content stringlengths 1 1.04M | input_ids listlengths 1 774k | ratio_char_token float64 0.38 22.9 | token_count int64 1 774k |
|---|---|---|---|
from typing import List
from unittest import TestCase
class TestListMethods(TestCase):
""" This test case is verifying basic list data type methods. """
def test_append(self) -> None:
""" Test checks if given elements are adding to array """
flist: List[...] = []
for i in range(1, 4):
flist.append(i)
self.assertEqual(flist, [1, 2, 3])
def test_extend(self) -> None:
""" Test checks if given elements extends an array """
flist: List[int] = [1, 2, 3]
flist.extend(range(4, 6))
self.assertEqual(flist[-2:], [4, 5])
def test_insert(self) -> None:
""" Test checks if given element is inserted into array """
flist: List[int] = [1, 2, 3]
flist.insert(3, 4)
self.assertEqual(flist, [1, 2, 3, 4])
def test_pop(self) -> None:
""" Test checks if given element is deleted from an array """
flist: List[int] = [1, 2, 3]
flist.pop(1)
self.assertEqual(flist, [1, 3])
| [
6738,
19720,
1330,
7343,
198,
6738,
555,
715,
395,
1330,
6208,
20448,
628,
198,
4871,
6208,
8053,
46202,
7,
14402,
20448,
2599,
198,
220,
220,
220,
37227,
770,
1332,
1339,
318,
45505,
4096,
1351,
1366,
2099,
5050,
13,
37227,
628,
220,
... | 2.310811 | 444 |
#!/usr/bin/python
import sys
import os
import subprocess
from os.path import join, isdir
import numpy as np
import fileinput
from numpy.random import permutation
##------------------------------------------------------------------
import torch
from torch import autograd, nn, optim
from torch.autograd import Variable
import torch.nn.functional as F
import torch.nn.parallel
import torch.backends.cudnn as cudnn
#----------------------------------------
from random import shuffle
import matplotlib
import matplotlib.pyplot as plt
plt.switch_backend('agg')
matplotlib.pyplot.viridis()
os.environ['PYTHONUNBUFFERED'] = '1'
import glob
from statistics import mean
import json
import kaldi_io
#*************************************************************************************************************************
####### Loading the Parser and default arguments
#import pdb;pdb.set_trace()
sys.path.insert(0,'/mnt/matylda3/vydana/HOW2_EXP/Gen_V1/ATTNCODE/Basic_Attention_V1')
import Attention_arg
from Attention_arg import parser
args = parser.parse_args()
###save architecture for decoding
model_path_name=join(args.model_dir,'model_architecture_')
with open(model_path_name, 'w') as f:
json.dump(args.__dict__, f, indent=2)
#####setting the gpus in the gpu cluster
#**********************************
if args.gpu:
cuda_command = 'nvidia-smi --query-gpu=memory.free,memory.total --format=csv | tail -n+2 | ' \
'awk \'BEGIN{FS=" "}{if ($1/$3 > 0.98) print NR-1}\''
oooo=subprocess.check_output(cuda_command, shell=True)
dev_id=str(oooo).lstrip('b').strip("'").split('\\n')[0]
os.environ["CUDA_VISIBLE_DEVICES"]=dev_id
gpu_no=os.environ["CUDA_VISIBLE_DEVICES"]
print("Using gpu number" + str(gpu_no))
dummy_variable=torch.zeros((10,10))
dummy_variable = dummy_variable.cuda() if args.gpu else dummy_variable
#----------------------------------------------------------------
#=================================================================
#sys.path.insert(0,'/mnt/matylda3/vydana/HOW2_EXP/Gen_V1/ATTNCODE/Basic_Attention_V1')
from Dataloader_for_AM_v1 import DataLoader
from Initializing_model_LSTM_SS_v2_args import Initialize_Att_model
from Load_sp_model import Load_sp_models
from Training_loop import train_val_model
from Spec_Augument import Spec_Aug_freqCont as Spec_Aug
from CMVN import CMVN
from utils__ import weights_init,reduce_learning_rate,read_as_list,gaussian_noise,plotting
from user_defined_losses import preprocess,compute_cer
from Decoding_loop import get_cer_for_beam
#===================================================================
if not isdir(args.model_dir):
os.makedirs(args.model_dir)
png_dir=args.model_dir+'_png'
if not isdir(png_dir):
os.makedirs(png_dir)
############################################
#================================================================================
#=======================================================
# def validate_the_model(arsg,epoch,dev_gen,model_encoder,model_decoder,encoder_optim,decoder_optim):
# #=======================================================
# model_encoder.eval()
# model_decoder.eval()
# #=======================================================
# Vl_CER=[]; Vl_BPE_CER=[];L_val_cost=[]
# val_examples=0
# for vl_smp in range(args.max_val_examples):
# B1 = dev_gen.next()
# smp_feat = B1.get('smp_feat')
# val_examples+=smp_feat.shape[0]
# assert B1 is not None, "None should never come out of the DataLoader"
# ##brak when the examples are more
# if (val_examples >= args.max_val_examples):
# break;
# #--------------------------------------
# Val_Output_trainval_dict=train_val_model(args=args,
# model_encoder = model_encoder,
# model_decoder = model_decoder,
# encoder_optim = encoder_optim,
# decoder_optim = decoder_optim,
# data_dict = B1,
# weight_noise_flag=False,
# spec_aug_flag=False,
# trainflag = False)
# L_val_cost.append(Val_Output_trainval_dict.get('cost_cpu'))
# Vl_CER.append(Val_Output_trainval_dict.get('Char_cer'))
# Vl_BPE_CER.append(Val_Output_trainval_dict.get('Word_cer'))
# attention_map=Val_Output_trainval_dict.get('attention_record').data.cpu().numpy()
# #======================================================
# #======================================================
# if (vl_smp%args.vl_disp==0) or (val_examples==args.max_val_examples-1):
# print("val epoch:==:>",epoch,"val smp no:==:>",vl_smp,"val_cost:==:>",mean(L_val_cost),"CER:",mean(Vl_CER),'BPE_CER',mean(Vl_BPE_CER),flush=True)
# if args.plot_fig_validation:
# plot_name=join(png_dir,'val_epoch'+str(epoch)+'_attention_single_file_'+str(vl_smp)+'.png')
# ##print(plot_name)
# plotting(plot_name,attention_map)
# #----------------------------------------------------
# #====================================================
# Vl_Output_dict={'L_val_cost':L_val_cost,
# 'Vl_CER':Vl_CER,
# 'Vl_BPE_CER':Vl_BPE_CER }
# return Vl_Output_dict
#=============================================================================================
#=============================================================================================
if __name__ == '__main__':
main()
| [
2,
48443,
14629,
14,
8800,
14,
29412,
198,
11748,
25064,
198,
11748,
28686,
198,
11748,
850,
14681,
198,
6738,
28686,
13,
6978,
1330,
4654,
11,
318,
15908,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
2393,
15414,
198,
6738,
299,
321... | 2.156104 | 2,998 |
from unittest import TestCase, main
import os
from tempfile import TemporaryDirectory
from absl import logging
import openfst_python as fst
from pydecoders.graph.graph_builder import GraphBuilder
if __name__ == '__main__':
logging.set_verbosity(logging.INFO)
main()
| [
198,
6738,
555,
715,
395,
1330,
6208,
20448,
11,
1388,
198,
11748,
28686,
198,
6738,
20218,
7753,
1330,
46042,
43055,
198,
6738,
2352,
75,
1330,
18931,
198,
11748,
1280,
69,
301,
62,
29412,
355,
277,
301,
198,
6738,
279,
5173,
721,
37... | 3.172414 | 87 |
from setuptools import setup
setup(
name='cvxbind',
version='1.0',
description='Python Oculus Rift DK2 Driver',
author='Jacob Panikulam',
author_email='jpanikulam@ufl.edu',
url='https://www.python.org/',
entry_points={
"console_scripts": ["cvxbind=cvxbind.main:main"]
},
package_dir={
'': '.',
},
packages=[
'cvxbind',
],
test_suite="test"
)
| [
6738,
900,
37623,
10141,
1330,
9058,
198,
198,
40406,
7,
198,
220,
220,
220,
1438,
11639,
33967,
87,
21653,
3256,
198,
220,
220,
220,
2196,
11639,
16,
13,
15,
3256,
198,
220,
220,
220,
6764,
11639,
37906,
22334,
21222,
32975,
17,
1243... | 2.176166 | 193 |
import tensorflow as tf
def my_model(features):
""" Base model definitions """
return tf.layers.dense(features, 1024) | [
11748,
11192,
273,
11125,
355,
48700,
198,
198,
4299,
616,
62,
19849,
7,
40890,
2599,
198,
220,
37227,
7308,
2746,
17336,
37227,
198,
220,
1441,
48700,
13,
75,
6962,
13,
67,
1072,
7,
40890,
11,
28119,
8
] | 3.297297 | 37 |
"""
This module contains class definitions for wFSAs.
:Authors: - Wilker Aziz
"""
from collections import defaultdict
from .symbol import Terminal
class WDFSA(object):
"""
This is a deterministic wFSA.
TODO: extend it to handle nondeterminism.
"""
def n_states(self):
"""Number of states."""
return len(self._arcs) # perhaps _arcs is an unfortunate, _arcs is indexed by states, thus len(_arcs) == n_states
def n_arcs(self):
"""Count number of arcs."""
return sum(1 for a in self.iterarcs())
def n_symbols(self):
"""Number of different symbols."""
return len(self._vocabulary)
def _create_state(self, state):
"""This is meant for private use, it allocates memory for one or more states.
:returns:
whether or not any state was added."""
if len(self._arcs) <= state:
for i in range(len(self._arcs), state + 1):
self._arcs.append(defaultdict(lambda: defaultdict(float)))
return True
return False
def iterstates(self):
"""Iterate through all states in order of allocation."""
return range(len(self._arcs))
def iterinitial(self):
"""Iterate through all initial states in no particular order."""
return iter(self._initial_states)
def iterfinal(self):
"""Iterate through all final states in no particular order."""
return iter(self._final_states)
def itersymbols(self):
"""Iterate through all symbols labelling transitions."""
return iter(self._vocabulary)
def iterarcs(self):
"""Iterate through all arcs/transitions in no particular order.
arc:
a tuple of the kind (origin, destination, label, weight).
"""
for sfrom, arcs_by_sym in enumerate(self._arcs):
for sym, w_by_sto in arcs_by_sym.items():
for sto, w in w_by_sto.items():
yield (sfrom, sto, sym, w)
def get_arcs(self, origin, symbol):
"""Return a list of pairs representing a destination and a weight.
:param origin: origin state.
:param symbol: label.
"""
if len(self._arcs) <= origin:
raise ValueError('Origin state %d does not exist' % origin)
return list(self._arcs[origin].get(symbol, {}).items())
def is_initial(self, state):
"""Whether or not a state is initial."""
return state in self._initial_states
def is_final(self, state):
"""Whether or not a state is final."""
return state in self._final_states
def add_arc(self, sfrom, sto, symbol, weight):
"""Add an arc creating the necessary states."""
self._create_state(sfrom) # create sfrom if necessary
self._create_state(sto) # create sto if necessary
self._arcs[sfrom][symbol][sto] = weight
self._vocabulary.add(symbol)
def make_initial(self, state):
"""Make a state initial."""
self._initial_states.add(state)
def make_final(self, state):
"""Make a state final."""
self._final_states.add(state)
def path_weight(self, path, semiring):
"""Returns the weight of a path given by a sequence of tuples of the kind (origin, destination, sym)"""
total = semiring.one
for (origin, destination, sym) in path:
arcs = self._arcs[origin].get(sym, None)
if arcs is None:
raise ValueError('Invalid transition origin=%s sym=%s' % (origin, sym))
w = arcs.get(destination, None)
if w is None:
raise ValueError('Invalid transition origin=%s destination=%s sym=%s' % (origin, destination, sym))
total = semiring.times(total, w)
return total
def arc_weight(self, origin, destination, sym):
"""Returns the weight of an arc."""
if not (0 <= origin < len(self._arcs)):
raise ValueError('Unknown state origin=%s' % (origin))
arcs = self._arcs[origin].get(sym, None)
if arcs is None:
raise ValueError('Invalid transition origin=%s sym=%s' % (origin, sym))
w = arcs.get(destination, None)
if w is None:
raise ValueError('Invalid transition origin=%s destination=%s sym=%s' % (origin, destination, sym))
return w
def make_linear_fsa(sentence, semiring, terminal_constructor=Terminal):
"""
Return a linea (unweighted) FSA representing an input sentence.
:param sentence:
a string or a sequence of tokens
:param semiring:
how we perform operations.
:param terminal_constructor:
which class we use to construct terminal symbols.
:returns:
WDFSA
>>> from grasp.semiring import Prob
>>> fsa = make_linear_fsa('a dog barks', Prob)
>>> fsa.n_states()
4
>>> fsa.n_arcs()
3
>>> fsa.n_symbols()
3
>>> list(fsa.iterinitial()) == [0]
True
>>> list(fsa.iterfinal()) == [3]
True
"""
wfsa = WDFSA()
if type(sentence) is str:
tokens = sentence.split()
else:
tokens = list(sentence)
for i, token in enumerate(tokens):
wfsa.add_arc(i, i + 1, terminal_constructor(token), semiring.one)
wfsa.make_initial(0)
wfsa.make_final(len(tokens))
return wfsa
| [
37811,
198,
1212,
8265,
4909,
1398,
17336,
329,
266,
10652,
1722,
13,
198,
198,
25,
30515,
669,
25,
532,
5187,
6122,
7578,
528,
198,
37811,
198,
198,
6738,
17268,
1330,
4277,
11600,
198,
6738,
764,
1837,
23650,
1330,
24523,
628,
198,
... | 2.379341 | 2,275 |
import numpy as np
import statsmodels.api as sm
from timeit import default_timer as timer # https://stackoverflow.com/questions/7370801/how-to-measure-elapsed-time-in-python
from .Model import Model
### EDIT BELOW HERE ###
# fit the model
# get the coefficients
# get the pvalues
# calculate the confidence intervals
| [
11748,
299,
32152,
355,
45941,
198,
11748,
9756,
27530,
13,
15042,
355,
895,
198,
6738,
640,
270,
1330,
4277,
62,
45016,
355,
19781,
1303,
3740,
1378,
25558,
2502,
11125,
13,
785,
14,
6138,
507,
14,
4790,
32583,
486,
14,
4919,
12,
146... | 3.151786 | 112 |
import numpy as np
from dbsp_drp import splicing
| [
11748,
299,
32152,
355,
45941,
198,
198,
6738,
288,
24145,
62,
7109,
79,
1330,
4328,
6345,
198
] | 2.941176 | 17 |
import functools
import warnings
from sqlalchemy import Table, sql, types, MetaData
from sqlalchemy.orm import relationship
from sqlalchemy.orm.util import class_mapper
from sqlalchemy.exc import SAWarning
from sqlalchemy.ext.declarative import declarative_base
from geoalchemy import Geometry, GeometryColumn
from geoalchemy import (Point, LineString, Polygon,
MultiPoint, MultiLineString, MultiPolygon)
from papyrus.geo_interface import GeoInterface
from papyrus.xsd import tag
_class_cache = {}
_geometry_type_mappings = dict(
[(t.name, t) for t in (Point, LineString, Polygon,
MultiPoint, MultiLineString, MultiPolygon)])
Base = declarative_base()
SQL_GEOMETRY_COLUMNS = """
SELECT
f_table_schema,
f_table_name,
f_geometry_column,
srid,
type
FROM
geometry_columns
WHERE
f_table_schema = :table_schema AND
f_table_name = :table_name AND
f_geometry_column = :geometry_column
"""
def init(engine):
"""
Initialize the db reflection module. Give the declarative base
class an engine, required for the reflection.
"""
Base.metadata.bind = engine
# A specific "association proxy" implementation
def get_class(tablename, DBSession=None, exclude_properties=None):
"""
Get the SQLAlchemy mapped class for "tablename". If no class exists
for "tablename" one is created, and added to the cache. "tablename"
must reference a valid string. If there's no table identified by
tablename in the database a NoSuchTableError SQLAlchemy exception
is raised.
"""
tablename, schema = _get_schema(tablename)
cache_key = (schema, tablename, exclude_properties)
if cache_key in _class_cache:
return _class_cache[cache_key]
table = get_table(tablename, schema, DBSession)
# create the mapped class
cls = _create_class(table, exclude_properties)
# add class to cache
_class_cache[cache_key] = cls
return cls
| [
11748,
1257,
310,
10141,
198,
11748,
14601,
198,
198,
6738,
44161,
282,
26599,
1330,
8655,
11,
44161,
11,
3858,
11,
30277,
6601,
198,
6738,
44161,
282,
26599,
13,
579,
1330,
2776,
198,
6738,
44161,
282,
26599,
13,
579,
13,
22602,
1330,
... | 2.701731 | 751 |
# Created by Kelvin_Clark on 1/31/2022, 12:59 PM
from typing import Optional
from sqlalchemy import and_
from sqlalchemy.orm import Session
from app.data.entities.user import User
from app.utils.security.password_util import verify_password_hash
| [
2,
15622,
416,
46577,
62,
43250,
319,
352,
14,
3132,
14,
1238,
1828,
11,
1105,
25,
3270,
3122,
198,
6738,
19720,
1330,
32233,
198,
198,
6738,
44161,
282,
26599,
1330,
290,
62,
198,
6738,
44161,
282,
26599,
13,
579,
1330,
23575,
198,
... | 3.438356 | 73 |
r1 = float(input('Primeiro segmento: '))
r2 = float(input('Segundo segmento: '))
r3 = float(input('Terceiro segmento: '))
if r1 < r2 + r3 and r2 < r1 + r3 and r3 < r2 + r1:
print('Podem formar um triangulo')
if r1 == r2 and r2 == r3 and r1 == r3: #pode ser feito também com: 'if r1 == r2 == r3:', sem utilizar a função AND
print('Equilátero')
elif r1 == r2 or r1 == r3 or r2 == r3:
print('Isósceles')
elif r1 != r2 and r1 != r3 and r2 != r3:
print('Todos os lados são diferentes')
else:
print('Não podem formar um triangulo!')
| [
81,
16,
796,
12178,
7,
15414,
10786,
26405,
7058,
10618,
78,
25,
705,
4008,
198,
81,
17,
796,
12178,
7,
15414,
10786,
41030,
41204,
10618,
78,
25,
705,
4008,
198,
81,
18,
796,
12178,
7,
15414,
10786,
15156,
344,
7058,
10618,
78,
25,... | 2.140152 | 264 |
import discord
import random
from discord.ext import commands
token = open("token.txt", "r").read()
client = discord.Client()
bot = commands.Bot(command_prefix='!joker')
@client.event
#didnot work on bot join
@client.event
##@bot.command()
##async def mention(ctx, user : discord.Member):
## await ctx.send(user.mention)
@client.event
## if "play" in message.content.lower():
## vc = await discord.channel()
## vc.play(discord.FFmpegPCMAudio('bhai.mp3'), after=lambda e: print('done', e))
## vc.is_playing()
## if "pause" in message.content.lower():
## vc.pause()
## if "resume" in message.content.lower():
## vc.resume()
## if "stop" in message.content.lower():
## vc.stop()
client.run(token)
| [
11748,
36446,
201,
198,
11748,
4738,
201,
198,
6738,
36446,
13,
2302,
1330,
9729,
201,
198,
30001,
796,
1280,
7203,
30001,
13,
14116,
1600,
366,
81,
11074,
961,
3419,
201,
198,
16366,
796,
36446,
13,
11792,
3419,
201,
198,
13645,
796,
... | 2.105134 | 409 |
import os
class Node(object):
"""
Convenience class for creating Qt Tree Model Views
:param str name: name of the node
:param Node parent: Node parent
"""
def find_child_by_name(self, name):
"""Get immediate child Node by its full name. This allows for searches
to find nodes with duplicate base names but different paths.
:param str name: Fully-qualified child node name
"""
# ToDo: Handle children with duplicate names
for child in self.children:
if child.full_name() == name:
return child
return None
@property
def name(self):
"""
The name of the node
:getter: Get the Node's name
:setter: Set the Node's name
:type: string
"""
return self._name
@name.setter
@property
def children(self):
"""
A collection of child Nodes
:getter: Return the Node's children
:setter: Replace current children with incoming list
:type: list
"""
return self._children[:]
@children.setter
@children.deleter
@property
def parent(self):
"""
Change the current parent relationship of the node
:getter: Return the current parent node
:setter: Re-parent the node to a new parent
:type: Node
"""
return self._parent
@parent.setter
@parent.deleter
def full_name(self):
"""Return Node's fully-qualified name"""
names = [n.name for n in self.list_parents()]
names.append(self.name)
return os.path.join(*names)
def post_parent(self, value):
"""
Convenience method to control what happens after the node is parented
"""
# this is here mostly to help integrate with apps that need to run other
# parenting ops outside of the view
pass
def row(self):
"""
Row index based on parent relationship
"""
if self.parent:
return self.parent.children.index(self)
return None
def list_all_relatives(self, out=None):
"""
Recursively list all downstream relationships (children/grandchildren)
"""
out = out or []
for child in self._children:
out.append(child)
child.list_all_relatives(out=out)
return out
def list_parents(self):
"""
List all upstream relationships (parent/grand parents)
"""
out = []
parent = self.parent
while parent:
out.append(parent)
parent = parent.parent
out.reverse()
return out
def is_descendant_of(self, node):
"""Return True if current node is a descendant of another node
:param Node node: Node to search for
:return: True if current node is a downstream relative of other
:rtype: bool
"""
return self._check_if_upstream(self, node)
def is_ancestor_of(self, node):
"""Return True if current node is an ancestor of another node
:param Node node: Node to search from
:return: True if current node is an upstream relative of other
:rtype: bool
"""
return self._check_if_upstream(node, self)
@staticmethod
def _check_if_upstream(start, end):
"""Return True if end node is an upstream ancestor of start
:param Node start: Node to begin search from
:param Node end: Node to search for upstream
:return: True if end is upstream of start
:rtype: bool
"""
curr_node = start
while curr_node.parent:
if curr_node.parent == end:
return True
curr_node = curr_node.parent
return False
| [
11748,
28686,
201,
198,
201,
198,
201,
198,
4871,
19081,
7,
15252,
2599,
201,
198,
220,
220,
220,
37227,
201,
198,
220,
220,
220,
1482,
574,
1240,
1398,
329,
4441,
33734,
12200,
9104,
29978,
201,
198,
201,
198,
220,
220,
220,
1058,
... | 2.243243 | 1,776 |
import asyncio
import time
from bcreg.credssubmitter import CONTROLLER_HEALTH_URL, CONTROLLER_HEALTH_WAIT, CONTROLLER_HEALTH_TIMEOUT, check_controller_health
print("Pinging " + CONTROLLER_HEALTH_URL + " ...")
try:
loop = asyncio.get_event_loop()
start_time = time.perf_counter()
while (time.perf_counter() - start_time) < CONTROLLER_HEALTH_TIMEOUT:
controller_health = loop.run_until_complete(check_controller_health(wait=False))
print("Status of " + CONTROLLER_HEALTH_URL + " is " + str(controller_health))
time.sleep(CONTROLLER_HEALTH_WAIT)
except Exception as e:
print("Exception", e)
raise
| [
198,
11748,
30351,
952,
198,
11748,
640,
198,
198,
6738,
47125,
2301,
13,
66,
445,
824,
549,
37974,
1330,
27342,
46,
3069,
1137,
62,
13909,
40818,
62,
21886,
11,
27342,
46,
3069,
1137,
62,
13909,
40818,
62,
15543,
2043,
11,
27342,
46,... | 2.568 | 250 |
import pickle
import os
| [
11748,
2298,
293,
201,
198,
11748,
28686,
201
] | 3.125 | 8 |
#!/usr/bin/env python 3
# MIT License
#
# Copyright (c) 2020 Carlos Gil Gonzalez
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""upydevice dev tools"""
import os
import json
dev_dir = '.upydevices'
dev_path = "{}/{}".format(os.environ['HOME'], dev_dir)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
513,
198,
198,
2,
17168,
13789,
198,
2,
198,
2,
15069,
357,
66,
8,
12131,
17409,
10689,
24416,
198,
2,
198,
2,
2448,
3411,
318,
29376,
7520,
11,
1479,
286,
3877,
11,
284,
597,
1048,
167... | 3.59322 | 354 |
#!/usr/bin/env python3
from skinnywms.wmssvr import application
application.run(debug=True, threaded=False)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
6738,
29494,
86,
907,
13,
26377,
824,
37020,
1330,
3586,
198,
31438,
13,
5143,
7,
24442,
28,
17821,
11,
40945,
28,
25101,
8,
198
] | 3.176471 | 34 |
# -*- coding: utf-8 -*-
import twitter
import requests
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
11748,
17044,
198,
11748,
7007,
628
] | 2.8 | 20 |
import stp.play as play
import stp.tactic as tactic
from rj_gameplay.tactic import pass_tactic, pass_seek, nmark_tactic, goalie_tactic, clear_tactic, wall_tactic
import stp.skill as skill
import stp.role as role
from stp.role.assignment.naive import NaiveRoleAssignment
import stp.rc as rc
from typing import Dict, Generic, Iterator, List, Optional, Tuple, Type, TypeVar
import numpy as np
from rj_gameplay.calculations import wall_calculations
class RestartPlay(play.IPlay):
"""One robot passes to another. Some markers.
"""
class DirectRestartPlay(play.IPlay):
"""One robot passes to another. Some markers.
"""
| [
11748,
336,
79,
13,
1759,
355,
711,
198,
11748,
336,
79,
13,
83,
12009,
355,
18543,
198,
198,
6738,
374,
73,
62,
6057,
1759,
13,
83,
12009,
1330,
1208,
62,
83,
12009,
11,
1208,
62,
36163,
11,
299,
4102,
62,
83,
12009,
11,
31071,
... | 3.033493 | 209 |
import cv2
import numpy as np
if __name__ == "__main__":
img = cv2.imread('/Users/liyue/Desktop/QC/QRCode/土地謄本/0_origin.jpg')
irimg = InRangeImage(img)
cv2.waitKey(0)
cv2.destroyAllWindows() | [
11748,
269,
85,
17,
198,
11748,
299,
32152,
355,
45941,
198,
220,
220,
220,
220,
220,
220,
220,
220,
198,
361,
11593,
3672,
834,
6624,
366,
834,
12417,
834,
1298,
198,
220,
220,
220,
33705,
796,
269,
85,
17,
13,
320,
961,
10786,
1... | 1.902655 | 113 |
import itertools
import operator
import random
from fractions import Fraction
from typing import Callable
from raiden.exceptions import UndefinedMediationFee
from raiden.transfer import channel, routes, secret_registry
from raiden.transfer.architecture import Event, StateChange, SuccessOrError, TransitionResult
from raiden.transfer.channel import get_balance
from raiden.transfer.events import SendProcessed
from raiden.transfer.identifiers import CANONICAL_IDENTIFIER_UNORDERED_QUEUE
from raiden.transfer.mediated_transfer.events import (
EventUnexpectedSecretReveal,
EventUnlockClaimFailed,
EventUnlockClaimSuccess,
EventUnlockFailed,
EventUnlockSuccess,
SendLockedTransfer,
SendRefundTransfer,
SendSecretReveal,
)
from raiden.transfer.mediated_transfer.mediation_fee import FeeScheduleState, Interpolate
from raiden.transfer.mediated_transfer.state import (
LockedTransferSignedState,
LockedTransferUnsignedState,
MediationPairState,
MediatorTransferState,
WaitingTransferState,
)
from raiden.transfer.mediated_transfer.state_change import (
ActionInitMediator,
ReceiveLockExpired,
ReceiveSecretReveal,
ReceiveTransferRefund,
)
from raiden.transfer.state import (
ChannelState,
NettingChannelState,
NetworkState,
RouteState,
message_identifier_from_prng,
)
from raiden.transfer.state_change import (
ActionChangeNodeNetworkState,
Block,
ContractReceiveSecretReveal,
ReceiveUnlock,
)
from raiden.transfer.utils import is_valid_secret_reveal
from raiden.utils.typing import (
MYPY_ANNOTATION,
Address,
BlockExpiration,
BlockHash,
BlockNumber,
BlockTimeout,
ChannelID,
Dict,
List,
LockType,
NodeNetworkStateMap,
Optional,
PaymentWithFeeAmount,
Secret,
SecretHash,
TokenAmount,
TokenNetworkAddress,
Tuple,
Union,
cast,
typecheck,
)
STATE_SECRET_KNOWN = (
"payee_secret_revealed",
"payee_contract_unlock",
"payee_balance_proof",
"payer_secret_revealed",
"payer_waiting_unlock",
"payer_balance_proof",
)
STATE_TRANSFER_PAID = ("payee_contract_unlock", "payee_balance_proof", "payer_balance_proof")
# TODO: fix expired state, it is not final
STATE_TRANSFER_FINAL = (
"payee_contract_unlock",
"payee_balance_proof",
"payee_expired",
"payer_balance_proof",
"payer_expired",
)
def is_lock_valid(expiration: BlockExpiration, block_number: BlockNumber) -> bool:
""" True if the lock has not expired. """
return block_number <= BlockNumber(expiration)
def is_safe_to_wait(
lock_expiration: BlockExpiration, reveal_timeout: BlockTimeout, block_number: BlockNumber
) -> SuccessOrError:
"""True if waiting is safe, i.e. there are more than enough blocks to safely
unlock on chain.
"""
# reveal timeout will not ever be larger than the lock_expiration otherwise
# the expected block_number is negative
assert block_number > 0, "block_number must be larger than zero"
assert reveal_timeout > 0, "reveal_timeout must be larger than zero"
assert lock_expiration > reveal_timeout, "lock_expiration must be larger than reveal_timeout"
lock_timeout = lock_expiration - block_number
# A node may wait for a new balance proof while there are reveal_timeout
# blocks left, at that block and onwards it is not safe to wait.
if lock_timeout > reveal_timeout:
return SuccessOrError()
return SuccessOrError(
f"lock timeout is unsafe."
f" timeout must be larger than {reveal_timeout}, but it is {lock_timeout}."
f" expiration: {lock_expiration} block_number: {block_number}"
)
def is_send_transfer_almost_equal(
send_channel: NettingChannelState, # pylint: disable=unused-argument
send: LockedTransferUnsignedState,
received: LockedTransferSignedState,
) -> bool:
""" True if both transfers are for the same mediated transfer. """
# The only thing that may change is the direction of the transfer
return (
send.payment_identifier == received.payment_identifier
and send.token == received.token
# FIXME: Checking the transferred amount would make a lot of sense, but
# this is hard to do precisely without larger changes to the
# codebase. With the uncertainty about how we want to deal with
# refunds and backtracking in the long term, this check is
# skipped for now.
# and send.lock.amount == received.lock.amount - send_channel.fee_schedule.flat
and send.lock.expiration == received.lock.expiration
and send.lock.secrethash == received.lock.secrethash
and send.initiator == received.initiator
and send.target == received.target
)
def get_payee_channel(
channelidentifiers_to_channels: Dict[ChannelID, NettingChannelState],
transfer_pair: MediationPairState,
) -> Optional[NettingChannelState]:
""" Returns the payee channel of a given transfer pair or None if it's not found """
payee_channel_identifier = transfer_pair.payee_transfer.balance_proof.channel_identifier
return channelidentifiers_to_channels.get(payee_channel_identifier)
def get_payer_channel(
channelidentifiers_to_channels: Dict[ChannelID, NettingChannelState],
transfer_pair: MediationPairState,
) -> Optional[NettingChannelState]:
""" Returns the payer channel of a given transfer pair or None if it's not found """
payer_channel_identifier = transfer_pair.payer_transfer.balance_proof.channel_identifier
return channelidentifiers_to_channels.get(payer_channel_identifier)
def get_pending_transfer_pairs(
transfers_pair: List[MediationPairState],
) -> List[MediationPairState]:
""" Return the transfer pairs that are not at a final state. """
pending_pairs = [
pair
for pair in transfers_pair
if pair.payee_state not in STATE_TRANSFER_FINAL
or pair.payer_state not in STATE_TRANSFER_FINAL
]
return pending_pairs
def find_intersection(fee_func: Interpolate, line: Callable[[int], Fraction]) -> Optional[float]:
"""Returns the x value where both functions intersect
`fee_func` is a piecewise linear function while `line` is a straight line
and takes the one of fee_func's indexes as argument.
Returns `None` if there is no intersection within `fee_func`s domain, which
indicates a lack of capacity.
"""
i = 0
y = fee_func.y_list[i]
compare = operator.lt if y < line(i) else operator.gt
while compare(y, line(i)):
i += 1
if i == len(fee_func.x_list):
# Not enough capacity to send
return None
y = fee_func.y_list[i]
# We found the linear section where the solution is. Now interpolate!
x1 = fee_func.x_list[i - 1]
x2 = fee_func.x_list[i]
yf1 = fee_func.y_list[i - 1]
yf2 = fee_func.y_list[i]
yl1 = line(i - 1)
yl2 = line(i)
return (yl1 - yf1) * (x2 - x1) / ((yf2 - yf1) - (yl2 - yl1)) + x1
def get_amount_without_fees(
amount_with_fees: PaymentWithFeeAmount,
channel_in: NettingChannelState,
channel_out: NettingChannelState,
) -> Optional[PaymentWithFeeAmount]:
""" Return the amount after fees are taken. """
balance_in = get_balance(channel_in.our_state, channel_in.partner_state)
balance_out = get_balance(channel_out.our_state, channel_out.partner_state)
receivable = TokenAmount(
channel_in.our_total_deposit + channel_in.partner_total_deposit - balance_in
)
assert (
channel_in.fee_schedule.cap_fees == channel_out.fee_schedule.cap_fees
), "Both channels must have the same cap_fees setting for the same mediator."
try:
fee_func = FeeScheduleState.mediation_fee_func(
schedule_in=channel_in.fee_schedule,
schedule_out=channel_out.fee_schedule,
balance_in=balance_in,
balance_out=balance_out,
receivable=receivable,
amount_with_fees=amount_with_fees,
cap_fees=channel_in.fee_schedule.cap_fees,
)
amount_without_fees = find_intersection(
fee_func, lambda i: amount_with_fees - fee_func.x_list[i]
)
except UndefinedMediationFee:
return None
if amount_without_fees is None:
# Insufficient capacity
return None
if amount_without_fees <= 0:
# The node can't cover its mediations fees from the transferred amount.
return None
return PaymentWithFeeAmount(int(round(amount_without_fees)))
def sanity_check(
state: MediatorTransferState,
channelidentifiers_to_channels: Dict[ChannelID, NettingChannelState],
) -> None:
""" Check invariants that must hold. """
# if a transfer is paid we must know the secret
all_transfers_states = itertools.chain(
(pair.payee_state for pair in state.transfers_pair),
(pair.payer_state for pair in state.transfers_pair),
)
if any(state in STATE_TRANSFER_PAID for state in all_transfers_states):
assert state.secret is not None, "Mediator's state must have secret"
# the "transitivity" for these values is checked below as part of
# almost_equal check
if state.transfers_pair:
first_pair = state.transfers_pair[0]
assert (
state.secrethash == first_pair.payer_transfer.lock.secrethash
), "Secret hash mismatch"
for pair in state.transfers_pair:
payee_channel = get_payee_channel(
channelidentifiers_to_channels=channelidentifiers_to_channels, transfer_pair=pair
)
# Channel could have been removed
if not payee_channel:
continue
assert is_send_transfer_almost_equal(
send_channel=payee_channel, send=pair.payee_transfer, received=pair.payer_transfer
), "Payee and payer transfers are too different"
assert pair.payer_state in pair.valid_payer_states, "payer_state not in valid payer states"
assert pair.payee_state in pair.valid_payee_states, "payee_state not in valid payee states"
for original, refund in zip(state.transfers_pair[:-1], state.transfers_pair[1:]):
assert original.payee_address == refund.payer_address, "payee/payer address mismatch"
payer_channel = get_payer_channel(
channelidentifiers_to_channels=channelidentifiers_to_channels, transfer_pair=refund
)
# Channel could have been removed
if not payer_channel:
continue
transfer_sent = original.payee_transfer
transfer_received = refund.payer_transfer
assert is_send_transfer_almost_equal(
send_channel=payer_channel, send=transfer_sent, received=transfer_received
), "Payee and payer transfers are too different (refund)"
if state.waiting_transfer and state.transfers_pair:
last_transfer_pair = state.transfers_pair[-1]
payee_channel = get_payee_channel(
channelidentifiers_to_channels=channelidentifiers_to_channels,
transfer_pair=last_transfer_pair,
)
# Channel could have been removed
if payee_channel:
transfer_sent = last_transfer_pair.payee_transfer
transfer_received = state.waiting_transfer.transfer
assert is_send_transfer_almost_equal(
send_channel=payee_channel, send=transfer_sent, received=transfer_received
), "Payee and payer transfers are too different (waiting transfer)"
def clear_if_finalized(
iteration: TransitionResult,
channelidentifiers_to_channels: Dict[ChannelID, NettingChannelState],
) -> TransitionResult[Optional[MediatorTransferState]]:
"""Clear the mediator task if all the locks have been finalized.
A lock is considered finalized if it has been removed from the pending locks
offchain, either because the transfer was unlocked or expired, or because the
channel was settled on chain and therefore the channel is removed."""
state = cast(MediatorTransferState, iteration.new_state)
if state is None:
return iteration # type: ignore
# Only clear the task if all channels have the lock cleared.
secrethash = state.secrethash
for pair in state.transfers_pair:
payer_channel = get_payer_channel(channelidentifiers_to_channels, pair)
if payer_channel and channel.is_lock_pending(payer_channel.partner_state, secrethash):
return iteration
payee_channel = get_payee_channel(channelidentifiers_to_channels, pair)
if payee_channel and channel.is_lock_pending(payee_channel.our_state, secrethash):
return iteration
if state.waiting_transfer:
waiting_transfer = state.waiting_transfer.transfer
waiting_channel_identifier = waiting_transfer.balance_proof.channel_identifier
waiting_channel = channelidentifiers_to_channels.get(waiting_channel_identifier)
if waiting_channel and channel.is_lock_pending(waiting_channel.partner_state, secrethash):
return iteration
return TransitionResult(None, iteration.events)
def forward_transfer_pair(
payer_transfer: LockedTransferSignedState,
payer_channel: NettingChannelState,
payee_channel: NettingChannelState,
route_state: RouteState,
route_state_table: List[RouteState],
pseudo_random_generator: random.Random,
block_number: BlockNumber,
) -> Tuple[Optional[MediationPairState], List[Event]]:
"""Given a payer transfer tries the given route to proceed with the mediation.
Args:
payer_transfer: The transfer received from the payer_channel.
route_state: route to be tried.
route_state_table: list of all candidate routes
channelidentifiers_to_channels: All the channels available for this
transfer.
pseudo_random_generator: Number generator to generate a message id.
block_number: The current block number.
"""
amount_after_fees = get_amount_without_fees(
amount_with_fees=payer_transfer.lock.amount,
channel_in=payer_channel,
channel_out=payee_channel,
)
if not amount_after_fees:
return None, []
lock_timeout = BlockTimeout(payer_transfer.lock.expiration - block_number)
safe_to_use_channel = channel.is_channel_usable_for_mediation(
channel_state=payee_channel, transfer_amount=amount_after_fees, lock_timeout=lock_timeout
)
if not safe_to_use_channel:
return None, []
assert payee_channel.settle_timeout >= lock_timeout, "settle_timeout must be >= lock_timeout"
route_states = routes.prune_route_table(
route_states=route_state_table, selected_route=route_state
)
message_identifier = message_identifier_from_prng(pseudo_random_generator)
lockedtransfer_event = channel.send_lockedtransfer(
channel_state=payee_channel,
initiator=payer_transfer.initiator,
target=payer_transfer.target,
amount=amount_after_fees,
message_identifier=message_identifier,
payment_identifier=payer_transfer.payment_identifier,
expiration=payer_transfer.lock.expiration,
secrethash=payer_transfer.lock.secrethash,
route_states=route_states,
)
mediated_events: List[Event] = [lockedtransfer_event]
# create transfer pair
transfer_pair = MediationPairState(
payer_transfer=payer_transfer,
payee_address=payee_channel.partner_state.address,
payee_transfer=lockedtransfer_event.transfer,
)
return transfer_pair, mediated_events
def backward_transfer_pair(
backward_channel: NettingChannelState,
payer_transfer: LockedTransferSignedState,
pseudo_random_generator: random.Random,
block_number: BlockNumber,
) -> Tuple[Optional[MediationPairState], List[Event]]:
"""Sends a transfer backwards, allowing the previous hop to try a new
route.
When all the routes available for this node failed, send a transfer
backwards with the same amount and secrethash, allowing the previous hop to
do a retry.
Args:
backward_channel: The original channel which sent the mediated transfer
to this node.
payer_transfer: The *latest* payer transfer which is backing the
mediation.
block_number: The current block number.
Returns:
The mediator pair and the correspoding refund event.
"""
transfer_pair = None
events: List[Event] = list()
lock = payer_transfer.lock
lock_timeout = BlockTimeout(lock.expiration - block_number)
# Ensure the refund transfer's lock has a safe expiration, otherwise don't
# do anything and wait for the received lock to expire.
if channel.is_channel_usable_for_mediation(backward_channel, lock.amount, lock_timeout):
message_identifier = message_identifier_from_prng(pseudo_random_generator)
backward_route_state = RouteState(
route=[backward_channel.our_state.address],
)
refund_transfer = channel.send_refundtransfer(
channel_state=backward_channel,
initiator=payer_transfer.initiator,
target=payer_transfer.target,
# `amount` should be `get_lock_amount_after_fees(...)`, but fees
# for refunds are currently not defined, so we assume fee=0 to keep
# it simple, for now.
amount=lock.amount,
message_identifier=message_identifier,
payment_identifier=payer_transfer.payment_identifier,
expiration=lock.expiration,
secrethash=lock.secrethash,
route_state=backward_route_state,
)
transfer_pair = MediationPairState(
payer_transfer=payer_transfer,
payee_address=backward_channel.partner_state.address,
payee_transfer=refund_transfer.transfer,
)
events.append(refund_transfer)
return transfer_pair, events
def set_offchain_secret(
state: MediatorTransferState,
channelidentifiers_to_channels: Dict[ChannelID, NettingChannelState],
secret: Secret,
secrethash: SecretHash,
) -> List[Event]:
""" Set the secret to all mediated transfers. """
state.secret = secret
for pair in state.transfers_pair:
payer_channel = channelidentifiers_to_channels.get(
pair.payer_transfer.balance_proof.channel_identifier
)
if payer_channel:
channel.register_offchain_secret(payer_channel, secret, secrethash)
payee_channel = channelidentifiers_to_channels.get(
pair.payee_transfer.balance_proof.channel_identifier
)
if payee_channel:
channel.register_offchain_secret(payee_channel, secret, secrethash)
# The secret should never be revealed if `waiting_transfer` is not None.
# For this to happen this node must have received a transfer, which it did
# *not* mediate, and nevertheless the secret was revealed.
#
# This can only be possible if the initiator reveals the secret without the
# target's secret request, or if the node which sent the `waiting_transfer`
# has sent another transfer which reached the target (meaning someone along
# the path will lose tokens).
if state.waiting_transfer:
payer_channel = channelidentifiers_to_channels.get(
state.waiting_transfer.transfer.balance_proof.channel_identifier
)
if payer_channel:
channel.register_offchain_secret(payer_channel, secret, secrethash)
unexpected_reveal = EventUnexpectedSecretReveal(
secrethash=secrethash, reason="The mediator has a waiting transfer."
)
return [unexpected_reveal]
return list()
def set_onchain_secret(
state: MediatorTransferState,
channelidentifiers_to_channels: Dict[ChannelID, NettingChannelState],
secret: Secret,
secrethash: SecretHash,
block_number: BlockNumber,
) -> List[Event]:
"""Set the secret to all mediated transfers.
The secret should have been learned from the secret registry.
"""
state.secret = secret
for pair in state.transfers_pair:
payer_channel = channelidentifiers_to_channels.get(
pair.payer_transfer.balance_proof.channel_identifier
)
if payer_channel:
channel.register_onchain_secret(payer_channel, secret, secrethash, block_number)
payee_channel = channelidentifiers_to_channels.get(
pair.payee_transfer.balance_proof.channel_identifier
)
if payee_channel:
channel.register_onchain_secret(
channel_state=payee_channel,
secret=secret,
secrethash=secrethash,
secret_reveal_block_number=block_number,
)
# Like the off-chain secret reveal, the secret should never be revealed
# on-chain if there is a waiting transfer.
if state.waiting_transfer:
payer_channel = channelidentifiers_to_channels.get(
state.waiting_transfer.transfer.balance_proof.channel_identifier
)
if payer_channel:
channel.register_onchain_secret(
channel_state=payer_channel,
secret=secret,
secrethash=secrethash,
secret_reveal_block_number=block_number,
)
unexpected_reveal = EventUnexpectedSecretReveal(
secrethash=secrethash, reason="The mediator has a waiting transfer."
)
return [unexpected_reveal]
return list()
def set_offchain_reveal_state(
transfers_pair: List[MediationPairState], payee_address: Address
) -> None:
""" Set the state of a transfer *sent* to a payee. """
for pair in transfers_pair:
if pair.payee_address == payee_address:
pair.payee_state = "payee_secret_revealed"
def events_for_expired_pairs(
channelidentifiers_to_channels: Dict[ChannelID, NettingChannelState],
transfers_pair: List[MediationPairState],
waiting_transfer: Optional[WaitingTransferState],
block_number: BlockNumber,
) -> List[Event]:
""" Informational events for expired locks. """
pending_transfers_pairs = get_pending_transfer_pairs(transfers_pair)
events: List[Event] = list()
for pair in pending_transfers_pairs:
payer_balance_proof = pair.payer_transfer.balance_proof
payer_channel = channelidentifiers_to_channels.get(payer_balance_proof.channel_identifier)
if not payer_channel:
continue
has_payer_transfer_expired = channel.is_transfer_expired(
transfer=pair.payer_transfer, affected_channel=payer_channel, block_number=block_number
)
if has_payer_transfer_expired:
# For safety, the correct behavior is:
#
# - If the payee has been paid, then the payer must pay too.
#
# And the corollary:
#
# - If the payer transfer has expired, then the payee transfer must
# have expired too.
#
# The problem is that this corollary cannot be asserted. If a user
# is running Raiden without a monitoring service, then it may go
# offline after having paid a transfer to a payee, but without
# getting a balance proof of the payer, and once it comes back
# online the transfer may have expired.
#
# assert pair.payee_state == 'payee_expired'
pair.payer_state = "payer_expired"
unlock_claim_failed = EventUnlockClaimFailed(
pair.payer_transfer.payment_identifier,
pair.payer_transfer.lock.secrethash,
"lock expired",
)
events.append(unlock_claim_failed)
if waiting_transfer:
expiration_threshold = channel.get_receiver_expiration_threshold(
waiting_transfer.transfer.lock.expiration
)
should_waiting_transfer_expire = (
waiting_transfer.state != "expired" and expiration_threshold <= block_number
)
if should_waiting_transfer_expire:
waiting_transfer.state = "expired"
unlock_claim_failed = EventUnlockClaimFailed(
waiting_transfer.transfer.payment_identifier,
waiting_transfer.transfer.lock.secrethash,
"lock expired",
)
events.append(unlock_claim_failed)
return events
def events_for_secretreveal(
transfers_pair: List[MediationPairState],
secret: Secret,
pseudo_random_generator: random.Random,
) -> List[Event]:
"""Reveal the secret off-chain.
The secret is revealed off-chain even if there is a pending transaction to
reveal it on-chain, this allows the unlock to happen off-chain, which is
faster.
This node is named N, suppose there is a mediated transfer with two refund
transfers, one from B and one from C:
A-N-B...B-N-C..C-N-D
Under normal operation N will first learn the secret from D, then reveal to
C, wait for C to inform the secret is known before revealing it to B, and
again wait for B before revealing the secret to A.
If B somehow sent a reveal secret before C and D, then the secret will be
revealed to A, but not C and D, meaning the secret won't be propagated
forward. Even if D sent a reveal secret at about the same time, the secret
will only be revealed to B upon confirmation from C.
If the proof doesn't arrive in time and the lock's expiration is at risk, N
won't lose tokens since it knows the secret can go on-chain at any time.
"""
events: List[Event] = list()
for pair in reversed(transfers_pair):
payee_knows_secret = pair.payee_state in STATE_SECRET_KNOWN
payer_knows_secret = pair.payer_state in STATE_SECRET_KNOWN
is_transfer_pending = pair.payer_state == "payer_pending"
should_send_secret = payee_knows_secret and not payer_knows_secret and is_transfer_pending
if should_send_secret:
message_identifier = message_identifier_from_prng(pseudo_random_generator)
pair.payer_state = "payer_secret_revealed"
payer_transfer = pair.payer_transfer
revealsecret = SendSecretReveal(
recipient=payer_transfer.balance_proof.sender,
message_identifier=message_identifier,
secret=secret,
canonical_identifier=CANONICAL_IDENTIFIER_UNORDERED_QUEUE,
)
events.append(revealsecret)
return events
def events_for_balanceproof(
channelidentifiers_to_channels: Dict[ChannelID, NettingChannelState],
transfers_pair: List[MediationPairState],
pseudo_random_generator: random.Random,
block_number: BlockNumber,
secret: Secret,
secrethash: SecretHash,
) -> List[Event]:
""" While it's safe do the off-chain unlock. """
events: List[Event] = list()
for pair in reversed(transfers_pair):
payee_knows_secret = pair.payee_state in STATE_SECRET_KNOWN
payee_payed = pair.payee_state in STATE_TRANSFER_PAID
payee_channel = get_payee_channel(channelidentifiers_to_channels, pair)
payee_channel_open = (
payee_channel and channel.get_status(payee_channel) == ChannelState.STATE_OPENED
)
payer_channel = get_payer_channel(channelidentifiers_to_channels, pair)
# The mediator must not send to the payee a balance proof if the lock
# is in the danger zone, because the payer may not do the same and the
# on-chain unlock may fail. If the lock is nearing it's expiration
# block, then on-chain unlock should be done, and if successful it can
# be unlocked off-chain.
is_safe_to_send_balanceproof = False
if payer_channel:
is_safe_to_send_balanceproof = is_safe_to_wait(
pair.payer_transfer.lock.expiration, payer_channel.reveal_timeout, block_number
).ok
should_send_balanceproof_to_payee = (
payee_channel_open
and payee_knows_secret
and not payee_payed
and is_safe_to_send_balanceproof
)
if should_send_balanceproof_to_payee:
# At this point we are sure that payee_channel exists due to the
# payee_channel_open check above. So let mypy know about this
assert payee_channel, MYPY_ANNOTATION
pair.payee_state = "payee_balance_proof"
message_identifier = message_identifier_from_prng(pseudo_random_generator)
unlock_lock = channel.send_unlock(
channel_state=payee_channel,
message_identifier=message_identifier,
payment_identifier=pair.payee_transfer.payment_identifier,
secret=secret,
secrethash=secrethash,
block_number=block_number,
)
unlock_success = EventUnlockSuccess(
pair.payer_transfer.payment_identifier, pair.payer_transfer.lock.secrethash
)
events.append(unlock_lock)
events.append(unlock_success)
return events
def events_for_onchain_secretreveal_if_dangerzone(
channelmap: Dict[ChannelID, NettingChannelState],
secrethash: SecretHash,
transfers_pair: List[MediationPairState],
block_number: BlockNumber,
block_hash: BlockHash,
) -> List[Event]:
"""Reveal the secret on-chain if the lock enters the unsafe region and the
secret is not yet on-chain.
"""
events: List[Event] = list()
all_payer_channels = []
for pair in transfers_pair:
channel_state = get_payer_channel(channelmap, pair)
if channel_state:
all_payer_channels.append(channel_state)
transaction_sent = has_secret_registration_started(
all_payer_channels, transfers_pair, secrethash
)
# Only consider the transfers which have a pair. This means if we have a
# waiting transfer and for some reason the node knows the secret, it will
# not try to register it. Otherwise it would be possible for an attacker to
# reveal the secret late, just to force the node to send an unecessary
# transaction.
for pair in get_pending_transfer_pairs(transfers_pair):
payer_channel = get_payer_channel(channelmap, pair)
if not payer_channel:
continue
lock = pair.payer_transfer.lock
safe_to_wait = is_safe_to_wait(lock.expiration, payer_channel.reveal_timeout, block_number)
secret_known = channel.is_secret_known(
payer_channel.partner_state, pair.payer_transfer.lock.secrethash
)
if not safe_to_wait and secret_known:
pair.payer_state = "payer_waiting_secret_reveal"
if not transaction_sent:
secret = channel.get_secret(payer_channel.partner_state, lock.secrethash)
assert secret, "the secret should be known at this point"
reveal_events = secret_registry.events_for_onchain_secretreveal(
channel_state=payer_channel,
secret=secret,
expiration=lock.expiration,
block_hash=block_hash,
)
events.extend(reveal_events)
transaction_sent = True
return events
def events_for_onchain_secretreveal_if_closed(
channelmap: Dict[ChannelID, NettingChannelState],
transfers_pair: List[MediationPairState],
secret: Secret,
secrethash: SecretHash,
block_hash: BlockHash,
) -> List[Event]:
"""Register the secret on-chain if the payer channel is already closed and
the mediator learned the secret off-chain.
Balance proofs are not exchanged for closed channels, so there is no reason
to wait for the unsafe region to register secret.
Note:
If the secret is learned before the channel is closed, then the channel
will register the secrets in bulk, not the transfer.
"""
events: List[Event] = list()
all_payer_channels = []
for pair in transfers_pair:
channel_state = get_payer_channel(channelmap, pair)
if channel_state:
all_payer_channels.append(channel_state)
transaction_sent = has_secret_registration_started(
all_payer_channels, transfers_pair, secrethash
)
# Just like the case for entering the danger zone, this will only consider
# the transfers which have a pair.
for pending_pair in get_pending_transfer_pairs(transfers_pair):
payer_channel = get_payer_channel(channelmap, pending_pair)
# Don't register the secret on-chain if the channel is open or settled
if payer_channel and channel.get_status(payer_channel) == ChannelState.STATE_CLOSED:
pending_pair.payer_state = "payer_waiting_secret_reveal"
if not transaction_sent:
partner_state = payer_channel.partner_state
lock = channel.get_lock(partner_state, secrethash)
# The mediator task lives as long as there are any pending
# locks, it may be the case that some of the transfer_pairs got
# resolved off-chain, but others didn't. For this reason we
# must check if the lock is still part of the channel
if lock:
reveal_events = secret_registry.events_for_onchain_secretreveal(
channel_state=payer_channel,
secret=secret,
expiration=lock.expiration,
block_hash=block_hash,
)
events.extend(reveal_events)
transaction_sent = True
return events
def events_to_remove_expired_locks(
mediator_state: MediatorTransferState,
channelidentifiers_to_channels: Dict[ChannelID, NettingChannelState],
block_number: BlockNumber,
pseudo_random_generator: random.Random,
) -> List[Event]:
"""Clear the channels which have expired locks.
This only considers the *sent* transfers, received transfers can only be
updated by the partner.
"""
events: List[Event] = list()
for transfer_pair in mediator_state.transfers_pair:
balance_proof = transfer_pair.payee_transfer.balance_proof
channel_identifier = balance_proof.channel_identifier
channel_state = channelidentifiers_to_channels.get(channel_identifier)
if not channel_state:
continue
secrethash = mediator_state.secrethash
lock: Union[None, LockType] = None
if secrethash in channel_state.our_state.secrethashes_to_lockedlocks:
assert (
secrethash not in channel_state.our_state.secrethashes_to_unlockedlocks
), "Locks for secrethash are already unlocked"
lock = channel_state.our_state.secrethashes_to_lockedlocks.get(secrethash)
elif secrethash in channel_state.our_state.secrethashes_to_unlockedlocks:
lock = channel_state.our_state.secrethashes_to_unlockedlocks.get(secrethash)
if lock:
lock_expiration_threshold = channel.get_sender_expiration_threshold(lock.expiration)
has_lock_expired = channel.is_lock_expired(
end_state=channel_state.our_state,
lock=lock,
block_number=block_number,
lock_expiration_threshold=lock_expiration_threshold,
)
is_channel_open = channel.get_status(channel_state) == ChannelState.STATE_OPENED
if has_lock_expired and is_channel_open:
transfer_pair.payee_state = "payee_expired"
expired_lock_events = channel.send_lock_expired(
channel_state=channel_state,
locked_lock=lock,
pseudo_random_generator=pseudo_random_generator,
)
events.extend(expired_lock_events)
unlock_failed = EventUnlockFailed(
transfer_pair.payee_transfer.payment_identifier,
transfer_pair.payee_transfer.lock.secrethash,
"lock expired",
)
events.append(unlock_failed)
return events
def secret_learned(
state: MediatorTransferState,
channelidentifiers_to_channels: Dict[ChannelID, NettingChannelState],
pseudo_random_generator: random.Random,
block_number: BlockNumber,
block_hash: BlockHash,
secret: Secret,
secrethash: SecretHash,
payee_address: Address,
) -> TransitionResult[MediatorTransferState]:
"""Unlock the payee lock, reveal the lock to the payer, and if necessary
register the secret on-chain.
"""
secret_reveal_events = set_offchain_secret(
state, channelidentifiers_to_channels, secret, secrethash
)
set_offchain_reveal_state(state.transfers_pair, payee_address)
onchain_secret_reveal = events_for_onchain_secretreveal_if_closed(
channelmap=channelidentifiers_to_channels,
transfers_pair=state.transfers_pair,
secret=secret,
secrethash=secrethash,
block_hash=block_hash,
)
offchain_secret_reveal = events_for_secretreveal(
state.transfers_pair, secret, pseudo_random_generator
)
balance_proof = events_for_balanceproof(
channelidentifiers_to_channels,
state.transfers_pair,
pseudo_random_generator,
block_number,
secret,
secrethash,
)
events = secret_reveal_events + offchain_secret_reveal + balance_proof + onchain_secret_reveal
iteration = TransitionResult(state, events)
return iteration
def mediate_transfer(
state: MediatorTransferState,
candidate_route_states: List[RouteState],
payer_channel: NettingChannelState,
addresses_to_channel: Dict[Tuple[TokenNetworkAddress, Address], NettingChannelState],
nodeaddresses_to_networkstates: NodeNetworkStateMap,
pseudo_random_generator: random.Random,
payer_transfer: LockedTransferSignedState,
block_number: BlockNumber,
) -> TransitionResult[MediatorTransferState]:
"""Try a new route or fail back to a refund.
The mediator can safely try a new route knowing that the tokens from
payer_transfer will cover the expenses of the mediation. If there is no
route available that may be used at the moment of the call the mediator may
send a refund back to the payer, allowing the payer to try a different
route.
"""
assert (
payer_channel.partner_state.address == payer_transfer.balance_proof.sender
), "Transfer must be signed by sender"
candidate_route_states = routes.filter_reachable_routes(
route_states=candidate_route_states,
nodeaddresses_to_networkstates=nodeaddresses_to_networkstates,
)
# Makes sure we filter routes that have already been used.
#
# So in a setup like this, we want to make sure that node 2, having tried to
# route the transfer through 3 will also try 5 before sending it backwards to 1
#
# 1 -> 2 -> 3 -> 4
# v ^
# 5 -> 6 -> 7
candidate_route_states = routes.filter_acceptable_routes(
route_states=candidate_route_states,
blacklisted_channel_ids=state.refunded_channels,
addresses_to_channel=addresses_to_channel,
token_network_address=payer_channel.token_network_address,
)
# Mediate through the first valid route
for route_state in candidate_route_states:
target_token_network = route_state.swaps.get(
route_state.route[0], payer_channel.token_network_address
)
payee_channel = addresses_to_channel.get((target_token_network, route_state.route[1]))
if not payee_channel:
continue
mediation_transfer_pair, mediation_events = forward_transfer_pair(
payer_transfer=payer_transfer,
payer_channel=payer_channel,
payee_channel=payee_channel,
route_state=route_state,
pseudo_random_generator=pseudo_random_generator,
block_number=block_number,
route_state_table=candidate_route_states,
)
if mediation_transfer_pair is not None:
state.transfers_pair.append(mediation_transfer_pair)
return TransitionResult(state, mediation_events)
# Could not mediate, try to refund
if state.transfers_pair:
original_pair = state.transfers_pair[0]
original_channel = addresses_to_channel.get(
(
original_pair.payer_transfer.balance_proof.token_network_address,
original_pair.payer_transfer.payer_address,
)
)
else:
original_channel = payer_channel
if original_channel:
refund_transfer_pair, refund_events = backward_transfer_pair(
original_channel, payer_transfer, pseudo_random_generator, block_number
)
if refund_transfer_pair:
state.transfers_pair.append(refund_transfer_pair)
return TransitionResult(state, refund_events)
# Neither mediation nor refund possible, wait for an opportunity to do either
state.waiting_transfer = WaitingTransferState(payer_transfer)
return TransitionResult(state, [])
def handle_block(
mediator_state: MediatorTransferState,
state_change: Block,
channelidentifiers_to_channels: Dict[ChannelID, NettingChannelState],
addresses_to_channel: Dict[Tuple[TokenNetworkAddress, Address], NettingChannelState],
nodeaddresses_to_networkstates: NodeNetworkStateMap,
pseudo_random_generator: random.Random,
) -> TransitionResult[MediatorTransferState]:
"""After Raiden learns about a new block this function must be called to
handle expiration of the hash time locks.
Args:
state: The current state.
Return:
TransitionResult: The resulting iteration
"""
mediate_events: List[Event] = []
if mediator_state.waiting_transfer:
secrethash = mediator_state.waiting_transfer.transfer.lock.secrethash
payer_channel = channelidentifiers_to_channels.get(
mediator_state.waiting_transfer.transfer.balance_proof.channel_identifier
)
if payer_channel is not None:
# If the transfer is waiting, because its expiry was later than the settlement timeout
# of the channel, we can retry the mediation on a new block. The call to
# `mediate_transfer` will re-evaluate the timeouts and mediate if possible.
mediation_attempt = mediate_transfer(
state=mediator_state,
candidate_route_states=mediator_state.routes,
payer_channel=payer_channel,
addresses_to_channel=addresses_to_channel,
nodeaddresses_to_networkstates=nodeaddresses_to_networkstates,
pseudo_random_generator=pseudo_random_generator,
payer_transfer=mediator_state.waiting_transfer.transfer,
block_number=state_change.block_number,
)
mediator_state = mediation_attempt.new_state
mediate_events = mediation_attempt.events
success_filter = lambda event: (
isinstance(event, (SendLockedTransfer, SendRefundTransfer))
and event.transfer.lock.secrethash == secrethash
)
mediation_happened = any(filter(success_filter, mediate_events))
if mediation_happened:
mediator_state.waiting_transfer = None
expired_locks_events = events_to_remove_expired_locks(
mediator_state=mediator_state,
channelidentifiers_to_channels=channelidentifiers_to_channels,
block_number=state_change.block_number,
pseudo_random_generator=pseudo_random_generator,
)
secret_reveal_events = events_for_onchain_secretreveal_if_dangerzone(
channelmap=channelidentifiers_to_channels,
secrethash=mediator_state.secrethash,
transfers_pair=mediator_state.transfers_pair,
block_number=state_change.block_number,
block_hash=state_change.block_hash,
)
unlock_fail_events = events_for_expired_pairs(
channelidentifiers_to_channels=channelidentifiers_to_channels,
transfers_pair=mediator_state.transfers_pair,
waiting_transfer=mediator_state.waiting_transfer,
block_number=state_change.block_number,
)
iteration = TransitionResult(
mediator_state,
mediate_events + unlock_fail_events + secret_reveal_events + expired_locks_events,
)
return iteration
def handle_refundtransfer(
mediator_state: MediatorTransferState,
mediator_state_change: ReceiveTransferRefund,
channelidentifiers_to_channels: Dict[ChannelID, NettingChannelState],
addresses_to_channel: Dict[Tuple[TokenNetworkAddress, Address], NettingChannelState],
nodeaddresses_to_networkstates: NodeNetworkStateMap,
pseudo_random_generator: random.Random,
block_number: BlockNumber,
) -> TransitionResult[MediatorTransferState]:
"""Validate and handle a ReceiveTransferRefund mediator_state change.
A node might participate in mediated transfer more than once because of
refund transfers, eg. A-B-C-B-D-T, B tried to mediate the transfer through
C, which didn't have an available route to proceed and refunds B, at this
point B is part of the path again and will try a new partner to proceed
with the mediation through D, D finally reaches the target T.
In the above scenario B has two pairs of payer and payee transfers:
payer:A payee:C from the first SendLockedTransfer
payer:C payee:D from the following SendRefundTransfer
Args:
mediator_state: Current mediator_state.
mediator_state_change: The mediator_state change.
Returns:
TransitionResult: The resulting iteration.
"""
events: List[Event] = list()
if mediator_state.secret is None:
# The last sent transfer is the only one that may be refunded, all the
# previous ones are refunded already.
transfer_pair = mediator_state.transfers_pair[-1]
payee_transfer = transfer_pair.payee_transfer
payer_transfer = mediator_state_change.transfer
channel_identifier = payer_transfer.balance_proof.channel_identifier
payer_channel = channelidentifiers_to_channels.get(channel_identifier)
if not payer_channel:
return TransitionResult(mediator_state, list())
is_valid, channel_events, _ = channel.handle_refundtransfer(
received_transfer=payee_transfer,
channel_state=payer_channel,
refund=mediator_state_change,
)
if not is_valid:
return TransitionResult(mediator_state, channel_events)
mediator_state.refunded_channels.append(
payer_channel.canonical_identifier.channel_identifier
)
iteration = mediate_transfer(
state=mediator_state,
candidate_route_states=mediator_state.routes,
payer_channel=payer_channel,
addresses_to_channel=addresses_to_channel,
nodeaddresses_to_networkstates=nodeaddresses_to_networkstates,
pseudo_random_generator=pseudo_random_generator,
payer_transfer=payer_transfer,
block_number=block_number,
)
events.extend(channel_events)
events.extend(iteration.events)
iteration = TransitionResult(mediator_state, events)
return iteration
def handle_offchain_secretreveal(
mediator_state: MediatorTransferState,
mediator_state_change: ReceiveSecretReveal,
channelidentifiers_to_channels: Dict[ChannelID, NettingChannelState],
pseudo_random_generator: random.Random,
block_number: BlockNumber,
block_hash: BlockHash,
) -> TransitionResult[MediatorTransferState]:
""" Handles the secret reveal and sends SendUnlock/RevealSecret if necessary. """
is_valid_reveal = is_valid_secret_reveal(
state_change=mediator_state_change, transfer_secrethash=mediator_state.secrethash
)
is_secret_unknown = mediator_state.secret is None
# a SecretReveal should be rejected if the payer transfer
# has expired. To check for this, we use the last
# transfer pair.
transfer_pair = mediator_state.transfers_pair[-1]
payer_transfer = transfer_pair.payer_transfer
channel_identifier = payer_transfer.balance_proof.channel_identifier
payer_channel = channelidentifiers_to_channels.get(channel_identifier)
if not payer_channel:
return TransitionResult(mediator_state, list())
has_payer_transfer_expired = channel.is_transfer_expired(
transfer=transfer_pair.payer_transfer,
affected_channel=payer_channel,
block_number=block_number,
)
if is_secret_unknown and is_valid_reveal and not has_payer_transfer_expired:
iteration = secret_learned(
state=mediator_state,
channelidentifiers_to_channels=channelidentifiers_to_channels,
pseudo_random_generator=pseudo_random_generator,
block_number=block_number,
block_hash=block_hash,
secret=mediator_state_change.secret,
secrethash=mediator_state_change.secrethash,
payee_address=mediator_state_change.sender,
)
else:
iteration = TransitionResult(mediator_state, list())
return iteration
def handle_onchain_secretreveal(
mediator_state: MediatorTransferState,
onchain_secret_reveal: ContractReceiveSecretReveal,
channelidentifiers_to_channels: Dict[ChannelID, NettingChannelState],
pseudo_random_generator: random.Random,
block_number: BlockNumber,
) -> TransitionResult[MediatorTransferState]:
"""The secret was revealed on-chain, set the state of all transfers to
secret known.
"""
secrethash = onchain_secret_reveal.secrethash
is_valid_reveal = is_valid_secret_reveal(
state_change=onchain_secret_reveal, transfer_secrethash=mediator_state.secrethash
)
if is_valid_reveal:
secret = onchain_secret_reveal.secret
# Compare against the block number at which the event was emitted.
block_number = onchain_secret_reveal.block_number
secret_reveal = set_onchain_secret(
state=mediator_state,
channelidentifiers_to_channels=channelidentifiers_to_channels,
secret=secret,
secrethash=secrethash,
block_number=block_number,
)
balance_proof = events_for_balanceproof(
channelidentifiers_to_channels=channelidentifiers_to_channels,
transfers_pair=mediator_state.transfers_pair,
pseudo_random_generator=pseudo_random_generator,
block_number=block_number,
secret=secret,
secrethash=secrethash,
)
iteration = TransitionResult(mediator_state, secret_reveal + balance_proof)
else:
iteration = TransitionResult(mediator_state, list())
return iteration
def handle_unlock(
mediator_state: MediatorTransferState,
state_change: ReceiveUnlock,
channelidentifiers_to_channels: Dict[ChannelID, NettingChannelState],
) -> TransitionResult[MediatorTransferState]:
""" Handle a ReceiveUnlock state change. """
events = list()
balance_proof_sender = state_change.balance_proof.sender
channel_identifier = state_change.balance_proof.channel_identifier
for pair in mediator_state.transfers_pair:
if pair.payer_transfer.balance_proof.sender == balance_proof_sender:
channel_state = channelidentifiers_to_channels.get(channel_identifier)
if channel_state:
is_valid, channel_events, _ = channel.handle_unlock(channel_state, state_change)
events.extend(channel_events)
if is_valid:
unlock = EventUnlockClaimSuccess(
pair.payee_transfer.payment_identifier, pair.payee_transfer.lock.secrethash
)
events.append(unlock)
send_processed = SendProcessed(
recipient=balance_proof_sender,
message_identifier=state_change.message_identifier,
canonical_identifier=CANONICAL_IDENTIFIER_UNORDERED_QUEUE,
)
events.append(send_processed)
pair.payer_state = "payer_balance_proof"
iteration = TransitionResult(mediator_state, events)
return iteration
def handle_node_change_network_state(
mediator_state: MediatorTransferState,
state_change: ActionChangeNodeNetworkState,
addresses_to_channel: Dict[Tuple[TokenNetworkAddress, Address], NettingChannelState],
pseudo_random_generator: random.Random,
block_number: BlockNumber,
) -> TransitionResult:
"""If a certain node comes online:
1. Check if a channel exists with that node
2. Check that this channel is a route, check if the route is valid.
3. Check that the transfer was stuck because there was no route available.
4. Send the transfer again to this now-available route.
"""
if state_change.network_state != NetworkState.REACHABLE:
return TransitionResult(mediator_state, list())
try:
route = next(
route
for route in mediator_state.routes
if route.next_hop_address == state_change.node_address
)
except StopIteration:
return TransitionResult(mediator_state, list())
if mediator_state.waiting_transfer is None:
return TransitionResult(mediator_state, list())
transfer = mediator_state.waiting_transfer.transfer
payer_channel = addresses_to_channel.get(
(transfer.balance_proof.token_network_address, transfer.balance_proof.sender)
)
payee_channel = addresses_to_channel.get(
(transfer.balance_proof.token_network_address, route.route[1]) # TODO: change TN for swaps
)
if not payee_channel or not payer_channel:
return TransitionResult(mediator_state, list())
payee_channel_open = channel.get_status(payee_channel) == ChannelState.STATE_OPENED
if not payee_channel_open:
return TransitionResult(mediator_state, list())
return mediate_transfer(
state=mediator_state,
candidate_route_states=mediator_state.routes,
payer_channel=payer_channel,
addresses_to_channel=addresses_to_channel,
nodeaddresses_to_networkstates={state_change.node_address: state_change.network_state},
pseudo_random_generator=pseudo_random_generator,
payer_transfer=mediator_state.waiting_transfer.transfer,
block_number=block_number,
)
def state_transition(
mediator_state: Optional[MediatorTransferState],
state_change: StateChange,
channelidentifiers_to_channels: Dict[ChannelID, NettingChannelState],
addresses_to_channel: Dict[Tuple[TokenNetworkAddress, Address], NettingChannelState],
nodeaddresses_to_networkstates: NodeNetworkStateMap,
pseudo_random_generator: random.Random,
block_number: BlockNumber,
block_hash: BlockHash,
) -> TransitionResult[Optional[MediatorTransferState]]:
""" State machine for a node mediating a transfer. """
# pylint: disable=too-many-branches
# Notes:
# - A user cannot cancel a mediated transfer after it was initiated, she
# may only reject to mediate before hand. This is because the mediator
# doesn't control the secret reveal and needs to wait for the lock
# expiration before safely discarding the transfer.
iteration = TransitionResult(mediator_state, list())
if type(state_change) == ActionInitMediator:
assert isinstance(state_change, ActionInitMediator), MYPY_ANNOTATION
if mediator_state is None:
iteration = handle_init(
state_change=state_change,
channelidentifiers_to_channels=channelidentifiers_to_channels,
addresses_to_channel=addresses_to_channel,
nodeaddresses_to_networkstates=nodeaddresses_to_networkstates,
pseudo_random_generator=pseudo_random_generator,
block_number=block_number,
)
elif type(state_change) == Block:
assert isinstance(state_change, Block), MYPY_ANNOTATION
assert mediator_state, "Block should be accompanied by a valid mediator state"
iteration = handle_block(
mediator_state=mediator_state,
state_change=state_change,
channelidentifiers_to_channels=channelidentifiers_to_channels,
addresses_to_channel=addresses_to_channel,
nodeaddresses_to_networkstates=nodeaddresses_to_networkstates,
pseudo_random_generator=pseudo_random_generator,
)
elif type(state_change) == ReceiveTransferRefund:
assert isinstance(state_change, ReceiveTransferRefund), MYPY_ANNOTATION
msg = "ReceiveTransferRefund should be accompanied by a valid mediator state"
assert mediator_state, msg
iteration = handle_refundtransfer(
mediator_state=mediator_state,
mediator_state_change=state_change,
channelidentifiers_to_channels=channelidentifiers_to_channels,
addresses_to_channel=addresses_to_channel,
nodeaddresses_to_networkstates=nodeaddresses_to_networkstates,
pseudo_random_generator=pseudo_random_generator,
block_number=block_number,
)
elif type(state_change) == ReceiveSecretReveal:
assert isinstance(state_change, ReceiveSecretReveal), MYPY_ANNOTATION
msg = "ReceiveSecretReveal should be accompanied by a valid mediator state"
assert mediator_state, msg
iteration = handle_offchain_secretreveal(
mediator_state=mediator_state,
mediator_state_change=state_change,
channelidentifiers_to_channels=channelidentifiers_to_channels,
pseudo_random_generator=pseudo_random_generator,
block_number=block_number,
block_hash=block_hash,
)
elif type(state_change) == ContractReceiveSecretReveal:
assert isinstance(state_change, ContractReceiveSecretReveal), MYPY_ANNOTATION
msg = "ContractReceiveSecretReveal should be accompanied by a valid mediator state"
assert mediator_state, msg
iteration = handle_onchain_secretreveal(
mediator_state=mediator_state,
onchain_secret_reveal=state_change,
channelidentifiers_to_channels=channelidentifiers_to_channels,
pseudo_random_generator=pseudo_random_generator,
block_number=block_number,
)
elif type(state_change) == ReceiveUnlock:
assert isinstance(state_change, ReceiveUnlock), MYPY_ANNOTATION
assert mediator_state, "ReceiveUnlock should be accompanied by a valid mediator state"
iteration = handle_unlock(
mediator_state=mediator_state,
state_change=state_change,
channelidentifiers_to_channels=channelidentifiers_to_channels,
)
elif type(state_change) == ReceiveLockExpired:
assert isinstance(state_change, ReceiveLockExpired), MYPY_ANNOTATION
assert mediator_state, "ReceiveLockExpired should be accompanied by a valid mediator state"
iteration = handle_lock_expired(
mediator_state=mediator_state,
state_change=state_change,
channelidentifiers_to_channels=channelidentifiers_to_channels,
block_number=block_number,
)
elif type(state_change) == ActionChangeNodeNetworkState:
assert isinstance(state_change, ActionChangeNodeNetworkState), MYPY_ANNOTATION
msg = "ActionChangeNodeNetworkState should be accompanied by a valid mediator state"
assert mediator_state, msg
iteration = handle_node_change_network_state(
mediator_state=mediator_state,
state_change=state_change,
addresses_to_channel=addresses_to_channel,
pseudo_random_generator=pseudo_random_generator,
block_number=block_number,
)
# this is the place for paranoia
if iteration.new_state is not None:
typecheck(iteration.new_state, MediatorTransferState)
sanity_check(iteration.new_state, channelidentifiers_to_channels)
return clear_if_finalized(iteration, channelidentifiers_to_channels)
| [
11748,
340,
861,
10141,
198,
11748,
10088,
198,
11748,
4738,
198,
6738,
49876,
1330,
376,
7861,
198,
6738,
19720,
1330,
4889,
540,
198,
198,
6738,
9513,
268,
13,
1069,
11755,
1330,
13794,
18156,
9921,
3920,
37,
1453,
198,
6738,
9513,
26... | 2.548803 | 23,892 |
from django.apps import AppConfig
| [
6738,
42625,
14208,
13,
18211,
1330,
2034,
16934,
628
] | 3.888889 | 9 |
# coding: utf-8
# In[1]:
import tensorflow as tf
import utils as utils
import aux_network_func as af
import data_processor as dp
#Alphabet maps objects to integer ids
from alphabet import Alphabet
import network as network
import dill
import numpy as np
import os
import time
import datetime
from tensorflow.python import debug as tf_debug
# In[2]:
tf.__version__
#usage : python BasicTextPreprocessing_CNN_CRF.py
#here 'word' is the name of the alphabet class instance
print("Loading data...")
word_alphabet = Alphabet('word')
#'label_name' is 'pos' or 'ner'
label_name ="ner"
label_alphabet = Alphabet(label_name)
logger = utils.get_logger("MainCode")
embedding = "glove"
embedding_path = "/home/yuchen/useful_data/glove.twitter.27B.100d.txt.gz" # "glove.6B.100d.gz"
oov = 'embedding'
fine_tune = True
# Model Hyperparameters
#tf.flags.DEFINE_integer("embedding_dim", 128, "Dimensionality of character embedding (default: 128)") #not used
tf.flags.DEFINE_string("train_path", "../final_data/train.final.featured", "Train Path") # eng.train.iobes.act
tf.flags.DEFINE_string("test_path", "../final_data/test.final", "Test Path") # eng.testa.iobes.act
tf.flags.DEFINE_string("dev_path", "../final_data/dev.final.featured", "dev Path") # eng.testb.iobes.act
tf.flags.DEFINE_float("dropout_keep_prob", 0.5, "Dropout keep probability (default: 0.5)")
tf.flags.DEFINE_float("grad_clip", 5, "value for gradient clipping to avoid exploding/vanishing gradient(default: 5.0) in LSTM")
tf.flags.DEFINE_float("max_global_clip", 5.0, "value for gradient clipping to avoid exploding/vanishing gradient overall(default: 1.0)")
# Training parameters
tf.flags.DEFINE_integer("batch_size", 64, "Batch Size (default: 64)")
tf.flags.DEFINE_integer("word_col", 0, "position of the word in input file (default: 0)")
tf.flags.DEFINE_integer("label_col", 5, "position of the label in input file (default: 3)")
tf.flags.DEFINE_integer("n_hidden_LSTM", 50, "Number of hidden units in LSTM (default: 200)")
tf.flags.DEFINE_integer("num_epochs", 100, "Number of training epochs (default: 200)")
tf.flags.DEFINE_integer("num_filters", 30, "Number of filters to apply for char CNN (default: 30)")
tf.flags.DEFINE_integer("filter_size", 3, "filter_size (default: 3 )")
tf.flags.DEFINE_integer("evaluate_every", 100, "Evaluate model on dev set after this many steps (default: 100)")
tf.flags.DEFINE_integer("char_embedd_dim", 30, "char_embedd_dim(default: 30)")
tf.flags.DEFINE_integer("Optimizer", 1, "Adam : 1 , SGD:2")
tf.flags.DEFINE_integer("num_checkpoints", 5, "Number of checkpoints to store (default: 5)")
tf.flags.DEFINE_float("starter_learning_rate", 0.015, "Initial learning rate for the optimizer. (default: 1e-3)")
tf.flags.DEFINE_float("decay_rate", 0.015, "How much to decay the learning rate. (default: 0.015)")
# Misc Parameters
tf.flags.DEFINE_boolean("allow_soft_placement", True, "Allow device soft device placement")
tf.flags.DEFINE_boolean("log_device_placement", False, "Log placement of ops on devices")
tf.flags.DEFINE_boolean("PadZeroBegin", False, "where to pad zero in the input")
FLAGS = tf.flags.FLAGS
FLAGS._parse_flags()
Flags_Dict = utils.print_FLAGS(FLAGS,logger)
train_path = FLAGS.train_path
test_path = FLAGS.test_path
dev_path = FLAGS.dev_path
word_column = FLAGS.word_col
label_column = FLAGS.label_col
# Output directory for models and summaries
timestamp = str(int(time.time()))
out_dir = os.path.abspath(os.path.join(os.path.curdir, "runs", timestamp))
print("Writing to {}\n".format(out_dir))
# read training data
logger.info("Reading data from training set...")
word_sentences_train, _, word_index_sentences_train, label_index_sentences_train = dp.read_conll_sequence_labeling(
train_path, word_alphabet, label_alphabet, word_column, label_column,out_dir=out_dir)
# if oov is "random" and do not fine tune, close word_alphabet
if oov == "random" and not fine_tune:
logger.info("Close word alphabet.")
word_alphabet.close()
# read dev data
logger.info("Reading data from dev set...")
word_sentences_dev, _, word_index_sentences_dev, label_index_sentences_dev = dp.read_conll_sequence_labeling(
dev_path, word_alphabet, label_alphabet, word_column, label_column)
# close alphabets : by close we mean we cannot add any more words to the word vocabulary.
#To DO :change to close this after train set alone
word_alphabet.close()
label_alphabet.close()
# we are doing a -1 because we did not use the zer index. I believe this is to account for unknown word
logger.info("word alphabet size: %d" % (word_alphabet.size() - 1))
logger.info("label alphabet size: %d" % (label_alphabet.size() - 1))
# get maximum length : this is mainly for padding.
max_length_train = utils.get_max_length(word_sentences_train)
max_length_dev = utils.get_max_length(word_sentences_dev)
#max_length_test = utils.get_max_length(word_sentences_test)
max_length = min(dp.MAX_LENGTH, max(max_length_train, max_length_dev))
logger.info("Maximum length (i.e max words ) of training set is %d" % max_length_train)
logger.info("Maximum length (i.e max words ) of dev set is %d" % max_length_dev)
#logger.info("Maximum length (i.e max words ) of test set is %d" % max_length_test)
logger.info("Maximum length (i.e max words ) used for training is %d" % max_length)
logger.info("Padding training text and lables ...")
word_index_sentences_train_pad,train_seq_length = utils.padSequence(word_index_sentences_train,max_length, beginZero=FLAGS.PadZeroBegin)
label_index_sentences_train_pad,_= utils.padSequence(label_index_sentences_train,max_length, beginZero=FLAGS.PadZeroBegin)
logger.info("Padding dev text and lables ...")
word_index_sentences_dev_pad,dev_seq_length = utils.padSequence(word_index_sentences_dev,max_length, beginZero=FLAGS.PadZeroBegin)
label_index_sentences_dev_pad,_= utils.padSequence(label_index_sentences_dev,max_length, beginZero=FLAGS.PadZeroBegin)
logger.info("Creating character set FROM training set ...")
char_alphabet = Alphabet('character')
char_index_train,max_char_per_word_train= dp.generate_character_data(word_sentences_train,
char_alphabet=char_alphabet,setType="Train")
# close character alphabet. WE close it because the embed table is goign to be random
char_alphabet.close()
logger.info("Creating character set FROM dev set ...")
char_index_dev,max_char_per_word_dev= dp.generate_character_data(word_sentences_dev,
char_alphabet=char_alphabet, setType="Dev")
logger.info("character alphabet size: %d" % (char_alphabet.size() - 1))
max_char_per_word = min(dp.MAX_CHAR_PER_WORD, max_char_per_word_train,max_char_per_word_dev)
logger.info("Maximum character length is %d" %max_char_per_word)
logger.info("Constructing embedding table ...")
#TODO : modify network to use this
char_embedd_table = dp.build_char_embedd_table(char_alphabet,char_embedd_dim=FLAGS.char_embedd_dim)
logger.info("Padding Training set ...")
char_index_train_pad = dp.construct_padded_char(char_index_train, char_alphabet, max_sent_length=max_length,max_char_per_word=max_char_per_word)
logger.info("Padding Dev set ...")
char_index_dev_pad = dp.construct_padded_char(char_index_dev, char_alphabet, max_sent_length=max_length,max_char_per_word=max_char_per_word)
#logger.info("Generating data with fine tuning...")
embedd_dict, embedd_dim, caseless = utils.load_word_embedding_dict(embedding, embedding_path,logger)
logger.info("Dimension of embedding is %d, Caseless: %d" % (embedd_dim, caseless))
#Create an embedding table where if the word from training/train/dev set is in glove , then assign glove values else assign random values
embedd_table = dp.build_embedd_table(word_alphabet, embedd_dict, embedd_dim, caseless)
word_vocab = word_alphabet.instances
word_vocab_size = len(word_vocab)
char_vocab = char_alphabet.instances
char_vocab_size = len(char_vocab)
num_classes = len(label_alphabet.instances) + 1 #to account for zero index we dont use
#logger.info("length of the embedding table is %d" , embedd_table.shape[0])
#Store the parameters for loading in test set
Flags_Dict['sequence_length']=max_length
Flags_Dict['num_classes']=num_classes
Flags_Dict['word_vocab_size']=word_vocab_size
Flags_Dict['char_vocab_size']=char_vocab_size
Flags_Dict['max_char_per_word']=max_char_per_word
Flags_Dict['embedd_dim']=embedd_dim
Flags_Dict['out_dir']=out_dir
Flags_Dict['model_path']=out_dir
# Checkpoint directory. Tensorflow assumes this directory already exists so we need to create it
checkpoint_dir = os.path.abspath(os.path.join(out_dir, "checkpoints"))
Flags_Dict['checkpoint_dir']=checkpoint_dir
dill.dump(Flags_Dict,open(os.path.join(out_dir, "config.pkl"),'wb'))
dill.dump(char_alphabet,open(os.path.join(out_dir, "char_alphabet.pkl"),'wb'))
dill.dump(word_alphabet,open(os.path.join(out_dir, "word_alphabet.pkl"),'wb'))
dill.dump(label_alphabet,open(os.path.join(out_dir, "label_alphabet.pkl"),'wb'))
tf.reset_default_graph()
session_conf = tf.ConfigProto(
allow_soft_placement=FLAGS.allow_soft_placement,
log_device_placement=FLAGS.log_device_placement)
with tf.Session(config=session_conf) as sess:
best_accuracy = 0
best_overall_accuracy = 0
best_accuracy_test = 0
best_overall_accuracy_test = 0
best_step = 0
BiLSTM = network.textBiLSTM(sequence_length=max_length, num_classes=num_classes, word_vocab_size=word_vocab_size,
word_embedd_dim=embedd_dim,n_hidden_LSTM =FLAGS.n_hidden_LSTM,max_char_per_word=max_char_per_word,
char_vocab_size=char_vocab_size,char_embedd_dim = FLAGS.char_embedd_dim,grad_clip=FLAGS.grad_clip,num_filters=FLAGS.num_filters,filter_size= FLAGS.filter_size)
# Define Training procedure
global_step = tf.Variable(0, name="global_step", trainable=False)
decay_step = int(len(word_index_sentences_train_pad)/FLAGS.batch_size) #we want to decay per epoch. Comes to around 1444 for batch of 100
#print("decay_step :",decay_step)
learning_rate = tf.train.exponential_decay(FLAGS.starter_learning_rate, global_step,decay_step, FLAGS.decay_rate, staircase=True)
if(FLAGS.Optimizer==2):
optimizer = tf.train.GradientDescentOptimizer(learning_rate) #also try GradientDescentOptimizer , AdamOptimizer
elif(FLAGS.Optimizer==1):
optimizer = tf.train.AdamOptimizer(learning_rate)
#This is the first part of minimize()
grads_and_vars = optimizer.compute_gradients(BiLSTM.loss)
#clipped_grads_and_vars = [(tf.clip_by_norm(grad, FLAGS.max_global_clip), var) for grad, var in grads_and_vars]
#we will do grad_clipping for LSTM only
#capped_gvs = [(tf.clip_by_value(grad, -FLAGS.max_global_clip, FLAGS.max_global_clip), var) for grad, var in grads_and_vars]
# the following bloack is a hack for clip by norm
#grad_list = [grad for grad, var in grads_and_vars]
#var_list = [var for grad, var in grads_and_vars]
#capped_gvs = tf.clip_by_global_norm(grad_list, clip_norm=FLAGS.max_global_norm)
#grads_and_vars_pair = zip(capped_gvs,var)
#This is the second part of minimize()
train_op = optimizer.apply_gradients(grads_and_vars, global_step=global_step)
# Keep track of gradient values and sparsity (optional)
grad_summaries = []
for g, v in grads_and_vars:
if g is not None:
grad_hist_summary = tf.summary.histogram("{}/grad/hist".format(v.name), g)
sparsity_summary = tf.summary.scalar("{}/grad/sparsity".format(v.name), tf.nn.zero_fraction(g))
grad_summaries.append(grad_hist_summary)
grad_summaries.append(sparsity_summary)
grad_summaries_merged = tf.summary.merge(grad_summaries)
# Summaries for loss and accuracy
loss_summary = tf.summary.scalar("loss", BiLSTM.loss)
#acc_summary = tf.summary.scalar("accuracy", BiLSTM.accuracy)
# Train Summaries
train_summary_op = tf.summary.merge([loss_summary, grad_summaries_merged])
train_summary_dir = os.path.join(out_dir, "summaries", "train")
train_summary_writer = tf.summary.FileWriter(train_summary_dir, sess.graph)
# Dev summaries
dev_summary_op = tf.summary.merge([loss_summary])
dev_summary_dir = os.path.join(out_dir, "summaries", "dev")
dev_summary_writer = tf.summary.FileWriter(dev_summary_dir, sess.graph)
checkpoint_prefix = os.path.join(checkpoint_dir, "model")
if not os.path.exists(checkpoint_dir):
os.makedirs(checkpoint_dir)
saver = tf.train.Saver(tf.global_variables(), max_to_keep=FLAGS.num_checkpoints)
# variables need to be initialized before we can use them
sess.run(tf.global_variables_initializer())
#debug block
#sess = tf_debug.LocalCLIDebugWrapperSession(sess)
#sess.add_tensor_filter("has_inf_or_nan", tf_debug.has_inf_or_nan)
def train_step(session,BiLSTM,PadZeroBegin,max_length,x_batch, y_batch,act_seq_lengths,dropout_keep_prob,embedd_table,char_batch,char_embedd_table):
"""
A single training step
"""
feed_dict=af.create_feed_Dict(BiLSTM,PadZeroBegin,max_length,x_batch,y_batch,act_seq_lengths,dropout_keep_prob,embedd_table,char_batch,char_embedd_table)
_, step, summaries, loss,logits,transition_params = session.run(
[train_op, global_step, train_summary_op, BiLSTM.loss,BiLSTM.logits,BiLSTM.transition_params],
feed_dict)
time_str = datetime.datetime.now().isoformat()
#print("{}: step {}, loss {:g}".format(time_str, step, loss))
train_summary_writer.add_summary(summaries, step)
# Generate batches
batches = utils.batch_iter(
list(zip(word_index_sentences_train_pad, label_index_sentences_train_pad ,train_seq_length,char_index_train_pad)), FLAGS.batch_size, FLAGS.num_epochs)
# Training loop. For each batch...
for batch in batches:
x_batch, y_batch,act_seq_lengths,char_batch = zip(*batch)
train_step(sess,BiLSTM,FLAGS.PadZeroBegin,max_length,x_batch, y_batch,act_seq_lengths,FLAGS.dropout_keep_prob,
embedd_table,char_batch,char_embedd_table)
current_step = tf.train.global_step(sess, global_step)
if current_step % FLAGS.evaluate_every == 0:
print("\nEvaluation:")
new_accuracy,accuracy_low_classes,fname,output=dev_step(sess,BiLSTM,FLAGS.PadZeroBegin,max_length,word_index_sentences_dev_pad,
label_index_sentences_dev_pad ,dev_seq_length,FLAGS.dropout_keep_prob,
embedd_table,current_step,char_index_dev_pad,char_embedd_table, writer=dev_summary_writer)
print("")
f = open(fname,'w')
f.write(output)
f.close()
os.system('python ../data/wnuteval.py '+fname)
if (accuracy_low_classes > best_accuracy):
os.system('rm *.txt')
f = open(fname,'w')
f.write(output)
f.close()
path = saver.save(sess, checkpoint_prefix, global_step=current_step)
best_accuracy = accuracy_low_classes
best_step = current_step
best_overall_accuracy = new_accuracy
print("New Best Dev Model: Saved model checkpoint to {}\n".format(path))
#run test data
#new_accuracy_test,accuracy_low_classes_test = af.test_step(logger= logger,session=sess,BiLSTM=BiLSTM,PadZeroBegin=FLAGS.PadZeroBegin,max_length=max_length,
# test_path=test_path,dropout_keep_prob=FLAGS.dropout_keep_prob,step=current_step,out_dir=out_dir,char_alphabet=char_alphabet,
# label_alphabet=label_alphabet,word_alphabet=word_alphabet,word_column=word_column,label_column=label_column,
# char_embedd_dim=FLAGS.char_embedd_dim,max_char_per_word=max_char_per_word)
#if (accuracy_low_classes_test > best_accuracy_test):
# best_accuracy_test = accuracy_low_classes_test
# best_step_test = current_step
# best_overall_accuracy_test = new_accuracy_test
print("DEV: best_accuracy on NER : %f best_step: %d best_overall_accuracy: %d" %(best_accuracy,best_step,best_overall_accuracy))
#print("TEST : best_accuracy on NER : %f best_step: %d best_overall_accuracy: %d" %(best_accuracy_test,best_step_test,best_overall_accuracy_test))
| [
198,
2,
19617,
25,
3384,
69,
12,
23,
198,
198,
2,
554,
58,
16,
5974,
198,
198,
11748,
11192,
273,
11125,
355,
48700,
198,
11748,
3384,
4487,
355,
3384,
4487,
198,
11748,
27506,
62,
27349,
62,
20786,
355,
6580,
198,
11748,
1366,
62,
... | 2.522605 | 6,481 |
from django.urls import path
from myMusicApp.albums.views import add_album, edit_album, delete_album, album_details
urlpatterns = [
path('add/', add_album, name='add album'),
path('edit/<int:pk>', edit_album, name='edit album'),
path('delete/<int:pk>', delete_album, name='delete album'),
path('details/<int:pk>', album_details, name='album details')
]
| [
6738,
42625,
14208,
13,
6371,
82,
1330,
3108,
198,
198,
6738,
616,
22648,
4677,
13,
40916,
82,
13,
33571,
1330,
751,
62,
40916,
11,
4370,
62,
40916,
11,
12233,
62,
40916,
11,
5062,
62,
36604,
198,
198,
6371,
33279,
82,
796,
685,
198... | 2.748148 | 135 |
# x_7_7
#
#
from ast import pattern
import re
pin = '1234' # 暗証番号
pattern1 = r'[0-9]{4}'
pattern2 = r'[2-9]{4}'
pattern3 = r'[0-9]{3}'
pattern4 = r'[0-9]{3, 5}'
print(is_match(r'[0-9]{4}', pin))
| [
2,
2124,
62,
22,
62,
22,
198,
2,
198,
2,
198,
198,
6738,
6468,
1330,
3912,
198,
11748,
302,
628,
198,
198,
11635,
796,
705,
1065,
2682,
6,
220,
1303,
10545,
248,
245,
164,
101,
120,
45911,
103,
20998,
115,
198,
198,
33279,
16,
7... | 1.624 | 125 |
from django.contrib import admin
from django.utils.safestring import mark_safe
# Register your models here.
from .models import UserProfile,Article,Category,Siteinfo,Acimage
admin.site.register(UserProfile,UserProfileAdmin)
admin.site.register(Category,CategoryAdmin)
admin.site.register(Article,ArticleAdmin)
admin.site.register(Siteinfo,SiteinfoAdmin)
admin.site.register(Acimage,AcimageAdmin) | [
6738,
42625,
14208,
13,
3642,
822,
1330,
13169,
198,
6738,
42625,
14208,
13,
26791,
13,
49585,
395,
1806,
1330,
1317,
62,
21230,
198,
2,
17296,
534,
4981,
994,
13,
198,
198,
6738,
764,
27530,
1330,
11787,
37046,
11,
14906,
11,
27313,
... | 3.432203 | 118 |
from ._fftconvolve import fftconvolve2d
from ..structures.weights import gauss
from math import ceil
def fftgauss(img, sigma, theta=0, pad_type=None, **kwargs):
'''
Smooths the input image with a gaussian kernel. Uses the fft method and allows
specifying a custom pad type with **kwargs from numpy.pad documentation. Smoothing
a color image will smooth each color channel individually.
Parameters
----------
img : (M,N) or (M,N,3) array
An image to be smoothed
sigma : tuple or float
Tuple defining the standard deviation of the gaussian in x and y directions.
A single value will assign the same value to x and y st. devs..
theta : float, optional
The rotation of the gaussian in radians.
pad_type : string, optional
The padding type to be used. For additional information see numpy.pad .
Defaults to constant.
kwargs : varies
See numpy.pad . Defaults to constant_values=0.
Returns
-------
smoothed_image : ndarray
A smoothed image. Keeps same shape and same number of color channels.
Notes
-----
There are many gaussian smoothing functions. This one is unique because it
automatically handles color images. It also allows defining very unique
gaussian kernels with strain and orientation.
'''
s = img.shape[:2]
K = gauss(sigma, theta, size=s)
if isinstance(sigma, list):
r = 2*ceil(max(sigma))
else:
r = sigma
return fftconvolve2d(img, K, r=r, pad_type=pad_type, centered=True, **kwargs)
| [
6738,
47540,
487,
83,
42946,
6442,
1330,
277,
701,
42946,
6442,
17,
67,
198,
6738,
11485,
7249,
942,
13,
43775,
1330,
31986,
1046,
198,
198,
6738,
10688,
1330,
2906,
346,
198,
198,
4299,
277,
701,
4908,
1046,
7,
9600,
11,
264,
13495,
... | 2.80605 | 562 |
from flask import Flask,jsonify,request
import os
import json
import codecs
import xlwt
from flask_apscheduler import APScheduler
import time
import random
import math
import xlrd
app = Flask(__name__)
if not os.path.exists("Database"):
os.mkdir("Database")
if not os.path.exists("Backup"):
os.mkdir("Backup")
mData={}
mID={}
xls=xlrd.open_workbook('commuting distance and time.xls')
sheet=xls.sheet_by_name('数据')
data=[]
for i in range(2,sheet.nrows):
mdata=sheet.row_values(i)
data.append(mdata)
pass
app.after_request(after_request)
@app.route('/getData',methods=['POST'])
@app.route('/saveData',methods=['POST'])
if __name__=='__main__':
scheduler=APScheduler()
scheduler.init_app(app)
scheduler.start()
app.run(host="0.0.0.0",port=80)
| [
6738,
42903,
1330,
46947,
11,
17752,
1958,
11,
25927,
198,
11748,
28686,
198,
11748,
33918,
198,
11748,
40481,
82,
198,
11748,
2124,
75,
46569,
198,
6738,
42903,
62,
499,
1416,
704,
18173,
1330,
3486,
50,
1740,
18173,
198,
11748,
640,
1... | 2.39939 | 328 |
# -*- coding: utf-8 -*-
# pylint: disable=wrong-import-position
"""
Function for computing expected number of photons to survive from a
time-independent Cartesian-binned table.
"""
from __future__ import absolute_import, division, print_function
__all__ = ['pexp_xyz']
__author__ = 'P. Eller, J.L. Lanfranchi'
__license__ = '''Copyright 2017 Philipp Eller and Justin L. Lanfranchi
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.'''
from os.path import abspath, dirname
import sys
import numpy as np
if __name__ == '__main__' and __package__ is None:
RETRO_DIR = dirname(dirname(dirname(abspath(__file__))))
if RETRO_DIR not in sys.path:
sys.path.append(RETRO_DIR)
from retro import DFLT_NUMBA_JIT_KWARGS, numba_jit
@numba_jit(**DFLT_NUMBA_JIT_KWARGS)
def pexp_xyz(sources, x_min, y_min, z_min, nx, ny, nz, binwidth,
survival_prob, avg_photon_x, avg_photon_y, avg_photon_z,
use_directionality):
"""Compute the expected number of detected photons in _all_ DOMs at _all_
times.
Parameters
----------
sources :
x_min, y_min, z_min :
nx, ny, nz :
binwidth :
survival_prob :
avg_photon_x, avg_photon_y, avg_photon_z :
use_directionality : bool
"""
expected_photon_count = 0.0
for source in sources:
x_idx = int((source['x'] - x_min) // binwidth)
if x_idx < 0 or x_idx >= nx:
continue
y_idx = int((source['y'] - y_min) // binwidth)
if y_idx < 0 or y_idx >= ny:
continue
z_idx = int((source['z'] - z_min) // binwidth)
if z_idx < 0 or z_idx >= nz:
continue
sp = survival_prob[x_idx, y_idx, z_idx]
surviving_count = source['photons'] * sp
# TODO: Incorporate photon direction info
if use_directionality:
raise NotImplementedError('Directionality cannot be used yet')
expected_photon_count += surviving_count
return expected_photon_count
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
279,
2645,
600,
25,
15560,
28,
36460,
12,
11748,
12,
9150,
198,
198,
37811,
198,
22203,
329,
14492,
2938,
1271,
286,
44378,
284,
7866,
422,
257,
198,
2435,
12,
3475... | 2.493401 | 985 |
from django.apps import AppConfig
| [
6738,
42625,
14208,
13,
18211,
1330,
2034,
16934,
628
] | 3.888889 | 9 |
# Copyright 2011 OpenStack Foundation
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from conveyoragent import context
from conveyoragent.engine.api import extensions
from conveyoragent.engine.api.wsgi import wsgi
from conveyoragent.engine.server import manager
LOG = logging.getLogger(__name__)
defaultContext = context.get_admin_context()
class Migration(extensions.ExtensionDescriptor):
"""Enable admin actions."""
name = "Migration"
alias = "conveyoragent-migration"
namespace = "http://docs.openstack.org/v2vgateway/ext/migration/api/v1"
updated = "2016-01-29T00:00:00+00:00"
# define new resource
# extend exist resource
| [
2,
15069,
2813,
4946,
25896,
5693,
198,
2,
15069,
2813,
10799,
8909,
16685,
198,
2,
1439,
6923,
33876,
13,
198,
2,
198,
2,
220,
220,
220,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
345,
743,... | 3.265823 | 395 |
#!/usr/bin/env python2.7
#
# Copyright 2017 Google Inc, 2019 Open GEE Contributors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Serves a specified globe to Google Earth EC.
Google Earth EC can be running on the same machine
or can be accessing this server through a proxy servers
sharing the same key.
"""
import tornado.httpserver
import tornado.ioloop
import tornado.web
import local_server
import portable_globe
import portable_server_base
import portable_web_interface
from platform_specific_functions import prepare_for_io_loop
class FlatFileHandler(portable_server_base.BaseHandler):
"""Class for handling flatfile requests."""
@tornado.web.asynchronous
def get(self):
"""Handle GET request for packets."""
argument_str = self.request.uri.split("?")[1]
arguments = argument_str.split("-")
if arguments[0] == "lf":
self.set_header("Content-Type", "image/png")
else:
self.set_header("Content-Type", "application/octet-stream")
if tornado.web.globe_.IsComposite():
tornado.web.local_server_.LocalFlatFileHandler(
self, portable_globe.COMPOSITE_BASE_LAYER)
else:
tornado.web.local_server_.LocalFlatFileHandler(
self, portable_globe.NON_COMPOSITE_LAYER)
self.finish()
class CompositeFlatFileHandler(portable_server_base.BaseHandler):
"""Class for handling flatfile requests to glc layers."""
@tornado.web.asynchronous
def get(self, layer_id):
"""Handle GET request for packets."""
argument_str = self.request.uri.split("?")[1]
arguments = argument_str.split("-")
if arguments[0] == "lf":
self.set_header("Content-Type", "image/png")
else:
self.set_header("Content-Type", "application/octet-stream")
tornado.web.local_server_.LocalFlatFileHandler(self, int(layer_id))
self.finish()
class DbRootHandler(portable_server_base.BaseHandler):
"""Class for returning the dbRoot."""
@tornado.web.asynchronous
def get(self):
"""Handle GET request for the dbroot."""
self.set_header("Content-Type", "application/octet-stream")
if not tornado.web.globe_.Is3d():
print "Bad request: dbRoot from non-3D globe."
else:
if tornado.web.globe_.IsComposite():
tornado.web.local_server_.LocalDbRootHandler(
self, portable_globe.COMPOSITE_BASE_LAYER)
else:
tornado.web.local_server_.LocalDbRootHandler(
self, portable_globe.NON_COMPOSITE_LAYER)
self.finish()
class CompositeDbRootHandler(portable_server_base.BaseHandler):
"""Class for returning the meta dbRoot of a glc or dbRoots of its layers."""
@tornado.web.asynchronous
def get(self, layer_id):
"""Handle GET request for the dbroot."""
self.set_header("Content-Type", "application/octet-stream")
if not tornado.web.globe_.Is3d():
print "Bad request: dbRoot from non-3D globe."
elif not tornado.web.globe_.IsComposite():
print "Bad request: composite request for glb."
else:
tornado.web.local_server_.LocalDbRootHandler(self, int(layer_id))
self.finish()
class CompositeVectorLayerHandler(portable_server_base.BaseHandler):
"""Class for returning vector layer data."""
@tornado.web.asynchronous
def get(self, layer_id, path):
"""Handle GET request for vector layer data."""
path = path.encode("ascii", "ignore")
self.set_header("Content-Type", "text/html")
if not tornado.web.globe_.IsComposite():
print "Bad request: composite request for glb."
else:
tornado.web.local_server_.LocalLayerVectorFileHandler(
self, path, int(layer_id))
self.finish()
class DocsHandler(portable_server_base.BaseHandler):
"""Class for returning the content of files directly from disk."""
@tornado.web.asynchronous
def get(self, path):
"""Handle GET request for some document.
For example it is used for javascript files for
the Google Earth web browser plugin.
Args:
path: Path to file to be returned.
"""
path = path.encode("ascii", "ignore")
if path[-3:].lower() == "gif":
self.set_header("Content-Type", "image/gif")
elif path[-3:].lower() == "png":
self.set_header("Content-Type", "image/png")
else:
self.set_header("Content-Type", "text/html")
tornado.web.local_server_.LocalDocsHandler(self, path)
self.finish()
class CompositeDocsHandler(portable_server_base.BaseHandler):
"""Class for returning the content of files directly from disk."""
@tornado.web.asynchronous
def get(self, layer_id, path):
"""Handle GET request for some document.
For example it is used for javascript files for
the Google Earth web browser plugin.
Args:
layer_id: Id of layer within the composite.
path: Path to file to be returned.
"""
path = path.encode("ascii", "ignore")
if path[-3:].lower() == "gif":
self.set_header("Content-Type", "image/gif")
elif path[-3:].lower() == "png":
self.set_header("Content-Type", "image/png")
else:
self.set_header("Content-Type", "text/html")
tornado.web.local_server_.LocalLayerDocsHandler(
self, path, int(layer_id))
self.finish()
class BalloonHandler(portable_server_base.BaseHandler):
"""Class for returning the content for a balloon."""
@tornado.web.asynchronous
def get(self):
"""Handle GET request for FT balloon data."""
self.set_header("Content-Type", "text/html")
ftid = self.request.arguments["ftid"][0].replace(":", "-")
path = "earth/vector_layer/balloon_%s.html" % ftid
tornado.web.local_server_.LocalDocsHandler(self, path)
self.finish()
class IconHandler(FlatFileHandler):
"""Class for returning icons."""
@tornado.web.asynchronous
def get(self, icon):
"""Handle GET request for icon."""
icon = icon.encode("ascii", "ignore")
self.set_header("Content-Type", "image/png")
if tornado.web.globe_.IsComposite():
tornado.web.local_server_.LocalIconHandler(
self, icon, portable_globe.COMPOSITE_BASE_LAYER)
else:
tornado.web.local_server_.LocalIconHandler(
self, icon, portable_globe.NON_COMPOSITE_LAYER)
self.finish()
class CompositeIconHandler(FlatFileHandler):
"""Class for returning icons."""
@tornado.web.asynchronous
def get(self, icon, layer_id):
"""Handle GET request for icon."""
icon = icon.encode("ascii", "ignore")
self.set_header("Content-Type", "image/png")
tornado.web.local_server_.LocalIconHandler(self, icon, int(layer_id))
self.finish()
class KmlSearchHandler(portable_server_base.BaseHandler):
"""Class for returning search results as kml."""
@tornado.web.asynchronous
def get(self):
"""Handle GET request for kml search results."""
self.set_header("Content-Type", "text/plain")
tornado.web.local_server_.LocalKmlSearchHandler(self)
self.finish()
class JsonSearchHandler(portable_server_base.BaseHandler):
"""Class for returning search results as json."""
@tornado.web.asynchronous
def get(self):
"""Handle GET request for json search results."""
self.set_header("Content-Type", "text/plain")
tornado.web.local_server_.LocalJsonSearchHandler(self)
self.finish()
class CompositeQueryHandler(portable_server_base.BaseHandler):
"""Class for handling "query" requests."""
@tornado.web.asynchronous
def get(self, layer_id):
"""Handle GET request for JSON file for plugin."""
if self.request.arguments["request"][0] == "Json":
self.set_header("Content-Type", "text/plain; charset=utf-8")
if ("is2d" in self.request.arguments.keys() and
self.request.arguments["is2d"][0] == "t"):
tornado.web.local_server_.LocalJsonHandler(self, True)
else:
tornado.web.local_server_.LocalJsonHandler(self, False)
elif self.request.arguments["request"][0] == "ImageryMaps":
if tornado.web.globe_.IsMbtiles():
self.set_header("Content-Type", "image/png")
tornado.web.local_server_.LocalMapTileHandler(
self, True, portable_globe.COMPOSITE_BASE_LAYER)
else:
self.set_header("Content-Type", "image/jpeg")
tornado.web.local_server_.LocalMapTileHandler(
self, True, int(layer_id))
elif self.request.arguments["request"][0] == "VectorMapsRaster":
self.set_header("Content-Type", "image/png")
tornado.web.local_server_.LocalMapTileHandler(
self, False, int(layer_id))
elif self.request.arguments["request"][0] == "Icon":
self.set_header("Content-Type", "image/png")
(icon_path, use_layer, use_local) = (
tornado.web.local_server_.ConvertIconPath(
self.request.arguments["icon_path"][0]))
layer_id = int(layer_id)
if not use_layer:
layer_id = portable_globe.NON_COMPOSITE_LAYER
tornado.web.local_server_.LocalIconHandler(
self, icon_path, layer_id, use_local)
else:
self.set_header("Content-Type", "text/plain")
print "Unknown query request: ", self.request.uri
self.finish()
class QueryHandler(portable_server_base.BaseHandler):
"""Class for handling "query" requests."""
@tornado.web.asynchronous
def get(self):
"""Handle GET request for JSON file for plugin."""
if self.request.arguments["request"][0] == "Json":
if "v" in self.request.arguments:
json_version = int(self.request.arguments["v"][0])
else:
json_version = 1
self.set_header("Content-Type", "text/plain; charset=utf-8")
# TODO: Need way to distinguish 2d/3d for
# TODO: composite with both.
if ("is2d" in self.request.arguments.keys() and
self.request.arguments["is2d"][0] == "t"):
tornado.web.local_server_.LocalJsonHandler(self, True, json_version)
else:
tornado.web.local_server_.LocalJsonHandler(self, False, json_version)
elif self.request.arguments["request"][0] == "ImageryMaps":
self.set_header("Content-Type", "image/jpeg")
if tornado.web.globe_.IsComposite():
tornado.web.local_server_.LocalMapTileHandler(
self, True, portable_globe.COMPOSITE_BASE_LAYER)
else:
tornado.web.local_server_.LocalMapTileHandler(
self, True, portable_globe.NON_COMPOSITE_LAYER)
elif self.request.arguments["request"][0] == "VectorMapsRaster":
self.set_header("Content-Type", "image/png")
if tornado.web.globe_.IsComposite():
tornado.web.local_server_.LocalMapTileHandler(
self, False, portable_globe.COMPOSITE_BASE_LAYER)
else:
tornado.web.local_server_.LocalMapTileHandler(
self, False, portable_globe.NON_COMPOSITE_LAYER)
elif self.request.arguments["request"][0] == "Icon":
self.set_header("Content-Type", "image/png")
if tornado.web.globe_.IsComposite():
(icon_path, use_layer, use_local) = (
tornado.web.local_server_.ConvertIconPath(
self.request.arguments["icon_path"][0]))
if use_layer:
layer_id = portable_globe.COMPOSITE_BASE_LAYER
else:
layer_id = portable_globe.NON_COMPOSITE_LAYER
tornado.web.local_server_.LocalIconHandler(
self, icon_path, layer_id, use_local)
else:
tornado.web.local_server_.LocalIconHandler(
# Strips off "icons/" prefix from the path
self, self.request.arguments["icon_path"][0][6:],
portable_globe.NON_COMPOSITE_LAYER)
else:
self.set_header("Content-Type", "text/plain")
print "Unknown query request: ", self.request.uri
self.finish()
class MapsGen204Handler(portable_server_base.BaseHandler):
"""Class for handling /maps/gen_204 request."""
def get(self):
"""Handle GET request for gen_204 request."""
self.set_header("Content-Type", "text/plain")
# TODO: Consider parsing and storing Maps API usage.
self.finish()
class PingHandler(portable_server_base.BaseHandler):
"""Class for handling ping request to check if server is up."""
def get(self):
"""Handle GET request for ping."""
self.set_header("Content-Type", "text/plain")
tornado.web.local_server_.LocalPingHandler(self)
self.finish()
class InfoHandler(portable_server_base.BaseHandler):
"""Class for getting information about current globe."""
def get(self):
"""Handle GET request for unknown path."""
self.set_header("Content-Type", "text/plain")
tornado.web.local_server_.LocalInfoHandler(self)
self.finish()
def main():
"""Main for portable server."""
application = tornado.web.Application([
# Important to look for local requests first.
(r"/local/(.*)", portable_server_base.LocalDocsHandler),
(r"/ext/(.*)", portable_server_base.ExtHandler),
(r".*/(\d+)/kh/flatfile/lf-(.*)", CompositeIconHandler),
(r".*/(\d+)/kh/flatfile", CompositeFlatFileHandler),
(r".*/(\d+)/kh/dbRoot.*", CompositeDbRootHandler),
(r".*/(\d+)/kmllayer/(.*)", CompositeVectorLayerHandler),
(r".*/flatfile/lf-(.*)", IconHandler),
(r".*/flatfile", FlatFileHandler),
(r".*/dbRoot.*", DbRootHandler),
(r".*/MapsAdapter", JsonSearchHandler),
(r".*/ECV4Adapter", KmlSearchHandler),
(r".*/Portable2dPoiSearch", JsonSearchHandler),
(r".*/Portable3dPoiSearch", KmlSearchHandler),
(r".*/icons/(.*)", IconHandler),
(r"/ping", PingHandler),
(r"/info", InfoHandler),
(r".*/(\d+)/query", CompositeQueryHandler),
(r".*/query", QueryHandler),
(r".*/(\d+)/(js/.*)", CompositeDocsHandler),
(r".*/(\d+)/(kml/.*)", CompositeDocsHandler),
(r".*/(\d+)/(license/.*)", CompositeDocsHandler),
(r".*/(\d+)/(earth/.*)", CompositeDocsHandler),
(r".*/(\d+)/(maps/.*)", CompositeDocsHandler),
(r".*/(js/.*)", DocsHandler),
(r".*/(kml/.*)", DocsHandler),
(r".*/(license/.*)", DocsHandler),
(r".*/(earth/.*)", DocsHandler),
(r"/maps/gen_204", MapsGen204Handler),
(r".*/(maps/.*)", DocsHandler),
(r"/eb_balloon", BalloonHandler),
(r"/(.*)", portable_web_interface.SetUpHandler),
])
prepare_for_io_loop()
tornado.web.globe_ = portable_globe.Globe()
tornado.web.local_server_ = local_server.LocalServer()
http_server = tornado.httpserver.HTTPServer(application)
if tornado.web.globe_.config_.DisableBroadcasting():
http_server.listen(tornado.web.globe_.Port(), address="127.0.0.1")
else:
http_server.listen(tornado.web.globe_.Port())
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
17,
13,
22,
198,
2,
198,
2,
15069,
2177,
3012,
3457,
11,
13130,
4946,
402,
6500,
25767,
669,
13,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
... | 2.61856 | 5,765 |
#
# PySNMP MIB module HH3C-DLDP2-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/HH3C-DLDP2-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 19:13:05 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ConstraintsIntersection, ValueRangeConstraint, SingleValueConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ConstraintsIntersection", "ValueRangeConstraint", "SingleValueConstraint", "ValueSizeConstraint")
hh3cCommon, = mibBuilder.importSymbols("HH3C-OID-MIB", "hh3cCommon")
ifIndex, ifDescr = mibBuilder.importSymbols("IF-MIB", "ifIndex", "ifDescr")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
MibScalar, MibTable, MibTableRow, MibTableColumn, Gauge32, IpAddress, MibIdentifier, Unsigned32, Counter64, NotificationType, iso, ObjectIdentity, Integer32, TimeTicks, Counter32, Bits, ModuleIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Gauge32", "IpAddress", "MibIdentifier", "Unsigned32", "Counter64", "NotificationType", "iso", "ObjectIdentity", "Integer32", "TimeTicks", "Counter32", "Bits", "ModuleIdentity")
TextualConvention, TruthValue, DisplayString, MacAddress = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "TruthValue", "DisplayString", "MacAddress")
hh3cDldp2 = ModuleIdentity((1, 3, 6, 1, 4, 1, 25506, 2, 117))
hh3cDldp2.setRevisions(('2011-12-26 15:30',))
if mibBuilder.loadTexts: hh3cDldp2.setLastUpdated('201112261530Z')
if mibBuilder.loadTexts: hh3cDldp2.setOrganization('Hangzhou H3C Technologies. Co., Ltd.')
hh3cDldp2ScalarGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 25506, 2, 117, 1))
hh3cDldp2GlobalEnable = MibScalar((1, 3, 6, 1, 4, 1, 25506, 2, 117, 1, 1), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hh3cDldp2GlobalEnable.setStatus('current')
hh3cDldp2Interval = MibScalar((1, 3, 6, 1, 4, 1, 25506, 2, 117, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 100)).clone(5)).setUnits('second').setMaxAccess("readwrite")
if mibBuilder.loadTexts: hh3cDldp2Interval.setStatus('current')
hh3cDldp2AuthMode = MibScalar((1, 3, 6, 1, 4, 1, 25506, 2, 117, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("unknown", 1), ("none", 2), ("simple", 3), ("md5", 4))).clone('none')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hh3cDldp2AuthMode.setStatus('current')
hh3cDldp2AuthPassword = MibScalar((1, 3, 6, 1, 4, 1, 25506, 2, 117, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hh3cDldp2AuthPassword.setStatus('current')
hh3cDldp2UniShutdown = MibScalar((1, 3, 6, 1, 4, 1, 25506, 2, 117, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("unknown", 1), ("auto", 2), ("manual", 3))).clone('auto')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hh3cDldp2UniShutdown.setStatus('current')
hh3cDldp2TableGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 25506, 2, 117, 2))
hh3cDldp2PortConfigTable = MibTable((1, 3, 6, 1, 4, 1, 25506, 2, 117, 2, 1), )
if mibBuilder.loadTexts: hh3cDldp2PortConfigTable.setStatus('current')
hh3cDldp2PortConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 25506, 2, 117, 2, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: hh3cDldp2PortConfigEntry.setStatus('current')
hh3cDldp2PortEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 25506, 2, 117, 2, 1, 1, 1), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hh3cDldp2PortEnable.setStatus('current')
hh3cDldp2PortStatusTable = MibTable((1, 3, 6, 1, 4, 1, 25506, 2, 117, 2, 2), )
if mibBuilder.loadTexts: hh3cDldp2PortStatusTable.setStatus('current')
hh3cDldp2PortStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 25506, 2, 117, 2, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: hh3cDldp2PortStatusEntry.setStatus('current')
hh3cDldp2PortOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 25506, 2, 117, 2, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("unknown", 1), ("initial", 2), ("inactive", 3), ("unidirectional", 4), ("bidirectional", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hh3cDldp2PortOperStatus.setStatus('current')
hh3cDldp2PortLinkStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 25506, 2, 117, 2, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("unknown", 1), ("down", 2), ("up", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hh3cDldp2PortLinkStatus.setStatus('current')
hh3cDldp2NeighborTable = MibTable((1, 3, 6, 1, 4, 1, 25506, 2, 117, 2, 3), )
if mibBuilder.loadTexts: hh3cDldp2NeighborTable.setStatus('current')
hh3cDldp2NeighborEntry = MibTableRow((1, 3, 6, 1, 4, 1, 25506, 2, 117, 2, 3, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "HH3C-DLDP2-MIB", "hh3cDldp2NeighborBridgeMac"), (0, "HH3C-DLDP2-MIB", "hh3cDldp2NeighborPortIndex"))
if mibBuilder.loadTexts: hh3cDldp2NeighborEntry.setStatus('current')
hh3cDldp2NeighborBridgeMac = MibTableColumn((1, 3, 6, 1, 4, 1, 25506, 2, 117, 2, 3, 1, 1), MacAddress())
if mibBuilder.loadTexts: hh3cDldp2NeighborBridgeMac.setStatus('current')
hh3cDldp2NeighborPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 25506, 2, 117, 2, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: hh3cDldp2NeighborPortIndex.setStatus('current')
hh3cDldp2NeighborStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 25506, 2, 117, 2, 3, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("unknown", 1), ("unconfirmed", 2), ("confirmed", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hh3cDldp2NeighborStatus.setStatus('current')
hh3cDldp2NeighborAgingTime = MibTableColumn((1, 3, 6, 1, 4, 1, 25506, 2, 117, 2, 3, 1, 4), Integer32()).setUnits('second').setMaxAccess("readonly")
if mibBuilder.loadTexts: hh3cDldp2NeighborAgingTime.setStatus('current')
hh3cDldp2TrapBindObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 25506, 2, 117, 3))
hh3cDldp2Trap = MibIdentifier((1, 3, 6, 1, 4, 1, 25506, 2, 117, 4))
hh3cDldp2TrapPrefix = MibIdentifier((1, 3, 6, 1, 4, 1, 25506, 2, 117, 4, 0))
hh3cDldp2TrapUniLink = NotificationType((1, 3, 6, 1, 4, 1, 25506, 2, 117, 4, 0, 1)).setObjects(("IF-MIB", "ifIndex"), ("IF-MIB", "ifDescr"))
if mibBuilder.loadTexts: hh3cDldp2TrapUniLink.setStatus('current')
hh3cDldp2TrapBidLink = NotificationType((1, 3, 6, 1, 4, 1, 25506, 2, 117, 4, 0, 2)).setObjects(("IF-MIB", "ifIndex"), ("IF-MIB", "ifDescr"))
if mibBuilder.loadTexts: hh3cDldp2TrapBidLink.setStatus('current')
mibBuilder.exportSymbols("HH3C-DLDP2-MIB", hh3cDldp2NeighborTable=hh3cDldp2NeighborTable, hh3cDldp2AuthPassword=hh3cDldp2AuthPassword, hh3cDldp2NeighborAgingTime=hh3cDldp2NeighborAgingTime, hh3cDldp2PortConfigTable=hh3cDldp2PortConfigTable, hh3cDldp2TrapPrefix=hh3cDldp2TrapPrefix, hh3cDldp2Trap=hh3cDldp2Trap, hh3cDldp2GlobalEnable=hh3cDldp2GlobalEnable, hh3cDldp2=hh3cDldp2, hh3cDldp2PortConfigEntry=hh3cDldp2PortConfigEntry, hh3cDldp2PortStatusTable=hh3cDldp2PortStatusTable, PYSNMP_MODULE_ID=hh3cDldp2, hh3cDldp2UniShutdown=hh3cDldp2UniShutdown, hh3cDldp2PortEnable=hh3cDldp2PortEnable, hh3cDldp2NeighborBridgeMac=hh3cDldp2NeighborBridgeMac, hh3cDldp2TrapBindObjects=hh3cDldp2TrapBindObjects, hh3cDldp2PortStatusEntry=hh3cDldp2PortStatusEntry, hh3cDldp2AuthMode=hh3cDldp2AuthMode, hh3cDldp2ScalarGroup=hh3cDldp2ScalarGroup, hh3cDldp2TableGroup=hh3cDldp2TableGroup, hh3cDldp2TrapUniLink=hh3cDldp2TrapUniLink, hh3cDldp2NeighborEntry=hh3cDldp2NeighborEntry, hh3cDldp2PortLinkStatus=hh3cDldp2PortLinkStatus, hh3cDldp2NeighborStatus=hh3cDldp2NeighborStatus, hh3cDldp2TrapBidLink=hh3cDldp2TrapBidLink, hh3cDldp2NeighborPortIndex=hh3cDldp2NeighborPortIndex, hh3cDldp2PortOperStatus=hh3cDldp2PortOperStatus, hh3cDldp2Interval=hh3cDldp2Interval)
| [
2,
198,
2,
9485,
15571,
7378,
337,
9865,
8265,
47138,
18,
34,
12,
19260,
6322,
17,
12,
8895,
33,
357,
4023,
1378,
16184,
76,
489,
8937,
13,
785,
14,
79,
893,
11632,
8,
198,
2,
7054,
45,
13,
16,
2723,
2393,
1378,
14,
14490,
14,
... | 2.339271 | 3,593 |
import numpy as np
from functools import reduce
from app.utils import get_reflection_indexes, transformations_codes
from app.config import (
X_MAX_TRANSLATED,
X_MIN_TRANSLATED,
Y_MAX_TRANSLATED,
Y_MIN_TRANSLATED,
MAX_NORMALIZED_VALUE,
MIN_NORMALIZED_VALUE,
)
def build_translation_matrix(Tx, Ty, Tz):
"""
Build translation matrix as:
[1 0 0 0]
[0 1 0 0]
[0 0 1 0]
[Tx Ty Tz 1]
"""
matrix = np.identity(4)
matrix[3][0] = Tx
matrix[3][1] = Ty
matrix[3][2] = Tz
return matrix
def build_scaling_matrix(Sx, Sy, Sz):
"""
Build scaling matrix as:
[Sx 0 0 0]
[0 Sy 0 0]
[0 0 Sz 0]
[0 0 0 1]
"""
matrix = np.identity(4)
matrix[0][0] = Sx
matrix[1][1] = Sy
matrix[2][2] = Sz
return matrix
def build_rotation_matrix(dX, dY, dZ):
"""
Build rotation matrix as composition from:
Rx = [1 0 0 0]
[0 cos(dX) sen(dX) 0]
[0 -sen(dX) cos(dX) 0]
[0 0 0 1]
Ry = [cos(dY) 0 -sen(dY) 0]
[0 1 0 0]
[sen(dY) 0 cos(dY) 0]
[0 0 0 1]
Rz = [cos(dZ) sen(dZ) 0 0]
[-sen(dZ) cos(dZ) 0 0]
[0 0 1 0]
[0 0 0 1]
"""
Rx = np.identity(4)
Rx[1][1] = np.cos(np.deg2rad(dX))
Rx[1][2] = np.sin(np.deg2rad(dX))
Rx[2][1] = -np.sin(np.deg2rad(dX))
Rx[2][2] = np.cos(np.deg2rad(dX))
Ry = np.identity(4)
Ry[0][0] = np.cos(np.deg2rad(dY))
Ry[0][2] = -np.sin(np.deg2rad(dY))
Ry[2][0] = np.sin(np.deg2rad(dY))
Ry[2][2] = np.cos(np.deg2rad(dY))
Rz = np.identity(4)
Rz[0][0] = np.cos(np.deg2rad(dZ))
Rz[0][1] = np.sin(np.deg2rad(dZ))
Rz[1][0] = -np.sin(np.deg2rad(dZ))
Rz[1][1] = np.cos(np.deg2rad(dZ))
return reduce(np.dot, [Rx, Ry, Rz])
def build_reflection_matrix(over):
"""
Build reflection matrix as:
[1 0 0]
[0 1 0]
[0 0 1],
where over determine where to
apply the reflection: x, y or origin.
"""
matrix = np.identity(3)
for index in get_reflection_indexes(over):
matrix[index] = -1
return matrix
def bezier_blending_functions(t):
"""
Given Bezier matrix as:
[-1 3 -3 1]
[ 3 -6 3 0]
[-3 3 0 0]
[ 1 0 0 0],
and vector T as:
[t³ t² t 1],
return Bézier blending function as
[(1 - t)³ ]
[3t(1 - t)²]
[3t²(1 - t)]
[t3 ]
"""
return np.array(
[(1 - t) ** 3, 3 * t * ((1 - t) ** 2), 3 * (t ** 2) * (1 - t), t ** 3]
)
def bezier_matrix():
"""
Return Bezier matrix as:
[-1 3 -3 1]
[ 3 -6 3 0]
[-3 3 0 0]
[ 1 0 0 0],
"""
return np.array(
[
[-1, 3, -3, 1],
[3, -6, 3, 0],
[-3, 3, 0, 0],
[1, 0, 0, 0],
]
)
def build_bspline_matrix():
"""
Build B-Spline matrix as
[ -1/6 1/2 -1/2 1/6]
[ 1/2 -1 1/2 0]
[ -1/2 0 1/2 0]
[ 1/6 2/3 1/6 0]
"""
return np.array(
[
[-1 / 6, 1 / 2, -1 / 2, 1 / 6],
[1 / 2, -1, 1 / 2, 0],
[-1 / 2, 0, 1 / 2, 0],
[1 / 6, 2 / 3, 1 / 6, 0],
]
)
def calculate_initial_differences(delta, a, b, c, d):
"""
Calculate initial differences vector as
[ d ]
[ aδ³ + bδ² + cδ]
[ 6aδ³ + 2bδ² ]
[ 6aδ³ ]
"""
delta_2 = delta ** 2
delta_3 = delta ** 3
return [
d,
a * delta_3 + b * delta_2 + c * delta,
6 * a * delta_3 + 2 * b * delta_2,
6 * a * delta_3,
]
transformations_functions_dict = {
"rf": build_reflection_matrix,
"rt": build_rotation_matrix,
"r_rt": build_rotation_matrix,
"sc": build_scaling_matrix,
"r_sc": build_scaling_matrix,
"tr": build_translation_matrix,
}
| [
11748,
299,
32152,
355,
45941,
198,
6738,
1257,
310,
10141,
1330,
4646,
198,
6738,
598,
13,
26791,
1330,
651,
62,
5420,
1564,
62,
9630,
274,
11,
38226,
62,
40148,
198,
6738,
598,
13,
11250,
1330,
357,
198,
220,
220,
220,
1395,
62,
2... | 1.688274 | 2,422 |
"""
Alternative calculation to the v_calc_staff_monthly set of views.
This one differs by having period as a column. There is not a significant
performance difference beteween the two until
it comes to aggregation (which for the other, could require an unpivot).
"""
from planning_system.db.schema.views.finance.v_calc_finances import period_table
def definition():
"""
Calculates monthly staff salary cost.
"""
v = f"""
SELECT --Staff info
s.staff_line_id, s.post_status_id, s.set_id, ISNULL(s.staff_id, s.staff_line_id) as staff_id,
ISNULL(s.pension_id,'N') as pension_id, ISNULL(s.travel_scheme,0) as travel_scheme,
--Set info
s.acad_year, s.set_cat_id,
--Monthly salary
dbo.udfGetMonthProp(s.acad_year, s.period, s.start_date, s.end_date)
*vFTE.FTE
*(
ISNULL(ss.value,0)
+ISNULL(s.allowances,0)
)/12 as value,
s.sp, s.period
FROM (SELECT i.*, fs.acad_year, fs.set_cat_id, p.period,
dbo.udfGetMonthSpinePoint(fs.acad_year, p.period, i.start_date, i.current_spine, i.grade) as sp
FROM input_pay_staff i
INNER JOIN f_set fs ON fs.set_id = i.set_id
CROSS JOIN {period_table} p) s
LEFT OUTER JOIN staff_spine ss ON ss.spine=s.sp
AND s.acad_year=ss.acad_year
AND s.set_cat_id=ss.set_cat_id
INNER JOIN v_calc_staff_fte vFTE on vFTE.staff_line_id=s.staff_line_id
"""
return v
| [
37811,
198,
49788,
17952,
284,
262,
410,
62,
9948,
66,
62,
28120,
62,
8424,
306,
900,
286,
5009,
13,
220,
198,
1212,
530,
24242,
416,
1719,
2278,
355,
257,
5721,
13,
1318,
318,
407,
257,
2383,
198,
26585,
3580,
731,
413,
6429,
262,
... | 2.125894 | 699 |
"""
19. Remove Nth Node From End of List
Given linked list: 1->2->3->4->5, and n = 2.
After removing the second node from the end, the linked list becomes 1->2->3->5.
"""
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
| [
37811,
201,
198,
1129,
13,
17220,
399,
400,
19081,
3574,
5268,
286,
7343,
201,
198,
15056,
6692,
1351,
25,
352,
3784,
17,
3784,
18,
3784,
19,
3784,
20,
11,
290,
299,
796,
362,
13,
201,
198,
201,
198,
3260,
10829,
262,
1218,
10139,
... | 2.230263 | 152 |
import json
import os
from convlab2.util.multiwoz.state import default_state
from convlab2.dst.rule.multiwoz.dst_util import normalize_value
from convlab2.dst.dst import DST
from convlab2.util.multiwoz.multiwoz_slot_trans import REF_SYS_DA
class RuleDST(DST):
"""Rule based DST which trivially updates new values from NLU result to states.
Attributes:
state(dict):
Dialog state. Function ``convlab2.util.multiwoz.state.default_state`` returns a default state.
value_dict(dict):
It helps check whether ``user_act`` has correct content.
"""
def update(self, user_act=None):
"""
update belief_state, request_state
:param user_act:
:return:
"""
for intent, domain, slot, value in user_act:
domain = domain.lower()
intent = intent.lower()
if domain in ['unk', 'general', 'booking']:
continue
if intent == 'inform':
k = REF_SYS_DA[domain.capitalize()].get(slot, slot)
if k is None:
continue
try:
assert domain in self.state['belief_state']
except:
raise Exception('Error: domain <{}> not in new belief state'.format(domain))
domain_dic = self.state['belief_state'][domain]
assert 'semi' in domain_dic
assert 'book' in domain_dic
if k in domain_dic['semi']:
nvalue = normalize_value(self.value_dict, domain, k, value)
self.state['belief_state'][domain]['semi'][k] = nvalue
elif k in domain_dic['book']:
self.state['belief_state'][domain]['book'][k] = value
elif k.lower() in domain_dic['book']:
self.state['belief_state'][domain]['book'][k.lower()] = value
elif k == 'trainID' and domain == 'train':
self.state['belief_state'][domain]['book'][k] = normalize_value(self.value_dict, domain, k, value)
elif k != 'none':
# raise Exception('unknown slot name <{}> of domain <{}>'.format(k, domain))
with open('unknown_slot.log', 'a+') as f:
f.write('unknown slot name <{}> of domain <{}>\n'.format(k, domain))
elif intent == 'request':
k = REF_SYS_DA[domain.capitalize()].get(slot, slot)
if domain not in self.state['request_state']:
self.state['request_state'][domain] = {}
if k not in self.state['request_state'][domain]:
self.state['request_state'][domain][k] = 0
# self.state['user_action'] = user_act # should be added outside DST module
return self.state
def init_session(self):
"""Initialize ``self.state`` with a default state, which ``convlab2.util.multiwoz.state.default_state`` returns."""
self.state = default_state()
if __name__ == '__main__':
# from convlab2.dst.rule.multiwoz import RuleDST
dst = RuleDST()
# Action is a dict. Its keys are strings(domain-type pairs, both uppercase and lowercase is OK) and its values are list of lists.
# The domain may be one of ('Attraction', 'Hospital', 'Booking', 'Hotel', 'Restaurant', 'Taxi', 'Train', 'Police').
# The type may be "inform" or "request".
# For example, the action below has a key "Hotel-Inform", in which "Hotel" is domain and "Inform" is action type.
# Each list in the value of "Hotel-Inform" is a slot-value pair. "Area" is slot and "east" is value. "Star" is slot and "4" is value.
action = [
["Inform", "Hotel", "Area", "east"],
["Inform", "Hotel", "Stars", "4"]
]
# method `update` updates the attribute `state` of tracker, and returns it.
state = dst.update(action)
assert state == dst.state
assert state == {'user_action': [],
'system_action': [],
'belief_state': {'police': {'book': {'booked': []}, 'semi': {}},
'hotel': {'book': {'booked': [], 'people': '', 'day': '', 'stay': ''},
'semi': {'name': '',
'area': 'east',
'parking': '',
'pricerange': '',
'stars': '4',
'internet': '',
'type': ''}},
'attraction': {'book': {'booked': []},
'semi': {'type': '', 'name': '', 'area': ''}},
'restaurant': {'book': {'booked': [], 'people': '', 'day': '', 'time': ''},
'semi': {'food': '', 'pricerange': '', 'name': '', 'area': ''}},
'hospital': {'book': {'booked': []}, 'semi': {'department': ''}},
'taxi': {'book': {'booked': []},
'semi': {'leaveAt': '',
'destination': '',
'departure': '',
'arriveBy': ''}},
'train': {'book': {'booked': [], 'people': ''},
'semi': {'leaveAt': '',
'destination': '',
'day': '',
'arriveBy': '',
'departure': ''}}},
'request_state': {},
'terminated': False,
'history': []}
# Please call `init_session` before a new dialog. This initializes the attribute `state` of tracker with a default state, which `convlab2.util.multiwoz.state.default_state` returns. But You needn't call it before the first dialog, because tracker gets a default state in its constructor.
dst.init_session()
action = [["Inform", "Train", "Arrive", "19:45"]]
state = dst.update(action)
assert state == {'user_action': [],
'system_action': [],
'belief_state': {'police': {'book': {'booked': []}, 'semi': {}},
'hotel': {'book': {'booked': [], 'people': '', 'day': '', 'stay': ''},
'semi': {'name': '',
'area': '',
'parking': '',
'pricerange': '',
'stars': '',
'internet': '',
'type': ''}},
'attraction': {'book': {'booked': []},
'semi': {'type': '', 'name': '', 'area': ''}},
'restaurant': {'book': {'booked': [], 'people': '', 'day': '', 'time': ''},
'semi': {'food': '', 'pricerange': '', 'name': '', 'area': ''}},
'hospital': {'book': {'booked': []}, 'semi': {'department': ''}},
'taxi': {'book': {'booked': []},
'semi': {'leaveAt': '',
'destination': '',
'departure': '',
'arriveBy': ''}},
'train': {'book': {'booked': [], 'people': ''},
'semi': {'leaveAt': '',
'destination': '',
'day': '',
'arriveBy': '19:45',
'departure': ''}}},
'request_state': {},
'terminated': False,
'history': []}
| [
11748,
33918,
198,
11748,
28686,
198,
198,
6738,
3063,
23912,
17,
13,
22602,
13,
16680,
14246,
8590,
13,
5219,
1330,
4277,
62,
5219,
198,
6738,
3063,
23912,
17,
13,
67,
301,
13,
25135,
13,
16680,
14246,
8590,
13,
67,
301,
62,
22602,
... | 1.663396 | 5,300 |
from toontown.coghq.SpecImports import *
from toontown.toonbase import ToontownGlobals
CogParent = 100007
CogParent1 = 100009
BattlePlace1 = 100004
BattlePlace2 = 100005
BattleCellId = 0
BattleCellId1 = 1
BattleCells = {BattleCellId: {'parentEntId': BattlePlace1,
'pos': Point3(0, 0, 0)},
BattleCellId1: {'parentEntId': BattlePlace2,
'pos': Point3(0, 0, 0)}}
CogData = [{'parentEntId': CogParent,
'boss': 0,
'level': ToontownGlobals.CashbotMintCogLevel,
'battleCell': BattleCellId,
'pos': Point3(-8, 4, 0),
'h': 180,
'behavior': 'stand',
'path': None,
'skeleton': 1},
{'parentEntId': CogParent,
'boss': 0,
'level': ToontownGlobals.CashbotMintCogLevel + 1,
'battleCell': BattleCellId,
'pos': Point3(-3, 4, 0),
'h': 180,
'behavior': 'stand',
'path': None,
'skeleton': 1},
{'parentEntId': CogParent,
'boss': 0,
'level': ToontownGlobals.CashbotMintCogLevel,
'battleCell': BattleCellId,
'pos': Point3(3, 4, 0),
'h': 180,
'behavior': 'stand',
'path': None,
'skeleton': 1},
{'parentEntId': CogParent,
'boss': 0,
'level': ToontownGlobals.CashbotMintCogLevel + 1,
'battleCell': BattleCellId,
'pos': Point3(8, 4, 0),
'h': 180,
'behavior': 'stand',
'path': None,
'skeleton': 1},
{'parentEntId': CogParent1,
'boss': 0,
'level': ToontownGlobals.CashbotMintCogLevel,
'battleCell': BattleCellId1,
'pos': Point3(-8, 4, 0),
'h': 180,
'behavior': 'stand',
'path': None,
'skeleton': 1},
{'parentEntId': CogParent1,
'boss': 0,
'level': ToontownGlobals.CashbotMintCogLevel + 1,
'battleCell': BattleCellId1,
'pos': Point3(-3, 4, 0),
'h': 180,
'behavior': 'stand',
'path': None,
'skeleton': 1},
{'parentEntId': CogParent1,
'boss': 0,
'level': ToontownGlobals.CashbotMintCogLevel,
'battleCell': BattleCellId1,
'pos': Point3(3, 4, 0),
'h': 180,
'behavior': 'stand',
'path': None,
'skeleton': 1},
{'parentEntId': CogParent1,
'boss': 0,
'level': ToontownGlobals.CashbotMintCogLevel + 1,
'battleCell': BattleCellId1,
'pos': Point3(8, 4, 0),
'h': 180,
'behavior': 'stand',
'path': None,
'skeleton': 1}]
ReserveCogData = []
| [
6738,
284,
756,
593,
13,
1073,
456,
80,
13,
22882,
3546,
3742,
1330,
1635,
198,
6738,
284,
756,
593,
13,
1462,
261,
8692,
1330,
1675,
756,
593,
9861,
672,
874,
198,
34,
519,
24546,
796,
1802,
25816,
198,
34,
519,
24546,
16,
796,
1... | 2.315846 | 934 |
from typing import List, Optional, Sequence
from sqlalchemy.orm import DeclarativeMeta
from ..clause_binder.clause_binder import ClauseBinder
from ..query import CRUDQuery
from .select_provider import SelectProvider
from .insert_provider import InsertProvider
from .update_provider import UpdateProvider
from .delete_provider import DeleteProvider
from .count_provider import CountProvider
| [
6738,
19720,
1330,
7343,
11,
32233,
11,
45835,
198,
6738,
44161,
282,
26599,
13,
579,
1330,
16691,
283,
876,
48526,
198,
6738,
11485,
565,
682,
62,
65,
5540,
13,
565,
682,
62,
65,
5540,
1330,
28081,
33,
5540,
198,
6738,
11485,
22766,
... | 4.030928 | 97 |
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.gis',
# all-auth
'django.contrib.sites',
'allauth',
'allauth.account',
'allauth.socialaccount',
# openwisp modules
'openwisp_users',
# openwisp-controller
'openwisp_controller.pki',
'openwisp_controller.config',
'openwisp_controller.geo',
'openwisp_controller.connection',
'flat_json_widget',
'openwisp_notifications',
'openwisp_utils.admin_theme',
# admin
'django.contrib.admin',
'django.forms',
# other dependencies
'sortedm2m',
'reversion',
'leaflet',
# rest framework
'rest_framework',
'rest_framework_gis',
'django_filters',
# other packages
'private_storage',
'channels',
'drf_yasg',
]
EXTENDED_APPS = [
'django_x509',
'django_loci',
]
| [
38604,
7036,
1961,
62,
2969,
3705,
796,
685,
198,
220,
220,
220,
705,
28241,
14208,
13,
3642,
822,
13,
18439,
3256,
198,
220,
220,
220,
705,
28241,
14208,
13,
3642,
822,
13,
11299,
19199,
3256,
198,
220,
220,
220,
705,
28241,
14208,
... | 2.215103 | 437 |
import warnings
from amuse.datamodel import *
warnings.warn("amuse.support.data.core has moved to amuse.datamodel", DeprecationWarning)
| [
11748,
14601,
198,
198,
6738,
26072,
13,
19608,
321,
375,
417,
1330,
1635,
198,
198,
40539,
654,
13,
40539,
7203,
321,
1904,
13,
11284,
13,
7890,
13,
7295,
468,
3888,
284,
26072,
13,
19608,
321,
375,
417,
1600,
2129,
8344,
341,
20361,... | 3.111111 | 45 |
$NetBSD: patch-Lib_ctypes_macholib_dyld.py,v 1.1 2020/11/17 19:33:26 sjmulder Exp $
Support for macOS 11 and Apple Silicon (ARM). Mostly backported from:
https://github.com/python/cpython/pull/22855
--- Lib/ctypes/macholib/dyld.py.orig 2020-08-15 05:20:16.000000000 +0000
+++ Lib/ctypes/macholib/dyld.py
@@ -6,6 +6,11 @@ import os
from ctypes.macholib.framework import framework_info
from ctypes.macholib.dylib import dylib_info
from itertools import *
+try:
+ from _ctypes import _dyld_shared_cache_contains_path
+except ImportError:
+ def _dyld_shared_cache_contains_path(*args):
+ raise NotImplementedError
__all__ = [
'dyld_find', 'framework_find',
@@ -122,8 +127,15 @@ def dyld_find(name, executable_path=None
dyld_executable_path_search(name, executable_path),
dyld_default_search(name, env),
), env):
+
if os.path.isfile(path):
return path
+ try:
+ if _dyld_shared_cache_contains_path(path):
+ return path
+ except NotImplementedError:
+ pass
+
raise ValueError("dylib %s could not be found" % (name,))
def framework_find(fn, executable_path=None, env=None):
| [
3,
7934,
21800,
25,
8529,
12,
25835,
62,
310,
9497,
62,
76,
620,
349,
571,
62,
9892,
335,
13,
9078,
11,
85,
352,
13,
16,
12131,
14,
1157,
14,
1558,
678,
25,
2091,
25,
2075,
264,
73,
30300,
6499,
5518,
720,
198,
198,
15514,
329,
... | 2.259259 | 540 |
from dataclasses import dataclass
@dataclass
class Client:
"""Class representing a pizza with ingredients."""
id: int
likes: set
dislikes: set
| [
6738,
4818,
330,
28958,
1330,
4818,
330,
31172,
628,
198,
31,
19608,
330,
31172,
198,
4871,
20985,
25,
198,
220,
220,
220,
37227,
9487,
10200,
257,
14256,
351,
9391,
526,
15931,
198,
220,
220,
220,
4686,
25,
493,
198,
220,
220,
220,
... | 2.981481 | 54 |
from django.http import request
from django.views import generic
import OrchardVision.settings as settings
import broker.models as models
from broker.models import Tree
from html import escape
import json | [
6738,
42625,
14208,
13,
4023,
1330,
2581,
198,
6738,
42625,
14208,
13,
33571,
1330,
14276,
198,
198,
11748,
1471,
30215,
44206,
13,
33692,
355,
6460,
198,
11748,
20426,
13,
27530,
355,
4981,
198,
6738,
20426,
13,
27530,
1330,
12200,
198,
... | 4.291667 | 48 |
__author__ = 'Davide Monfrecola'
from phantomrestclient import auth
from phantomrestclient import launchconfigurations
from phantomrestclient import domains
from phantomrestclient import sites
from phantomrestclient import phantomrequests
# only for testing purpouse
if __name__ == "__main__":
#d = domains.Domains()
#d.get_all()
#lc = launchconfigurations.LaunchConfigurations()
#lc.get_all()
lc = sites.Sites()
print(lc.get_all()) | [
834,
9800,
834,
796,
705,
35,
615,
485,
2892,
19503,
4033,
64,
6,
198,
198,
6738,
36381,
2118,
16366,
1330,
6284,
198,
6738,
36381,
2118,
16366,
1330,
4219,
11250,
20074,
198,
6738,
36381,
2118,
16366,
1330,
18209,
198,
6738,
36381,
211... | 3.136986 | 146 |
# -*- coding: utf-8 -*-
import tkinter
root = tkinter.Tk()
root.wm_title("Tkinter04 Demo")
label1 = tkinter.Label(root, text=u"账号:").grid(row=0, sticky="w")
label2 = tkinter.Label(root, text=u"密码:").grid(row=1, sticky="w")
label3 = tkinter.Label(root, text=u"")
var = tkinter.Variable()
var.set("tester")
entry1 = tkinter.Entry(root, textvariable=var) # textvariable属性绑定变量
entry2 = tkinter.Entry(root)
entry2["show"] = "*" # 设置show属性,实现“不可见输入”
entry1.grid(row=0, column=1, sticky="e")
entry2.grid(row=1, column=1, sticky="e")
label3.grid(row=3, column=1, sticky="w")
btn = tkinter.Button(root, text=u"登录", command=reg)
btn.grid(row=2, column=1, sticky="e")
root.minsize(180, 80)
root.mainloop()
# ### grid布局
# grid()函数:
# - 参数row :指定位于的行,从0开始;
# - 参数column :指定位于的列,从0开始;
# - 参数sticky :组件开始的方向,“n,s,w,e”表示上下左右;
# - 参数ipadx和ipady :内边距的x方向与y方向,默认边距是0;
# - 参数padx和pady :外边距的x方向与y方向,默认边距是0;
# - 参数rowspan和columnspan :表示跨越的行数和列数;
#
# ### 注意
# pack布局和grid布局不能同时使用;
# 对于较复杂的布局,建议使用grid布局;
#
# ### 输入框(Entry)
# 获取输入的文本信息;
# 具体信息可查看源码文件__init__.py中的Entry类(“Python安装目录\Lib\tkinter\__init__.py”);
# get()方法获取输入框的内容,使用时不需要任何参数;
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
201,
198,
11748,
256,
74,
3849,
201,
198,
201,
198,
15763,
796,
256,
74,
3849,
13,
51,
74,
3419,
201,
198,
15763,
13,
26377,
62,
7839,
7203,
51,
74,
3849,
3023,
34588,
49... | 1.214286 | 966 |
#!/usr/bin/env python
#
import os
import femagtools.jhb
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
198,
11748,
28686,
198,
11748,
2796,
363,
31391,
13,
73,
71,
65,
628,
628
] | 2.458333 | 24 |
import logging
from pytest_play.providers import BaseProvider
from sqlalchemy.pool import NullPool
from sqlalchemy import (
create_engine,
text,
)
class SQLProvider(BaseProvider):
""" SQL provider """
def get_db(self, database_url):
""" Return a cached db engine if available """
if not hasattr(self.engine, 'play_sql'):
self.engine.play_sql = {}
db = self.engine.play_sql.get(database_url)
if not db:
db = create_engine(
database_url,
poolclass=NullPool)
self.engine.play_sql[database_url] = db
return db
def _make_assertion(self, command, **kwargs):
""" Make an assertion based on python
expression against kwargs
"""
assertion = command.get('assertion', None)
if assertion:
self.engine.execute_command(
{'provider': 'python',
'type': 'assert',
'expression': assertion
},
**kwargs,
)
def _make_variable(self, command, **kwargs):
""" Make a variable based on python
expression against kwargs
"""
expression = command.get('variable_expression', None)
if expression:
self.engine.execute_command(
{'provider': 'python',
'type': 'store_variable',
'name': command['variable'],
'expression': expression
},
**kwargs,
)
| [
11748,
18931,
198,
6738,
12972,
9288,
62,
1759,
13,
15234,
4157,
1330,
7308,
29495,
198,
6738,
44161,
282,
26599,
13,
7742,
1330,
35886,
27201,
198,
6738,
44161,
282,
26599,
1330,
357,
198,
220,
220,
220,
2251,
62,
18392,
11,
198,
220,
... | 2.069921 | 758 |
import torch
from .common import QLayer
from ..functions import xnor_connect
import warnings
warnings.simplefilter("always",DeprecationWarning)
warnings.warn("Module not finished due to gradient implementation on conv layer !", ImportWarning)
| [
11748,
28034,
220,
198,
6738,
764,
11321,
1330,
1195,
49925,
198,
6738,
11485,
12543,
2733,
1330,
2124,
13099,
62,
8443,
198,
11748,
14601,
198,
40539,
654,
13,
36439,
24455,
7203,
33770,
1600,
12156,
8344,
341,
20361,
8,
198,
40539,
654,... | 4.04918 | 61 |
def test_dump_molecular_structures(
request,
tmp_path,
molecule_db,
name_db,
case_data,
):
"""
Dump molecular structures.
This test dumps molecules to files and to a MongoDB database
so that they can be visually inspected.
Parameters
----------
request : :class:`pytest.FixtureRequest`
Holds information about the requesting test.
tmp_path : :class:`pathlib2.Path`
A path into which the structure of the molecule is saved.
molecule_db : :class:`.MoleculeDatabase`
A database into which the structure of the molecule is saved.
name_db : :class:`.ValueDatabase`
A database into which name the name of the molecule is saved.
Returns
-------
None : :class:`NoneType`
"""
case_data.molecule.write(tmp_path / 'molecule.mol')
molecule_db.put(case_data.molecule)
name_db.put(case_data.molecule, request.node.name)
| [
4299,
1332,
62,
39455,
62,
76,
2305,
10440,
62,
7249,
942,
7,
198,
220,
220,
220,
2581,
11,
198,
220,
220,
220,
45218,
62,
6978,
11,
198,
220,
220,
220,
27756,
62,
9945,
11,
198,
220,
220,
220,
1438,
62,
9945,
11,
198,
220,
220,... | 2.702312 | 346 |
# app/conversations/models.py
# Django modules
from django.db import models
# Locals
from app.users.models import MyCustomUser
# Create your models here.
# NAMA MODEL/TABEL: Conversation
# NAMA MODEL/TABEL: Message
| [
2,
598,
14,
1102,
690,
602,
14,
27530,
13,
9078,
198,
198,
2,
37770,
13103,
198,
6738,
42625,
14208,
13,
9945,
1330,
4981,
198,
198,
2,
15181,
874,
198,
6738,
598,
13,
18417,
13,
27530,
1330,
2011,
15022,
12982,
198,
198,
2,
13610,
... | 3 | 74 |
import angr
| [
11748,
281,
2164,
628
] | 3.25 | 4 |
from functools import wraps
from evalml.exceptions import PipelineNotYetFittedError
from evalml.utils.base_meta import BaseMeta
class PipelineBaseMeta(BaseMeta):
"""Metaclass that overrides creating a new pipeline by wrapping methods with validators and setters"""
@classmethod
def check_for_fit(cls, method):
"""`check_for_fit` wraps a method that validates if `self._is_fitted` is `True`.
It raises an exception if `False` and calls and returns the wrapped method if `True`.
"""
@wraps(method)
return _check_for_fit
class TimeSeriesPipelineBaseMeta(PipelineBaseMeta):
"""Metaclass that overrides creating a new time series pipeline by wrapping methods with validators and setters"""
@classmethod
def check_for_fit(cls, method):
"""`check_for_fit` wraps a method that validates if `self._is_fitted` is `True`.
It raises an exception if `False` and calls and returns the wrapped method if `True`.
"""
@wraps(method)
return _check_for_fit
| [
198,
198,
6738,
1257,
310,
10141,
1330,
27521,
198,
198,
6738,
5418,
4029,
13,
1069,
11755,
1330,
37709,
3673,
11486,
37,
2175,
12331,
198,
6738,
5418,
4029,
13,
26791,
13,
8692,
62,
28961,
1330,
7308,
48526,
628,
198,
4871,
37709,
1488... | 2.877717 | 368 |
import scrappers
import scrappers.mixins
class FoxNews(scrappers.mixins.RSSScrapper, scrappers.Scrapper):
"""The Fox News RSS feeds scrapper.
"""
| [
11748,
19320,
11799,
198,
11748,
19320,
11799,
13,
19816,
1040,
628,
198,
4871,
5426,
9980,
7,
1416,
430,
11799,
13,
19816,
1040,
13,
49,
5432,
3351,
430,
2848,
11,
19320,
11799,
13,
3351,
430,
2848,
2599,
198,
220,
220,
220,
37227,
4... | 2.836364 | 55 |
#!/usr/bin/env python
# Copyright (c) 2018 Anish Athalye (me@anishathalye.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import argparse
import socket
import sys
import os
import select
import time
__version__ = '0.1.2'
SERVER_IP = 'seashells.io'
SERVER_PORT = 1337
RECV_BUFFER_SIZE = 1024
READ_BUFFER_SIZE = 1024
SOCKET_TIMEOUT = 10 # seconds
if __name__ == '__main__':
main()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
2,
15069,
357,
66,
8,
2864,
1052,
680,
13548,
3400,
68,
357,
1326,
31,
7115,
776,
3400,
68,
13,
785,
8,
198,
2,
198,
2,
2448,
3411,
318,
29376,
7520,
11,
1479,
286,
3877,
11... | 3.435208 | 409 |
from faker import Faker
from generators.uniform_distribution_gen import UniformDistributionGen
from generators.random_relation_gen import RandomRelationGen
from base.field_base import FieldBase
from generators.normal_distribution_gen import NormalDistributionGen
from generators.first_name_generator import FirstNameGenerator
from generators.last_name_generator import LastNameGenerator
from generators.universal_function_generator import UniversalFunctionGenerator
from generators.print_relations_generator import PrintRelationsGenerator
from base.model_base import ModelBase
from base.class_base import ClassBase
if __name__ == "__main__":
model = ModelBase()
# Person
cb_a = ClassBase(model, A, 10)
cb_b = ClassBase(model, B, 10)
cb_c = ClassBase(model, C, 10)
FieldBase(cb_a, PrintRelationsGenerator(),
"alpha", related_fields=["C.alpha", "C.beta", "C.gamma"])
FieldBase(cb_a, RandomRelationGen(cb_c), "C")
FieldBase(cb_b, PrintRelationsGenerator(),
"alpha", related_fields=["C.alpha", "C.beta", "C.gamma"])
FieldBase(cb_b, RandomRelationGen(cb_c), "C")
FieldBase(cb_c, PrintRelationsGenerator(),
"alpha", related_fields=["beta"])
FieldBase(cb_c, PrintRelationsGenerator(),
"beta", related_fields=["gamma"])
FieldBase(cb_c, PrintRelationsGenerator(),
"gamma", related_fields=["delta"])
FieldBase(cb_c, UniversalFunctionGenerator(
f=Faker().paragraph, nb_sentences=1),
"delta")
model.create_instances()
model.map_field_graph_full()
model.print_generation_order()
model.draw_field_graph()
model.fill_in_instances()
print("")
| [
6738,
277,
3110,
1330,
376,
3110,
198,
6738,
27298,
13,
403,
6933,
62,
17080,
3890,
62,
5235,
1330,
35712,
20344,
3890,
13746,
198,
6738,
27298,
13,
25120,
62,
49501,
62,
5235,
1330,
14534,
6892,
341,
13746,
198,
6738,
2779,
13,
3245,
... | 2.7184 | 625 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Native adb client-to-server protocol implementation.
This is an async implementation, suitable for including in an event loop.
"""
import os
import sys
import stat
import errno
import struct
import signal
import typing
from devtest import logging
from devtest import timers
from devtest import ringbuffer
from devtest.io import socket
from devtest.io import streams
from devtest.os import process
from devtest.os import procutils
from devtest.os import exitstatus
from devtest.io.reactor import (get_kernel, spawn, block_in_thread, sleep,
SignalEvent, timeout_after, TaskTimeout)
from . import logcat
ADB = procutils.which("adb")
if ADB is None:
raise ImportError("The adb program was not found in PATH. "
"This module will not work.")
ADB_PORT = 5037
MAX_PAYLOAD_V2 = 256 * 1024
MAX_PAYLOAD = MAX_PAYLOAD_V2
class AdbProtocolError(Error):
"""An error in the protocol was detected."""
class AdbCommandFail(Error):
"""An error indicated by the server."""
def _run_adb(adbcommand):
"""Run the adb binary."""
cmd = [ADB]
cmd.extend(adbcommand.split())
return process.check_output(cmd)
class AdbConnection:
"""Asynchronous adb socket wrapper."""
# On the host: 1st OKAY is connect, 2nd OKAY is status.
class AdbClient:
"""An adb client, synchronous.
For general host side operations.
"""
def get_device(self, serial):
"""Get a AndroidDeviceClient instance.
"""
return AndroidDeviceClient(
serial, host=self._conn.host, port=self._conn.port)
def get_state(self, serial):
"""Get the current state of a device.
Arguments:
serial: str of the device serial number.
Returns:
one of {"device", "unauthorized", "bootloader"}
or None if serial not found.
"""
for dev in self.get_device_list():
if dev.serial == serial:
return dev.state
def get_device_list(self):
"""Get list of attached devices.
Returns:
list of AndroidDevice instances.
"""
dl = []
resp = self._message(b"host:devices-l")
for line in resp.splitlines():
dl.append(_device_factory(line))
return dl
def forward(self, serial, hostport, devport):
"""Tell server to start forwarding TCP ports.
"""
msg = b"host-serial:%b:forward:tcp:%d;tcp:%d" % (serial.encode("ascii"),
hostport, devport)
get_kernel().run(_command_transact(self._conn, msg))
def list_forward(self):
"""Return a list of currently forwarded ports.
Returns:
Tuple of (serial, host_port, device_port).
"""
resp = self._message(b"host:list-forward")
fl = []
for line in resp.splitlines():
# <serial> " " <local> " " <remote> "\n"
serno, host_port, remote_port = line.split()
fl.append((serno.decode("ascii"),
int(host_port.split(b":")[1]),
int(remote_port.split(b":")[1])))
return fl
@property
def server_version(self):
"""The server's version number."""
resp = self._message(b"host:version")
return int(resp, 16)
def getAsyncAndroidDeviceClient(serial, host="localhost", port=ADB_PORT):
"""Get initialized _AsyncAndroidDeviceClient object from sync code.
"""
return get_kernel().run(AsyncAndroidDeviceClient(serial, host, port))
async def AsyncAndroidDeviceClient(serial, host="localhost", port=ADB_PORT):
"""Get initialized _AsyncAndroidDeviceClient instance from async code."""
ac = _AsyncAndroidDeviceClient(serial, host, port)
await ac._init()
return ac
class _AsyncAndroidDeviceClient:
"""An active adb client per device.
For device specific operations.
For use in asynchronous event loops.
"""
async def forward(self, hostport, devport):
"""Tell server to start forwarding TCP ports.
"""
msg = b"host-serial:%b:forward:tcp:%d;tcp:%d" % (self.serial,
hostport, devport)
await _command_transact(self._conn, msg)
async def kill_forward(self, hostport):
"""Tell server to remove forwarding TCP ports.
"""
msg = b"host-serial:%b:killforward:tcp:%d" % (self.serial, hostport)
await _command_transact(self._conn, msg)
async def kill_forward_all(self):
"""Tell server to remove all forwarding TCP ports.
"""
msg = b"host-serial:%b:killforward-all" % (self.serial,)
await _command_transact(self._conn, msg)
async def list_forward(self):
"""Return a list of currently forwarded ports.
Returns:
Tuple of (host_port, device_port).
"""
msg = b"host-serial:%b:list-forward" % (self.serial,)
resp = await self._message(msg)
fl = []
for line in resp.splitlines():
# <serial> " " <local> " " <remote> "\n"
serno, host_port, remote_port = line.split()
if serno == self.serial:
fl.append((int(host_port.split(b":")[1]),
int(remote_port.split(b":")[1])))
return fl
async def wait_for(self, state: str):
"""Wait for device to be in a particular state.
State must be one of {"any", "bootloader", "device", "recovery", "sideload"}
"""
if state not in {"any", "bootloader", "device", "recovery", "sideload"}:
raise ValueError("Invalid state to wait for.")
msg = b"host-serial:%b:wait-for-usb-%b" % (self.serial, state.encode("ascii"))
await _command_transact(self._conn, msg)
async def command(self, cmdline, usepty=False):
"""Run a non-interactive shell command.
Uses ring buffers to collect outputs to avoid a runaway device command
from filling host memory. However, this might possibly truncate output.
Returns:
stdout (string): output of command
stderr (string): error output of command
exitstatus (ExitStatus): the exit status of the command.
"""
if self.features is None:
await self.get_features()
if "shell_v2" not in self.features:
raise AdbCommandFail("Only shell v2 protocol currently supported.")
cmdline, name = _fix_command_line(cmdline)
await _start_shell(self.serial, self._conn, usepty, cmdline)
sp = ShellProtocol(self._conn.socket)
stdout = ringbuffer.RingBuffer(MAX_PAYLOAD)
stderr = ringbuffer.RingBuffer(MAX_PAYLOAD)
resp = await sp.run(None, stdout, stderr)
await self._conn.close()
rc = resp[0]
if rc & 0x80:
rc = -(rc & 0x7F)
return (stdout.read().decode("utf8"),
stderr.read().decode("utf8"),
exitstatus.ExitStatus(
None,
name="{}@{}".format(name, self.serial.decode("ascii")),
returncode=rc)
)
async def stat(self, path):
"""stat a remote file or directory.
Return os.stat_result with attributes from remote path.
"""
sp = SyncProtocol(self.serial)
await sp.connect_with(self._conn)
try:
st = await sp.stat(path)
finally:
await sp.quit()
await self._conn.close()
return st
async def push(self, localfiles: list, remotepath: str, sync: bool = False):
"""Push a list of local files to remote file or directory.
"""
sp = SyncProtocol(self.serial)
await sp.connect_with(self._conn)
try:
resp = await sp.push(localfiles, remotepath, sync)
finally:
await sp.quit()
await self._conn.close()
return resp
async def pull_file(self, remotepath: str, filelike: typing.BinaryIO):
"""Pull a remote file into given file-like object.
"""
sp = SyncProtocol(self.serial)
await sp.connect_with(self._conn)
try:
text = await sp.pull_file(remotepath, filelike)
finally:
await sp.quit()
await self._conn.close()
return text
async def start(self, cmdline, stdoutstream, stderrstream):
"""Start a process on device with the shell protocol.
Returns a curio Task object wrapping the ShellProtocol run.
"""
cmdline, name = _fix_command_line(cmdline)
await _start_shell(self.serial, self._conn, False, cmdline)
sp = ShellProtocol(self._conn.socket)
return await spawn(sp.run(None, stdoutstream, stderrstream))
async def spawn(self, cmdline):
"""Start a process on device in raw mode.
Return:
DeviceProcess with active connection.
"""
cmdline, name = _fix_command_line(cmdline)
logging.info("adb.spawn({})".format(cmdline))
sock = await _start_exec(self.serial, self._conn, cmdline)
return DeviceProcess(sock, name)
async def install(self, apkfile, allow_test=True, installer=None,
onsdcard=False, onflash=False, allow_downgrade=True,
grant_all=True):
"""Install an APK.
Performs a streaming installation. Returns True if Success.
"""
# TODO(dart) other options
st = os.stat(apkfile) # TODO(dart) fix potential long blocker
if not stat.S_ISREG(st.st_mode):
raise ValueError("The apkfile must be a regular file.")
cmdline = ["cmd", "package", "install"]
if allow_test:
cmdline.append("-t")
if installer is not None:
cmdline.extend(["-i", str(installer)])
if onsdcard:
cmdline.append("-s")
if onflash:
cmdline.append("-f")
if allow_downgrade:
cmdline.append("-d")
if grant_all:
cmdline.append("-g")
cmdline.extend(["-S", str(st.st_size)])
cmdline, name = _fix_command_line(cmdline)
sock = await _start_exec(self.serial, self._conn, cmdline)
p = DeviceProcess(sock, name)
del sock
async with streams.aopen(apkfile, "rb") as afo:
await p.copy_from(afo)
status_response = await p.read(4096)
await p.close()
return b'Success' in status_response
# TODO(dart) install sessions
async def package(self, cmd, *args, user=None, **kwargs):
"""Manage packages.
Equivalent of 'pm' command.
"""
cmdline = ['cmd', 'package']
if user is not None:
cmdline.append("--user")
cmdline.append(str(user))
cmdline.append(cmd)
cmdline.extend(str(arg) for arg in args)
for opt, optarg in kwargs.items():
cmdline.append("--" + opt)
if optarg not in (None, True):
cmdline.append(str(optarg))
out, err, es = await self.command(cmdline)
if not es:
raise AdbCommandFail(err)
return out
async def logcat_clear(self):
"""Clear logcat buffer."""
stdout, stderr, es = await self.command(["logcat", "-c"])
if not es:
raise AdbCommandFail("Didn't clear logcat")
async def logcat(self, stdoutstream, stderrstream, format="threadtime",
buffers="default", modifiers=None, binary=False,
regex=None, dump=False, logtags=""):
"""Coroutine for streaming logcat output to the provided file-like
streams.
Args:
stdout, stderr: file-like object to write log events to.
binary: bool output binary format if True.
regex: A Perl compatible regular expression to match messages against.
format: str of one of the following:
"brief", "long", "process", "raw", "tag", "thread",
"threadtime", "time".
buffers: list or comma separated string of:
'main', 'system', 'radio', 'events', 'crash', 'default' or 'all'
modifiers: str of one or more of:
epoch", "monotonic", "uid", "usec", "UTC", "year", "zone"
logcats: str of space separated filter expressions.
"""
logtags = os.environ.get("ANDROID_LOG_TAGS", logtags)
cmdline = ['exec', 'logcat']
# buffers
if isinstance(buffers, str):
buffers = buffers.split(",")
for bufname in buffers:
cmdline.extend(["-b", bufname])
if binary:
cmdline.append("-B")
if regex:
cmdline.extend(["-e", regex])
if dump:
cmdline.append("-d")
# output format
if format not in {"brief", "long", "process", "raw", "tag",
"thread", "threadtime", "time"}:
raise ValueError("Bad format type.")
if modifiers:
if isinstance(modifiers, str):
modifiers = modifiers.split(",")
for modifier in modifiers:
if modifier in {"epoch", "monotonic", "uid", "usec", "UTC",
"year", "zone"}:
format += ("," + modifier)
else:
raise ValueError("Invalid logcat format modifier")
cmdline.extend(["-v", format])
# logtags
if logtags:
logtags = logtags.replace('"', '\\"')
cmdline.extend(logtags.split())
# go!
cmdline, _ = _fix_command_line(cmdline)
await _start_shell(self.serial, self._conn, False, cmdline)
sp = ShellProtocol(self._conn.socket)
await sp.run(None, stdoutstream, stderrstream)
class AndroidDeviceClient:
"""An active adb client per device.
For synchronous (blocking) style code.
"""
@property
@property
def reboot(self):
"""Reboot the device."""
return get_kernel().run(self._aadb.reboot())
def remount(self):
"""Remount filesystem read-write."""
return get_kernel().run(self._aadb.remount())
def root(self):
"""Become root on the device."""
return get_kernel().run(self._aadb.root())
def unroot(self):
"""Become non-root on the device."""
return get_kernel().run(self._aadb.unroot())
def forward(self, hostport, devport):
"""Tell server to start forwarding TCP ports.
"""
return get_kernel().run(self._aadb.forward(hostport, devport))
def kill_forward(self, hostport):
"""Tell server to remove forwarding TCP ports.
"""
return get_kernel().run(self._aadb.kill_forward(hostport))
def list_forward(self):
"""Get a list of currently forwarded ports."""
return get_kernel().run(self._aadb.list_forward())
def wait_for(self, state: str):
"""Wait for device to be in a particular state.
State must be one of {"any", "bootloader", "device", "recovery", "sideload"}
"""
return get_kernel().run(self._aadb.wait_for(state))
def command(self, cmdline, usepty=False):
"""Run a non-interactive shell command.
Uses ring buffers to collect outputs to avoid a runaway device command
from filling host memory. However, this might possibly truncate output.
Returns:
stdout (string): output of command
stderr (string): error output of command
exitstatus (ExitStatus): the exit status of the command.
"""
return get_kernel().run(self._aadb.command(cmdline, usepty))
def spawn(self, cmdline):
"""Start a process on device in raw mode.
Return:
DeviceProcess with active connection.
"""
return get_kernel().run(self._aadb.spawn(cmdline))
def install(self, apkfile, **kwargs):
"""Install an APK.
Default flags are best for testing, but you can override. See asyn
method.
"""
coro = self._aadb.install(apkfile, **kwargs)
return get_kernel().run(coro)
def package(self, cmd, *args, user=None, **kwargs):
"""Manage packages.
Equivalent of 'pm' command.
"""
coro = self._aadb.package(cmd, *args, user=user, **kwargs)
return get_kernel().run(coro)
def list(self, name, cb):
"""Perform a directory listing.
Arguments:
name: str, name of directory
cb: callable with signature cb(os.stat_result, filename)
"""
coro = self._aadb.list(name, acb)
return get_kernel().run(coro)
def stat(self, path: str):
"""stat a remote file or directory.
Return os.stat_result with attributes from remote path.
"""
coro = self._aadb.stat(path)
return get_kernel().run(coro)
def push(self, localfiles: list, remotepath: str, sync: bool = False):
"""Push a list of local files to remote file or directory.
"""
coro = self._aadb.push(localfiles, remotepath, sync)
return get_kernel().run(coro)
def pull(self, remotepath, localpath):
"""Pull a single file from device to local file system.
"""
coro = self._aadb.pull(remotepath, localpath)
return get_kernel().run(coro)
def pull_file(self, remotepath: str, filelike: typing.BinaryIO):
"""Pull a file into local memory, as bytes.
A path to a file on the device.
"""
coro = self._aadb.pull_file(remotepath, filelike)
return get_kernel().run(coro)
def reconnect(self):
"""Reconnect from device side."""
return get_kernel().run(self._aadb.reconnect())
def logcat_clear(self):
"""Clear logcat buffer."""
return get_kernel().run(self._aadb.logcat_clear())
async def logcat(self, stdoutstream, stderrstream, format="threadtime",
buffers="default", modifiers=None, binary=False,
regex=None, logtags=""):
"""Coroutine for streaming logcat output to the provided file-like
streams.
"""
await self._aadb.logcat(stdoutstream, stderrstream, format=format,
buffers=buffers, modifiers=modifiers,
binary=binary, regex=regex, logtags=logtags)
class DeviceProcess:
"""Represents an attached process on the device.
"""
async def copy_to(self, otherfile):
"""Copy output from this process to another file stream."""
while True:
data = await self.socket.recv(MAX_PAYLOAD)
if not data:
break
await otherfile.write(data)
async def copy_from(self, otherfile):
"""Copy output from another file stream to this process."""
while True:
data = await otherfile.read(MAX_PAYLOAD)
if not data:
break
await self.socket.sendall(data)
def _fix_command_line(cmdline):
"""Fix the command.
If a list, quote the components if required.
Return encoded command line as bytes and the command base name.
"""
if isinstance(cmdline, list):
name = cmdline[0]
cmdline = " ".join('"{}"'.format(s) if " " in s else str(s) for s in cmdline)
else:
name = cmdline.split()[0]
return cmdline.encode("utf8"), name
# Perform shell request, connection stays open
# Send command to specific device with orderly shutdown
# Root command transaction is special since device adbd restarts.
class ShellProtocol:
"""Implement the shell protocol v2."""
IDSTDIN = 0
IDSTDOUT = 1
IDSTDERR = 2
IDEXIT = 3
CLOSESTDIN = 4
# Window size change (an ASCII version of struct winsize).
WINDOWSIZECHANGE = 5
# Indicates an invalid or unknown packet.
INVALID = 255
class SyncProtocol:
"""Implementation of Android SYNC protocol.
Only recent devices are supported.
"""
mkid = lambda code: int.from_bytes(code, byteorder='little') # noqa
ID_LSTAT_V1 = mkid(b'STAT')
ID_STAT_V2 = mkid(b'STA2')
ID_LSTAT_V2 = mkid(b'LST2')
ID_LIST = mkid(b'LIST')
ID_SEND = mkid(b'SEND')
ID_RECV = mkid(b'RECV')
ID_DENT = mkid(b'DENT')
ID_DONE = mkid(b'DONE')
ID_DATA = mkid(b'DATA')
ID_OKAY = mkid(b'OKAY')
ID_FAIL = mkid(b'FAIL')
ID_QUIT = mkid(b'QUIT')
del mkid
SYNC_DATA_MAX = 65536
SYNCMSG_DATA = struct.Struct("<II") # id, size
# id; mode; size; time; namelen;
SYNCMSG_DIRENT = struct.Struct("<IIIII")
# id; error; dev; ino; mode; nlink; uid; gid; size; atime; mtime; ctime;
SYNCMSG_STAT_V2 = struct.Struct("<IIQQIIIIQqqq")
SYNCMSG_STATUS = struct.Struct("<II") # id, msglen
async def list(self, path, cb_coro):
"""List a directory on device."""
await self.send_request(SyncProtocol.ID_LIST, path)
while True:
resp = await self.socket.recv(SyncProtocol.SYNCMSG_DIRENT.size)
msgid, mode, size, time, namelen = SyncProtocol.SYNCMSG_DIRENT.unpack(resp)
if msgid == SyncProtocol.ID_DONE:
return True
if msgid != SyncProtocol.ID_DENT:
return False
name = await self.socket.recv(namelen)
stat = os.stat_result((mode, None, None, None, 0, 0, size, None, float(time), None))
await cb_coro(stat, name.decode("utf-8"))
async def stat(self, remotepath):
"""Stat a path."""
s = SyncProtocol.SYNCMSG_STAT_V2
await self.send_request(SyncProtocol.ID_STAT_V2, remotepath)
resp = await self.socket.recv(s.size)
stat_id, err, dev, ino, mode, nlink, uid, gid, size, atime, mtime, ctime = s.unpack(resp)
if stat_id == SyncProtocol.ID_STAT_V2:
if err != 0:
raise OSError(err, errno.errorcode[err], remotepath)
sr = os.stat_result((mode, ino, dev, nlink, uid, gid, size,
float(atime), float(mtime), float(ctime), # floats
# int nanoseconds, but not really
atime * 1e9, mtime * 1e9, ctime * 1e9))
return sr
else:
raise AdbProtocolError("SyncProtocol: invalid response type.")
async def push(self, localfiles, remotepath, sync=False):
"""Push files to device destination."""
try:
dest_st = await self.stat(remotepath)
except FileNotFoundError:
dst_exists = False
dst_isdir = False
else:
dst_exists = True
if stat.S_ISDIR(dest_st.st_mode):
dst_isdir = True
elif stat.S_ISREG(dest_st.st_mode):
dst_isdir = False
else:
raise ValueError("push: destination is not a directory or "
"regular file")
if not dst_isdir:
if len(localfiles) > 1:
raise ValueError("push: destination is not a dir when copying "
"multiple files.")
if dst_exists:
raise ValueError("push: destination exists")
for localfile in localfiles:
local_st = os.stat(localfile)
if stat.S_ISDIR(local_st.st_mode):
rpath = os.path.join(remotepath, os.path.basename(localfile))
await self._copy_local_dir_remote(localfile, rpath, sync)
if stat.S_ISREG(local_st.st_mode):
if dst_isdir:
rpath = os.path.join(remotepath, os.path.basename(localfile))
# If synchronize requested, just stat remote and return if size
# and mtime are equal.
if sync:
try:
dst_stat = await self.stat(rpath)
except OSError:
pass
else:
if (local_st.st_size == dst_stat.st_size and
local_st.st_mtime == dst_stat.st_mtime):
return
await self._sync_send(localfile.encode("utf8"),
rpath.encode("utf8"),
local_st)
class LogcatHandler:
"""Host side logcat handler that receives logcat messages in binary mode
over raw connection.
"""
LOGCAT_MESSAGE = struct.Struct("<HHiIIIII") # logger_entry_v4
# uint16_t len; length of the payload
# uint16_t hdr_size; sizeof(struct logger_entry_v4)
# int32_t pid; generating process's pid
# uint32_t tid; generating process's tid
# uint32_t sec; seconds since Epoch
# uint32_t nsec; nanoseconds
# uint32_t lid; log id of the payload, bottom 4 bits currently
# uint32_t uid; generating process's uid
# char msg[0]; the entry's payload
def clear(self):
"""Clear logcat buffers."""
return get_kernel().run(self._aadb.logcat_clear())
def dump(self):
"""Dump logs to stdout until interrupted."""
return get_kernel().run(self._dump())
def dump_to(self, localfile, logtags=None):
"""Dump all current logs to a file, in binary format."""
return get_kernel().run(self._dump_to(localfile, logtags))
def watch_for(self, tag=None, priority=None, text=None, timeout=90):
"""Watch for first occurence of a particular set of tag, priority, or
message text.
If tag is given watch for first of that tag.
If tag and priority is given watch for tag only with that priority.
if tag and text is given watch for tag with the given text in the
message part.
If text is given look for first occurence of text in message part.
"""
if tag is None and text is None:
raise ValueError("watch_for: must supply one or both of 'tag' or "
"'text' parameters.")
return get_kernel().run(self._watch_for(tag, priority, text, timeout))
class AndroidDevice:
"""Information about attached Android device.
No connection necessary.
"""
# b'HTxxxserial device usb:1-1.1 product:marlin model:Pixel_XL device:marlin\n'
if __name__ == "__main__":
from devtest import debugger
debugger.autodebug()
start_server()
print("Test AdbClient:")
c = AdbClient()
print(" Server version:", c.server_version)
for devinfo in c.get_device_list():
print(" ", devinfo)
print("Forwards:", c.list_forward())
c.close()
del c
print("Test AndroidDeviceClient:")
ac = AndroidDeviceClient(devinfo.serial)
ac.wait_for("device")
print(" features:", ac.features)
print(" state:", ac.get_state())
print(" running 'ls /sdcard':")
stdout, stderr, es = ac.command(["ls", "/sdcard"])
print(" ", es)
print(" stdout:", repr(stdout))
print(" stderr:", repr(stderr))
print("forward list:")
print(repr(ac.list_forward()))
ac.close()
del ac
# Test async with logcat. ^C to stop it.
kern = get_kernel()
kern.run(dostuff)
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
| [
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,
13789,
13,
198,
2,
921,
743,
7330,
257,
4866,
286,
262,
13789,
379,
198,
19... | 2.229281 | 12,609 |
import os
import sys
import json
import argparse
import numpy as np
import nibabel as nib
import scipy.io as sio
from matplotlib import cm
from json import encoder
from matplotlib import colors as mcolors
encoder.FLOAT_REPR = lambda o: format(o, '.2f')
def build_wmc(tck_file, tractID_list):
"""
Build the wmc structure.
"""
print("building wmc structure")
tractogram = nib.streamlines.load(tck_file)
tractogram = tractogram.streamlines
labels = np.zeros((len(tractogram),1))
os.makedirs('tracts')
tractsfile = []
names = np.full(tractID_list[-1],'NC',dtype=object)
with open('tract_name_list.txt') as f:
tract_name_list = f.read().splitlines()
np.random.seed(0)
colors = dict(mcolors.BASE_COLORS, **mcolors.CSS4_COLORS)
by_hsv = sorted((tuple(mcolors.rgb_to_hsv(mcolors.to_rgba(color)[:3])), name)
for name, color in colors.items())
permuted_colors = np.random.permutation(by_hsv)
for t, tractID in enumerate(tractID_list):
tract_name = tract_name_list[t]
idx_fname = 'estimated_idx_%s.npy' %tract_name
idx_tract = np.load(idx_fname)
labels[idx_tract] = tractID
#build json file
filename = '%s.json' %tractID
tract = tractogram[idx_tract]
count = len(tract)
streamlines = np.zeros([count], dtype=object)
for e in range(count):
streamlines[e] = np.transpose(tract[e]).round(2)
#color=list(cm.nipy_spectral(t+10))[0:3]
color = list(permuted_colors[tractID][0])
print("sub-sampling for json")
if count < 1000:
max = count
else:
max = 1000
jsonfibers = np.reshape(streamlines[:max], [max,1]).tolist()
for i in range(max):
jsonfibers[i] = [jsonfibers[i][0].tolist()]
with open ('tracts/%s' %filename, 'w') as outfile:
jsonfile = {'name': tract_name, 'color': color, 'coords': jsonfibers}
json.dump(jsonfile, outfile)
splitname = tract_name.split('_')
fullname = splitname[-1].capitalize()+' '+' '.join(splitname[0:-1])
tractsfile.append({"name": fullname, "color": color, "filename": filename})
names[tractID-1] = tract_name
print("saving classification.mat")
sio.savemat('classification.mat', { "classification": {"names": names, "index": labels }})
with open ('tracts/tracts.json', 'w') as outfile:
json.dump(tractsfile, outfile, separators=(',', ': '), indent=4)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-tractogram', nargs='?', const=1, default='',
help='The tractogram file')
args = parser.parse_args()
with open('config.json') as f:
data = json.load(f)
tractID_list = np.array(eval(data["tractID_list"]), ndmin=1)
build_wmc(args.tractogram, tractID_list)
sys.exit()
| [
11748,
28686,
198,
11748,
25064,
198,
11748,
33918,
198,
11748,
1822,
29572,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
33272,
9608,
355,
33272,
198,
11748,
629,
541,
88,
13,
952,
355,
264,
952,
198,
6738,
2603,
29487,
8019,
1330,
... | 2.219891 | 1,287 |
import environs
env = environs.Env()
env.read_env()
APP_DEBUG = env.bool('APP_DEBUG', False)
| [
11748,
17365,
343,
684,
198,
198,
24330,
796,
17365,
343,
684,
13,
4834,
85,
3419,
198,
24330,
13,
961,
62,
24330,
3419,
198,
198,
24805,
62,
30531,
796,
17365,
13,
30388,
10786,
24805,
62,
30531,
3256,
10352,
8,
198
] | 2.435897 | 39 |
#Todo functions
punctuation = ",./?'|}{[]=+-_;:<>!@#$%^&*()`~"
fd = open("FDText.txt")
hf = open("huckfinn.txt")
fdText=fd.read()
hfText=hf.read()
fdArray = fdText.split()
hfArray = hfText.split()
fdArray2 = cleantext(fdArray)
hfArray2 = cleantext(hfArray)
fd.close()
hf.close()
print(hfArray2)
| [
198,
2,
51,
24313,
5499,
628,
628,
198,
79,
16260,
2288,
796,
33172,
19571,
8348,
91,
18477,
21737,
28,
10,
12,
62,
26,
25,
27,
29,
0,
31,
29953,
4,
61,
5,
9,
3419,
63,
93,
1,
198,
16344,
796,
1280,
7203,
26009,
8206,
13,
1411... | 2.039735 | 151 |
from random import randint
from random import choice
from random import uniform
from collections import namedtuple
from typing import List, Dict
_Options = namedtuple("Options",
["vowels",
"consonants",
"max_initial_consonants",
"max_vowels",
"max_final_consonants",
"max_syllables",
"consonant_frequencies"])
# class Options:
# def __init__(self, vowels, consonants, max_initial_consonants, max_vowels,
# max_final_consonants, max_syllables, consonant_frequencies):
# self.vowels = vowels
# self.consonants = consonants
# self.max_initial_consonants = max_initial_consonants
# self.max_vowels = max_vowels
# self.max_final_consonants = max_final_consonants
# self.max_syllables = max_syllables
# self.consonant_frequencies = consonant_frequencies
START = '~'
END = '$'
| [
6738,
4738,
1330,
43720,
600,
198,
6738,
4738,
1330,
3572,
198,
6738,
4738,
1330,
8187,
198,
6738,
17268,
1330,
3706,
83,
29291,
198,
6738,
19720,
1330,
7343,
11,
360,
713,
628,
198,
62,
29046,
796,
3706,
83,
29291,
7203,
29046,
1600,
... | 2 | 508 |
import os
import sys
sys.path.append(os.path.dirname(__file__))
from libs.AbstractLanguageDetectionModel import AbstractLanguageDetectionModel
from libs.langfromchars.mobi.LangFromChars import LangFromChars
| [
11748,
28686,
198,
11748,
25064,
198,
17597,
13,
6978,
13,
33295,
7,
418,
13,
6978,
13,
15908,
3672,
7,
834,
7753,
834,
4008,
198,
198,
6738,
9195,
82,
13,
23839,
32065,
11242,
3213,
17633,
1330,
27741,
32065,
11242,
3213,
17633,
198,
... | 3.296875 | 64 |
# coding: utf-8
#
# Copyright 2021 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for core.domain.beam_job_services."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
import datetime
import itertools
import json
import subprocess
from core.domain import beam_job_services
from core.platform import models
from core.tests import test_utils
from jobs import registry as jobs_registry
import python_utils
(beam_job_models,) = models.Registry.import_models([models.NAMES.beam_job])
| [
2,
19617,
25,
3384,
69,
12,
23,
198,
2,
198,
2,
15069,
33448,
383,
9385,
544,
46665,
13,
1439,
6923,
33876,
13,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
74... | 3.59375 | 320 |
import sys
import gpiod
import time
import liblo
with gpiod.Chip('gpiochip0') as chip:
if (len(sys.argv) > 1):
# Use arg specified line
line = chip.get_line(int(sys.argv[1]))
else:
# TXD1 (GPIO14)
line = chip.get_line(14)
line.request(consumer=sys.argv[0], type=gpiod.LINE_REQ_DIR_IN)
recording = False
try:
if (len(sys.argv) > 2):
# Send all messages to arg specified target
target = liblo.Address(sys.argv[2])
else:
# Send all messages to port 8000 on localhost
target = liblo.Address(8000)
while True:
val = line.get_value()
if (val):
# Could be expensive
motionTimestamp = time.time()
if (not recording):
# Send message "/record"
liblo.send(target, "/record")
# Send message "/play" with int argument
liblo.send(target, "/play", 1)
recording = True
print("play")
else:
# Send only if no motion was detected for 30 minutes
if (recording and time.time() - motionTimestamp > 1800):
# Send message "/play" with int argument
liblo.send(target, "/play", 0)
# Send messages to save the project
time.sleep(1)
liblo.send(target, "/project/save")
recording = False;
print("stop")
time.sleep(0.1)
except liblo.AddressError as err:
print(err)
except KeyboardInterrupt:
sys.exit(130)
| [
11748,
25064,
198,
11748,
27809,
2101,
198,
11748,
640,
198,
11748,
9195,
5439,
198,
198,
4480,
27809,
2101,
13,
49985,
10786,
31197,
952,
35902,
15,
11537,
355,
11594,
25,
198,
220,
220,
220,
611,
357,
11925,
7,
17597,
13,
853,
85,
8... | 1.90133 | 902 |
# from django.test import TestCase
import unittest
import requests, os, json, sys, logging
from django.http import JsonResponse
from nyokaserver.nyokaServerClass import NyokaServer
| [
2,
422,
42625,
14208,
13,
9288,
1330,
6208,
20448,
198,
11748,
555,
715,
395,
198,
11748,
7007,
11,
28686,
11,
33918,
11,
25064,
11,
18931,
198,
6738,
42625,
14208,
13,
4023,
1330,
449,
1559,
31077,
198,
6738,
299,
88,
482,
6005,
332,... | 3.480769 | 52 |
#!/usr/bin/python
import urllib
import urllib2
from collections import OrderedDict as OrderedDict
import json
import base64
import logging
import os
# own modules
from datalogger import Timeseries as Timeseries
from datalogger import TimeseriesArray as TimeseriesArray
from datalogger import TimeseriesArrayStats as TimeseriesArrayStats
from datalogger import TimeseriesStats as TimeseriesStats
from datalogger import QuantileArray as QuantileArray
class DataLoggerWeb(object):
"""
class wot work with DataLogger Web Application
"""
def __init__(self, datalogger_url=None):
"""
parameters:
datalogger_url <str> baseURL to use for every call
"""
if datalogger_url is not None:
self.__datalogger_url = datalogger_url
else:
logging.debug("reading datalogger_url from config file")
conffile = "/etc/datalogger/datalogger.conf"
if os.path.isfile(conffile):
for row in open(conffile, "rb").read().split("\n"):
if len(row) > 0 and row[0] != "#":
key, value = row.split("=")
self.__dict__[key.strip()] = value.strip()
logging.debug("%s = %s", key.strip(), self.__dict__[key.strip()])
self.__datalogger_url = self.__dict__[key.strip()]
logging.info(self.__dict__)
assert self.__datalogger_url is not None
def __get_url(self, method, uri_params, query_params):
"""
creates url to call with urllib
parameters:
uri_parameters <dict> are appended in URL
query_parameters <dict> are appended after ? in GET Requests
returns:
<str> URL to call with urllib
"""
url = "/".join((self.__datalogger_url, method))
url = "/".join((url, "/".join((value for key, value in uri_params.items()))))
if len(query_params.keys()) > 0:
url = "?".join((url, "&".join(("%s=%s" % (key, value) for key, value in query_params.items()))))
logging.debug("created url: %s", url)
return url
def __get_json(self, method, uri_params, query_params):
"""
call url and parse the returned data with json.loads
parameters:
method <str> Web Application Function to call
uri_parameters <dict>
query_parameters <dict>
returns:
<object> returnd from json.loads(returned data from urllib)
"""
raw = self.__get_raw_data(method, uri_params, query_params)
try:
ret = json.loads(raw)
#logging.debug("JSON Output:\n%s", ret)
return ret
except ValueError as exc:
logging.exception(exc)
logging.error("JSON decode error, raw output: %s", raw)
def __get_raw_data(self, method, uri_params, query_params):
"""
call url return data received
parameters:
method <str> Web Application Function to call
uri_parameters <dict>
query_parameters <dict>
returns:
<str> returnd from urllib
"""
url = self.__get_url(method, uri_params, query_params)
try:
res = urllib2.urlopen(url)
logging.debug("got Status code : %s", res.code)
raw = res.read()
#logging.debug("Raw Output: %s", raw)
return raw
except Exception as exc:
logging.exception(exc)
logging.error("Error occured calling %s", url)
raise exc
def __get_json_chunked(self, method, uri_params, query_params):
"""
call url return data received
parameters:
method <str> Web Application Function to call
uri_parameters <dict>
query_parameters <dict>
returns:
<str> returnd from urllib
"""
url = self.__get_url(method, uri_params, query_params)
try:
res = urllib2.urlopen(url)
logging.debug("got Status code : %s", res.code)
data = ""
raw = res.read()
while raw:
try:
data += raw
raw = res.read()
except ValueError as exc:
logging.exception(exc)
return json.loads(data)
except Exception as exc:
logging.exception(exc)
logging.error("Error occured calling %s", url)
raise exc
def get_projects(self):
"""
get list of available projects
returns:
<list>
"""
uri_params = {}
query_params = {}
data = self.__get_json("get_projects", uri_params, query_params)
return data
def get_tablenames(self, project):
"""
get list of tablenames of this project
parameters:
project <str>
returns:
<list>
"""
uri_params = {
"project" : project,
}
query_params = {}
data = self.__get_json("get_tablenames", uri_params, query_params)
return data
def get_wikiname(self, project, tablename):
"""
get wikiname for given project tablename
parameters:
project <str>
tablename <str>
returns:
<str>
"""
uri_params = {
"project" : project,
"tablename" : tablename
}
query_params = {}
data = self.__get_json("get_wikiname", uri_params, query_params)
return data
def get_headers(self, project, tablename):
"""
get headers of raw data of this particular project/tablename combination
parameters:
project <str>
tablename <str>
returns:
<list> of headers
"""
uri_params = {
"project" : project,
"tablename" : tablename,
}
query_params = {}
data = self.__get_json("get_headers", uri_params, query_params)
return data
def get_index_keynames(self, project, tablename):
"""
get index_keynames of this particular project/tablename combination
parameters:
project <str>
tablename <str>
returns:
<list> of index_keynames
"""
uri_params = {
"project" : project,
"tablename" : tablename,
}
query_params = {}
data = self.__get_json("get_index_keynames", uri_params, query_params)
return data
def get_value_keynames(self, project, tablename):
"""
get value_keynames of this particular project/tablename combination
parameters:
project <str>
tablename <str>
returns:
<list> of value_keynames
"""
uri_params = {
"project" : project,
"tablename" : tablename,
}
query_params = {}
data = self.__get_json("get_value_keynames", uri_params, query_params)
return data
def get_ts_keyname(self, project, tablename):
"""
get ts_keyname of this particular project/tablename combination
parameters:
project <str>
tablename <str>
returns:
<str> ts_keyname used
"""
uri_params = {
"project" : project,
"tablename" : tablename,
}
query_params = {}
data = self.__get_json("get_ts_keyname", uri_params, query_params)
return data
def get_last_business_day_datestring(self):
"""
get last business day datestring from WebApplication
returns:
<str> datestring like 2015-12-31
"""
uri_params = {}
query_params = {}
data = self.__get_json("get_last_business_day_datestring", uri_params, query_params)
return data
def get_datewalk(self, datestring1, datestring2):
"""
get last business day datestring from WebApplication
returns:
<str> datestring like 2015-12-31
"""
uri_params = OrderedDict()
uri_params["datestring1"] = datestring1
uri_params["datestring2"] = datestring2
query_params = {}
data = self.__get_json("get_datewalk", uri_params, query_params)
return data
def get_caches(self, project, tablename, datestring):
"""
get ts_keyname of this particular project/tablename combination
parameters:
project <str>
tablename <str>
datestring <str>
returns:
<dict> of caches available
"""
uri_params = {
"project" : project,
"tablename" : tablename,
"datestring" : datestring
}
query_params = {}
data = self.__get_json("get_caches", uri_params, query_params)
return data
def get_tsa(self, project, tablename, datestring):
"""
get TimeseriesArray object for this particular project/tablename/datestring combination
parameters:
project <str>
tablename <str>
datestring <str>
returns:
<TimeseriesArray>
"""
index_keynames = self.get_index_keynames(project, tablename)
value_keynames = self.get_value_keynames(project, tablename)
ts_keyname = self.get_ts_keyname(project, tablename)
tsa = TimeseriesArray(index_keynames, value_keynames, ts_keyname)
uri_params = {
"project" : project,
"tablename" : tablename,
"datestring" : datestring,
}
query_params = {}
data = self.__get_json_chunked("get_tsa", uri_params, query_params)
for row in data:
tsa.add(row)
return tsa
def get_tsa_adv(self, project, tablename, datestring, groupkeys, group_func_name, index_pattern):
"""
get TimeseriesArray object for this particular project/tablename/datestring combination
parameters:
project <str>
tablename <str>
datestring <str>
groupkeys <tuple>
group_func_name <str>
index_pattern <str>
returns:
<TimeseriesArray>
"""
value_keynames = self.get_value_keynames(project, tablename)
ts_keyname = self.get_ts_keyname(project, tablename)
tsa = None
if groupkeys is None:
index_keynames = self.get_index_keynames(project, tablename)
tsa = TimeseriesArray(index_keynames, value_keynames, ts_keyname)
else:
tsa = TimeseriesArray(groupkeys, value_keynames, ts_keyname)
uri_params = OrderedDict()
uri_params["project"] = project
uri_params["tablename"] = tablename
uri_params["datestring"] = datestring
uri_params["groupkey_enc"] = base64.b64encode(unicode(groupkeys))
uri_params["group_func_name"] = group_func_name
uri_params["index_pattern"] = base64.b64encode(unicode(index_pattern))
query_params = {}
data = self.__get_json_chunked("get_tsa_adv", uri_params, query_params)
for row in data:
tsa.add(row)
return tsa
def get_ts(self, project, tablename, datestring, key):
"""
get Timeseries object for this particular project/tablename/datestring/key combination
parameters:
project <str>
tablename <str>
datestring <str>
key <tuple> key to identify particular Timeseries
returns:
<TimeseriesArray>
"""
index_keynames = self.get_index_keynames(project, tablename)
value_keynames = self.get_value_keynames(project, tablename)
ts_keyname = self.get_ts_keyname(project, tablename)
tsa = TimeseriesArray(index_keynames, value_keynames, ts_keyname)
uri_params = {
"project" : project,
"tablename" : tablename,
"datestring" : datestring,
"key" : base64.b64encode(unicode(key)),
}
query_params = {}
data = self.__get_json_chunked("get_ts", uri_params, query_params)
for row in data:
tsa.add(row)
return tsa
def get_tsastats(self, project, tablename, datestring):
"""
get TimeseriesStatsArray object for this particular project/tablename/datestring combination
parameters:
project <str>
tablename <str>
datestring <str>
returns:
<TimeseriesStatsArray>
"""
uri_params = {
"project" : project,
"tablename" : tablename,
"datestring" : datestring,
}
query_params = {}
data = self.__get_raw_data("get_tsastats", uri_params, query_params)
tsastats = TimeseriesArrayStats.from_json(data)
return tsastats
def get_stat_func_names(self):
"""
get statistical functions defined in TimeseriesArrayStats
returns:
<list>
"""
uri_params = {}
query_params = {}
data = self.__get_json("get_stat_func_names", uri_params, query_params)
return data
def get_quantile(self, project, tablename, datestring):
"""
get QuantileArray object for this particular project/tablename/datestring combination
parameters:
project <str>
tablename <str>
datestring <str>
returns:
<QuantileArray>
"""
uri_params = {
"project" : project,
"tablename" : tablename,
"datestring" : datestring,
}
query_params = {}
data = self.__get_raw_data("get_quantile", uri_params, query_params)
quantile = QuantileArray.from_json(data)
return quantile
| [
2,
48443,
14629,
14,
8800,
14,
29412,
198,
198,
11748,
2956,
297,
571,
198,
11748,
2956,
297,
571,
17,
198,
6738,
17268,
1330,
14230,
1068,
35,
713,
355,
14230,
1068,
35,
713,
198,
11748,
33918,
198,
11748,
2779,
2414,
198,
11748,
189... | 2.099271 | 6,588 |
# lower, upper, title, capitalize, isupper, islower
# istitle, swapcase
g = "HI"
"HI".lower() # firsth method
g.lower() # second method
print(g.lower())
print(g)
g = g.lower()
print(g)
# ######################################################## #
g = "Python"
print(g.lower()) # "Python" >> "python"
print(g.upper()) # "Python" >> "PYTHON"
txt = "Welcome to my world"
print(g.title()) # "welcome to my world" >> "Welcome To My World"
print(g.capitalize()) # "welcome to my world" >> "Welcome to my world"
g = "python"
print(g.islower()) # "h" >>> True
g = "pythoN"
print(g.islower()) # "H" >>> False
# isupper check for Upper Case !
# "H" >>> True
# "h" >>> False
# istitle
# "Hello World!" >>> True
# "Hello world!" >>> False
g = "pYTHON"
print(g) # "pYTHON"
print(g.swapcase()) # "Python" | [
2,
2793,
11,
6727,
11,
3670,
11,
35160,
11,
318,
45828,
11,
318,
21037,
198,
2,
318,
7839,
11,
16075,
7442,
198,
70,
796,
366,
25374,
1,
198,
198,
1,
25374,
1911,
21037,
3419,
1303,
717,
71,
2446,
198,
70,
13,
21037,
3419,
220,
... | 2.498498 | 333 |
"""Variables prefixed with `DEFAULT` should be able to be overridden by
configuration file and command-line arguments."""
UNIT = 100000000 # The same across assets.
# Versions
VERSION_MAJOR = 9
VERSION_MINOR = 59
VERSION_REVISION = 4
VERSION_STRING = str(VERSION_MAJOR) + '.' + str(VERSION_MINOR) + '.' + str(VERSION_REVISION)
VERSION_FILE = 'https://counterpartyxcp.github.io/counterparty-lib/counterpartylib/protocol_changes.json'
# Counterparty protocol
TXTYPE_FORMAT = '>I'
TWO_WEEKS = 2 * 7 * 24 * 3600
MAX_EXPIRATION = 4 * 2016 # Two months
MEMPOOL_BLOCK_HASH = 'mempool'
MEMPOOL_BLOCK_INDEX = 9999999
# SQLite3
MAX_INT = 2**63 - 1
# Bitcoin Core
OP_RETURN_MAX_SIZE = 40 # bytes
# Currency agnosticism
BTC = 'DOGE'
XCP = 'XDP'
BTC_NAME = 'Dogecoin'
XCP_NAME = 'Dogeparty-coinwarp'
APP_NAME = XCP_NAME.lower()
DEFAULT_RPC_PORT_TESTNET = 15000
DEFAULT_RPC_PORT = 5000
DEFAULT_BACKEND_RPC_PORT_TESTNET = 44555
DEFAULT_BACKEND_RPC_PORT = 22555
DEFAULT_BACKEND_PORT_TESTNET = 44555
DEFAULT_BACKEND_PORT = 22555
UNSPENDABLE_TESTNET = 'ndogepartyxxxxxxxxxxxxxxxxxxwpsZCH'
UNSPENDABLE_MAINNET = 'DDogepartyxxxxxxxxxxxxxxxxxxw1dfzr'
ADDRESSVERSION_TESTNET = b'q'
PRIVATEKEY_VERSION_TESTNET =b'\xf1'
ADDRESSVERSION_MAINNET = b'\x1e'
PRIVATEKEY_VERSION_MAINNET = b'\x9e'
MAGIC_BYTES_TESTNET = b'\xfc\xc1\xb7\xdc' # For bip-0010
MAGIC_BYTES_MAINNET = b'\xc0\xc0\xc0\xc0' # For bip-0010
BLOCK_FIRST_TESTNET_TESTCOIN = 124678
BURN_START_TESTNET_TESTCOIN = BLOCK_FIRST_TESTNET_TESTCOIN
BURN_END_TESTNET_TESTCOIN = 26280000 # Fifty years, at 1 minute per block.
BLOCK_FIRST_TESTNET = BLOCK_FIRST_TESTNET_TESTCOIN
#BLOCK_FIRST_TESTNET_HASH = ''
BURN_START_TESTNET = BURN_START_TESTNET_TESTCOIN
BURN_END_TESTNET = 26280000 # Fifty years, at 1 minute per block.
BLOCK_FIRST_MAINNET_TESTCOIN = 335642
BURN_START_MAINNET_TESTCOIN = BLOCK_FIRST_MAINNET_TESTCOIN
BURN_END_MAINNET_TESTCOIN = 26280000 # Fifty years, at 1 minute per block.
BLOCK_FIRST_MAINNET = BLOCK_FIRST_MAINNET_TESTCOIN
BLOCK_FIRST_MAINNET_HASH = 'd7d6f6bdd0c422defacf8544e6d4c96f8cbe249772872ec76de830dd99958f49'
BURN_START_MAINNET = BURN_START_MAINNET_TESTCOIN
BURN_END_MAINNET = BURN_START_MAINNET + 30*24*60 # 30 days burn period with 1 min target time per block.
# Protocol defaults
# NOTE: If the DUST_SIZE constants are changed, they MUST also be changed in counterblockd/dogepartylib/config.py as well
DEFAULT_REGULAR_DUST_SIZE = UNIT # 1 DOGE; there is not dust limit in Dogecoin, but every txout < 1 DOGE, cost 1 DOGE in fee
DEFAULT_MULTISIG_DUST_SIZE = UNIT * 2 # 2 DOGE.
DEFAULT_OP_RETURN_VALUE = 0 # 0 DOGE.
DEFAULT_FEE_PER_KB = UNIT # 1 DOGE.
# UI defaults
DEFAULT_FEE_FRACTION_REQUIRED = .009 # 0.90%
DEFAULT_FEE_FRACTION_PROVIDED = .01 # 1.00%
DEFAULT_REQUESTS_TIMEOUT = 20
DEFAULT_RPC_BATCH_SIZE = 20
# Custom exit codes
EXITCODE_UPDATE_REQUIRED = 5
DEFAULT_CHECK_ASSET_CONSERVATION = True
NAMED_ASSET_MAXLEN = 14
BACKEND_RAW_TRANSACTIONS_CACHE_SIZE = 20000
BACKEND_RPC_BATCH_NUM_WORKERS = 6
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| [
37811,
23907,
2977,
7694,
2966,
351,
4600,
7206,
38865,
63,
815,
307,
1498,
284,
307,
23170,
4651,
416,
198,
11250,
3924,
2393,
290,
3141,
12,
1370,
7159,
526,
15931,
198,
198,
4944,
2043,
796,
1802,
10535,
220,
220,
220,
220,
220,
22... | 2.260174 | 1,376 |
from .models import Profile, Project, Vote
from django.forms import ModelForm, widgets
from django import forms | [
6738,
764,
27530,
1330,
13118,
11,
4935,
11,
19175,
198,
6738,
42625,
14208,
13,
23914,
1330,
9104,
8479,
11,
40803,
198,
6738,
42625,
14208,
1330,
5107
] | 4.269231 | 26 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###################################################################
# Author: Mu yanru
# Date : 2019.2
# Email : muyanru345@163.com
###################################################################
import random
from dayu_widgets3.combo_box import MComboBox
from dayu_widgets3.divider import MDivider
from dayu_widgets3.field_mixin import MFieldMixin
from dayu_widgets3.label import MLabel
from dayu_widgets3.menu import MMenu
from dayu_widgets3 import dayu_theme
from dayu_widgets3.qt import *
if __name__ == '__main__':
import sys
app = QApplication(sys.argv)
test = ComboBoxExample()
from dayu_widgets3 import dayu_theme
dayu_theme.apply(test)
test.show()
sys.exit(app.exec_())
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
29113,
29113,
21017,
198,
2,
6434,
25,
8252,
331,
272,
622,
198,
2,
7536,
220,
1058,
13130,
13,
17,
198,
2,
9570,
1... | 2.949807 | 259 |
import librosa
import numpy as np
import scipy.signal
import torch
import textgrid
import pickle
import os
import torch
import torch.nn.functional as F
import s3prl.hub as hub
import w2v2_model_jeff
WINDOWS = {'hamming': scipy.signal.hamming,
'hann': scipy.signal.hann,
'blackman': scipy.signal.blackman,
'bartlett': scipy.signal.bartlett}
if __name__ == '__main__':
wav_file = 'data/SpokenCOCO/wavs-speaker/m1vjq8cayvs6c9/m1vjq8cayvs6c9-32KTQ2V7RDFP25PU1AP8KXEZU8I9MO_92648_385961.wav'
grid_file = 'data/SpokenCOCO/wavs-speaker-aligned/m1vjq8cayvs6c9/m1vjq8cayvs6c9-32KTQ2V7RDFP25PU1AP8KXEZU8I9MO_92648_385961.TextGrid'
text_file = 'data/SpokenCOCO/wavs-speaker/m1vjq8cayvs6c9/m1vjq8cayvs6c9-32KTQ2V7RDFP25PU1AP8KXEZU8I9MO_92648_385961.txt'
# example extracting segment-averaged logmelspec
frame_stride=0.01
logspec, nframes = compute_spectrogram(wav_file) # (40, 530)
word_list, word_string = read_textgrid(grid_file, text_file, nframes, frame_stride=frame_stride)
sentence_segment_spec, num_of_words, _ = slice_feature(logspec, word_list, \
target_sent_padded_length=50, sent_level_padding=True, \
target_segment_padded_length=None, return_whole=False) # (50, 40)
print(logspec.shape, nframes, sentence_segment_spec.shape, num_of_words) # (40, 530) 530 (50, 40) 12
print('************************************')
# example extracting whole-segmnet logmelspec
frame_stride=0.01
logspec, nframes = compute_spectrogram(wav_file) # (40, 530)
word_list, word_string = read_textgrid(grid_file, text_file, nframes, frame_stride=frame_stride)
sentence_segment_spec, num_of_words, segment_len_list = slice_feature(logspec, word_list, \
target_sent_padded_length=50, sent_level_padding=True, \
target_segment_padded_length=int(7.90/frame_stride), return_whole=True) # (50, 790, 40)
print(logspec.shape, nframes, sentence_segment_spec.shape, num_of_words) # (40, 530) 530 (50, 790, 40) 12
print(word_list, word_string)
print('************************************')
# example extracting whole-segment hubert
# setup upstream model first
frame_stride=0.02
HUBERT = getattr(hub, 'hubert_base')()
# load pre-trained model
if torch.cuda.is_available():
device = 'cuda'
else: device = 'cpu'
upstream_model = HUBERT.to(device)
upstream_model = upstream_model.eval() # important -- this disables layerdrop of w2v2/hubert
# upstream model feature extraction
hubert_repre, nframes = hubert_feature_extraction(wav_file, upstream_model, layer=12, device=device) # (768, 264)
word_list, word_string = read_textgrid(grid_file, text_file, nframes, frame_stride=frame_stride)
sentence_segment_spec, num_of_words, segment_len_list = slice_feature(hubert_repre, word_list, \
target_sent_padded_length=50, sent_level_padding=True, \
target_segment_padded_length=int(7.90/frame_stride), return_whole=True) # (50, 395, 768)
print(hubert_repre.shape, nframes, sentence_segment_spec.shape, num_of_words, segment_len_list) # (768, 264) 264 (50, 395, 768) 12 [8, 19, 22, 8, 12, 8, 30, 19, 10, 24, 11, 46]
print(word_list, word_string) # [('a', 35.0, 43.0), ('town', 43.0, 61.5), ('square', 61.5, 84.5), ('is', 84.5, 92.0), ('full', 92.0, 104.49999999999999), ('of', 104.49999999999999, 112.00000000000001), ('people', 112.00000000000001, 142.5), ('riding', 149.0, 167.5), ('their', 167.5, 178.0), ('bikes', 178.0, 201.5), ('and', 201.5, 213.49999999999997), ('skateboarding', 213.49999999999997, 259.0)] a town square is full of people riding their bikes and skateboarding
print('************************************')
# example extracting whole-segment VG-hubert
upstream_model = setup_vg_hubert(model_type='disc-81', snapshot='best', device=device)
vghubert_repre, vghubert_nframes = vghubert_feature_extraction(wav_file, upstream_model,
layer=11, device=device)
print(vghubert_repre.shape, nframes) # (768, 264) 264
print('************************************')
# example concatenating the extracted hubert and vghubert representations
concat_hubert_and_vghubert_repre = np.concatenate((hubert_repre, vghubert_repre), axis=0)
print(concat_hubert_and_vghubert_repre.shape) # (1536, 264)
print('************************************')
# example extracting seg_feats VG-hubert (w/ [CLS] attention weighting)
wav_file = 'wavs/train/392/m3h8xuyggvigfx-3UOUJI6MTDEYG9C1DSM8RAHBHXQUX1_455974_141516.wav'
wav_file_fixed = 'data/SpokenCOCO/wavs-speaker/m3h8xuyggvigfx/m3h8xuyggvigfx-3UOUJI6MTDEYG9C1DSM8RAHBHXQUX1_455974_141516.wav'
with open('data/SpokenCOCO/Jason_word_discovery/mbr_104_1030_top10/test_data_dict.pkl', 'rb') as f:
data = pickle.load(f)
attn_boundaries = data[wav_file]['boundaries'] # load attention segment boundaries for [CLS]-weighted mean-pool
spf = data[wav_file]['spf']
upstream_model = setup_vg_hubert(model_type='disc-81', snapshot='15', device=device) # load disc-81, snapshot 15
vghubert_repre, vghubert_nframes = vghubert_feature_extraction(wav_file_fixed,
upstream_model,
layer=11,
device=device,
cls_mean_pool=True,
spf=spf,
boundaries=attn_boundaries)
print(vghubert_repre.shape, nframes) # (768, 13) 264
print('************************************')
| [
11748,
9195,
4951,
64,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
629,
541,
88,
13,
12683,
282,
198,
11748,
28034,
198,
11748,
2420,
25928,
198,
11748,
2298,
293,
198,
11748,
28686,
198,
198,
11748,
28034,
220,
198,
11748,
28034,
1... | 2.037098 | 3,046 |
from pytl866.context import Tl866Context
from pytl866.driver import DriverError, \
VPP_98, VPP_126, VPP_140, VPP_166, VPP_144, VPP_171, VPP_185, VPP_212, \
VDD_30, VDD_35, VDD_46, VDD_51, VDD_43, VDD_48, VDD_60, VDD_65
| [
6738,
12972,
28781,
42240,
13,
22866,
1330,
309,
75,
42240,
21947,
198,
6738,
12972,
28781,
42240,
13,
26230,
1330,
12434,
12331,
11,
3467,
198,
220,
220,
220,
569,
10246,
62,
4089,
11,
569,
10246,
62,
19420,
11,
569,
10246,
62,
15187,
... | 2.026786 | 112 |
#!/usr/bin/env python3
# coding: utf-8
# Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""operator dsl function: strided_slice"""
import copy
import numpy as np
import akg.topi
import akg.tvm
from akg.utils import validation_check as vc_util
def check_args(begin, end, strides, begin_mask, end_mask, ellipsis_mask, new_axis_mask, shrink_axis_mask):
"""check args."""
if len(begin) != len(end):
raise Exception("len(begin) is {}, len(end) is {}. They must be identical!".format(len(begin), len(end)))
if strides is not None:
if len(begin) != len(strides):
raise Exception("len(begin) is {}, len(strides) is {}. They must be identical!".
format(len(begin), len(strides)))
for s in strides:
if s == 0:
raise Exception("Value in strides[{}] must not be 0!".format(strides))
if begin_mask < 0 or begin_mask >= (2 ** len(begin)):
raise Exception("Illegal begin_mask[{}]".format(begin_mask))
if end_mask < 0 or end_mask >= (2 ** len(begin)):
raise Exception("Illegal end_mask[{}]".format(end_mask))
if ellipsis_mask < 0 or ellipsis_mask >= (2 ** len(begin)):
raise Exception("Illegal ellipsis_mask[{}]".format(ellipsis_mask))
if ellipsis_mask != 0: # ellipsis_mask must be a power of two (only one ellipsis)
if ellipsis_mask & (ellipsis_mask - 1) != 0:
raise Exception("ellipsis_mask[{}] is not power of two (only one ellipsis).".format(ellipsis_mask))
if new_axis_mask < 0 or new_axis_mask >= (2 ** len(begin)):
raise Exception("Illegal new_axis_mask[{}]".format(new_axis_mask))
if shrink_axis_mask < 0 or shrink_axis_mask >= (2 ** len(begin)):
raise Exception("Illegal shrink_axis_mask[{}]".format(shrink_axis_mask))
def args_to_slices(begin, end, strides, begin_mask, end_mask, ellipsis_mask, new_axis_mask, shrink_axis_mask):
"""args to slice."""
check_args(begin, end, strides, begin_mask, end_mask, ellipsis_mask, new_axis_mask, shrink_axis_mask)
slices = []
for dim, bgn in enumerate(begin):
if (ellipsis_mask >> dim) & 1:
slices.append(Ellipsis)
elif (new_axis_mask >> dim) & 1:
slices.append(np.newaxis)
elif (shrink_axis_mask >> dim) & 1:
slices.append(bgn)
else:
start = None if (begin_mask >> dim) & 1 else bgn
stop = None if (end_mask >> dim) & 1 else end[dim]
step = strides[dim]
slices.append(slice(start, stop, step))
return slices
def slices_to_args(slices=()):
"""slice to args."""
begin = []
end = []
strides = []
begin_mask = 0
end_mask = 0
ellipsis_mask = 0
new_axis_mask = 0
shrink_axis_mask = 0
for i, arg in enumerate(slices):
if isinstance(arg, slice):
begin.append(0 if arg.start is None else arg.start)
if arg.start is None:
begin_mask |= 1 << i
end.append(0 if arg.stop is None else arg.stop)
if arg.stop is None:
end_mask |= 1 << i
strides.append(1 if arg.step is None else arg.step)
elif arg is np.newaxis:
begin.append(0)
end.append(0)
strides.append(1)
new_axis_mask |= 1 << i
elif arg is Ellipsis:
begin.append(0)
end.append(0)
strides.append(1)
ellipsis_mask |= 1 << i
elif isinstance(arg, int):
begin.append(arg)
end.append(arg + 1)
strides.append(1)
shrink_axis_mask |= 1 << i
else:
raise Exception("arg ", arg, ' is invalid')
return begin, end, strides, begin_mask, end_mask, ellipsis_mask, new_axis_mask, shrink_axis_mask
def complete_args(inputs_shape, begin, end, strides, begin_mask,
end_mask, ellipsis_mask, new_axis_mask, shrink_axis_mask):
"""complete args."""
check_args(begin, end, strides, begin_mask, end_mask, ellipsis_mask, new_axis_mask, shrink_axis_mask)
# step0: deep copy begin, end, strides
begin = copy.copy(begin)
end = copy.copy(end)
strides = copy.copy(strides)
# step1: store all bits and calculate new_axis_count
check_args(begin, end, strides, begin_mask, end_mask, ellipsis_mask, new_axis_mask, shrink_axis_mask)
begin_list = [(begin_mask >> dim) & 1 for dim in range(len(begin))]
end_list = [(end_mask >> dim) & 1 for dim in range(len(begin))]
ellipsis_list = [(ellipsis_mask >> dim) & 1 for dim in range(len(begin))]
new_axis_list = [(new_axis_mask >> dim) & 1 for dim in range(len(begin))]
new_axis_count = len([dim for dim in range(len(begin)) if (new_axis_mask >> dim) & 1])
shrink_list = [(shrink_axis_mask >> dim) & 1 for dim in range(len(begin))]
# step2: fill the ellipsis using ellipsis_list
ellipsis_idx = None
for idx, x in enumerate(ellipsis_list):
if x:
ellipsis_idx = idx
break
if ellipsis_idx is not None:
ellipsis_length = len(inputs_shape) - (len(begin) - 1 - new_axis_count)
idx = ellipsis_idx
begin.pop(idx)
end.pop(idx)
strides.pop(idx)
begin_list.pop(idx)
end_list.pop(idx)
ellipsis_list.pop(idx)
new_axis_list.pop(idx)
shrink_list.pop(idx)
for _ in range(ellipsis_length):
begin.insert(idx, None)
end.insert(idx, None)
strides.insert(idx, 1)
begin_list.insert(idx, 1)
end_list.insert(idx, 1)
ellipsis_list.insert(idx, 0)
new_axis_list.insert(idx, 0)
shrink_list.insert(idx, 0)
# step3: remove new_axis using new_axis_list
new_axis_index = [idx for idx, x in enumerate(new_axis_list) if x]
for idx in new_axis_index[::-1]:
begin.pop(idx)
end.pop(idx)
strides.pop(idx)
begin_list.pop(idx)
end_list.pop(idx)
ellipsis_list.pop(idx)
shrink_list.pop(idx)
new_axis_list.pop(idx)
# step4: update (begin, end, strides) using (shrink_list, begin_list, end_list)
for dim, bgn in enumerate(begin):
if shrink_list[dim]:
end[dim] = bgn + 1
strides[dim] = 1
continue
if begin_list[dim]:
begin[dim] = 0
if end_list[dim]:
end[dim] = inputs_shape[dim]
return begin, end, strides, new_axis_index, shrink_list
@vc_util.check_input_type(akg.tvm.tensor.Tensor, ((list, tuple), int), ((list, tuple), int),
((list, tuple), int), int, int, int, int, int)
def strided_slice(inputs, begin, end, strides,
begin_mask, end_mask, ellipsis_mask, new_axis_mask, shrink_axis_mask):
"""
Generate an array by slicing input tensor
Args:
inputs (tvm.tensor.Tensor): Tensor of type float16, float32.
begin (Union[list, tuple, int]): The start indexes for slicing.
end (Union[list, tuple, int]): The end indexes for slicing.
strides (Union[list, tuple, int]): The strides for slicing.
begin_mask (int): int32 mask for begin indexes.
end_mask (int): int32 mask for end indexes.
ellipsis_mask (int): int32 mask for inserting unspecified dimensions.
new_axis_mask (int): int32 mask for new dim with length 1.
shrink_axis_mask (int): int32 mask for shrinking the dims.
Returns:
tvm.tensor.Tensor, with the same dtype as inputs.
"""
shape = [x.value for x in inputs.shape]
# step0~4: complete begin, end, strides
begin, end, strides, new_axis_index, shrink_list = complete_args(shape, begin, end, strides,
begin_mask, end_mask, ellipsis_mask,
new_axis_mask, shrink_axis_mask)
# step5: use topi to do strided_slice using begin, end, strides
if (shape == [1] and begin == end):
return akg.tvm.compute(shape, lambda *i: inputs(*i), name="out")
if inputs.dtype == "uint8":
inputs_cast = akg.topi.cast(inputs, "int8")
else:
inputs_cast = inputs
out1 = akg.topi.strided_slice(inputs_cast, begin, end, strides)
# step6: increase out_tensor's dim using new_axis_index
new_shape = list(out1.shape)
for idx in new_axis_index[::-1]:
new_shape.insert(idx, 1)
# step7: decrease out_tensor's dim using shrink_list
for idx in new_axis_index[::-1]:
shrink_list.insert(idx, 0)
shrink_axis_index = [idx for idx, x in enumerate(shrink_list) if x]
for idx in shrink_axis_index[::-1]:
new_shape.pop(idx)
# step8: reshape out_tensor
out2 = akg.topi.reshape(out1, tuple(new_shape))
if inputs.dtype == "uint8":
out2 = akg.topi.cast(out2, "uint8")
return out2
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
19617,
25,
3384,
69,
12,
23,
198,
2,
15069,
13130,
43208,
21852,
1766,
1539,
12052,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34... | 2.226685 | 4,257 |
from typing import List, Dict, Iterable, Union, Callable
from enum import Enum, auto
import json
from pathlib import Path
from pysam import VariantRecord
VariantRecords = Iterable[VariantRecord]
Chrom = str
ChromSizes = Dict[Chrom, int]
class SeqRegion:
"""Mapping between vcf records in two coordinate spaces
"""
@property
@staticmethod
SeqRegions = List[SeqRegion]
SeqRegionsMap = Dict[Chrom, SeqRegions]
| [
6738,
19720,
1330,
7343,
11,
360,
713,
11,
40806,
540,
11,
4479,
11,
4889,
540,
198,
6738,
33829,
1330,
2039,
388,
11,
8295,
198,
11748,
33918,
198,
6738,
3108,
8019,
1330,
10644,
198,
198,
6738,
279,
893,
321,
1330,
38215,
23739,
198... | 2.939189 | 148 |
import sys
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
__version__ = "unknown"
# "import" __version__
for line in open("sfs/__init__.py"):
if line.startswith("__version__"):
exec(line)
break
# See http://pytest.org/latest/goodpractises.html
setup(
name="sfs",
version=__version__,
packages=find_packages(),
install_requires=[
'numpy!=1.11.0', # https://github.com/sfstoolbox/sfs-python/issues/11
'scipy',
],
author="SFS Toolbox Developers",
author_email="sfstoolbox@gmail.com",
description="Sound Field Synthesis Toolbox",
long_description=open('README.rst').read(),
license="MIT",
keywords="audio SFS WFS Ambisonics".split(),
url="http://github.com/sfstoolbox/",
platforms='any',
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Scientific/Engineering",
],
tests_require=['pytest'],
cmdclass={'test': PyTest},
zip_safe=True,
)
| [
11748,
25064,
198,
6738,
900,
37623,
10141,
1330,
9058,
11,
1064,
62,
43789,
198,
6738,
900,
37623,
10141,
13,
21812,
13,
9288,
1330,
1332,
355,
6208,
21575,
198,
198,
834,
9641,
834,
796,
366,
34680,
1,
198,
198,
2,
366,
11748,
1,
... | 2.538126 | 459 |
import csv
import os
from statistics import mean
csvpath = os.path.join('resources', 'budget_data.csv')
output_file = os.path.join('resources', 'Financial_Analysis.txt')
with open(csvpath, 'r', newline='') as csvfile:
csvreader = csv.reader(csvfile, delimiter=",")
# Defining our dictionaries and lists
dates = {}
monthlyRev = []
final = {}
analysis = []
# Builds our dictionary with profit/loss
for row in csvreader:
if row[0] != 'Date':
dates[row[0]] = int(row[1])
# Variable for total number of months, i.e., entries within the 'dates' dictionary
totalMonth = len(dates)
# Total net amount of profit/loss over the entire period
totalRevenue = sum(dates.values())
revenue = tuple(dates.values())
month = tuple(dates.keys())
# Adds revenue into our list
for x in range(1, (len(revenue))):
monthlyRev.append((int(revenue[x]) - int(revenue[x-1])))
# Finds the average change in profit/loss over the entire period
average = mean(monthlyRev)
# Builds our dictionary with monthly revenue profit/loss
for x in range(1, (len(month))):
final[month[x]] = int(monthlyRev[x-1])
# I don't fully understand what's going on here, it was the recommended answer on Stack Overflow
# Functionally, it returns the largest and smallest values
greatInc = max(zip(final.values(), final.keys()))
greatDec = min(zip(final.values(), final.keys()))
# Creates our read out for financial analysis
analysis.append('Financial Analysis')
analysis.append('----------------------------')
analysis.append('Total Months: ' + str(totalMonth))
analysis.append('Total Revenue: $' + str(totalRevenue))
analysis.append('Average Revenue Change: $' + str(average))
analysis.append('Greatest Increase in Revenue: ' + str(greatInc[1]) + ' ($' + str(greatInc[0]) + ')')
analysis.append('Greatest Decrease in Revenue: ' + str(greatDec[1]) + ' ($' + str(greatDec[0]) + ')')
# Prints analysis in terminal
print("\n".join((analysis)))
# Writes our .txt output file
with open(output_file, 'w') as txtfile:
txtfile.write('\n'.join(analysis)) | [
11748,
269,
21370,
198,
11748,
28686,
198,
6738,
7869,
1330,
1612,
198,
198,
40664,
6978,
796,
28686,
13,
6978,
13,
22179,
10786,
37540,
3256,
705,
37315,
62,
7890,
13,
40664,
11537,
198,
22915,
62,
7753,
796,
28686,
13,
6978,
13,
22179... | 2.902796 | 751 |
import helpers
from sensory_cloud.config import Config
from sensory_cloud.generated.common.common_pb2 import ServerHealthResponse
from sensory_cloud.services.health_service import HealthService
if __name__ == "__main__":
server_health = health_service_example()
| [
11748,
49385,
198,
198,
6738,
23326,
62,
17721,
13,
11250,
1330,
17056,
198,
6738,
23326,
62,
17721,
13,
27568,
13,
11321,
13,
11321,
62,
40842,
17,
1330,
9652,
18081,
31077,
198,
6738,
23326,
62,
17721,
13,
30416,
13,
13948,
62,
15271,... | 3.75 | 72 |
from genologics.lims import *
from genologics_sql.utils import *
from genologics.config import BASEURI, USERNAME, PASSWORD
from genologics_sql.tables import Process
from pprint import pprint
import genologics.entities as gent
import LIMS2DB.utils as lutils
import LIMS2DB.classes as lclasses
if __name__ == "__main__":
print '24-188975'
main('24-188975')
print '24-185529'
main('24-185529')
print '24-181946'
main('24-181946')
print '24-179366'
main('24-179366')
print '24-190011'
main('24-190011')
print '24-188979'
main('24-188979')
| [
198,
6738,
2429,
928,
873,
13,
2475,
82,
1330,
1635,
198,
6738,
2429,
928,
873,
62,
25410,
13,
26791,
1330,
1635,
198,
6738,
2429,
928,
873,
13,
11250,
1330,
49688,
47269,
11,
1294,
1137,
20608,
11,
41752,
54,
12532,
198,
6738,
2429,
... | 2.404082 | 245 |
"""Base classes for CoverageProviders.
The CoverageProviders themselves are in the file corresponding to the
service that needs coverage -- overdrive.py, metadata_wrangler.py, and
so on.
"""
import logging
from lxml import etree
from nose.tools import set_trace
from StringIO import StringIO
from core.coverage import (
CoverageFailure,
CollectionCoverageProvider,
WorkCoverageProvider,
)
from core.model import (
Collection,
ConfigurationSetting,
CoverageRecord,
DataSource,
Edition,
ExternalIntegration,
Identifier,
LicensePool,
WorkCoverageRecord,
)
from core.util.opds_writer import (
OPDSFeed
)
from core.opds_import import (
AccessNotAuthenticated,
MetadataWranglerOPDSLookup,
OPDSImporter,
OPDSXMLParser,
SimplifiedOPDSLookup,
)
from core.util.http import (
RemoteIntegrationException,
)
class RegistrarImporter(OPDSImporter):
"""We are successful whenever the metadata wrangler puts an identifier
into the catalog, even if no metadata is immediately available.
"""
SUCCESS_STATUS_CODES = [200, 201, 202]
class ReaperImporter(OPDSImporter):
"""We are successful if the metadata wrangler acknowledges that an
identifier has been removed, and also if the identifier wasn't in
the catalog in the first place.
"""
SUCCESS_STATUS_CODES = [200, 404]
class OPDSImportCoverageProvider(CollectionCoverageProvider):
"""Provide coverage for identifiers by looking them up, in batches,
using the Simplified lookup protocol.
"""
DEFAULT_BATCH_SIZE = 25
OPDS_IMPORTER_CLASS = OPDSImporter
def __init__(self, collection, lookup_client, **kwargs):
"""Constructor.
:param lookup_client: A SimplifiedOPDSLookup object.
"""
super(OPDSImportCoverageProvider, self).__init__(collection, **kwargs)
self.lookup_client = lookup_client
def process_batch(self, batch):
"""Perform a Simplified lookup and import the resulting OPDS feed."""
(imported_editions, pools, works,
error_messages_by_id) = self.lookup_and_import_batch(batch)
results = []
imported_identifiers = set()
# We grant coverage if an Edition was created from the operation.
for edition in imported_editions:
identifier = edition.primary_identifier
results.append(identifier)
imported_identifiers.add(identifier)
# The operation may also have updated information from a
# number of LicensePools.
for pool in pools:
identifier = pool.identifier
if identifier in imported_identifiers:
self.finalize_license_pool(pool)
else:
msg = "OPDS import operation imported LicensePool, but no Edition."
results.append(
self.failure(identifier, msg, transient=True)
)
# Anything left over is either a CoverageFailure, or an
# Identifier that used to be a CoverageFailure, indicating
# that a simplified:message that a normal OPDSImporter would
# consider a 'failure' should actually be considered a
# success.
for failure_or_identifier in sorted(error_messages_by_id.values()):
if isinstance(failure_or_identifier, CoverageFailure):
failure_or_identifier.collection = self.collection_or_not
results.append(failure_or_identifier)
return results
def process_item(self, identifier):
"""Handle an individual item (e.g. through ensure_coverage) as a very
small batch. Not efficient, but it works.
"""
[result] = self.process_batch([identifier])
return result
def finalize_license_pool(self, pool):
"""An OPDS entry was matched with a LicensePool. Do something special
to mark the occasion.
By default, nothing happens.
"""
pass
@property
def api_method(self):
"""The method to call to fetch an OPDS feed from the remote server.
"""
return self.lookup_client.lookup
def lookup_and_import_batch(self, batch):
"""Look up a batch of identifiers and parse the resulting OPDS feed.
This method is overridden by MockOPDSImportCoverageProvider.
"""
# id_mapping maps our local identifiers to identifiers the
# foreign data source will reocgnize.
id_mapping = self.create_identifier_mapping(batch)
if id_mapping:
foreign_identifiers = id_mapping.keys()
else:
foreign_identifiers = batch
response = self.api_method(foreign_identifiers)
# import_feed_response takes id_mapping so it can map the
# foreign identifiers back to their local counterparts.
return self.import_feed_response(response, id_mapping)
def create_identifier_mapping(self, batch):
"""Map the internal identifiers used for books to the corresponding
identifiers used by the lookup client.
By default, no identifier mapping is needed.
"""
return None
def import_feed_response(self, response, id_mapping):
"""Confirms OPDS feed response and imports feed through
the appropriate OPDSImporter subclass.
"""
self.lookup_client.check_content_type(response)
importer = self.OPDS_IMPORTER_CLASS(
self._db, self.collection,
identifier_mapping=id_mapping,
data_source_name=self.data_source.name
)
return importer.import_from_feed(response.text)
| [
37811,
14881,
6097,
329,
33998,
15946,
4157,
13,
198,
198,
464,
33998,
15946,
4157,
2405,
389,
287,
262,
2393,
11188,
284,
262,
198,
15271,
326,
2476,
5197,
1377,
625,
19472,
13,
9078,
11,
20150,
62,
18351,
49910,
13,
9078,
11,
290,
1... | 2.666352 | 2,116 |
from .__Fundamental_6 import *
import typing
import System.IO
import System.Collections.Generic
import System
import QuantConnect.Data.Fundamental.MultiPeriodField
import QuantConnect.Data.Fundamental
import QuantConnect.Data
import QuantConnect
import datetime
class BeginningCashPositionCashFlowStatement(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
The cash and equivalents balance at the beginning of the accounting period, as indicated on the Cash Flow statement.
BeginningCashPositionCashFlowStatement(store: IDictionary[str, Decimal])
"""
NineMonths: float
OneMonth: float
SixMonths: float
ThreeMonths: float
TwelveMonths: float
TwoMonths: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
class BiologicalAssetsBalanceSheet(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
Biological assets include plants and animals.
BiologicalAssetsBalanceSheet(store: IDictionary[str, Decimal])
"""
ThreeMonths: float
TwelveMonths: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
class BookValuePerShareGrowth(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
The growth in the company's book value per share on a percentage basis. Morningstar calculates the growth percentage based on
the common shareholder's equity reported in the Balance Sheet divided by the diluted shares outstanding within the company
filings or reports.
BookValuePerShareGrowth(store: IDictionary[str, Decimal])
"""
FiveYears: float
OneYear: float
ThreeMonths: float
ThreeYears: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
class BuildingsAndImprovementsBalanceSheet(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
Fixed assets that specifically deal with the facilities a company owns. Include the improvements associated with buildings.
BuildingsAndImprovementsBalanceSheet(store: IDictionary[str, Decimal])
"""
NineMonths: float
OneMonth: float
SixMonths: float
ThreeMonths: float
TwelveMonths: float
TwoMonths: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
class CapExGrowth(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
The growth in the company's capital expenditures on a percentage basis. Morningstar calculates the growth percentage based on
the capital expenditures reported in the Cash Flow Statement within the company filings or reports.
CapExGrowth(store: IDictionary[str, Decimal])
"""
FiveYears: float
OneYear: float
ThreeYears: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
class CapExReportedCashFlowStatement(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
Capital expenditure, capitalized software development cost, maintenance capital expenditure, etc. as reported by the company.
CapExReportedCashFlowStatement(store: IDictionary[str, Decimal])
"""
NineMonths: float
OneMonth: float
SixMonths: float
ThreeMonths: float
TwelveMonths: float
TwoMonths: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
class CapExSalesRatio(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
Capital Expenditure / Revenue
CapExSalesRatio(store: IDictionary[str, Decimal])
"""
OneYear: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
class CapitalExpenditureAnnual5YrGrowth(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
This is the compound annual growth rate of the company's capital spending over the last 5 years. Capital Spending is the sum of
the Capital Expenditure items found in the Statement of Cash Flows.
CapitalExpenditureAnnual5YrGrowth(store: IDictionary[str, Decimal])
"""
FiveYears: float
OneYear: float
ThreeYears: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
class CapitalExpenditureCashFlowStatement(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
Funds used by a company to acquire or upgrade physical assets such as property, industrial buildings or equipment. This
type of outlay is made by companies to maintain or increase the scope of their operations. Capital expenditures are generally
depreciated or depleted over their useful life, as distinguished from repairs, which are subtracted from the income of the current
year.
CapitalExpenditureCashFlowStatement(store: IDictionary[str, Decimal])
"""
NineMonths: float
OneMonth: float
SixMonths: float
ThreeMonths: float
TwelveMonths: float
TwoMonths: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
class CapitalExpendituretoEBITDA(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
Measures the amount a company is investing in its business relative to EBITDA generated in a given PeriodAsByte.
CapitalExpendituretoEBITDA(store: IDictionary[str, Decimal])
"""
OneYear: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
class CapitalLeaseObligationsBalanceSheet(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
Current Portion of Capital Lease Obligation plus Long Term Portion of Capital Lease Obligation.
CapitalLeaseObligationsBalanceSheet(store: IDictionary[str, Decimal])
"""
NineMonths: float
OneMonth: float
SixMonths: float
ThreeMonths: float
TwelveMonths: float
TwoMonths: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
class CapitalStockBalanceSheet(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
The total amount of stock authorized for issue by a corporation, including common and preferred stock.
CapitalStockBalanceSheet(store: IDictionary[str, Decimal])
"""
NineMonths: float
OneMonth: float
SixMonths: float
ThreeMonths: float
TwelveMonths: float
TwoMonths: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
class CashAdvancesandLoansMadetoOtherPartiesCashFlowStatement(QuantConnect.Data.Fundamental.MultiPeriodField):
"""
Cash outlay for cash advances and loans made to other parties.
CashAdvancesandLoansMadetoOtherPartiesCashFlowStatement(store: IDictionary[str, Decimal])
"""
TwelveMonths: float
Store: typing.List[QuantConnect.Data.Fundamental.MultiPeriodField.PeriodField]
| [
6738,
764,
834,
24553,
6860,
62,
21,
1330,
1635,
198,
11748,
19720,
198,
11748,
4482,
13,
9399,
198,
11748,
4482,
13,
5216,
26448,
13,
46189,
198,
11748,
4482,
198,
11748,
16972,
13313,
13,
6601,
13,
24553,
6860,
13,
29800,
5990,
2101,
... | 3.172048 | 2,168 |
linux_only_targets="blink bluetooth.ble_bqb netmgrapp bluetooth.blemesh_srv bluetooth.blemesh uDataapp bluetooth.blemesh_cli bluetooth.bleadv wifihalapp hdlcapp.hdlcserver acapp helloworld bluetooth.bleperipheral helloworld_nocli"
| [
23289,
62,
8807,
62,
83,
853,
1039,
2625,
2436,
676,
48208,
16271,
13,
903,
62,
65,
80,
65,
2010,
11296,
430,
381,
48208,
16271,
13,
11253,
5069,
62,
27891,
85,
48208,
16271,
13,
11253,
5069,
334,
6601,
1324,
48208,
16271,
13,
11253,
... | 2.717647 | 85 |
"""Entity for Zigbee Home Automation."""
import asyncio
import logging
import time
from homeassistant.core import callback
from homeassistant.helpers import entity
from homeassistant.helpers.device_registry import CONNECTION_ZIGBEE
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.restore_state import RestoreEntity
from .core.const import (
ATTR_MANUFACTURER,
ATTR_MODEL,
ATTR_NAME,
DATA_ZHA,
DATA_ZHA_BRIDGE_ID,
DOMAIN,
SIGNAL_REMOVE,
)
from .core.helpers import LogMixin
_LOGGER = logging.getLogger(__name__)
ENTITY_SUFFIX = "entity_suffix"
RESTART_GRACE_PERIOD = 7200 # 2 hours
class ZhaEntity(RestoreEntity, LogMixin, entity.Entity):
"""A base class for ZHA entities."""
def __init__(self, unique_id, zha_device, channels, skip_entity_id=False, **kwargs):
"""Init ZHA entity."""
self._force_update = False
self._should_poll = False
self._unique_id = unique_id
ieeetail = "".join([f"{o:02x}" for o in zha_device.ieee[:4]])
ch_names = [ch.cluster.ep_attribute for ch in channels]
ch_names = ", ".join(sorted(ch_names))
self._name = f"{zha_device.name} {ieeetail} {ch_names}"
self._state = None
self._device_state_attributes = {}
self._zha_device = zha_device
self.cluster_channels = {}
self._available = False
self._unsubs = []
self.remove_future = None
for channel in channels:
self.cluster_channels[channel.name] = channel
@property
def name(self):
"""Return Entity's default name."""
return self._name
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return self._unique_id
@property
def zha_device(self):
"""Return the zha device this entity is attached to."""
return self._zha_device
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
return self._device_state_attributes
@property
def force_update(self) -> bool:
"""Force update this entity."""
return self._force_update
@property
def should_poll(self) -> bool:
"""Poll state from device."""
return self._should_poll
@property
def device_info(self):
"""Return a device description for device registry."""
zha_device_info = self._zha_device.device_info
ieee = zha_device_info["ieee"]
return {
"connections": {(CONNECTION_ZIGBEE, ieee)},
"identifiers": {(DOMAIN, ieee)},
ATTR_MANUFACTURER: zha_device_info[ATTR_MANUFACTURER],
ATTR_MODEL: zha_device_info[ATTR_MODEL],
ATTR_NAME: zha_device_info[ATTR_NAME],
"via_device": (DOMAIN, self.hass.data[DATA_ZHA][DATA_ZHA_BRIDGE_ID]),
}
@property
def available(self):
"""Return entity availability."""
return self._available
@callback
def async_set_available(self, available):
"""Set entity availability."""
self._available = available
self.async_schedule_update_ha_state()
@callback
def async_update_state_attribute(self, key, value):
"""Update a single device state attribute."""
self._device_state_attributes.update({key: value})
self.async_schedule_update_ha_state()
@callback
def async_set_state(self, state):
"""Set the entity state."""
pass
async def async_added_to_hass(self):
"""Run when about to be added to hass."""
await super().async_added_to_hass()
self.remove_future = asyncio.Future()
await self.async_check_recently_seen()
await self.async_accept_signal(
None,
"{}_{}".format(self.zha_device.available_signal, "entity"),
self.async_set_available,
signal_override=True,
)
await self.async_accept_signal(
None,
"{}_{}".format(SIGNAL_REMOVE, str(self.zha_device.ieee)),
self.async_remove,
signal_override=True,
)
self._zha_device.gateway.register_entity_reference(
self._zha_device.ieee,
self.entity_id,
self._zha_device,
self.cluster_channels,
self.device_info,
self.remove_future,
)
async def async_check_recently_seen(self):
"""Check if the device was seen within the last 2 hours."""
last_state = await self.async_get_last_state()
if (
last_state
and self._zha_device.last_seen
and (time.time() - self._zha_device.last_seen < RESTART_GRACE_PERIOD)
):
self.async_set_available(True)
if not self.zha_device.is_mains_powered:
# mains powered devices will get real time state
self.async_restore_last_state(last_state)
self._zha_device.set_available(True)
async def async_will_remove_from_hass(self) -> None:
"""Disconnect entity object when removed."""
for unsub in self._unsubs[:]:
unsub()
self._unsubs.remove(unsub)
self.zha_device.gateway.remove_entity_reference(self)
self.remove_future.set_result(True)
@callback
def async_restore_last_state(self, last_state):
"""Restore previous state."""
pass
async def async_update(self):
"""Retrieve latest state."""
for channel in self.cluster_channels.values():
if hasattr(channel, "async_update"):
await channel.async_update()
async def async_accept_signal(self, channel, signal, func, signal_override=False):
"""Accept a signal from a channel."""
unsub = None
if signal_override:
unsub = async_dispatcher_connect(self.hass, signal, func)
else:
unsub = async_dispatcher_connect(
self.hass, f"{channel.unique_id}_{signal}", func
)
self._unsubs.append(unsub)
def log(self, level, msg, *args):
"""Log a message."""
msg = f"%s: {msg}"
args = (self.entity_id,) + args
_LOGGER.log(level, msg, *args)
| [
37811,
32398,
329,
24992,
20963,
5995,
17406,
341,
526,
15931,
198,
198,
11748,
30351,
952,
198,
11748,
18931,
198,
11748,
640,
198,
198,
6738,
1363,
562,
10167,
13,
7295,
1330,
23838,
198,
6738,
1363,
562,
10167,
13,
16794,
364,
1330,
... | 2.220614 | 2,833 |
#coding=utf-8
from __future__ import absolute_import, unicode_literals
# https://github.com/qiniu/python-sdk/blob/master/qiniu/services/storage/uploader.py
# https://developer.qiniu.com/kodo/sdk/python
from qiniu import Auth,put_file,put_data,BucketManager
from .utils import QiniuError, bucket_lister
from os.path import basename,splitext
from datetime import datetime
class QiniuStorage(object):
'''
七牛云的文件上传、显示、删除
@auth:ZWJ
'''
def __init__(self,access_key,secret_key,bucket_name,bucket_domain):
"""
@para:
access_key:公钥
secret_key:私钥
bucket_name: 要上传的空间
bucket_domain:获取文件url路径时对应的私有域名
"""
self.auth=Auth(access_key,secret_key)
self.bucket_name = bucket_name
self.bucket_domain = bucket_domain
self.bucket_manager = BucketManager(self.auth)
def put_data(self,name,data):
"""
@def:put_data
@def_fun: 文件流上传
空间里的文件名不能重复,所以用_newname生成新文件名
@para:
name: 文件名
data: 上传二进制流
@ret:上传后的url路径
"""
#上传到七牛后保存的文件名
key = self._newname(name)
#生成上传 Token,可以指定过期时间等
token=self.auth.upload_token(self.bucket_name,key)
if hasattr(data,'chunks'):
data = b''.join(c for c in data.chunks())
ret, info = put_data(token, key, data) #上传文件流到七牛
if ret is None or ret['key'] != key:
raise QiniuError(info)
return self.get_url(key)
def _newname(self,name):
'''加上6位日期和6位时间标识 PG.jpg --> PG_170211_044217.jpg '''
root,ext=splitext(basename(name))
time=datetime.now().strftime('_%y%m%d_%H%M%S')
return '{}{}{}'.format(root,time,ext)
def get_url(self,key):
'''
@def:get_url
@def_fun: 返回七牛云上文件名为key的文件对应的url地址
如果是公有空间,该地址可以直接访问文件;私有空间则需用private_download_url
@para:
key: 七牛云上的文件名
@ret:域名加文件名生成的url路径
'''
url='http://{}/{}'.format(self.bucket_domain,key)
return url
def private_download_url(self,url,expires=7200):
"""
@def:private_download_url
@def_fun: 生成私有资源下载链接
@para:
url: 私有空间资源的原始URL
expires: 下载凭证有效期,默认为7200s
@ret:私有资源的下载链接
"""
return self.auth.private_download_url(url,expires)
def put_file(self,filePath):
"""
@def:put_file
@def_fun: 本地文件上传
空间里的文件名不能重复,所以用_newname生成新文件名
@para:
filePath: 待上传文件在磁盘中的绝对路径
@ret:上传后的url路径
"""
key = self._newname(filePath)
token=self.auth.upload_token(self.bucket_name,key)
ret, info = put_file(token, key, filePath)
if ret is None or ret['key'] != key:
raise QiniuError(info)
return self.get_url(key)
def exists(self,key):
'''检测七牛云上是否有文件名为key的文件'''
bucket=self.bucket_manager
ret, info = bucket.stat(self.bucket_name, key.split('/')[-1])
return ret is not None
def delete(self,key):
'''删除七牛云上文件名为key的文件'''
if not self.exists(key):
return '{} not exist in qiniu_cloud'.format(key)
bm=self.bucket_manager
ret, info = bm.delete(self.bucket_name, key.split('/')[-1])
if ret == {}:
return 'success to delete {} in qiniu_cloud'.format(key)
else:
return info
def ls_files(self,prefix="", limit=None):
"""
@def:ls_file
@def_fun: 显示七牛云上的文件名
具体规格参考:
http://developer.qiniu.com/docs/v6/api/reference/rs/list.html
@para:
prefix: 列举前缀
limit: 单次列举个数限制
@ret: 文件名组成的set()集合
"""
files=set()
dirlist = bucket_lister(self.bucket_manager, self.bucket_name,
prefix,limit)
for item in dirlist:
files.add(item['key'])
return files
| [
2,
66,
7656,
28,
40477,
12,
23,
198,
6738,
11593,
37443,
834,
1330,
4112,
62,
11748,
11,
28000,
1098,
62,
17201,
874,
198,
2,
3740,
1378,
12567,
13,
785,
14,
80,
5362,
84,
14,
29412,
12,
21282,
74,
14,
2436,
672,
14,
9866,
14,
8... | 1.411422 | 3,257 |
# -*- coding: utf-8 -*-
import numpy as np
import pandas as pd
from sklearn.externals import joblib
from qstrader.price_parser import PriceParser
from qstrader.event import (SignalEvent, EventType)
from qstrader.strategy.base import AbstractStrategy
class IntradayMachineLearningPredictionStrategy(AbstractStrategy):
"""
Requires:
tickers - The list of ticker symbols
events_queue - A handle to the system events queue
"""
def _update_current_returns(self, event):
"""
Updates the array of current returns "features"
used by the machine learning model for prediction.
"""
# Adjust the feature vector to move all lags by one
# and then recalculate the returns
for i, f in reversed(list(enumerate(self.cur_prices))):
if i > 0:
self.cur_prices[i] = self.cur_prices[i - 1]
else:
self.cur_prices[i] = event.close_price / float(
PriceParser.PRICE_MULTIPLIER
)
if self.minutes > (self.lags + 1):
for i in range(0, self.lags):
self.cur_returns[i] = ((
self.cur_prices[i] / self.cur_prices[i + 1]
) - 1.0) * 100.0
def calculate_signals(self, event):
"""
Calculate the intraday machine learning
prediction strategy.
"""
if event.type == EventType.BAR:
self._update_current_returns(event)
self.minutes += 1
# Allow enough time to pass to populate the
# returns feature vector
if self.minutes > (self.lags + 2):
pred = self.model.predict(self.cur_returns.reshape((1, -1)))[0]
# Long only strategy
if not self.invested and pred == 1:
print("LONG: %s" % event.time)
self.events_queue.put(
SignalEvent(self.tickers[0], "BOT", self.qty)
)
self.invested = True
if self.invested and pred == -1:
print("CLOSING LONG: %s" % event.time)
self.events_queue.put(
SignalEvent(self.tickers[0], "SLD", self.qty)
)
self.invested = False
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
19798,
292,
355,
279,
67,
198,
198,
6738,
1341,
35720,
13,
1069,
759,
874,
1330,
1693,
8019,
198,
6738,
10662,
2536,
5067,
13,... | 2 | 1,166 |
#!/usr/bin/env python3
import argparse
import os
from junit_xml import TestSuite, TestCase
if __name__ == "__main__":
ap = argparse.ArgumentParser(add_help=True)
ap.add_argument("-o", "--out", default=None, help="Output file.")
ap.add_argument("input", help="input file")
args = ap.parse_args()
if args.input is None:
print("Input file reqired")
ap.print_usage()
exit()
with open(args.input, "r") as ifile:
if args.out != None:
with open(args.out, "w") as ofile:
ofile.write(generate_junit_xml(ifile))
else:
print(generate_junit_xml(ifile))
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
198,
11748,
1822,
29572,
198,
11748,
28686,
198,
6738,
10891,
270,
62,
19875,
1330,
6208,
5606,
578,
11,
6208,
20448,
628,
198,
361,
11593,
3672,
834,
6624,
366,
834,
12417,
834,
1... | 2.221843 | 293 |
import os
import tensorflow as tf
| [
11748,
28686,
198,
198,
11748,
11192,
273,
11125,
355,
48700,
628
] | 3.272727 | 11 |
from models import MonModel, change_time
# from models.user import User
import models.user
# from models.tweet import Tweet # 这样不行
import models.tweet # 为了避免和comment交叉引用
import time
class Comment(MonModel):
"""
__fields__ = [
'_id',
('id', int, -1),
('type', str, ''),
('deleted', bool, False),
('created_time', int, 0),
('updated_time', int, 0),
"""
__fields__ = MonModel.__fields__ + [
('content', str, ''),
('user_id', int, -1),
('user_name', str, ''),
('tweet_id', int, -1),
('who_likes', list, []),
]
@classmethod
# class Comment(Model):
# def __init__(self, form, user_id=-1):
# self.id = form.get('id', None)
# self.content = form.get('content', '')
# self.user_id = form.get('user_id', user_id)
# # 注意一定要为int
# self.tweet_id = int(form.get('tweet_id', -1))
#
# def user(self):
# u = User.find_by(id=self.user_id)
# return u
#
# def tweet(self):
# t = models.tweet.Tweet.find_by(id=self.tweet_id)
# return t
| [
6738,
4981,
1330,
2892,
17633,
11,
1487,
62,
2435,
198,
2,
422,
4981,
13,
7220,
1330,
11787,
198,
11748,
4981,
13,
7220,
198,
2,
422,
4981,
13,
83,
7277,
1330,
18752,
220,
1303,
5525,
123,
247,
43718,
115,
38834,
26193,
234,
198,
11... | 1.989071 | 549 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Feb 21 14:04:11 2020
@author: ICARD
"""
#module systeme :
import os.path
import os
import threading
import configparser
Config = configparser.ConfigParser()
verrou = threading.Lock()
def creer_config_defaut():
'''
Ici dans la fonction creer_config_defaut On va crée un fichier de configuration du serveur (un fichier INI)
D'abord on verouille le cadena pour etre sur d'etre seul à travail sur le fichier.
Ensuite on utilise l'objet 'Config' initialisé au dessus : Config = configparser.ConfigParser() pour stocker la configuration.
on créer le fichier : .serveur-http.conf et ont ecrit l'objet Config dedans qui contient la configuration du serveur.
Si j'amais on attrape un erreur on renvoie false pour prevenir que l'ecriture c'est mal déroulé.
Sinon on relache le verrou et on return True.
'''
#verouillage du verrou
verrou.acquire()
try:
#Config du serveur
Config['global'] = {
'Hote': '',
'Port': '8000',
'Rep_servi':os.path.join(os.environ['HOME'],"src", "M2207", "Projet_tp_server", 'Site')
}
#Ecriture dans le fichier INI
with open('.serveur-http.conf', 'w') as configfile:
Config.write(configfile)
except OSError:
return False
#déverouillage du verrou
verrou.release()
return True
def lire_configuration():
'''
Dans la fonction lire_configuration on va lire le fichier de configuration (fichier ini) crée avec la fonction creer_config_defaut
D'abord on verifie si le fichier existe sinon on le crée et on verifie que la création c'est bien déroulé.
Ensuite on verouille le cadena, on lit le fichier .serveur-http.conf
On récupere toutes les options et on les stocks dans la list 'clé'
Apres on récupere les valeurs associée au clé dans la boucle for.
On s'assure juste de transformer le numero de port en entier (int) avec la methode Config.getint.
Et enfin on fait appele à la fonction set_config pour écrire dans le dictionnaire CONFIGURATION.
Si j'amais on attrape un erreur on renvoie false pour prevenir que la lecture c'est mal déroulé.
Sinon on return True
'''
try:
#Vérification de l'existance du fichier INI
if os.path.isfile('.serveur-http.conf') == False:
creer_config_defaut()
if creer_config_defaut() == False:
print ("problème d'écriture du fichier de configuration")
return False
#lecture du fichier
verrou.acquire()
Config.read('.serveur-http.conf')
cle = Config.options('global')
verrou.release()
#écriture de la config dans le dictionnaire grace à la fonction set_config
for i in range (0, len(cle)):
verrou.acquire()
valeur = Config.get('global',cle[i])
#changement du type du port (de str en int)
try :
if i == 1:
valeur = Config.getint('global','Port')
except Exception:
cle[i]='Port'
valeur=80
verrou.release()
set_config(cle[i],valeur)
except OSError :
return False
return True
CONFIGURATION = {}
CODE_ETAT={
200 : "OK",
400 : "BAD REQUEST",
404 : "NOT FOUND",
405 : "METHOD NOT ALLOWED",
500 : "INTERNAL SERVER ERROR"
}
PAGE_HTML_ERREUR={
404 : b'''<html><body><center><h1>Error 404: NOT FOUND</h1></center><p>Head back to <a href=\"/\">home page</a>.</p></body></html>''',
400 : b'''<html><body><center><h1>Error 400: BAD REQUEST</h1></center><p>Head back to <a href=\"/\">home page</a>.</p></body></html>''',
405 : b'''<html><body><center><h1>Error 405: METHOD NOT ALLOWED</h1></center><p>Head back to <a href=\"/\">home page</a>.</p></body></html>''',
500 : b'''<html><body><center><h1>Error 500: INTERNAL SERVER ERROR</h1></center><p>Head back to <a href=\"/\">home page</a>.</p></body></html>'''
}
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
41972,
319,
19480,
3158,
2310,
1478,
25,
3023,
25,
1157,
12131,
198,
198,
31,
9800,
25,
12460,
9795,
... | 2.158666 | 1,979 |
#Done by Carlos Amaral (19/07/2020)
#Try 15.1- Cubes
import matplotlib.pyplot as plt
x_values = [1, 2, 3, 4, 5]
y_values = [1, 8, 27, 64, 125]
plt.style.use('seaborn')
fig, ax = plt.subplots()
ax.plot(x_values, y_values, linewidth = 3)
#Set chart title and label axes
ax.set_title("Cubic Numbers", fontsize = 24)
ax.set_xlabel("Values", fontsize = 14)
ax.set_ylabel("Cubic of Values", fontsize = 14)
#Set size tick labels
ax.tick_params(axis = 'both', labelsize = 14)
plt.show() | [
2,
45677,
416,
17409,
44291,
282,
357,
1129,
14,
2998,
14,
42334,
8,
628,
198,
2,
23433,
1315,
13,
16,
12,
7070,
274,
198,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
220,
198,
198,
87,
62,
27160,
796,
685,
16,
... | 2.403941 | 203 |
## https://github.com/bast/somepackage
## https://github.com/navdeep-G/samplemod
if __name__ == '__main__':
BoardFeet = CalcBoardFeet(8,1,LengthInFeet=8)
print(BoardFeet)
BoardFeet = CalcBoardFeet(7,2,LengthInInches=120)
print(BoardFeet) | [
2235,
3740,
1378,
12567,
13,
785,
14,
65,
459,
14,
11246,
26495,
198,
2235,
3740,
1378,
12567,
13,
785,
14,
28341,
22089,
12,
38,
14,
39873,
4666,
628,
628,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
628,
220,
22... | 2.354545 | 110 |
import random
for i in range(5):
print(random.randint(1, 10)) | [
11748,
4738,
198,
1640,
1312,
287,
2837,
7,
20,
2599,
198,
220,
220,
220,
3601,
7,
25120,
13,
25192,
600,
7,
16,
11,
838,
4008
] | 2.6 | 25 |
"""
This automation Quarantines and EC2 Instance, identified as above or below the configured threshold
by Hyperglance Rule(s)
This automation will operate across accounts, where the appropriate IAM Role exists.
"""
import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)
def hyperglance_automation(boto_session, resource: dict, automation_params=''):
""" Attempts to Qurantine and EC2 Instance
Parameters
----------
boto_session : object
The boto session to use to invoke the automation
resource: dict
Dict of Resource attributes touse in the automation
automation_params : str
Automation parameters passed from the Hyperglance UI
"""
client = boto_session.client('ec2')
ec2_resource = boto_session.resource('ec2')
ec2_instance = resource['attributes']['Instance ID']
vpc_id = resource['attributes']['VPC ID']
## Check if there already is a qurantine SG, if not, create one
response = client.describe_security_groups(
Filters=[
{
'Name': 'group-name',
'Values': ['Quarantined_By_Hyperglance']
},
{
'Name': 'vpc-id',
'Values': [vpc_id]
}
]
)
quarantine_sg_id = ''
if response['SecurityGroups']:
quarantine_sg_id = response['SecurityGroups'][0]['GroupId']
logger.info("Already quarantined by security group: {}".format(quarantine_sg_id))
else:
response = client.create_security_group(
Description='Quarantine Security Group. Created by Hyperglance automations. Do NOT attach Ingress or Egress rules.',
GroupName='Quarantined_By_Hyperglance',
VpcId=vpc_id
)
quarantine_sg_id = response['GroupId']
## Remove the default Security Groups Rules
created_security_group = ec2_resource.SecurityGroup(response['GroupId'])
created_security_group.revoke_egress(
GroupId=response['GroupId'],
IpPermissions=[
{
'IpProtocol': '-1',
'IpRanges': [
{
'CidrIp': '0.0.0.0/0'
}
]
}
]
)
## Finally attach the instance to SG
ec2_resource.Instance(ec2_instance).modify_attribute(Groups=[quarantine_sg_id])
| [
37811,
198,
1212,
22771,
2264,
4741,
1127,
290,
13182,
17,
2262,
590,
11,
5174,
355,
2029,
393,
2174,
262,
17839,
11387,
198,
1525,
15079,
4743,
590,
14330,
7,
82,
8,
198,
198,
1212,
22771,
481,
8076,
1973,
5504,
11,
810,
262,
5035,
... | 2.26959 | 1,072 |
from plotly.subplots import make_subplots
import plotly.graph_objs as go
import numpy as np
import pandas as pd
| [
6738,
7110,
306,
13,
7266,
489,
1747,
1330,
787,
62,
7266,
489,
1747,
198,
11748,
7110,
306,
13,
34960,
62,
672,
8457,
355,
467,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
19798,
292,
355,
279,
67,
628,
628,
628,
628
] | 2.902439 | 41 |
from decimal import Decimal
from django.test import TestCase
from cartridge.shop.models import Cart
from cartridge.shop.tests import ShopTests
from cartridgetax.handler import default_billship_handler
| [
6738,
32465,
1330,
4280,
4402,
198,
198,
6738,
42625,
14208,
13,
9288,
1330,
6208,
20448,
198,
198,
6738,
26904,
13,
24643,
13,
27530,
1330,
13690,
198,
6738,
26904,
13,
24643,
13,
41989,
1330,
13705,
51,
3558,
198,
198,
6738,
6383,
605... | 3.745455 | 55 |
import logging
import math
import numpy as np
#TODO: Remove this line
from PySide.QtGui import QGraphicsPolygonItem, QImage
from PySide.QtGui import QColor, QGraphicsPixmapItem, QPixmap
from PySide.QtCore import QPoint, QPointF, Qt
from traits.api import Bool, Enum, DelegatesTo, Dict, HasTraits, Instance, Int, List, WeakRef, on_trait_change
from arrview import settings
from arrview.colormapper import ArrayPixmap
from arrview.roi import ROI, ROIManager
from arrview.slicer import Slicer
from arrview.tools.base import GraphicsTool, GraphicsToolFactory, MouseState
from arrview.tools.paintbrush import PaintBrushItem
log = logging.getLogger(__name__)
_paintbrush_z = 100
_foreground_roi_z = 11
_background_roi_z = 10
def _pixmap_to_ndarray(pixmap, alpha_threshold=0):
"""Convert a pixmap to a ndarray mask
Parameters
----------
pixmap : QPixmap
pixmap to convert to ndarray
alpha_threshold : float
convert pixels with alpha > than this value to 1's and values <= threshold to 0's
Returns
-------
A binary mask of the pixmap as a ndarray
"""
img = pixmap.toImage()
w, h = img.width(), img.height()
ptr = img.constBits()
arr = np.frombuffer(ptr, dtype='uint8').reshape(h, w, 4)
out = (arr[...,3] > alpha_threshold).copy()
return out
def _ndarray_to_arraypixmap(array, color=(0, 255, 0, 128)):
"""Convert a binary array to an ArrayPixmap with specified color and alpha level
Args:
array -- binary ndarray
color -- RGBA color tuple. [0, 255] for each channel
Returns:
An ArrayPixmap with of the ndarray with constant alpha value
and color. The input array is colored with *color* and *alpha*
anywhere it is equal to 1.
"""
assert array.ndim == 2, 'Only 2D arrays are supported'
assert len(color) == 4, 'Color should be a 4-tuple'
h, w = array.shape
array = array.astype('uint32')
array = (color[3] * array) << 24 \
| (color[0] * array) << 16 \
| (color[1] * array) << 8 \
| (color[2] * array)
pixdata = array.flatten()
img = QImage(pixdata, w, h, QImage.Format_ARGB32)
return ArrayPixmap(pixdata, QPixmap.fromImage(img))
| [
11748,
18931,
198,
11748,
10688,
198,
198,
11748,
299,
32152,
355,
45941,
198,
198,
2,
51,
3727,
46,
25,
17220,
428,
1627,
198,
6738,
9485,
24819,
13,
48,
83,
8205,
72,
1330,
1195,
18172,
34220,
14520,
7449,
11,
1195,
5159,
198,
198,
... | 2.562572 | 871 |
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class MusicConfig(AppConfig):
"""Configuration for ``Music`` app."""
name = 'apps.music'
verbose_name = _('Music')
| [
6738,
42625,
14208,
13,
18211,
1330,
2034,
16934,
198,
6738,
42625,
14208,
13,
26791,
13,
41519,
1330,
334,
1136,
5239,
62,
75,
12582,
355,
4808,
628,
198,
4871,
7849,
16934,
7,
4677,
16934,
2599,
198,
220,
220,
220,
37227,
38149,
329,
... | 3.041667 | 72 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Use Python 3 !
# Filter 2gram files to keep cities that really exist
# In :
# "../_data/2gram-to/" directory
# "../_out/inverted_cities.json"
# Out :
# "../_out/2gram-to-cities/" directory
# Prerequisite:
# 01, 02
import csv
import json
import time
import os
conj = "to"
dirIn = "../_data/2gram-"+conj+"/"
dirOut = "../_out/2gram-"+conj+"-cities/"
rowWord = "" # name of the current row minus the "conj"
prevRowWord = "" # name of the current row minus the "conj"
currentCityName = ""
savingLines = False
cities = []
pathToCities = "../_out/inverted_cities.json"
cityOccurances = [] # result
cityCount = 0
doubles = {} # list of the doublons
print("Is the folder {0} empty ?".format(dirOut))
answer = input()
if answer.lower() not in ['y', 'yes', 'o', 'oui']:
print("Do it.")
exit()
timer = time.process_time()
i = 0
for root, dirs, files in os.walk(dirIn):
for filename in files:
if i%100 == 0:
print(i)
i += 1
# print(dirIn+filename)
with open(dirIn+filename, 'r') as csvfile:
csvRows = csv.reader(csvfile, delimiter='\t')
# next(csvRows)
for csvRow in csvRows:
# get rowWord
rowWord = csvRow[0][len(conj) + 1:]
# if the row word has changed
if rowWord != prevRowWord:
# save it
if savingLines:
saveCity(originalName)
prevRowWord = rowWord
originalName = getOriginalName(rowWord)
# if it's not a real city
if originalName is "":
savingLines = False
else:
# it's a real city, save the next lines !
# print(currentCityName, " -> ", rowWord," . ", originalName)
# t = input()
currentCityName = originalName
savingLines = True
else:
# the row word is the same as the previous line
if savingLines:
saveLine(rowWord, csvRow)
with open(dirOut + "doubles.txt", 'w') as f:
for key in doubles:
f.write(key + "\n")
print("Successfully found {0} cities in {1} second(s)".format(cityCount, int(time.process_time() - timer)))
timer = time.process_time() | [
2,
48443,
14629,
14,
8800,
14,
29412,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
2,
5765,
11361,
513,
5145,
198,
2,
25853,
362,
4546,
3696,
284,
1394,
4736,
326,
1107,
2152,
198,
2,
554,
1058,
198,
... | 2.424171 | 844 |
import os.path as osp
from glob import glob
from .base_flow import FlowDataset
| [
11748,
28686,
13,
6978,
355,
267,
2777,
198,
6738,
15095,
1330,
15095,
198,
198,
6738,
764,
8692,
62,
11125,
1330,
27782,
27354,
292,
316,
628
] | 3.24 | 25 |
import io
import subprocess
import logging
import os
import gzip
from .vfs import VFSFile
log = logging.getLogger(__name__)
class GzipReader(VFSFile, io.RawIOBase):
"""A pipe object that takes a file-like object as input and acts itself like a stream,
decompressing data on the fly."""
__module__ = 'pyobs.vfs'
def __init__(self, fd, close_fd=True):
"""Create a new GZIP reader pipe.
Args:
fd: File-like object.
close_fd: Whether or not to close the file afterwards. If False, caller has to close it by itself.
"""
io.RawIOBase.__init__(self)
# init
self._pos = 0
self._fd = fd
self._close_fd = close_fd
# does gzip exist?
use_shell = os.path.exists('/bin/gzip')
# read and compress raw stream
if use_shell:
p = subprocess.run(['/bin/gzip', '-d'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, input=fd.read())
if len(p.stderr) > 1:
raise RuntimeError('Error from gzip: %s', p.stderr)
self._buffer = p.stdout
else:
self._buffer = gzip.decompress(fd.read())
def readable(self):
"""Stream is readable."""
return True
def seekable(self):
"""Stream is seekable."""
return True
def seek(self, offset, whence=io.SEEK_SET):
"""Seek in stream.
Args:
offset: Offset to move.
whence: Origin of move, i.e. beginning, current position, or end of stream.
"""
# set offset
if whence == io.SEEK_SET:
self._pos = offset
elif whence == io.SEEK_CUR:
self._pos += offset
elif whence == io.SEEK_END:
self._pos = len(self) - 1 + offset
# limit
self._pos = max(0, min(len(self) - 1, self._pos))
def tell(self):
"""Give current position on stream."""
return self._pos
def __len__(self):
"""Length of stream buffer."""
return len(self._buffer)
def read(self, size=-1) -> bytearray:
"""Read number of bytes from stream.
Args:
size: Number of bytes to read, -1 reads until end of data.
Returns:
Data read from stream.
"""
# check size
if size == -1:
data = self._buffer
self._pos = len(self) - 1
else:
# extract data to read
data = self._buffer[self._pos:self._pos + size]
self._pos += size
# return data
return data
def close(self):
"""Close stream."""
if self._close_fd:
# close fd, if requested
self._fd.close()
class GzipWriter(VFSFile, io.RawIOBase):
"""A pipe object that takes a file-like object as input and acts itself like a stream,
compressing data on the fly."""
def __init__(self, fd, close_fd=True):
"""Create a new GZIP writer pipe.
Args:
fd: File-like object.
close_fd: Whether or not to close the file afterwards. If False, caller has to close it by itself.
"""
io.RawIOBase.__init__(self)
# init buffer
self._buffer = b''
self._fd = fd
self._close_fd = close_fd
# does gzip exist?
self._use_shell = os.path.exists('/bin/gzip')
def writable(self):
"""Stream is writable."""
return True
def write(self, b: bytearray):
"""Write data into the stream.
Args:
b: Bytes of data to write.
"""
self._buffer += b
def flush(self):
"""Flush the stream."""
# write compressed data
if self._use_shell:
p = subprocess.run(['/bin/gzip'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, input=self._buffer)
if len(p.stderr) > 1:
raise RuntimeError('Error from gzip: %s', p.stderr)
self._fd.write(p.stdout)
else:
self._fd.write(gzip.compress(self._buffer))
# reset buffer
self._buffer = b''
def close(self):
"""Close the stream."""
# flush
self.flush()
# close fd
if self._close_fd:
self._fd.close()
__all__ = ['GzipReader', 'GzipWriter']
| [
11748,
33245,
198,
11748,
850,
14681,
198,
11748,
18931,
198,
11748,
28686,
198,
11748,
308,
13344,
198,
198,
6738,
764,
85,
9501,
1330,
569,
10652,
8979,
628,
198,
6404,
796,
18931,
13,
1136,
11187,
1362,
7,
834,
3672,
834,
8,
628,
1... | 2.11398 | 2,053 |
from __future__ import absolute_import
from .densenet161 import densenet_no_top
from .resnet50_notop import resnet50_notop
| [
6738,
11593,
37443,
834,
1330,
4112,
62,
11748,
198,
6738,
764,
67,
18756,
316,
25948,
1330,
288,
18756,
316,
62,
3919,
62,
4852,
198,
6738,
764,
411,
3262,
1120,
62,
1662,
404,
1330,
581,
3262,
1120,
62,
1662,
404,
198
] | 3.075 | 40 |