id stringlengths 1 7 | text stringlengths 6 1.03M | dataset_id stringclasses 1
value |
|---|---|---|
1683165 | <filename>pushservice/src/Controller/SendLog.py
#######################################################################
#
# Push Service for Enigma-2
# Coded by betonme (c) 2012 <glaserfrank(at)gmail.com>
# Support: http://www.i-have-a-dreambox.com/wbb2/thread.php?threadid=167779
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
#######################################################################
# Config
from Components.config import ConfigYesNo, ConfigText, NoSave
# Plugin internal
from Plugins.Extensions.PushService.ControllerBase import ControllerBase
# Plugin specific
import os
import re
import fnmatch
# Constants
SUBJECT = _("Found Log(s)")
BODY = _("Log(s) are attached")
class SendLog(ControllerBase):
ForceSingleInstance = True
def __init__(self):
# Is called on instance creation
ControllerBase.__init__(self)
self.logfiles = []
# Default configuration
self.setOption( 'path', NoSave(ConfigText( default = "/media/hdd/", fixed_size = False )), _("Path to check") )
self.setOption( 'file_pattern', NoSave(ConfigText( default = "*.log", fixed_size = False )), _("Filename pattern (No RegExp)") )
self.setOption( 'content_pattern', NoSave(ConfigText( default = ".*", fixed_size = False )), _("Content pattern (RegExp)") )
self.setOption( 'scan_subs', NoSave(ConfigYesNo( default = False )), _("Scan subfolders") )
self.setOption( 'rename_logs', NoSave(ConfigYesNo( default = False )), _("Rename log(s)") )
self.setOption( 'delete_logs', NoSave(ConfigYesNo( default = False )), _("Delete log(s)") )
def run(self, callback, errback):
# At the end a plugin has to call one of the functions: callback or errback
# Callback should return with at least one of the parameter: Header, Body, Attachment
# If empty or none is returned, nothing will be sent
self.logfiles = []
path = self.getValue('path')
file_pattern = self.getValue('file_pattern')
content_pattern = self.getValue('content_pattern')
prog = re.compile(content_pattern)
if self.getValue('scan_subs'):
for root, dirnames, filenames in os.walk(path):
for filename in fnmatch.filter(filenames, file_pattern):
logfile = os.path.join( root, filename )
if( content_pattern == ".*" ):
self.logfiles.append( logfile )
else:
infile = open(logfile,"r")
for line in infile:
if prog.match(line):
self.logfiles.append( logfile )
break
infile.close()
else:
filenames = os.listdir( path )
for filename in fnmatch.filter(filenames, file_pattern):
logfile = os.path.join( path, filename )
if( content_pattern == ".*" ):
self.logfiles.append( logfile )
else:
infile = open(logfile,"r")
for line in infile:
if prog.match(line):
self.logfiles.append( logfile )
break
infile.close()
if self.logfiles:
callback( SUBJECT, BODY, self.logfiles )
else:
callback()
# Callback functions
def callback(self):
# Called after all services succeded
if self.getValue('delete_logs'):
# Delete logfiles
for logfile in self.logfiles[:]:
if os.path.exists( logfile ):
os.remove( logfile )
self.logfiles.remove( logfile )
elif self.getValue('rename_logs'):
# Rename logfiles to avoid resending it
for logfile in self.logfiles[:]:
if os.path.exists( logfile ):
# Adapted from autosubmit - instead of .sent we will use .pushed
currfilename = str(os.path.basename(logfile))
newfilename = "/media/hdd/" + currfilename + ".pushed"
os.rename(logfile,newfilename)
self.logfiles.remove( logfile )
def errback(self):
# Called after all services has returned, but at least one has failed
self.logfiles = []
| StarcoderdataPython |
154819 | <filename>yeast/steps/summarize_step.py
from pandas.core.groupby.generic import DataFrameGroupBy
from yeast.step import Step
from yeast.errors import YeastValidationError
class SummarizeStep(Step):
"""
Create one or more numeric variables summarizing the columns of an existing group created by
GroupByStep() resulting in one row in the output for each group. Please refer to the
Aggregations documentation to see the complete list of supported aggregations.
The most used ones are: `AggMean`, `AggMedian`, `AggCount`, `AggMax`, `AggMin`
Parameters:
- `aggregations`: dictionary with the aggregations to perform. The key is the new column name
where the value is the specification of the aggregation to perform.
For example: `{'new_column_name': AggMean('column')}`
- `role`: String name of the role to control baking flows on new data. Default: `all`.
Usage:
```python
# Basic Summarization on a Group
recipe = Recipe([
GroupByStep(['category', 'year']),
SummarizeStep({
'average_rating': AggMean('rating'),
'unique_titles': AggCountDistinct('title')
})
])
```
Raises:
- `YeastValidationError`: If there was not a GroupByStep before
"""
def __init__(self, aggregations, role='all'):
self.aggregations = aggregations
super().__init__(needs_preparation=False, role=role)
def do_bake(self, gdf):
aggs = {}
for column, agg in self.aggregations.items():
aggs[column] = agg.resolve(gdf)
return gdf.agg(**aggs).reset_index()
def do_validate(self, gdf):
"""
- A GroupByStep was applied before this step
"""
if not isinstance(gdf, DataFrameGroupBy):
raise YeastValidationError('This step must be executed after a GroupByStep(...)')
| StarcoderdataPython |
8296 | """
Ocropus's magic PIL-numpy array conversion routines. They express slightly
different behavior from PIL.Image.toarray().
"""
import unicodedata
import numpy as np
from PIL import Image
__all__ = ['pil2array', 'array2pil']
def pil2array(im: Image.Image, alpha: int = 0) -> np.array:
if im.mode == '1':
return np.array(im.convert('L'))
return np.array(im)
def array2pil(a: np.array) -> Image:
if a.dtype == np.dtype("B"):
if a.ndim == 2:
return Image.frombytes("L", (a.shape[1], a.shape[0]),
a.tostring())
elif a.ndim == 3:
return Image.frombytes("RGB", (a.shape[1], a.shape[0]),
a.tostring())
else:
raise Exception("bad image rank")
elif a.dtype == np.dtype('float32'):
return Image.frombytes("F", (a.shape[1], a.shape[0]), a.tostring())
else:
raise Exception("unknown image type")
def is_bitonal(im: Image.Image) -> bool:
"""
Tests a PIL.Image for bitonality.
Args:
im (PIL.Image.Image): Image to test
Returns:
True if the image contains only two different color values. False
otherwise.
"""
return im.getcolors(2) is not None and len(im.getcolors(2)) == 2
def get_im_str(im: Image.Image) -> str:
return im.filename if hasattr(im, 'filename') else str(im)
def is_printable(char: str) -> bool:
"""
Determines if a chode point is printable/visible when printed.
Args:
char (str): Input code point.
Returns:
True if printable, False otherwise.
"""
letters = ('LC', 'Ll', 'Lm', 'Lo', 'Lt', 'Lu')
numbers = ('Nd', 'Nl', 'No')
punctuation = ('Pc', 'Pd', 'Pe', 'Pf', 'Pi', 'Po', 'Ps')
symbol = ('Sc', 'Sk', 'Sm', 'So')
printable = letters + numbers + punctuation + symbol
return unicodedata.category(char) in printable
def make_printable(char: str) -> str:
"""
Takes a Unicode code point and return a printable representation of it.
Args:
char (str): Input code point
Returns:
Either the original code point, the name of the code point if it is a
combining mark, whitespace etc., or the hex code if it is a control
symbol.
"""
if not char or is_printable(char):
return char
elif unicodedata.category(char) in ('Cc', 'Cs', 'Co'):
return '0x{:x}'.format(ord(char))
else:
return unicodedata.name(char)
| StarcoderdataPython |
3344379 | """
WRITEME
"""
from __future__ import absolute_import, print_function, division
import logging
import warnings
import theano
from theano import gof
import theano.gof.vm
from theano.configparser import config
from six import string_types
from theano.compile.function_module import Supervisor
_logger = logging.getLogger('theano.compile.mode')
# If a string is passed as the linker argument in the constructor for
# Mode, it will be used as the key to retrieve the real linker in this
# dictionary
predefined_linkers = {
'py': gof.PerformLinker(), # Use allow_gc Theano flag
'c': gof.CLinker(), # Don't support gc. so don't check allow_gc
'c|py': gof.OpWiseCLinker(), # Use allow_gc Theano flag
'c|py_nogc': gof.OpWiseCLinker(allow_gc=False),
'vm': gof.vm.VM_Linker(use_cloop=False), # Use allow_gc Theano flag
'cvm': gof.vm.VM_Linker(use_cloop=True), # Use allow_gc Theano flag
'vm_nogc': gof.vm.VM_Linker(allow_gc=False, use_cloop=False),
'cvm_nogc': gof.vm.VM_Linker(allow_gc=False, use_cloop=True)}
def register_linker(name, linker):
"""Add a `Linker` which can be referred to by `name` in `Mode`."""
if name in predefined_linkers:
raise ValueError('Linker name already taken: %s' % name)
predefined_linkers[name] = linker
# If a string is passed as the optimizer argument in the constructor
# for Mode, it will be used as the key to retrieve the real optimizer
# in this dictionary
exclude = []
if not theano.config.cxx:
exclude = ['cxx_only']
OPT_NONE = gof.Query(include=[], exclude=exclude)
# Even if multiple merge optimizer call will be there, this shouldn't
# impact performance.
OPT_MERGE = gof.Query(include=['merge'], exclude=exclude)
OPT_FAST_RUN = gof.Query(include=['fast_run'], exclude=exclude)
OPT_FAST_RUN_STABLE = OPT_FAST_RUN.requiring('stable')
# We need fast_compile_gpu here. As on the GPU, we don't have all
# operation that exist in fast_compile, but have some that get
# introduced in fast_run, we want those optimization to also run in
# fast_compile+gpu. We can't tag them just as 'gpu', as this would
# exclude them if we exclude 'gpu'.
OPT_FAST_COMPILE = gof.Query(include=['fast_compile', 'fast_compile_gpu'],
exclude=exclude)
OPT_STABILIZE = gof.Query(include=['fast_run'], exclude=exclude)
OPT_STABILIZE.position_cutoff = 1.5000001
OPT_NONE.name = 'OPT_NONE'
OPT_MERGE.name = 'OPT_MERGE'
OPT_FAST_RUN.name = 'OPT_FAST_RUN'
OPT_FAST_RUN_STABLE.name = 'OPT_FAST_RUN_STABLE'
OPT_FAST_COMPILE.name = 'OPT_FAST_COMPILE'
OPT_STABILIZE.name = 'OPT_STABILIZE'
OPT_O2 = OPT_FAST_COMPILE.including('fusion')
OPT_O3 = OPT_FAST_RUN.excluding('inplace')
OPT_UNSAFE = OPT_O3.including('unsafe')
OPT_O2.name = 'OPT_O2'
OPT_O3.name = 'OPT_O3'
OPT_UNSAFE.name = 'OPT_UNSAFE'
predefined_optimizers = {
None: OPT_NONE,
'None': OPT_NONE,
'merge': OPT_MERGE,
'o4': OPT_FAST_RUN,
'o3': OPT_O3,
'o2': OPT_O2,
'o1': OPT_FAST_COMPILE,
'unsafe': OPT_UNSAFE,
'fast_compile': OPT_FAST_COMPILE,
'fast_run': OPT_FAST_RUN,
'fast_run_stable': OPT_FAST_RUN_STABLE,
'stabilize': OPT_STABILIZE}
def register_optimizer(name, opt):
"""Add a `Optimizer` which can be referred to by `name` in `Mode`."""
if name in predefined_optimizers:
raise ValueError('Optimizer name already taken: %s' % name)
predefined_optimizers[name] = opt
class AddDestroyHandler(gof.Optimizer):
"""
This optimizer performs two important functions:
1) It has a 'requirement' of the destroyhandler. This means that the fgraph
will include it as a feature for this optimization, and keep this feature
enabled for subsequent optimizations. All optimizations that work inplace
on any of their inputs must run *after* this optimization to ensure that
the DestroyHandler has been included in the fgraph.
2) It tries to replace each output with an Op that purports to destroy it
(but it won't I promise). If this replacement succeeds it means that
there is a bug in theano. It should not be possible to destroy outputs.
"""
def apply(self, fgraph):
supervisor_added = False
for feature in fgraph._features:
if isinstance(feature, Supervisor):
supervisor_added = True
break
if not supervisor_added:
warnings.warn("WARNING: Supervisor is not added. Please build a FunctionGraph"
"via theano.compile.function_module.std_graph()"
"or add the Supervisor class manually.",
stacklevel=3)
def add_requirements(self, fgraph):
super(AddDestroyHandler, self).add_requirements(fgraph)
fgraph.attach_feature(gof.DestroyHandler())
class AddFeatureOptimizer(gof.Optimizer):
"""
This optimizer adds a provided feature to the function graph.
"""
def __init__(self, feature):
self.feature = feature
def add_requirements(self, fgraph):
super(AddFeatureOptimizer, self).add_requirements(fgraph)
fgraph.attach_feature(self.feature)
class PrintCurrentFunctionGraph(gof.Optimizer):
"""
This optimizer is for debugging.
Toss it into the optimization pipeline to see the state of things at any
given point.
"""
def __init__(self, header):
self.header = header
def apply(self, fgraph):
import theano.printing
print("PrintCurrentFunctionGraph:", self.header)
theano.printing.debugprint(fgraph.outputs)
optdb = gof.SequenceDB()
optdb.register('merge1', gof.MergeOptimizer(),
0, 'fast_run', 'fast_compile', 'merge')
# After scan1 opt at 0.5 and before ShapeOpt at 1
# This should only remove nodes.
# The opt should not do anything that need shape inference.
# New nodes that don't have infer_shape need that the original node
# also don't have infer_shape
local_useless = gof.optdb.LocalGroupDB(apply_all_opts=True, profile=True)
optdb.register(
'useless',
gof.optdb.TopoDB(local_useless,
failure_callback=gof.opt.NavigatorOptimizer.warn_inplace),
0.6, 'fast_run', 'fast_compile')
optdb.register('merge1.1', gof.MergeOptimizer(),
0.65, 'fast_run', 'fast_compile', 'merge')
# rearranges elemwise expressions
optdb.register('canonicalize', gof.EquilibriumDB(ignore_newtrees=False),
1, 'fast_run', 'fast_compile', 'canonicalize_db')
# Register in the canonizer Equilibrium as a clean up opt the merge opt.
# Without this, as the equilibrium have ignore_newtrees=False, we
# won't merge all nodes if it is set as a global optimizer with
# final_opt=True.
# We need a new instance of MergeOptimizer to don't have its name
# changed by other usage of it.
optdb['canonicalize'].register("merge", gof.opt.MergeOptimizer(), 'fast_run',
"fast_compile", cleanup=True)
optdb.register('merge1.2', gof.MergeOptimizer(),
1.2, 'fast_run', 'fast_compile', 'merge')
optdb.register('Print1.21', PrintCurrentFunctionGraph('Post-canonicalize'),
1.21,) # 'fast_run', 'fast_compile')
# replace unstable subgraphs
optdb.register('stabilize', gof.EquilibriumDB(),
1.5, 'fast_run')
optdb.register('Print1.51', PrintCurrentFunctionGraph('Post-stabilize'),
1.51,) # 'fast_run', 'fast_compile')
# misc special cases for speed
optdb.register('specialize', gof.EquilibriumDB(),
2, 'fast_run', 'fast_compile_gpu')
# misc special cases for speed that break canonicalization
optdb.register('uncanonicalize', gof.EquilibriumDB(),
3, 'fast_run')
# misc special cases for speed that are dependent on the device.
optdb.register('specialize_device', gof.EquilibriumDB(),
48.6, 'fast_compile', 'fast_run') # must be after gpu stuff at 48.5
# especially constant merge
optdb.register('merge2', gof.MergeOptimizer(),
49, 'fast_run', 'merge')
optdb.register('add_destroy_handler', AddDestroyHandler(),
49.5, 'fast_run', 'inplace')
# final pass just to make sure
optdb.register('merge3', gof.MergeOptimizer(),
100, 'fast_run', 'merge')
class Mode(object):
"""
The Mode represents a way to optimize and then link a computation graph.
Parameters
----------
optimizer : a structure of type Optimizer
An Optimizer may simplify the math, put similar computations together,
improve numerical stability and various other improvements.
linker : a structure of type Linker
A Linker decides which implementations to use (C or Python, for example)
and how to string them together to perform the computation.
See Also
--------
predefined_linkers
predefined_optimizers
predefined_modes
"""
def __init__(self, linker=None, optimizer='default'):
if linker is None:
linker = config.linker
if optimizer is 'default':
optimizer = config.optimizer
Mode.__setstate__(self, (linker, optimizer))
# self.provided_optimizer - typically the `optimizer` arg.
# But if the `optimizer` arg is keyword corresponding to a predefined
# Query, then this stores the query
# self._optimizer - typically same as provided_optimizer??
# self.__get_optimizer - returns self._optimizer (possibly querying
# optdb with self._optimizer)
# self.optimizer - property that returns __get_optimizer()
def __getstate__(self):
return (self.provided_linker, self.provided_optimizer)
def __setstate__(self, state):
linker, optimizer = state
self.provided_linker = linker
self.provided_optimizer = optimizer
if isinstance(linker, string_types) or linker is None:
linker = predefined_linkers[linker]
self.linker = linker
if isinstance(optimizer, string_types) or optimizer is None:
optimizer = predefined_optimizers[optimizer]
if isinstance(optimizer, gof.Query):
self.provided_optimizer = optimizer
self._optimizer = optimizer
self.call_time = 0
self.fn_time = 0
linker.mode = self # TODO: WHY IS THIS HERE?
def __str__(self):
return "%s(linker = %s, optimizer = %s)" % (self.__class__.__name__,
self.provided_linker,
self.provided_optimizer)
def __get_optimizer(self):
if isinstance(self._optimizer, gof.Query):
return optdb.query(self._optimizer)
else:
return self._optimizer
optimizer = property(__get_optimizer)
def get_linker_optimizer(self, linker, optimizer):
if isinstance(linker, string_types) or linker is None:
linker = predefined_linkers[linker]
if isinstance(optimizer, string_types) or optimizer is None:
optimizer = predefined_optimizers[optimizer]
return (linker, optimizer)
def including(self, *tags):
link, opt = self.get_linker_optimizer(self.provided_linker,
self.provided_optimizer)
# N.B. opt might be a Query instance, not sure what else it might be...
# string? Optimizer? OptDB? who knows???
return self.clone(optimizer=opt.including(*tags))
def register(self, *optimizations):
"""Adds new optimization instances to a mode.
This method adds new optimization instances to a compilation mode. It
works like the `including()` method but takes as inputs optimization
instances to add instead of tags.
Parameters
----------
optimizations :
Every element of `optimizations` is a tuple containing an
optimization instance and a floating point value indicating the
position at which to insert the optimization in the mode.
Returns
-------
Mode
Copy of the current Mode which includes the provided
optimizations.
"""
link, opt = self.get_linker_optimizer(self.provided_linker,
self.provided_optimizer)
return self.clone(optimizer=opt.register(*optimizations))
def excluding(self, *tags):
link, opt = self.get_linker_optimizer(self.provided_linker,
self.provided_optimizer)
return self.clone(optimizer=opt.excluding(*tags))
def requiring(self, *tags):
link, opt = self.get_linker_optimizer(self.provided_linker,
self.provided_optimizer)
return self.clone(optimizer=opt.requiring(*tags))
def clone(self, link_kwargs=None, optimizer="", **kwargs):
"""
Create a new instance of this Mode.
Keyword arguments can be provided for the linker,
in which case its `clone` method will be called with these
arguments.
"""
if link_kwargs is None:
link_kwargs = {}
new_linker = self.linker.clone(**link_kwargs)
if optimizer == "":
optimizer = self.provided_optimizer
new_mode = type(self)(linker=new_linker,
optimizer=optimizer)
return new_mode
# If a string is passed as the mode argument in function or
# FunctionMaker, the Mode will be taken from this dictionary using the
# string as the key
# Use VM_linker to allow lazy evaluation by default.
FAST_COMPILE = Mode(theano.gof.vm.VM_Linker(use_cloop=False, c_thunks=False),
'fast_compile')
if theano.config.cxx:
FAST_RUN = Mode('cvm', 'fast_run')
else:
FAST_RUN = Mode('vm', 'fast_run')
predefined_modes = {'FAST_COMPILE': FAST_COMPILE,
'FAST_RUN': FAST_RUN,
}
instantiated_default_mode = None
def get_mode(orig_string):
if orig_string is None:
string = config.mode
else:
string = orig_string
if not isinstance(string, string_types):
return string # it is hopefully already a mode...
global instantiated_default_mode
# The default mode is cached. However, config.mode can change
# If instantiated_default_mode has the right class, use it.
if orig_string is None and instantiated_default_mode:
if string in predefined_modes:
default_mode_class = predefined_modes[string].__class__.__name__
else:
default_mode_class = string
if (instantiated_default_mode.__class__.__name__ ==
default_mode_class):
return instantiated_default_mode
if string in ['Mode', 'DebugMode', 'NanGuardMode']:
if string == 'DebugMode':
# need to import later to break circular dependency.
from .debugmode import DebugMode
# DebugMode use its own linker.
ret = DebugMode(optimizer=config.optimizer)
elif string == 'NanGuardMode':
# need to import later to break circular dependency.
from .nanguardmode import NanGuardMode
# NanGuardMode use its own linker.
ret = NanGuardMode(True, True, True, optimizer=config.optimizer)
else:
# TODO: Can't we look up the name and invoke it rather than using eval here?
ret = eval(string +
'(linker=config.linker, optimizer=config.optimizer)')
elif string in predefined_modes:
ret = predefined_modes[string]
else:
raise Exception("No predefined mode exist for string: %s" % string)
if orig_string is None:
# Build and cache the default mode
if theano.config.optimizer_excluding:
ret = ret.excluding(*theano.config.optimizer_excluding.split(':'))
if theano.config.optimizer_including:
ret = ret.including(*theano.config.optimizer_including.split(':'))
if theano.config.optimizer_requiring:
ret = ret.requiring(*theano.config.optimizer_requiring.split(':'))
instantiated_default_mode = ret
return ret
def get_default_mode():
return get_mode(None)
def register_mode(name, mode):
"""
Add a `Mode` which can be referred to by `name` in `function`.
"""
if name in predefined_modes:
raise ValueError('Mode name already taken: %s' % name)
predefined_modes[name] = mode
| StarcoderdataPython |
1727078 | import pytest
from lhotse.cut import CutSet
from lhotse.dataset import UnsupervisedDataset
@pytest.fixture
def libri_cut_set():
return CutSet.from_yaml('test/fixtures/libri/cuts.yml')
def test_unsupervised_dataset(libri_cut_set):
dataset = UnsupervisedDataset(libri_cut_set)
assert len(dataset) == 1
feats = dataset[0]
assert feats.shape == (1000, 23)
| StarcoderdataPython |
3306814 | class node:
def __init__(self,data,next = None ):
### Code Here ###
self.value = data
self.next = None
def __str__(self):
### Code Here ###
return self.value
def createList(l=[]):
### Code Here ###
for i in range(len(l)):
l[i] = node(l[i])
if i > 0:
l[i-1].next = l[i]
return l[0]
def printList(H):
### Code Here ###
p = H
strout = ""
while p != None:
strout+= str(p)+" "
p = p.next
print(strout)
def mergeOrderesList(p,q):
### Code Here ###
mnode = node(None)
mp = mnode
while p != None and q != None:
df = int(p.value) - int(q.value)
# print(p.value,q.value,df)
if df < 0:
mp.next = p
p = p.next
else:
mp.next = q
q = q.next
mp = mp.next
if not p is None :
mp.next = p
elif not q is None :
mp.next = q
return mnode.next
#################### FIX comand ####################
# input only a number save in L1,L2
L1,L2 = input("Enter 2 Lists : ").split()
L1,L2 = L1.split(","),L2.split(",")
LL1 = createList(L1)
LL2 = createList(L2)
print('LL1 : ',end='')
printList(LL1)
print('LL2 : ',end='')
printList(LL2)
m = mergeOrderesList(LL1,LL2)
print('Merge Result : ',end='')
printList(m) | StarcoderdataPython |
58894 | <filename>abstract_models/bisim/pick_puck.py
import numpy as np
class BisimPickPuckModel:
@classmethod
def get_state_block(cls, hand, layers, action_grid):
num_positions = action_grid.shape[0] * action_grid.shape[1]
action_grid = np.reshape(action_grid, (-1, 2))
if hand is not None:
return num_positions
else:
puck = layers[0][0]
index = cls.get_coords(action_grid, puck.x, puck.y)
return index
@staticmethod
def get_coords(action_grid, x, y):
return np.where(np.sum(action_grid == [x, y], axis=1) == 2)[0][0]
| StarcoderdataPython |
18655 | <filename>eval.py<gh_stars>1-10
import torch
from torch.utils.data import Dataset
import numpy as np
import os
import pickle
from madmom.features import DBNBeatTrackingProcessor
import torch
from model import BeatTrackingNet
from utils import init_single_spec
from mir_eval.beat import evaluate
from data import BallroomDataset
from beat_tracker import predict_beats_from_spectrogram
import yaml
import sys
import pdb
# import config
with open('config.yaml', 'r') as f:
config = yaml.load(f, Loader=yaml.FullLoader)
def evaluate_model(
model_checkpoint,
spectrogram,
ground_truth):
"""
Given a model checkpoint, a single spectrogram, and the corresponding
ground truth, evaluate the model's performance on all beat tracking metrics
offered by mir_eval.beat.
"""
prediction = predict_beats_from_spectrogram(
spectrogram,
model_checkpoint)
scores = evaluate(ground_truth, prediction)
return scores
def evaluate_model_on_dataset(
model_checkpoint,
dataset,
ground_truths):
"""
Run through a whole instance of torch.utils.data.Dataset and compare the
model's predictions to the given ground truths.
"""
# Create dicts to store scores and histories
mean_scores = {}
running_scores = {}
# Iterate over dataset
for i in range(len(dataset)):
spectrogram = dataset[i]["spectrogram"].unsqueeze(0)
ground_truth = ground_truths[i]
scores = evaluate_model(
model_checkpoint,
spectrogram,
ground_truth)
beat_scores = scores
for metric in beat_scores:
if metric not in running_scores:
running_scores[metric] = 0.0
running_scores[metric] += beat_scores[metric]
# Each iteration, pass our current index and our running score total
# to a print callback function.
print(f"{i}, {str(running_scores)}")
# After all iterations, calculate mean scores.
for metric in running_scores:
mean_scores[metric] = running_scores[metric] / (i + 1)
# Return a dictionary of helpful information
return {
"total_examples": i + 1,
"scores": mean_scores
}
dataset = BallroomDataset()
ground_truths = (dataset.get_ground_truth(i) for i in range(len(dataset)))
# Run evaluation
evaluate_model_on_dataset(config['default_checkpoint_path'],
dataset,
ground_truths) | StarcoderdataPython |
1774210 | # -*- coding: utf-8 -*-
# https://www.apache.org/licenses/LICENSE-2.0.html
try:
import traceback
import re
import BigWorld
from gui.Scaleform.daapi.view.battle.shared.stats_exchage.vehicle import VehicleInfoComponent
from gui.battle_control.arena_info.arena_dp import ArenaDataProvider
from gui.battle_control.arena_info.player_format import PlayerFullNameFormatter, _PlayerFormatResult
from gui.SystemMessages import SM_TYPE, pushMessage
from gui.Scaleform.daapi.view.lobby.LobbyView import LobbyView
from notification.settings import NOTIFICATION_TYPE
from notification.actions_handlers import NotificationsActionsHandlers
from mod_recent_stat_loader import ModRecentStat
from mod_recent_stat_logging import logInfo, logError
logInfo("Mod initialization is started.")
modRecentStat = ModRecentStat()
def buildVehiclesDataNew(self, vehicles):
try:
modRecentStat.loadPlayerDataByVehicleList(vehicles)
except BaseException:
logError("Error in buildVehiclesDataNew", traceback.format_exc())
buildVehiclesDataOld(self, vehicles)
ArenaDataProvider.buildVehiclesData, buildVehiclesDataOld = buildVehiclesDataNew, ArenaDataProvider.buildVehiclesData
def formatNew(self, vInfoVO, playerName=None):
result = formatOld(self, vInfoVO, playerName)
newPlayerName = result.playerName
newPlayerFakeName = result.playerFakeName
try:
accountDBID = vInfoVO.player.accountDBID
newPlayerName = modRecentStat.formatPlayerName(accountDBID, result.playerName)
newPlayerFakeName = modRecentStat.formatPlayerName(accountDBID, result.playerFakeName)
except BaseException:
logError("Error in formatNew", traceback.format_exc())
return _PlayerFormatResult(result.playerFullName, newPlayerName, newPlayerFakeName, result.clanAbbrev,
result.regionCode, result.vehicleName)
PlayerFullNameFormatter.format, formatOld = formatNew, PlayerFullNameFormatter.format
def handleActionNew(self, model, typeID, entityID, actionName):
needOpen = False
try:
needOpen = typeID == NOTIFICATION_TYPE.MESSAGE and re.match('https?://', actionName, re.I)
if needOpen:
BigWorld.wg_openWebBrowser(actionName)
except BaseException:
logError("Error in handleActionNew", traceback.format_exc())
if not needOpen:
handleActionOld(self, model, typeID, entityID, actionName)
NotificationsActionsHandlers.handleAction, handleActionOld = handleActionNew, NotificationsActionsHandlers.handleAction
def LobbyView_populateNew(self):
LobbyView_populateOld(self)
try:
if not modRecentStat.notificationsShowed:
modRecentStat.notificationsShowed = True
pushMessage(modRecentStat.getWelcomeMessage(), SM_TYPE.Information)
pushMessage(modRecentStat.getInfoMessage(), SM_TYPE.Information)
except BaseException:
logError("Error in LobbyView_populateNew", traceback.format_exc())
LobbyView._populate, LobbyView_populateOld = LobbyView_populateNew, LobbyView._populate
def addVehicleInfoNew(self, vInfoVO, overrides):
returnValue = addVehicleInfoOld(self, vInfoVO, overrides)
try:
colorId = modRecentStat.getPlayerColorId(vInfoVO.player.accountDBID)
if colorId is not None:
badgesDict = {
"badgeType": "badge_%s" % (10 + colorId),
}
# TODO: show correct badge (if persists) in center:
# if "badgeType" in self._data:
# badgesDict["suffixBadgeType"] = self._data["badgeType"]
returnValue = self._data.update(badgesDict)
else:
self._data.pop("badgeType", None)
except BaseException:
logError("Error in addVehicleInfoNew", traceback.format_exc())
return returnValue
VehicleInfoComponent.addVehicleInfo, addVehicleInfoOld = addVehicleInfoNew, VehicleInfoComponent.addVehicleInfo
logInfo("Mod initialization is finished.")
except BaseException as e:
print("The Recent Stat of You: can't init the mod because of %s" % e)
try:
import traceback
print(traceback.format_exc())
except BaseException:
pass
| StarcoderdataPython |
3286358 | <reponame>NumberAI/python-bandwidth-iris<gh_stars>1-10
#!/usr/bin/env python
from iris_sdk.models.maps.base_map import BaseMap
class SubscriptionsMap(BaseMap):
subscription = None | StarcoderdataPython |
3392057 | # pip install requests
import abc
import html
import json
import os
import re
import requests
IMAGES_DIR = "../images/"
image_storage = []
class ParsedImage:
@abc.abstractclassmethod
def get_image_src(self):
pass
class GoogleImage(ParsedImage):
def __init__(self, json_image):
self.id = json_image["id"]
self.height = json_image["oh"]
self.width = json_image["ow"]
self.description1 = json_image["pt"]
self.description2 = json_image["s"]
self.source_url = json_image["isu"]
self.image_src = json_image["ou"]
def get_human_size(self):
return str(self.height) + "x" + str(self.width)
def get_image_src(self):
return self.image_src(self)
def make_dir(search_keyword):
final_dir = IMAGES_DIR + search_keyword.replace(" ", "_").replace("+", "-")
if os.path.isdir(final_dir):
return final_dir + "/"
else:
os.makedirs(final_dir)
return final_dir + "/"
def parse_google(keyword):
# TODO Accept param search
google_url = 'https://www.google.si/search?q={0}&espv=1&site=webhp&source=lnms&tbm=isch&sa=X&ei=XosDVaCXD8TasATItgE&ved=0CAcQ_AUoAg&iact=ms&start=250&num=230'
request_header = {"User-Agent":
"Mozilla/5.0 (Windows NT 6.1) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/41.0.2228.0 "
"Safari/537.36"}
escaped_keyword = html.escape(keyword)
site = requests.get(url=google_url.format(escaped_keyword), headers=request_header).content.decode("utf-8")
raw_data = re.finditer('({"clt":|{"cb":|{"cl":).+?(?=<)', site)
for raw_image in raw_data:
json_image = json.loads(raw_image.group(0))
print(json_image)
image_storage.append(GoogleImage(json_image))
def download_and_store(base_directory, filename_keyword):
iteration = 1
for image in image_storage:
print(image.image_src)
response = requests.get(image.image_src)
binary_file = response.content
if response.status_code == 200:
file_type = image.image_src.split("/")[-1]
if file_type.endswith((".jpg", ".png", ".bmp", "tiff", "bmp")):
file_name = base_directory + filename_keyword + "_" + str(
iteration) + "_" + image.get_human_size() + "." + file_type.split(".")[-1]
with open(file_name, "w") as file:
file.write(binary_file)
iteration = iteration + 1
def go():
search_keyword = "monkey"
parse_google(search_keyword)
download_and_store(make_dir(search_keyword), search_keyword)
go()
| StarcoderdataPython |
52817 | <reponame>XiYe20/VPTR
import tensorflow as tf
import numpy as np
from PIL import Image
from pathlib import Path
import shutil
import os
from tqdm import tqdm
#Requirements: tensorflow 2.6.0
def read_BAIR_tf2_record(records_dir, save_dir):
"""
Args:
record_file: string for the BAIR tf record file path
Returns:
imgs: The images saved in the input record_file
"""
ORIGINAL_HEIGHT = 64
ORIGINAL_WIDTH = 64
COLOR_CHAN = 3
records_path = Path(records_dir)
tf_record_files = sorted(list(records_path.glob('*.tfrecords')))
dataset = tf.data.TFRecordDataset(tf_record_files)
pgbar = tqdm(total = 256*len(tf_record_files), desc = 'Processing...')
for example_id, example in enumerate(dataset):
example_dir = Path(save_dir).joinpath(f'example_{example_id}')
if example_dir.exists():
shutil.rmtree(example_dir.absolute())
example_dir.mkdir(parents=True, exist_ok=True)
for i in range(0, 30):
image_main_name = str(i) + '/image_main/encoded'
image_aux1_name = str(i) + '/image_aux1/encoded'
features = {image_aux1_name: tf.io.FixedLenFeature([1], tf.string),
image_main_name: tf.io.FixedLenFeature([1], tf.string)}
features = tf.io.parse_single_example(example, features=features)
image_aux1 = tf.io.decode_raw(features[image_aux1_name], tf.uint8)
image_aux1 = tf.reshape(image_aux1, shape=[1, ORIGINAL_HEIGHT * ORIGINAL_WIDTH * COLOR_CHAN])
image_aux1 = tf.reshape(image_aux1, shape=[ORIGINAL_HEIGHT, ORIGINAL_WIDTH, COLOR_CHAN])
frame_name = example_dir.joinpath(f'{i:04n}.png')
frame = Image.fromarray(image_aux1.numpy(), 'RGB')
frame.save(frame_name.absolute().as_posix())
#frame = tf.image.encode_png(image_aux1)
#with open(frame_name.absolute().as_posix(), 'wb') as f:
# f.write(frame)
pgbar.update(1)
def resize_im(features, image_name, conf, height = None):
COLOR_CHAN = 3
if '128x128' in conf:
ORIGINAL_WIDTH = 128
ORIGINAL_HEIGHT = 128
IMG_WIDTH = 128
IMG_HEIGHT = 128
elif height != None:
ORIGINAL_WIDTH = height
ORIGINAL_HEIGHT = height
IMG_WIDTH = height
IMG_HEIGHT = height
else:
ORIGINAL_WIDTH = 64
ORIGINAL_HEIGHT = 64
IMG_WIDTH = 64
IMG_HEIGHT = 64
image = tf.decode_raw(features[image_name], tf.uint8)
image = tf.reshape(image, shape=[1, ORIGINAL_HEIGHT * ORIGINAL_WIDTH * COLOR_CHAN])
image = tf.reshape(image, shape=[ORIGINAL_HEIGHT, ORIGINAL_WIDTH, COLOR_CHAN])
if IMG_HEIGHT != IMG_WIDTH:
raise ValueError('Unequal height and width unsupported')
crop_size = min(ORIGINAL_HEIGHT, ORIGINAL_WIDTH)
image = tf.image.resize_image_with_crop_or_pad(image, crop_size, crop_size)
image = tf.reshape(image, [1, crop_size, crop_size, COLOR_CHAN])
image = tf.image.resize_bicubic(image, [IMG_HEIGHT, IMG_WIDTH])
image = tf.cast(image, tf.float32) / 255.0
return image
if __name__ == '__main__':
"""
read_BAIR_tf2_record('BAIR/softmotion30_44k/test/traj_0_to_255.tfrecords',
'BAIR/softmotion30_44k/test')
"""
read_BAIR_tf2_record('BAIR/softmotion30_44k/train', 'BAIR/softmotion30_44k/train')
| StarcoderdataPython |
3304325 | <reponame>jfairf01/OrgoWebsite<filename>app/SnEMechs/errors.py<gh_stars>0
from flask import render_template
from . import SnEMechs
@SnEMechs.app_errorhandler(403)
def forbidden(_):
return render_template('errors/403.html'), 403
@SnEMechs.app_errorhandler(404)
def page_not_found(_):
return render_template('errors/404.html'), 404
@SnEMechs.app_errorhandler(500)
def internal_server_error(_):
return render_template('errors/500.html'), 500
| StarcoderdataPython |
4806233 | <reponame>TOXiNdeep2503/makeabilitylabwebsite
#!/usr/bin/env python
# -*- coding:utf-8 -*-
""" Custom context processors that allows us to pass variables to every view
See: https://docs.djangoproject.com/en/2.0/ref/templates/api/#subclassing-context-requestcontext
https://stackoverflow.com/questions/2893724/creating-my-own-context-processor-in-django
https://stackoverflow.com/questions/36093221/how-to-put-variable-from-database-into-base-html-template
"""
from .models import News
def recent_news(request):
""" context processors returning recent news """
news_items_num = 3 # Defines the number of news items that will be selected
news_items = News.objects.order_by('-date')[:news_items_num]
return { 'recent_news': news_items, } | StarcoderdataPython |
150157 | #! /usr/bin/env python3
import pygame
from pygame import mixer
from constants import DEFAULT_SAMPLE_RATE
from decorated_gui import DecoratedGUI
class AudioGUI (DecoratedGUI):
def __init__ (self, sample_rate=DEFAULT_SAMPLE_RATE, *args, **kwargs):
DecoratedGUI.__init__ (self, *args, **kwargs)
self.sample_rate = sample_rate
def __enter__ (self):
bits = 16 #the number of channels specified here is NOT
#the channels talked about here http://www.pygame.org/docs/ref/mixer.html#pygame.mixer.get_num_channels
sr = self.sample_rate
pygame.mixer.pre_init (sr, -bits, 2)
##mixer.init ()
#pygame.init ()
#mixer .init () # TODO proper order ?
return DecoratedGUI.__enter__ (self)
# TODO handle keys for volume ?
if __name__ == "__main__":
def main ():
with AudioGUI (exit_on_close=False) as g: g.run ()
main ()
print ("end main")
quit ()
sys.exit ()
| StarcoderdataPython |
6352 | <filename>hackathon/darkmattertemperaturedistribution/example.py<gh_stars>1-10
#!/usr/bin/env python
from scipy import *
from pylab import *
#from pylab import imshow
#!
#! Some graphical explorations of the Julia sets with python and pyreport
#!#########################################################################
#$
#$ We start by defining a function J:
#$ \[ J_c : z \rightarrow z^2 + c \]
#$
def J(c):
return lambda z : z**2 + c
[x,y] = ogrid[ -1:1:0.002, -1:1:0.002 ]
z = x + y *1j
#! If we study the divergence of function J under repeated iteration
#! depending on its inital conditions we get a very pretty graph
threshTime = zeros_like(z)
for i in range(40):
z = J(0.285)(z)
threshTime += z*conj(z) > 4
figure(0)
axes([0,0,1,1])
axis('off')
imshow(threshTime)
bone()
show()
#! We can also do that systematicaly for other values of c:
axes([0,0,1,1])
axis('off')
rcParams.update({'figure.figsize': [10.5,5]})
c_values = (0.285 + 0.013j, 0.45 - 0.1428j, -0.70176 -0.3842j,
-0.835-0.2321j, -0.939 +0.167j, -0.986+0.87j)
for i,c in enumerate(c_values):
threshTime = zeros_like(z)
z = x + y *1j
for n in range(40):
z = J(c)(z)
threshTime += z*conj(z) > 4
subplot(2,3,i+1)
imshow(threshTime)
axis('off')
show()
| StarcoderdataPython |
87055 | <reponame>WagnerNils/MMSplice_MTSplice
# import tensorflow as tf
import numpy as np
import tensorflow.keras.backend as K
from tensorflow.keras.layers import Layer
from tensorflow.keras.layers import Conv1D
from tensorflow.keras.regularizers import Regularizer
from tensorflow.keras import initializers
import scipy.interpolate as si
DNA = ["A", "C", "G", "T"]
def normalize_data_format(value):
if value is None:
value = K.image_data_format()
data_format = value.lower()
if data_format not in {'channels_first', 'channels_last'}:
raise ValueError('The `data_format` argument must be one of '
'"channels_first", "channels_last". Received: ' +
str(value))
return data_format
class GlobalAveragePooling1D_Mask0(Layer):
"""
Global average pooling operation for temporal data.
Masking out 0-padded input.
"""
def __init__(self, data_format='channels_last', **kwargs):
super(GlobalAveragePooling1D_Mask0, self).__init__(**kwargs)
self.data_format = normalize_data_format(data_format)
def compute_output_shape(self, input_shape):
input_shape = input_shape[0]
if self.data_format == 'channels_first':
return (input_shape[0], input_shape[1])
else:
return (input_shape[0], input_shape[2])
def call(self, inputs):
inputs, model_inputs = inputs
steps_axis = 1 if self.data_format == 'channels_last' else 2
mask = K.max(model_inputs, axis=2, keepdims=True)
inputs *= mask
return K.sum(inputs, axis=steps_axis) / K.maximum(
K.sum(mask, axis=steps_axis), K.epsilon())
class ConvSequence(Conv1D):
VOCAB = DNA
def __init__(self,
filters,
kernel_size,
strides=1,
padding='valid',
dilation_rate=1,
activation=None,
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
seq_length=None,
**kwargs):
# override input shape
if seq_length:
kwargs["input_shape"] = (seq_length, len(self.VOCAB))
kwargs.pop("batch_input_shape", None)
super(ConvSequence, self).__init__(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
**kwargs)
self.seq_length = seq_length
def build(self, input_shape):
if int(input_shape[-1]) != len(self.VOCAB):
raise ValueError("{cls} requires input_shape[-1] == {n}. Given: {s}".
format(cls=self.__class__.__name__, n=len(self.VOCAB), s=input_shape[-1]))
return super(ConvSequence, self).build(input_shape)
def get_config(self):
config = super(ConvSequence, self).get_config()
config["seq_length"] = self.seq_length
return config
class ConvDNA(ConvSequence):
VOCAB = DNA
VOCAB_name = "DNA"
def get_S(n_bases=10, spline_order=3, add_intercept=True):
# mvcv R-code
# S<-diag(object$bs.dim);
# if (m[2]) for (i in 1:m[2]) S <- diff(S)
# object$S <- list(t(S)%*%S) # get penalty
# object$S[[1]] <- (object$S[[1]]+t(object$S[[1]]))/2 # exact symmetry
S = np.identity(n_bases)
m2 = spline_order - 1 # m[2] is the same as m[1] by default
# m2 order differences
for i in range(m2):
S = np.diff(S, axis=0) # same as diff() in R
S = np.dot(S.T, S)
S = (S + S.T) / 2 # exact symmetry
if add_intercept is True:
# S <- cbind(0, rbind(0, S)) # in R
zeros = np.zeros_like(S[:1, :])
S = np.vstack([zeros, S])
zeros = np.zeros_like(S[:, :1])
S = np.hstack([zeros, S])
return S.astype(np.float32)
def get_knots(start, end, n_bases=10, spline_order=3):
"""
Arguments:
x; np.array of dim 1
"""
x_range = end - start
start = start - x_range * 0.001
end = end + x_range * 0.001
# mgcv annotation
m = spline_order - 1
nk = n_bases - m # number of interior knots
dknots = (end - start) / (nk - 1)
knots = np.linspace(start=start - dknots * (m + 1),
stop=end + dknots * (m + 1),
num=nk + 2 * m + 2)
return knots.astype(np.float32)
def get_X_spline(x, knots, n_bases=10, spline_order=3, add_intercept=True):
"""
Returns:
np.array of shape [len(x), n_bases + (add_intercept)]
# BSpline formula
https://docs.scipy.org/doc/scipy/reference/generated/scipy.interpolate.BSpline.html#scipy.interpolate.BSpline
Fortran code:
https://github.com/scipy/scipy/blob/v0.19.0/scipy/interpolate/fitpack/splev.f
"""
if len(x.shape) is not 1:
raise ValueError("x has to be 1 dimentional")
tck = [knots, np.zeros(n_bases), spline_order]
X = np.zeros([len(x), n_bases])
for i in range(n_bases):
vec = np.zeros(n_bases)
vec[i] = 1.0
tck[1] = vec
X[:, i] = si.splev(x, tck, der=0)
if add_intercept is True:
ones = np.ones_like(X[:, :1])
X = np.hstack([ones, X])
return X.astype(np.float32)
class BSpline():
"""Class for computing the B-spline funcions b_i(x) and
constructing the penality matrix S.
# Arguments
start: float or int; start of the region
end: float or int; end of the region
n_bases: int; number of spline bases
spline_order: int; spline order
# Methods
- **getS(add_intercept=False)** - Get the penalty matrix S
- Arguments
- **add_intercept**: bool. If true, intercept column is added to the returned matrix.
- Returns
- `np.array`, of shape `(n_bases + add_intercept, n_bases + add_intercept)`
- **predict(x, add_intercept=False)** - For some x, predict the bn(x) for each base
- Arguments
- **x**: np.array; Vector of dimension 1
- **add_intercept**: bool; If True, intercept column is added to the to the final array
- Returns
- `np.array`, of shape `(len(x), n_bases + (add_intercept))`
"""
def __init__(self, start=0, end=1, n_bases=10, spline_order=3):
self.start = start
self.end = end
self.n_bases = n_bases
self.spline_order = spline_order
self.knots = get_knots(self.start, self.end,
self.n_bases, self.spline_order)
self.S = get_S(self.n_bases, self.spline_order, add_intercept=False)
def __repr__(self):
return "BSpline(start={0}, end={1}, n_bases={2}, spline_order={3})".\
format(self.start, self.end, self.n_bases, self.spline_order)
def getS(self, add_intercept=False):
"""Get the penalty matrix S
Returns
np.array, of shape (n_bases + add_intercept, n_bases + add_intercept)
"""
S = self.S
if add_intercept is True:
# S <- cbind(0, rbind(0, S)) # in R
zeros = np.zeros_like(S[:1, :])
S = np.vstack([zeros, S])
zeros = np.zeros_like(S[:, :1])
S = np.hstack([zeros, S])
return S
def predict(self, x, add_intercept=False):
"""For some x, predict the bn(x) for each base
Arguments:
x: np.array; Vector of dimension 1
add_intercept: bool; should we add the intercept to the final array
Returns:
np.array, of shape (len(x), n_bases + (add_intercept))
"""
# sanity check
if x.min() < self.start:
raise Warning("x.min() < self.start")
if x.max() > self.end:
raise Warning("x.max() > self.end")
return get_X_spline(x=x,
knots=self.knots,
n_bases=self.n_bases,
spline_order=self.spline_order,
add_intercept=add_intercept)
def get_config(self):
return {"start": self.start,
"end": self.end,
"n_bases": self.n_bases,
"spline_order": self.spline_order
}
@classmethod
def from_config(cls, config):
return cls(**config)
class GAMRegularizer(Regularizer):
def __init__(self, n_bases=10, spline_order=3, l2_smooth=0., l2=0.):
"""Regularizer for GAM's
# Arguments
n_bases: number of b-spline bases
order: spline order (2 for quadratic, 3 for qubic splines)
l2_smooth: float; Smoothness penalty (penalize w' * S * w)
l2: float; L2 regularization factor - overall weights regularizer
"""
# convert S to numpy-array if it's a list
self.n_bases = n_bases
self.spline_order = spline_order
self.l2_smooth = K.cast_to_floatx(l2_smooth)
self.l2 = K.cast_to_floatx(l2)
# convert to K.constant
self.S = K.constant(
K.cast_to_floatx(
get_S(n_bases, spline_order, add_intercept=False)
))
def __call__(self, x):
# x.shape = (n_bases, n_spline_tracks)
# from conv: (kernel_width=1, n_bases, n_spline_tracks)
from_conv = len(K.int_shape(x)) == 3
if from_conv:
x = K.squeeze(x, 0)
n_spline_tracks = K.cast_to_floatx(K.int_shape(x)[1])
regularization = 0.
if self.l2:
regularization += K.sum(self.l2 * K.square(x)) / n_spline_tracks
if self.l2_smooth:
# https://keras.io/backend/#batch_dot
# equivalent to mean( diag(x' * S * x) )
regularization += self.l2_smooth * \
K.mean(K.batch_dot(x, K.dot(self.S, x), axes=1))
return regularization
def get_config(self):
# convert S to list()
return {'n_bases': self.n_bases,
'spline_order': self.spline_order,
'l2_smooth': float(self.l2_smooth),
'l2': float(self.l2),
}
class SplineWeight1D(Layer):
"""Up- or down-weight positions in the activation array of 1D convolutions:
`x^{out}_{ijk} = x^{in}_{ijk}* (1 + f_S^k(j)) \;,`
where f_S is the spline transformation.
# Arguments
n_bases: int; Number of spline bases used for the positional effect.
l2_smooth: (float) L2 regularization strength for the second
order differences in positional bias' smooth splines. (GAM smoothing regularization)
l2: (float) L2 regularization strength for the spline base coefficients.
use_bias: boolean; should we add a bias to the transition
bias_initializer: bias initializer - from `keras.initializers`
"""
def __name__(self):
return "SplineWeight1D"
def __init__(self,
# spline type
n_bases=10,
spline_degree=3,
share_splines=False,
# regularization
l2_smooth=0,
l2=0,
use_bias=False,
bias_initializer='zeros',
**kwargs):
self.n_bases = n_bases
self.spline_degree = spline_degree
self.share_splines = share_splines
self.l2 = l2
self.l2_smooth = l2_smooth
self.use_bias = use_bias
self.bias_initializer = initializers.get(bias_initializer)
super(SplineWeight1D, self).__init__(**kwargs)
def build(self, input_shape):
# input_shape = (None, steps, filters)
start = 0
end = int(input_shape[1])
filters = int(input_shape[2])
if self.share_splines:
n_spline_tracks = 1
else:
n_spline_tracks = filters
# setup the bspline object
self.bs = BSpline(start, end - 1,
n_bases=self.n_bases,
spline_order=self.spline_degree
)
# create X_spline,
self.positions = np.arange(end)
# shape = (end, self.n_bases)
self.X_spline = self.bs.predict(self.positions, add_intercept=False)
# convert to the right precision and K.constant
self.X_spline_K = K.constant(K.cast_to_floatx(self.X_spline))
# add weights - all set to 0
self.kernel = self.add_weight(shape=(self.n_bases, n_spline_tracks),
initializer='zeros',
name='kernel',
regularizer=GAMRegularizer(self.n_bases, self.spline_degree,
self.l2_smooth, self.l2),
trainable=True)
if self.use_bias:
self.bias = self.add_weight((n_spline_tracks, ),
initializer=self.bias_initializer,
name='bias',
regularizer=None)
# Be sure to call this somewhere!
super(SplineWeight1D, self).build(input_shape)
def call(self, x):
spline_track = K.dot(self.X_spline_K, self.kernel)
if self.use_bias:
spline_track = K.bias_add(spline_track, self.bias)
# if self.spline_exp:
# spline_track = K.exp(spline_track)
# else:
spline_track = spline_track + 1
# multiply together the two coefficients
output = spline_track * x
return output
def compute_output_shape(self, input_shape):
return input_shape
def get_config(self):
config = {
'n_bases': self.n_bases,
'spline_degree': self.spline_degree,
'share_splines': self.share_splines,
# 'spline_exp': self.spline_exp,
'l2_smooth': self.l2_smooth,
'l2': self.l2,
'use_bias': self.use_bias,
'bias_initializer': initializers.serialize(self.bias_initializer),
}
base_config = super(SplineWeight1D, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def positional_effect(self):
w = self.get_weights()[0]
pos_effect = np.dot(self.X_spline, w)
return {"positional_effect": pos_effect, "positions": self.positions}
| StarcoderdataPython |
197502 | <reponame>gawainguo/Flask-AC<filename>flask_ac/ac_manager.py
'''
This module provide ACManager class
'''
from flask import g
from flask_ac import ptree
class ACManager(object):
'''
ACManager is defination of access control manager,
which hold the loaders and other configs for access control.
Instance of ACManager can bounded to several apps, by using init_app
method in your application's factory.
To initialize ACManager, the permissions object is required.
This dict object provide all the permissions with name and key
(name is the description of the permission, and key is generally a string
represent the permission in data model). In each permission, there is also
a list of dict which represent other permissions under this permission.
This tree-like structure is called ptree in flask_ac, and it will generate
the actual ptree object when init ACManager instance
'''
def __init__(self, permissions, app=None, roles_loader=None,
permissions_loader=None, default_error_handler=None):
# TODO: support for cfg or other format permission
self.ptree = self.build_ptree_from_obj(permissions)
self.roles_loader = roles_loader
self.permissions_loader = permissions_loader
self.default_error_handler = default_error_handler
# Bounded to the app if provided
if app is not None:
self.init_app(app)
def init_app(self, app):
'''
Bound the ACManager instance so the flask or other application can
access manager anywhere with context
'''
app.ac_manager = self
def get_valid_permissions(self, permissions):
'''
Get all valid nodes by traversal top-down from root to leaf
'''
return ptree.get_nodes_in_path(self.ptree, permissions)
def allow_access(self, roles=None, permissions=None):
if roles:
return self._allow_access_by_role(roles)
valid_permissions = self.get_valid_permissions(permissions)
return self._allow_access_by_permissions(valid_permissions)
def _allow_access_by_role(self, roles):
roles = set(roles)
user_roles = set(self.get_user_roles())
intersactions = self._get_intersection(roles, user_roles)
return len(intersactions) > 0
def _allow_access_by_permissions(self, permissions):
permissions = set(permissions)
user_permissions = set(self.get_user_permissions())
intersactions = self._get_intersection(permissions, user_permissions)
return len(intersactions) > 0
def _get_intersection(self, set_1, set_2):
return set_1.intersection(set_2)
def get_user_roles(self):
if self.permissions_loader:
return self.roles_loader()
return self._roles_loader()
def get_user_permissions(self):
"""
Get current user permissions keys
return: list of permision keys
"""
if self.permissions_loader:
return self.permissions_loader()
return self._permissions_loader()
def _roles_loader(self):
'''
Default roles loader
By default, ac_manager will query user from flask global g.user
To query from session or other place, provide the loader when
initializing the ac_manager instance
'''
user = g.user
roles = user.get('roles', [])
return [role.get('name') for role in roles]
def _permissions_loader(self):
'''
Default roles loader
By default, ac_manager will query user from flask global g.user
To query from session or other place, provide the loader when
initializing the ac_manager instance
'''
user = g.user
roles = user.get('roles')
permissions = []
for role in roles:
permissions += role.get('permissions')
return permissions
def build_ptree_from_obj(self, obj):
'''
Build ptree from provider permission object
'''
return ptree.create_ptree_from_obj(obj)
def get_child_permissions(self, permission):
'''
Get all child permissions of provided permissions
return: Permission objects
'''
return ptree.get_child_permissions(permission)
def get_permissions_by_keys(self, permission_keys):
'''
Get Permission object by permission names
return: Permission object
'''
permissions = ptree.get_permissions_by_keys(
self.ptree, permission_keys)
return permissions
| StarcoderdataPython |
1734756 | <gh_stars>10-100
#!/usr/bin/env python
# Created by <NAME> on 18-2-13.
class Solution:
def convertToTitle(self, n):
"""
:type n: int
:rtype: str
"""
ans = ''
while n > 0:
ans = chr((n-1) % 26 + 65) + ans
n = (n - 1) // 26
return ans
if __name__ == '__main__':
s = Solution()
print(s.convertToTitle(1))
| StarcoderdataPython |
60665 | <filename>Hearthstone/test.py<gh_stars>0
from CardGenerator import CardGenerator
test = CardGenerator()
test.generate(2) #4 is how much mana should the card cost
| StarcoderdataPython |
1624573 | <reponame>akinoriosamura/retinaface-tf2
import tensorflow as tf
def MultiStepLR(initial_learning_rate, lr_steps, lr_rate, name='MultiStepLR'):
"""Multi-steps learning rate scheduler."""
lr_steps_value = [initial_learning_rate]
for _ in range(len(lr_steps)):
lr_steps_value.append(lr_steps_value[-1] * lr_rate)
return tf.keras.optimizers.schedules.PiecewiseConstantDecay(
boundaries=lr_steps, values=lr_steps_value)
def MultiStepWarmUpLR(initial_learning_rate, lr_steps, lr_rate,
warmup_steps=0., min_lr=0.,
name='MultiStepWarmUpLR'):
"""Multi-steps warm up learning rate scheduler."""
assert warmup_steps <= lr_steps[0]
assert min_lr <= initial_learning_rate
lr_steps_value = [initial_learning_rate]
for _ in range(len(lr_steps)):
lr_steps_value.append(lr_steps_value[-1] * lr_rate)
return PiecewiseConstantWarmUpDecay(
boundaries=lr_steps, values=lr_steps_value, warmup_steps=warmup_steps,
min_lr=min_lr)
def CosineAnnealingLR_Restart(initial_learning_rate, t_period, lr_min):
"""Cosine annealing learning rate scheduler with restart."""
return tf.keras.experimental.CosineDecayRestarts(
initial_learning_rate=initial_learning_rate,
first_decay_steps=t_period, t_mul=1.0, m_mul=1.0,
alpha=lr_min / initial_learning_rate)
class PiecewiseConstantWarmUpDecay(
tf.keras.optimizers.schedules.LearningRateSchedule):
"""A LearningRateSchedule wiht warm up schedule.
Modified from tf.keras.optimizers.schedules.PiecewiseConstantDecay"""
def __init__(self, boundaries, values, warmup_steps, min_lr,
name=None):
super(PiecewiseConstantWarmUpDecay, self).__init__()
if len(boundaries) != len(values) - 1:
raise ValueError(
"The length of boundaries should be 1 less than the"
"length of values")
self.boundaries = boundaries
self.values = values
self.name = name
self.warmup_steps = warmup_steps
self.min_lr = min_lr
def __call__(self, step):
with tf.name_scope(self.name or "PiecewiseConstantWarmUp"):
step = tf.cast(tf.convert_to_tensor(step), tf.float32)
pred_fn_pairs = []
warmup_steps = self.warmup_steps
boundaries = self.boundaries
values = self.values
min_lr = self.min_lr
pred_fn_pairs.append(
(step <= warmup_steps,
lambda: min_lr + step * (values[0] - min_lr) / warmup_steps))
pred_fn_pairs.append(
(tf.logical_and(step <= boundaries[0],
step > warmup_steps),
lambda: tf.constant(values[0])))
pred_fn_pairs.append(
(step > boundaries[-1], lambda: tf.constant(values[-1])))
for low, high, v in zip(boundaries[:-1], boundaries[1:],
values[1:-1]):
# Need to bind v here; can do this with lambda v=v: ...
pred = (step > low) & (step <= high)
pred_fn_pairs.append((pred, lambda: tf.constant(v)))
# The default isn't needed here because our conditions are mutually
# exclusive and exhaustive, but tf.case requires it.
return tf.case(pred_fn_pairs, lambda: tf.constant(values[0]),
exclusive=True)
def get_config(self):
return {
"boundaries": self.boundaries,
"values": self.values,
"warmup_steps": self.warmup_steps,
"min_lr": self.min_lr,
"name": self.name
}
if __name__ == "__main__":
lr_scheduler = MultiStepWarmUpLR(1e-2, [5500, 6800], 0.1,
warmup_steps=500, min_lr=1e-3)
# lr_scheduler = MultiStepWarmUpLR(1e-3, [5500, 6800], 0.1)
# lr_scheduler = MultiStepLR(1e-4, [500, 1000, 2000, 3000], 0.5)
# lr_scheduler = CosineAnnealingLR_Restart(2e-4, 2500, 1e-7)
##############################
# Draw figure
##############################
N_iter = 10000
step_list = list(range(0, N_iter, 10))
lr_list = []
for i in step_list:
current_lr = lr_scheduler(i).numpy()
lr_list.append(current_lr)
import matplotlib as mpl
from matplotlib import pyplot as plt
import matplotlib.ticker as mtick
mpl.style.use('default')
import seaborn
seaborn.set(style='whitegrid')
seaborn.set_context('paper')
plt.figure(1)
plt.subplot(111)
plt.ticklabel_format(style='sci', axis='x', scilimits=(0, 0))
plt.title('Title', fontsize=16, color='k')
plt.plot(step_list, lr_list, linewidth=1.5, label='learning rate scheme')
legend = plt.legend(loc='upper right', shadow=False)
ax = plt.gca()
labels = ax.get_xticks().tolist()
for k, v in enumerate(labels):
labels[k] = str(int(v / 1000)) + 'K'
ax.set_xticklabels(labels)
ax.yaxis.set_major_formatter(mtick.FormatStrFormatter('%.1e'))
ax.set_ylabel('Learning rate')
ax.set_xlabel('Iteration')
fig = plt.gcf()
plt.show()
| StarcoderdataPython |
1748927 | <gh_stars>0
""" Test AS3 Client """
import json
import tempfile
import shutil
from os import path
from f5sdk import exceptions
from f5sdk.utils import http_utils
from ....global_test_imports import pytest, Mock, PropertyMock
from ....shared import constants
from ....shared import mock_utils
REQUESTS = constants.MOCK['requests']
EXAMPLE_VERSION_INFO = {
'x.x.x': {
'latest': True
},
'x.x.y': {
'latest': False
}
}
EXAMPLE_EXTENSION_METADATA = {
'components': {
'as3': {
'versions': EXAMPLE_VERSION_INFO
},
'do': {
'versions': EXAMPLE_VERSION_INFO
},
'ts': {
'versions': EXAMPLE_VERSION_INFO
},
'cf': {
'versions': EXAMPLE_VERSION_INFO
}
}
}
FIXED_INFO = {
'as3': {
'version': '3.10.0',
'name': 'f5-appsvcs',
'package_name': 'f5-appsvcs-3.10.0-5.noarch',
'previous_version': '3.9.0',
},
'do': {
'version': '1.10.0',
'name': 'f5-declarative-onboarding',
'package_name': 'f5-declarative-onboarding-1.10.0-2.noarch',
'previous_version': '1.9.0',
},
'ts': {
'version': '1.10.0',
'name': 'f5-telemetry',
'package_name': 'f5-telemetry-1.10.0-2.noarch',
'previous_version': '1.9.0',
},
'cf': {
'version': '1.1.0',
'name': 'f5-cloud-failover',
'package_name': 'f5-cloud-failover-1.1.0-0.noarch',
'previous_version': '1.0.0',
}
}
# pylint: disable=too-many-public-methods
@pytest.mark.parametrize("component", ["as3", "do", "ts", "cf"])
class TestExtensionClients(object):
"""Test Extension Clients - Iterates through each parametrized component
and performs tests. Any test that applies to all components could go here
"""
@classmethod
def setup_class(cls):
"""" Setup func """
cls.test_tmp_dir = tempfile.mkdtemp()
@classmethod
def teardown_class(cls):
"""" Teardown func """
shutil.rmtree(cls.test_tmp_dir)
@staticmethod
@pytest.mark.usefixtures("create_extension_client")
def test_init(component, create_extension_client):
"""Test: Initialize extension client
Assertions
----------
- 'package' attribute exists
- 'service' attribute exists
"""
extension_client = create_extension_client(component=component)
assert extension_client.package
assert extension_client.service
@staticmethod
@pytest.mark.usefixtures("get_extension_client_class")
@pytest.mark.usefixtures("mgmt_client")
def test_component_version_invalid(component, mgmt_client, get_extension_client_class):
"""Test: Invalid component version
Assertions
----------
- InvalidComponentVersionError exception should be raised
"""
pytest.raises(
exceptions.InvalidComponentVersionError,
get_extension_client_class(component=component),
mgmt_client,
version='0.0.0',
use_latest_metadata=False
)
@staticmethod
@pytest.mark.usefixtures("create_extension_client")
def test_download_latest_metadata(component, create_extension_client, mocker):
"""Test: Download latest metadata from CDN when
- use_latest_metadata=True (which is the default)
Assertions
----------
- instantiating a extension client should download metadata
"""
mock_conditions = [
{
'type': 'url',
'value': 'cdn.f5.com',
'response': {
'body': EXAMPLE_EXTENSION_METADATA
}
}
]
mocker.patch(REQUESTS).side_effect = mock_utils.create_response(
{},
conditional=mock_conditions
)
extension_client = create_extension_client(
component=component,
use_latest_metadata=True
)
assert extension_client.version == 'x.x.x'
@staticmethod
@pytest.mark.usefixtures("create_extension_client")
def test_download_latest_metadata_http_error(component, create_extension_client, mocker):
"""Test: Download latest metadata from CDN continues when http error occurs
Assertions
----------
- Error/exception should be silently caught and logged
"""
mocker.patch(REQUESTS).side_effect = Exception('Error')
mock_logger = Mock()
mocker.patch('f5sdk.logger.Logger.get_logger').return_value = mock_logger
create_extension_client(component=component, use_latest_metadata=True)
assert mock_logger.warning.call_count == 1
error_message = 'Error downloading metadata file'
assert error_message in mock_logger.warning.call_args_list[0][0][0]
@staticmethod
@pytest.mark.usefixtures("create_extension_client")
def test_install(component, create_extension_client, mocker):
"""Test: install
Assertions
----------
- install() response should equal:
{
'component': '<component>',
'version': '<component version>'
}
"""
extension_client = create_extension_client(
component=component,
version=FIXED_INFO[component]['version']
)
mock_conditions = [
{
'type': 'url',
'value': 'github.com',
'response': {'body': 'foo'.encode()}
},
{
'type': 'url',
'value': '/mgmt/shared/file-transfer/uploads',
'response': {'body': {'id': 'xxxx'}}
},
{
'type': 'url',
'value': '/mgmt/shared/iapp/package-management-tasks',
'response': {'body': {'id': 'xxxx', 'status': 'FINISHED'}}
}
]
mocker.patch(REQUESTS).side_effect = mock_utils.create_response(
{},
conditional=mock_conditions
)
mocker.patch(REQUESTS).return_value.json = Mock(
return_value={
'id': 'xxxx',
'status': 'FINISHED',
'queryResponse': [
{
'name': FIXED_INFO[component]['name'],
'packageName': FIXED_INFO[component]['package_name']
}
]
}
)
assert extension_client.package.install() == {
'component': component,
'version': FIXED_INFO[component]['version']
}
@staticmethod
@pytest.mark.usefixtures("create_extension_client")
def test_install_with_package_url(component, create_extension_client, mocker, tmpdir):
"""Test: install with package_url
Assertions
----------
- install() response should equal:
{
'component': '<component>',
'version': '<component version>'
}
"""
extension_client = create_extension_client(
component=component,
version=FIXED_INFO[component]['version']
)
mock_conditions = [
{
'type': 'url',
'value': 'github.com',
'response': {'body': 'foo'.encode()}
},
{
'type': 'url',
'value': '/mgmt/shared/file-transfer/uploads',
'response': {'body': {'id': 'xxxx'}}
},
{
'type': 'url',
'value': '/mgmt/shared/iapp/package-management-tasks',
'response': {'body': {'id': 'xxxx', 'status': 'FINISHED'}}
}
]
mocker.patch(REQUESTS).side_effect = mock_utils.create_response(
{},
conditional=mock_conditions
)
mocker.patch(REQUESTS).return_value.json = Mock(
return_value={
'id': 'xxxx',
'status': 'FINISHED',
'queryResponse': [
{
'name': FIXED_INFO[component]['name'],
'packageName': FIXED_INFO[component]['package_name']
}
]
}
)
mocker.patch("f5sdk.utils.http_utils.download_to_file").side_effect = Mock()
mocker.patch("f5sdk.constants.TMP_DIR", tmpdir)
url_remote_file = "https://path/extension.rpm"
package_name = url_remote_file.split('/')[-1]
# create dummy file in pytest fixture tmpdir
tmpdir.join(package_name).write("url_remote_file test")
assert extension_client.package.install(package_url=url_remote_file) == {
'component': component,
'version': FIXED_INFO[component]['version']
}
url_local_file = 'file://%s/%s' % (tmpdir, package_name)
tmpdir.join(package_name).write("url_local_file test")
assert extension_client.package.install(package_url=url_local_file) == {
'component': component,
'version': FIXED_INFO[component]['version']
}
@staticmethod
@pytest.mark.usefixtures("create_extension_client")
def test_install_package_url_invalid(component, create_extension_client):
"""Test: invalid package_url
Assertions
----------
- InputRequiredError exception should be raised
"""
extension_client = create_extension_client(
component=component,
version=FIXED_INFO[component]['version']
)
pytest.raises(exceptions.InputRequiredError,
extension_client.package.install, package_url="invalidUrl")
@staticmethod
@pytest.mark.usefixtures("create_extension_client")
def test_uninstall(component, create_extension_client, mocker):
"""Test: uninstall
Assertions
----------
- uninstall() response should equal:
{
'component': '<component>',
'version': '<component version>'
}
"""
extension_client = create_extension_client(
component=component,
version=FIXED_INFO[component]['version']
)
mock_conditions = [
{
'type': 'url',
'value': '/mgmt/shared/iapp/package-management-tasks',
'response': {
'body': {
'id': 'xxxx',
'status': 'FINISHED',
'queryResponse': [
{
'name': FIXED_INFO[component]['name'],
'packageName': FIXED_INFO[component]['package_name']
}
]
}
}
}
]
mocker.patch(REQUESTS).side_effect = mock_utils.create_response(
{},
conditional=mock_conditions
)
assert extension_client.package.uninstall() == {
'component': component,
'version': FIXED_INFO[component]['version']
}
@staticmethod
@pytest.mark.usefixtures("create_extension_client")
def test_uninstall_any_version(component, create_extension_client, mocker):
"""Test: uninstall (any version)
Given: Extension client is provided "previous version" which
is different than the "installed version"
Assertions
----------
- Package version in uninstall operation should be <component version>
- uninstall() response should equal:
{
'component': '<component>',
'version': '<component version>'
}
"""
extension_client = create_extension_client(
component=component,
version=FIXED_INFO[component]['previous_version']
)
mock_request = mocker.patch(REQUESTS)
mock_conditions = [
{
'type': 'url',
'value': '/mgmt/shared/iapp/package-management-tasks',
'response': {
'body': {
'id': 'xxxx',
'status': 'FINISHED',
'queryResponse': [
{
'name': FIXED_INFO[component]['name'],
'packageName': FIXED_INFO[component]['package_name']
}
]
}
}
}
]
mock_request.side_effect = mock_utils.create_response(
{},
conditional=mock_conditions
)
assert extension_client.package.uninstall() == {
'component': component,
'version': FIXED_INFO[component]['version']
}
_, kwargs = mock_request.call_args_list[2]
assert json.loads(kwargs['data'])['packageName'] == FIXED_INFO[component]['package_name']
@staticmethod
@pytest.mark.usefixtures("create_extension_client")
def test_is_installed(component, create_extension_client, mocker):
"""Test: is_installed
Assertions
----------
- is_installed() response should be a dict
"""
extension_client = create_extension_client(
component=component,
version=FIXED_INFO[component]['version']
)
mocker.patch(REQUESTS).return_value.json = Mock(
return_value={
'id': 'xxxx',
'status': 'FINISHED',
'queryResponse': [
{
'name': FIXED_INFO[component]['name'],
'packageName': FIXED_INFO[component]['package_name']
}
]
}
)
is_installed = extension_client.package.is_installed()
assert is_installed['installed']
assert is_installed['installed_version'] == FIXED_INFO[component]['version']
assert is_installed['latest_version'] != ''
@staticmethod
@pytest.mark.usefixtures("create_extension_client")
def test_is_installed_similar_package(component, create_extension_client, mocker):
"""Test: is_installed with second package containing component name in it
Assertions
----------
- is_installed() response should have installed=true and correct version
"""
extension_client = create_extension_client(
component=component,
version=FIXED_INFO[component]['version']
)
mocker.patch(REQUESTS).return_value.json = Mock(
return_value={
'id': 'xxxx',
'status': 'FINISHED',
'queryResponse': [
{
'name': FIXED_INFO[component]['name'],
'packageName': FIXED_INFO[component]['package_name']
},
{
'name': '{}-foo'.format(FIXED_INFO[component]['name']),
'packageName': '{}-foo-x.x.x.noarch'.format(FIXED_INFO[component]['name'])
}
]
}
)
is_installed = extension_client.package.is_installed()
assert is_installed['installed']
assert is_installed['installed_version'] == FIXED_INFO[component]['version']
@staticmethod
@pytest.mark.usefixtures("create_extension_client")
def test_failed_task_status(component, create_extension_client, mocker):
"""Test: is_installed with failed RPM task status
Assertions
----------
- Exception exception should be raised
"""
extension_client = create_extension_client(component=component)
mocker.patch(REQUESTS).return_value.json = Mock(
return_value={
'id': 'xxxx',
'status': 'FAILED'
}
)
pytest.raises(Exception, extension_client.package.is_installed)
@staticmethod
@pytest.mark.usefixtures("create_extension_client")
def test_is_installed_two_digit_version(component, create_extension_client, mocker):
"""Test: is_installed where package name major version contains two digits
Note: This test should live outside the generic tests perhaps...
Assertions
----------
- is_installed() installed_version response should be correctly parsed
"""
extension_client = create_extension_client(component=component)
mocker.patch(REQUESTS).return_value.json = Mock(
return_value={
'id': 'xxxx',
'status': 'FINISHED',
'queryResponse': [
{
'name': FIXED_INFO[component]['name'],
'packageName': FIXED_INFO[component]['package_name']
}
]
}
)
is_installed = extension_client.package.is_installed()
assert is_installed['installed_version'] == FIXED_INFO[component]['version']
@staticmethod
@pytest.mark.usefixtures("create_extension_client")
def test_is_not_installed(component, create_extension_client, mocker):
"""Test: is_not_installed
Assertions
----------
- is_installed() response should be a dict
"""
extension_client = create_extension_client(component=component)
mocker.patch(REQUESTS).return_value.json = Mock(
return_value={
'id': 'xxxx',
'status': 'FINISHED',
'queryResponse': [
{
'name': '',
'packageName': ''
}
]
}
)
is_installed = extension_client.package.is_installed()
assert not is_installed['installed']
assert is_installed['installed_version'] == ''
assert is_installed['latest_version'] != ''
@staticmethod
@pytest.mark.usefixtures("create_extension_client")
def test_list_versions(component, create_extension_client):
"""Test: list extension versions
Assertions
----------
- list_versions() response should be a list
"""
extension_client = create_extension_client(component=component)
version_list = extension_client.package.list_versions()
assert FIXED_INFO[component]['version'] in version_list
@staticmethod
@pytest.mark.usefixtures("create_extension_client")
def test_show(component, create_extension_client, mocker):
"""Test: show
Assertions
----------
- show() response should equal requests response
"""
extension_client = create_extension_client(component=component)
mock_response = {'message': 'success'}
mocker.patch(REQUESTS).return_value.json = Mock(return_value=mock_response)
assert extension_client.service.show() == mock_response
@staticmethod
@pytest.mark.usefixtures("create_extension_client")
def test_create(component, create_extension_client, mocker):
"""Test: create
Assertions
----------
- create() response should equal requests response
"""
extension_client = create_extension_client(component=component)
mock_response = {'message': 'success'}
mocker.patch(REQUESTS).return_value.json = Mock(return_value=mock_response)
assert extension_client.service.create(config={'config': 'foo'}) == mock_response
@pytest.mark.usefixtures("create_extension_client")
def test_create_config_file(self, component, create_extension_client, mocker):
"""Test: create with config file
Assertions
----------
- create() response should equal requests response
"""
extension_client = create_extension_client(component=component)
mock_response = {'message': 'success'}
mocker.patch(REQUESTS).return_value.json = Mock(return_value=mock_response)
config_file = path.join(self.test_tmp_dir, 'config.json')
with open(config_file, 'w') as _f:
_f.write(json.dumps({'config': 'foo'}))
assert extension_client.service.create(config_file=config_file) == mock_response
@staticmethod
@pytest.mark.usefixtures("create_extension_client")
def test_create_no_config(component, create_extension_client):
"""Test: create with no config provided
Assertions
----------
- InputRequiredError exception should be raised
"""
extension_client = create_extension_client(component=component)
pytest.raises(exceptions.InputRequiredError, extension_client.service.create)
@staticmethod
@pytest.mark.usefixtures("create_extension_client")
def test_create_async(component, create_extension_client, mocker):
"""Test: create async response
Assertions
----------
- create() response should equal task requests response
- make_request() should be called twice
- make_request() second call uri should equal task uri
"""
extension_client = create_extension_client(component=component)
mock_response = {'foo': 'bar'}
make_request_mock = mocker.patch(
'f5sdk.utils.http_utils.make_request',
side_effect=[({'selfLink': 'https://localhost/foo/1234'}, 202), (mock_response, 200)]
)
response = extension_client.service.create(config={'foo': 'bar', 'async': True})
assert response == mock_response
assert make_request_mock.call_count == 2
args, _ = make_request_mock.call_args_list[1]
assert args[1] == '/foo/1234'
@staticmethod
@pytest.mark.usefixtures("create_extension_client")
def test_is_available(component, create_extension_client, mocker):
"""Test: is_available
Assertions
----------
- is_available() response should be boolean (True)
"""
extension_client = create_extension_client(component=component)
mocker.patch(REQUESTS).return_value.json = Mock(return_value={'message': 'success'})
assert extension_client.service.is_available()
@staticmethod
@pytest.mark.usefixtures("create_extension_client")
def test_show_info(component, create_extension_client, mocker):
"""Test: show_info
Assertions
----------
- show_info() response should be info endpoint API response
"""
extension_client = create_extension_client(component=component)
mocker.patch(REQUESTS).return_value.json = Mock(return_value={'version': 'x.x.x.x'})
assert extension_client.service.show_info() == {'version': 'x.x.x.x'}
class TestAS3Client(object):
"""Test AS3 Client - performs any component specific tests """
@classmethod
def setup_class(cls):
"""" Setup func """
cls.component = 'as3'
@pytest.mark.usefixtures("create_extension_client")
def test_uninstall_with_dependency(self, create_extension_client, mocker):
"""Test: uninstall with existing dependency
Assertions
----------
- uninstall() should log a warning about existing dependency
"""
mock_conditions = [
{
'type': 'url',
'value': '/mgmt/shared/iapp/package-management-tasks',
'response': {
'body': {
'id': 'xxxx',
'status': 'FINISHED',
'queryResponse': [
{
'name': FIXED_INFO[self.component]['name'],
'packageName': 'f5-appsvcs-3.9.0-3.noarch'
}
]
}
}
}
]
mocker.patch(REQUESTS).side_effect = mock_utils.create_response(
{},
conditional=mock_conditions
)
mock_logger = Mock()
mocker.patch('f5sdk.logger.Logger.get_logger').return_value = mock_logger
extension_client = create_extension_client(component=self.component)
extension_client.package.uninstall()
assert mock_logger.warning.call_count == 1
logged_message = mock_logger.warning.call_args_list[0][0][0]
assert 'A component package dependency has not been removed' in logged_message
assert 'See documentation for more details' in logged_message
@pytest.mark.usefixtures("create_extension_client")
def test_delete(self, create_extension_client, mocker):
"""Test: delete
Assertions
----------
- delete() response should equal requests response
"""
extension_client = create_extension_client(component=self.component)
mock_response = {'message': 'success'}
mocker.patch(REQUESTS).return_value.json = Mock(return_value=mock_response)
assert extension_client.service.delete() == mock_response
class TestDOClient(object):
"""Test DO Client - performs any component specific tests """
@classmethod
def setup_class(cls):
"""" Setup func """
cls.component = 'do'
@pytest.mark.usefixtures("create_extension_client")
def test_do_show_inspect(self, create_extension_client, mocker):
"""Test: show_inspect
Assertions
----------
- show_inspect() response should be inspect endpoint API response
"""
extension_client = create_extension_client(component=self.component)
mock_response = {'message': 'success'}
mocker.patch(REQUESTS).return_value.json = Mock(return_value=mock_response)
assert extension_client.service.show_inspect() == mock_response
@pytest.mark.usefixtures("create_extension_client")
def test_do_show_inspect_query_parameters(self, create_extension_client, mocker):
"""Test: show_inspect(**kwargs) query parameters with a GET request to the /inspect endpoint
For example
https://MGMT_IP/mgmt/shared/declarative-onboarding/inspect?targetHost=X.X.X.X
&targetPort=443&targetUsername=admin&targetPassword=<PASSWORD>
Assertions
----------
- HTTP request uri should contain the query parameters
- show_inspect() response should be mocked response
"""
extension_client = create_extension_client(component=self.component)
inspect_kwargs = {
'query_parameters': {
'targetHost': '192.0.2.1',
'targetPort': 443,
'targetUsername': 'admin',
'targetPassword': '<PASSWORD>'
}
}
mock_request = mocker.patch(REQUESTS)
mock_request.return_value.json = Mock(return_value={})
type(mock_request.return_value).status_code = PropertyMock(return_value=200)
show_inspect_response = extension_client.service.show_inspect(**inspect_kwargs)
args, _ = mock_request.call_args
query_params = http_utils.parse_url(args[1])['query']
for key in inspect_kwargs['query_parameters']:
assert '{}={}'.format(key, inspect_kwargs['query_parameters'][key]) in query_params
assert show_inspect_response == {}
class TestCFClient(object):
"""Test CF Client - performs any component specific tests """
@classmethod
def setup_class(cls):
"""" Setup func """
cls.component = 'cf'
@pytest.mark.usefixtures("create_extension_client")
def test_cf_show_failover(self, create_extension_client, mocker):
"""Test: show_failover
Assertions
----------
- show_failover() response should be trigger endpoint API response
"""
extension_client = create_extension_client(component=self.component)
mock_response = {'message': 'success'}
mocker.patch(REQUESTS).return_value.json = Mock(return_value=mock_response)
assert extension_client.service.show_trigger() == mock_response
@pytest.mark.usefixtures("create_extension_client")
def test_cf_show_inspect(self, create_extension_client, mocker):
"""Test: show_inspect
Assertions
----------
- show_inspect() response should be inspect endpoint API response
"""
extension_client = create_extension_client(component=self.component)
mock_response = {'message': 'success'}
mocker.patch(REQUESTS).return_value.json = Mock(return_value=mock_response)
assert extension_client.service.show_inspect() == mock_response
@pytest.mark.usefixtures("create_extension_client")
def test_cf_trigger_failover(self, create_extension_client, mocker):
"""Test: show_inspect
Assertions
----------
- trigger() response should be trigger endpoint API response
"""
extension_client = create_extension_client(component=self.component)
mock_response = {'message': 'success'}
mocker.patch(REQUESTS).return_value.json = Mock(return_value=mock_response)
assert extension_client.service.trigger() == mock_response
@pytest.mark.usefixtures("create_extension_client")
def test_cf_reset(self, create_extension_client, mocker):
"""Test: reset
Assertions
----------
- reset() response should be reset endpoint API response
"""
extension_client = create_extension_client(component=self.component)
mock_response = {'message': 'success'}
mocker.patch(REQUESTS).return_value.json = Mock(return_value=mock_response)
assert extension_client.service.reset() == mock_response
| StarcoderdataPython |
3300953 | import time
import microstats
lst = [7, 4, 8, 6, 3.6, 8, 3, 3, 5, 2, 23, 9, 20, 7, 28, 22, 22, 6, 7, 7]
def test_gauge_value():
g = microstats.GaugeValue()
assert g.val == 0
g.set(20)
g.set(25)
g.set(10)
assert g.val == 10
assert g.val_max == 25
assert g.val_min == 10
g.add(-5)
g.add(3)
assert g.val == 8
assert g.val_max == 25
assert g.val_min == 5
g.add(6)
g.reset()
assert g.val == g.val_max == g.val_min == 14
def test_get_stats():
list1 = [0]
d = microstats.get_stats(list1)
assert d == {
"sum": 0,
"avg": 0,
# 'max_p95': 0,
# 'min_p95': 0,
"max": 0,
"min": 0,
"cnt": 1,
}
assert len(lst) == 20
d = microstats.get_stats(lst)
assert d["sum"] == 200.6
assert d["avg"] == 10.03
# assert d['max_p95'] == 23
# assert d['min_p95'] == 3
assert d["max"] == 28
assert d["min"] == 2
def test_micro_stats():
stats = microstats.MicroStats()
stats.flush()
stats.incr("Requests", 50)
stats.incr("Requests", 10)
stats.decr("Price", 10)
stats.gauge("ConcurrentRequest", 36)
stats.gauge("ConcurrentRequest", 21)
stats.gauge("ConcurrentRequest", 15)
stats.gauge("ConcurrentRequest", 41)
stats.gauge("ConcurrentRequest", 10, delta=True)
with stats.timer("Latency"):
time.sleep(0.008)
with stats.timer("Latency"):
time.sleep(0.006)
for val in lst:
stats.scatter("goods", val)
stats.unique("User", "a")
stats.unique("User", "b")
stats.unique("User", "c")
stats.unique("User", "b")
stats.before_flush("ConcurrentRequest", lambda: 60)
data = stats.flush()
print(data)
assert data["Requests"] == 60
assert data["Price"] == -10
assert data["ConcurrentRequest"] == 60
assert data["ConcurrentRequest_max"] == 60
assert data["ConcurrentRequest_min"] == 15
assert data["goods_sum"] == 200.6
assert data["goods_avg"] == 10.03
# assert data['goods_max_p95'] == 23
# assert data['goods_min_p95'] == 3
assert data["User"] == 3
data = stats.flush()
assert data["ConcurrentRequest"] == 60
assert data["ConcurrentRequest_max"] == 60
assert data["ConcurrentRequest_min"] == 60
assert data["Requests"] == 0
assert data["User"] == 0
def test_stats_group():
group = microstats.StatsGroup()
group.group1.incr("Click")
group.group2.incr("Conversion", 5)
data = group.flush()
assert data["group1"]["Click"] == 1
assert data["group2"]["Conversion"] == 5
| StarcoderdataPython |
1661078 | from flask import Blueprint, request, jsonify
from app.handlers import service_handler as handler
import logging
"""
service_router.py renders all of the backend routes that we are exposing in our
application.
All of these routes will be prefixed with `/api/v1/`, followed by the route
These routes are as follows:
/pull (pull a container) POST request
/settings (get the settings)
/settings (post a new setting)
"""
service_router = Blueprint(
'service_router',
__name__,
url_prefix="/api/v1"
)
@service_router.route('/')
@service_router.route('/ping')
def service_healthcheck():
logging.info('calling service_healthcheck')
return jsonify(handler.handle_healthcheck())
@service_router.route('/docker-status')
def docker_healthcheck():
logging.info('calling docker_healthcheck')
return jsonify(handler.handle_docker_healthcheck())
@service_router.route('/create', methods=["POST"])
def service_create_docker_container():
logging.info('calling service_create_docker_container')
message = {}
if request.method == "POST":
image_tag = request.args.get('image')
success = handler.handle_create_container(image_tag)
message["success"] = success
message["message"] = "created container successfully" if success else "failed to create"
else:
message["message"] = "there was an error"
return jsonify(message)
@service_router.route('/containers', methods=["GET"])
def service_get_running_containers():
logging.info('calling service_get_running_containers')
return handler.handle_get_running_containers()
@service_router.route('/container/<container_id>', methods=["GET"])
def service_get_running_container(container_id):
return jsonify(handler.handle_get_running_container(container_id))
@service_router.route('/stop/<container_id>', methods=["GET"])
def service_stop_container(container_id):
return jsonify(handler.handle_stop_container(container_id))
@service_router.route('/start/<container_id>', methods=["GET"])
def service_start_container(container_id):
return jsonify(handler.handle_start_container(container_id))
@service_router.route('/delete/<container_id>', methods=["DELETE"])
def service_remove_container(container_id):
if container_id is None:
return {
"message": "invalid container id"
}
success = handler.handle_remove_container(container_id)
return {
"message":
"successfully removed {}".format(container_id) if success else "unable to remove container"
}
| StarcoderdataPython |
1739737 | #!/usr/local/bin/python3
# https://data36.com/linear-regression-in-python-numpy-polyfit/
import pandas as pd
import matplotlib.pyplot as plt
# %matplotlib inline
import numpy as np
# {{{
n_sedov = [
278,
288,
297,
306,
314,
318,
322,
330,
337,
344,
350,
363,
369,
375,
380,
386,
391,
396,
401,
406,
411,
416,
420,
]
MiB_gpu = [
4759,
5291,
5731,
6271,
6759,
6979,
7247,
7785,
8275,
8767,
9247,
10277,
10767,
11305,
11741,
12285,
12769,
13257,
13747,
14239,
14777,
15317,
15751,
]
# }}}
p100 = {'n_sedov': n_sedov, 'MiB_gpu': MiB_gpu}
mydata = pd.DataFrame(data=p100)
x = mydata.n_sedov
xx = [i**3 for i in x]
y = mydata.MiB_gpu
# plt.scatter(x,y)
model = np.polyfit(xx, y, 1) # array([2.50256443e-03, 2.54777987e+02])
model
# y = model[0] * x + model[1]
predict = np.poly1d(model)
# hours_studied = 20
# predict(hours_studied)
from sklearn.metrics import r2_score
r2_score(y, predict(xx)) # 0.9999902500986138
x_lin_reg = range(1800000, 6200000)
y_lin_reg = predict(x_lin_reg)
plt.scatter(x, y)
plt.plot(x_lin_reg, y_lin_reg, c = 'r')
| StarcoderdataPython |
1737906 | # Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django.template import defaultfilters as filters
from django.utils.http import urlencode
from django.utils.translation import ugettext_lazy as _
from horizon import tables
from openstack_dashboard import api
class DeleteFlavor(tables.DeleteAction):
data_type_singular = _("Flavor")
data_type_plural = _("Flavors")
def delete(self, request, obj_id):
api.nova.flavor_delete(request, obj_id)
class CreateFlavor(tables.LinkAction):
name = "create"
verbose_name = _("Create Flavor")
url = "horizon:admin:flavors:create"
classes = ("ajax-modal",)
icon = "plus"
class UpdateFlavor(tables.LinkAction):
name = "update"
verbose_name = _("Edit Flavor")
url = "horizon:admin:flavors:update"
classes = ("ajax-modal",)
icon = "pencil"
class ViewFlavorExtras(tables.LinkAction):
name = "extras"
verbose_name = _("View Extra Specs")
url = "horizon:admin:flavors:extras:index"
icon = "pencil"
class ModifyAccess(tables.LinkAction):
name = "projects"
verbose_name = _("Modify Access")
url = "horizon:admin:flavors:update"
classes = ("ajax-modal",)
icon = "pencil"
def get_link_url(self, flavor):
step = 'update_flavor_access'
base_url = reverse(self.url, args=[flavor.id])
param = urlencode({"step": step})
return "?".join([base_url, param])
class FlavorFilterAction(tables.FilterAction):
def filter(self, table, flavors, filter_string):
"""Really naive case-insensitive search."""
q = filter_string.lower()
def comp(flavor):
return q in flavor.name.lower()
return filter(comp, flavors)
def get_size(flavor):
return _("%sMB") % flavor.ram
def get_swap_size(flavor):
return _("%sMB") % (flavor.swap or 0)
def get_disk_size(flavor):
return _("%sGB") % (flavor.disk or 0)
def get_ephemeral_size(flavor):
return _("%sGB") % getattr(flavor, 'OS-FLV-EXT-DATA:ephemeral', 0)
def get_extra_specs(flavor):
return flavor.get_keys()
class FlavorsTable(tables.DataTable):
name = tables.Column('name', verbose_name=_('Flavor Name'))
vcpus = tables.Column('vcpus', verbose_name=_('VCPUs'))
ram = tables.Column(get_size,
verbose_name=_('RAM'),
attrs={'data-type': 'size'})
disk = tables.Column(get_disk_size,
verbose_name=_('Root Disk'),
attrs={'data-type': 'size'})
ephemeral = tables.Column(get_ephemeral_size,
verbose_name=_('Ephemeral Disk'),
attrs={'data-type': 'size'})
swap = tables.Column(get_swap_size,
verbose_name=_('Swap Disk'),
attrs={'data-type': 'size'})
flavor_id = tables.Column('id', verbose_name=_('ID'))
public = tables.Column("is_public",
verbose_name=_("Public"),
empty_value=False,
filters=(filters.yesno, filters.capfirst))
extra_specs = tables.Column(get_extra_specs,
verbose_name=_("Extra Specs"),
link=("horizon:admin:flavors:extras:index"),
empty_value=False,
filters=(filters.yesno, filters.capfirst))
class Meta:
name = "flavors"
verbose_name = _("Flavors")
table_actions = (FlavorFilterAction, CreateFlavor, DeleteFlavor)
row_actions = (UpdateFlavor,
ModifyAccess,
ViewFlavorExtras,
DeleteFlavor)
| StarcoderdataPython |
1618749 | #%%
#%%
import os
import time
import shutil
import numpy as np
import tensorflow as tf
from PIL import Image
import random
import matplotlib.pyplot as plt
import cv2
from cv2 import cv2
scal = 224
sampleModel = tf.keras.applications.ResNet50V2(weights='imagenet',
include_top=False,
input_shape=(scal, scal, 3))
sampleModel.trianable = False
for l in sampleModel.layers:
print(l.name)
if l.name == 'conv4_block5_out':
print(l)
#%%
c=[]
name=['conv2_block2_out','conv3_block3_out','conv4_block5_out','conv5_block3_out']
i=0
for l in sampleModel.layers:
if l.name == name[i]:
i+=1
print(l.name)
c.append(l.output)
if i == 4:
break
print(c)
model = tf.keras.models.Model(inputs=sampleModel.input, outputs=c)
tf.keras.utils.plot_model(model, to_file='rennetRpn.png', show_shapes=True, show_layer_names=True)
#%%
model.outputs
#%%
sampleModel.layers['conv4_block5_out']
#%%
img = cv2.imread('hua.jpg')
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
img = img/255.0
img = cv2.resize(img,(224,224))
plt.imshow(img)
o = sampleModel(np.expand_dims(img,0))
# %%
probs = tf.nn.softmax(o)
probs=probs.numpy()
np.max(probs)
# %%
np.argmax(probs)
# %%
probs
# %%
print('Predicted:', tf.keras.applications.resnet_v2.decode_predictions(o, top=3)[0])
# %%
img.shape
# %%
w = sampleModel.get_weights()
w[0]
# %%
for l in sampleModel.layers:
print(l.name)
# %%
# %%
| StarcoderdataPython |
1760205 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# king_phisher/server/graphql/types/misc.py
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the project nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
from __future__ import absolute_import
import datetime
import functools
import king_phisher.geoip as geoip
import king_phisher.ipaddress as ipaddress
import geoip2.errors
import graphene.relay
import graphene.types.utils
import graphql.language.ast
import graphql_relay.connection.arrayconnection
__all__ = ('ConnectionField', 'GeoLocation', 'Plugin', 'PluginConnection', 'RelayNode')
# custom enum types
class FilterOperatorEnum(graphene.Enum):
EQ = 'eq'
GE = 'ge'
GT = 'gt'
LE = 'le'
LT = 'lt'
NE = 'ne'
class SortDirectionEnum(graphene.Enum):
AESC = 'aesc'
DESC = 'desc'
# misc definitions
class RelayNode(graphene.relay.Node):
@classmethod
def from_global_id(cls, global_id):
return global_id
@classmethod
def to_global_id(cls, _, local_id):
return local_id
class ConnectionField(graphene.relay.ConnectionField):
@classmethod
def connection_resolver(cls, resolver, connection, root, info, **kwargs):
iterable = resolver(root, info, **kwargs)
_len = len(iterable)
connection = graphql_relay.connection.arrayconnection.connection_from_list_slice(
iterable,
kwargs,
slice_start=0,
list_length=_len,
list_slice_length=_len,
connection_type=functools.partial(connection, total=_len),
pageinfo_type=graphene.relay.connection.PageInfo,
edge_type=connection.Edge
)
connection.iterable = iterable
connection.length = _len
return connection
# custom scalar types
class AnyScalar(graphene.types.Scalar):
@staticmethod
def serialize(dt):
raise NotImplementedError()
@staticmethod
def parse_literal(node):
if isinstance(node, graphql.language.ast.FloatValue):
return float(node.value)
if isinstance(node, graphql.language.ast.IntValue):
return int(node.value)
return node.value
@staticmethod
def parse_value(value):
return value
class DateTimeScalar(graphene.types.Scalar):
@staticmethod
def serialize(dt):
return dt
@staticmethod
def parse_literal(node):
if isinstance(node, graphql.language.ast.StringValue):
return datetime.datetime.strptime(node.value, '%Y-%m-%dT%H:%M:%S.%f')
@staticmethod
def parse_value(value):
return datetime.datetime.strptime(value, '%Y-%m-%dT%H:%M:%S.%f')
# custom compound types
class GeoLocation(graphene.ObjectType):
city = graphene.Field(graphene.String)
continent = graphene.Field(graphene.String)
coordinates = graphene.List(graphene.Float)
country = graphene.Field(graphene.String)
postal_code = graphene.Field(graphene.String)
time_zone = graphene.Field(graphene.String)
@classmethod
def from_ip_address(cls, ip_address):
ip_address = ipaddress.ip_address(ip_address)
if ip_address.is_private:
return
try:
result = geoip.lookup(ip_address)
except geoip2.errors.AddressNotFoundError:
result = None
if result is None:
return
return cls(**result)
class Plugin(graphene.ObjectType):
class Meta:
interfaces = (RelayNode,)
authors = graphene.List(graphene.String)
classifiers = graphene.List(graphene.String)
description = graphene.Field(graphene.String)
homepage = graphene.Field(graphene.String)
name = graphene.Field(graphene.String)
reference_urls = graphene.List(graphene.String)
title = graphene.Field(graphene.String)
version = graphene.Field(graphene.String)
@classmethod
def from_plugin(cls, plugin):
return cls(
authors=plugin.authors,
classifiers=plugin.classifiers,
description=plugin.description,
homepage=plugin.homepage,
name=plugin.name,
reference_urls=plugin.reference_urls,
title=plugin.title,
version=plugin.version
)
@classmethod
def resolve(cls, info, **kwargs):
plugin_manager = info.context.get('plugin_manager', {})
for _, plugin in plugin_manager:
if plugin.name != kwargs.get('name'):
continue
return cls.from_plugin(plugin)
class PluginConnection(graphene.relay.Connection):
class Meta:
node = Plugin
total = graphene.Int()
@classmethod
def resolve(cls, info, **kwargs):
plugin_manager = info.context.get('plugin_manager', {})
return [Plugin.from_plugin(plugin) for _, plugin in sorted(plugin_manager, key=lambda i: i[0])]
# custom compound input types
class FilterInput(graphene.InputObjectType):
and_ = graphene.List('king_phisher.server.graphql.types.misc.FilterInput', name='and')
or_ = graphene.List('king_phisher.server.graphql.types.misc.FilterInput', name='or')
field = graphene.String()
value = AnyScalar()
operator = FilterOperatorEnum()
class SortInput(graphene.InputObjectType):
field = graphene.String(required=True)
direction = SortDirectionEnum()
| StarcoderdataPython |
92310 | <reponame>veeramanikandan0305/KA-UP
import zipfile
import sys
import os
import datetime
import re
from distutils.dir_util import copy_tree
def zip_folder(folder_path, output_path):
"""Zip the contents of an entire folder (with that folder included
in the archive). Empty subfolders will be included in the archive
as well.
"""
parent_folder = os.path.dirname(folder_path)
# Retrieve the paths of the folder contents.
contents = os.walk(folder_path)
try:
zip_file = zipfile.ZipFile(output_path, 'w', zipfile.ZIP_DEFLATED)
for root, folders, files in contents:
# Include all subfolders, including empty ones.
for folder_name in folders:
absolute_path = os.path.join(root, folder_name)
relative_path = absolute_path.replace(parent_folder + '\\',
'')
print "Adding '%s' to archive." % absolute_path
zip_file.write(absolute_path, relative_path)
for file_name in files:
absolute_path = os.path.join(root, file_name)
relative_path = absolute_path.replace(parent_folder + '\\',
'')
print "Adding '%s' to archive." % absolute_path
zip_file.write(absolute_path, relative_path)
print "'%s' created successfully." % output_path
except IOError, message:
print message
sys.exit(1)
except OSError, message:
print message
sys.exit(1)
except zipfile.BadZipfile, message:
print message
sys.exit(1)
finally:
zip_file.close()
#~ def copy_file():
def backup():
backup= open('/backup.txt').read()
backup=backup.splitlines()
len(backup)
for index in range(len(backup)):
backup_list = backup[index].split(',')
if ('/' in backup_list[0]):
backup_list[0]=backup_list[0].replace(" ", "")
filename=backup_list[0].split('/')
file_index=len(filename)
file_name=str(filename[file_index-1])+"_"+str(datetime.datetime.now().day)+"_"+str(datetime.datetime.now().month)
fromDirectory = backup_list[0].replace("/"+str(filename[file_index-1]),"")
fromsrc=backup_list[0]
file_tempname=str(filename[file_index-1])
toDirectory = "/backup_temp/"+file_tempname
copy_tree(fromsrc, toDirectory)
if not re.search(fromDirectory, toDirectory, re.IGNORECASE):
#~ copy_tree(fromDirectory, toDirectory)
print fromDirectory
print toDirectory
backup_list[1]=backup_list[1].replace(" ", "")
cron_time=backup_list[2].replace(" ", "")
dest=backup_list[1]+file_name+".zip"
src="./backup_temp/"+file_tempname
cron_time=cron_time.split('/')
if (int(cron_time[1]) is 1):
zip_folder(backup_list[0],dest)
elif(int(cron_time[1]) is 7):
week_days=['sunday','monday','tuesday','wednesday','thursday','friday','saturday']
day_name=datetime.datetime.today().strftime("%A")
if re.search(day_name, week_days[int(cron_time[0])], re.IGNORECASE):
zip_folder(src,dest)
elif(int(cron_time[1]) is 30):
month_day= datetime.datetime.today().day
if (int(cron_time[0]) is month_day):
zip_folder(src,dest)
else:
print "WARNING : Please Mention Valid Cron Time for backup on line number " + str(index+1)
#~ if re.search('Social', strong_tag.title.text, re.IGNORECASE):
if __name__ == '__main__':
backup()
| StarcoderdataPython |
1685398 | <filename>Python3/1133.py
a = int(input())
b = int(input())
if a > b:
a, b = b, a
for i in range(a+1, b):
if i % 5 in (2, 3):
print(i)
| StarcoderdataPython |
80212 | # -*- coding: utf-8 -*-
"""
@author : <NAME>
@github : https://github.com/tianpangji
@software : PyCharm
@file : routing.py
@create : 2020/7/29 20:21
"""
import os
import django
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'drf_admin.settings.dev')
from channels.routing import ProtocolTypeRouter, URLRouter
from django.urls import re_path
from drf_admin.utils.websocket import TokenAuthMiddleware
from drf_admin.apps.monitor.consumers import service
django.setup()
application = ProtocolTypeRouter({
'websocket': TokenAuthMiddleware(
URLRouter([
re_path(r'^monitor/service', service.ResourcesConsumer),
])
)
})
| StarcoderdataPython |
1757559 | <reponame>pk-hackerrank/python
# Importing phase and polar from cmath
# Complex form a+bj, Polar coordinates (r,phi)
from cmath import phase, polar
# Reading input String
given_input = input()
# Converting given string into complex number i.e. a+bj format
in_complex_format = complex(given_input)
#calc_r = abs(in_complex_format) # abs gives the r of polar coordinates
#calc_teta = phase(in_complex_format) # phase gives the phi of polar coordinates
# We can also use polar which will give the r and phi directly as a tuple.
(calc_r_using_polar, calc_teta_using_polar) = polar(in_complex_format)
#print(calc_r)
#print(calc_teta)
print(calc_r_using_polar)
print(calc_teta_using_polar) | StarcoderdataPython |
86672 | <reponame>3togo/skeleton-tracing
import trace_skeleton
import cv2
import random
import os
import sys
def get_fname(fname):
for i in range(5):
if os.path.exists(fname):
return fname
fname = os.path.join("..", fname)
print(fname)
fname = get_fname("test_images/opencv-thinning-src-img.png")
if not fname:
print(f"{fname} not found")
sys.exit()
im = cv2.imread(fname,0)
_,im = cv2.threshold(im,128,255,cv2.THRESH_BINARY);
# cv2.imshow("",im);cv2.waitKey(0)
polys = trace_skeleton.from_numpy(im);
for l in polys:
c = (200*random.random(),200*random.random(),200*random.random())
for i in range(0,len(l)-1):
cv2.line(im,(l[i][0],l[i][1]),(l[i+1][0],l[i+1][1]),c)
cv2.imshow('',im);cv2.waitKey(0)
| StarcoderdataPython |
1639694 | from os import error
from instagrapi import Client
from flask import Flask,jsonify
import json
import random
print('Login in..')
def write_file(data, filename):
fh = open(filename, "w")
try:
fh.write(json.dumps(data))
finally:
fh.close()
def read_file(filename):
fh = open(filename, "r")
return json.load(fh)
ACCOUNT_USERNAME = 'maidenvaper'
ACCOUNT_PASSWORD = '<PASSWORD>'
IG_CREDENTIAL = ACCOUNT_USERNAME
# # IG_CREDENTIAL = random.choice(ACCOUNT_USERNAMES)
print(IG_CREDENTIAL)
cl = None
try:
cl = Client(read_file(IG_CREDENTIAL))
print("valid credentials.json")
except:
print("invalid credentials.json")
cl = Client()
print('done')
cl.login(ACCOUNT_USERNAME, ACCOUNT_PASSWORD)
print("valid login")
write_file(cl.get_settings(), IG_CREDENTIAL)
app = Flask(__name__)
@app.route("/")
def index():
try:
return "Hello World!"
except:
return 'Please Connect to internet'
@app.route("/favicon.ico")
def index2():
try:
print('faviconn')
return "favicon.ico"
except:
return 'Please Connect to internet'
@app.route("/user/<username>")
def user(username):
user_id = cl.user_id_from_username(username)
medias = cl.user_info(user_id).dict()
print(type(medias))
return medias
@app.route('/followers/<username>')
def followers(username):
user_id = cl.user_id_from_username(username)
medias = cl.user_followers(user_id)
medias=str(medias)
list=medias.replace("), 'media_type'","', 'media_type'")
list=list.replace('HttpUrl(','')
list=list.replace("='",":'")
list=list.replace(",'username'","','username'")
list=list.replace("UserShort(pk","UserShort{pk")
list=list.replace("pk=","pk:")
list=list.replace(",username","',username")
list=list.replace("stories=[])","stories=[]}")
list=list.replace("UserShort","")
list=list.replace(")","")
list=list.replace(", stories=[]","")
list=list.replace("full_name=","full_name:")
list=list.replace("'",'"')
list=list.replace(",",', "')
list=list.replace(":",'" :')
list=list.replace('"https" :','"https:')
list=list.replace('{','{"')
list=list.replace('_ ','_')
print(list)
data=json.loads(list)
temp = []
for t in data:
data[t].pop(" full_name",None)
data[t].pop(' scheme',None)
data[t].pop(' host',None)
data[t].pop(' tld',None)
data[t].pop(' host_type',None)
data[t].pop(' path',None)
data[t].pop(' query',None)
print(data[t])
print(type(data[t]))
print(type(data[t]))
temp.append(data[t])
return jsonify(temp)
@app.route('/following/<username>')
def following(username):
user_id = cl.user_id_from_username(username)
medias = cl.user_following(user_id)
medias=str(medias)
list=medias.replace("), 'media_type'","', 'media_type'")
list=list.replace('HttpUrl(','')
list=list.replace("='",":'")
list=list.replace(",'username'","','username'")
list=list.replace("UserShort(pk","UserShort{pk")
list=list.replace("pk=","pk:")
list=list.replace(",username","',username")
list=list.replace("stories=[])","stories=[]}")
list=list.replace("UserShort","")
list=list.replace(")","")
list=list.replace(", stories=[]","")
list=list.replace("full_name=","full_name:")
list=list.replace("{","{'")
list=list.replace(":","':")
list=list.replace('" ','" ')
json.dumps(list)
return list
@app.route("/hastag/top/<hastag>")
def hastag_top(hastag):
medias = cl.hashtag_medias_top(str(hastag),50)
hastag=[]
for aap in medias:
data=aap.dict()
data.pop("caption_text",None)
print(data)
hastag.append(data)
list=str(hastag)
list=list.replace("), 'media_type'","', 'media_type'")
list=list.replace('HttpUrl(','')
json.dumps(hastag)
return hastag
@app.route("/hastag/recent/<hastag>")
def hastag_recent(hastag):
medias = cl.hashtag_medias_recent(str(hastag),50)
hastag=[]
for aap in medias:
data=aap.json()
print(data)
hastag.append(data)
hastag=str(hastag)
json.dumps(hastag)
return hastag
@app.route("/stories/<username>")
def user_stories(username):
user_id = cl.user_id_from_username(username)
data= cl.user_stories(user_id)
story=[]
for media in data:
media=media.json()
story.append(media)
list=str(story)
list=list.replace("'","")
list=list.replace("']","]")
json.dumps(list)
return list
@app.route("/posts/<username>")
def user_posts(username):
user_id = cl.user_id_from_username(username)
medias = cl.user_medias(user_id)
list=[]
for medis in medias:
# print(type(medias))
# print(type(medis))
data=medis.json()
# print(type(data))
dictiy=json.loads(data)
# print(type(dictiy))
dictiy.pop("caption_text", None)
data=json.dumps(dictiy)
list.append(data)
list=str(list)
list=list.replace("'","")
list=list.replace("']","]")
json.dumps(list)
return list
@app.route("/media-comments/<media_id>")
def media_comment(media_id):
medias = cl.media_comments(str(media_id))
list=[]
for i in medias:
comments=i.json()
print(comments)
list.append(comments)
list=str(list)
list=list.replace("'","")
list=list.replace("']","]")
json.dumps(list)
return list
@app.route("/media-likes/<media_id>")
def media_like(media_id):
medias = cl.media_likers(str(media_id))
list=[]
for i in medias:
comments=i.json()
print(comments)
list.append(comments)
list=str(list)
list=list.replace("\\", '')
list=list.replace("'","")
list=list.replace("']","]")
json.dumps(list)
return list
@app.route('/me-not-following-back/<username>')
def me_not_following_back(username):
user_id = cl.user_id_from_username(username)
followers=cl.user_followers(user_id)
following=cl.user_following(user_id)
data=[]
for items in followers:
if items not in following:
users=cl.user_info(items).json()
data.append(users)
print(users)
data=str(data)
list=data.replace("'","")
# list=list.replace("']","]")
json.dumps(list)
return list
@app.route('/they-not-following-back/<username>')
def they_not_following_back(username):
user_id = cl.user_id_from_username(username)
followers=cl.user_followers(user_id)
following=cl.user_following(user_id)
data=[]
for items in following:
if items not in followers:
users=cl.user_info(items)
data.append(users)
print(users)
data=str(data)
list=data.replace("'","")
list=list.replace("']","]")
json.dumps(data)
return data
if __name__ == '__main__':
app.run(debug=True,host='0.0.0.0',port=8080)
| StarcoderdataPython |
64755 | <filename>sbdata/tasks.py
import ast
import dataclasses
import json
import re
import sys
import typing
from sbdata.repo import find_item_by_name, Item
from sbdata.task import register_task, Arguments
from sbdata.wiki import get_wiki_sources_by_title
@dataclasses.dataclass
class DungeonDrop:
item: Item
floor: int
chest: str
drop_chances: dict[str, str]
def get_drop_chance(self, has_s_plus: bool, talisman_level: int, boss_luck: int):
drop_identifier = "S" + ('+' if has_s_plus else '') + 'ABCD'[talisman_level] + str(len([i for i in [0, 1, 3, 5, 10] if i >= boss_luck]))
return self.drop_chances.get(drop_identifier)
@register_task("Fetch Dungeon Loot")
def fetch_dungeon_loot(args: Arguments):
items = []
for floor in get_wiki_sources_by_title(*[f'Template:Catacombs Floor {f} Loot Master' for f in ['I', 'II', 'III', 'IV', 'V', 'VI', 'VII']]).values():
for template in floor.filter_templates():
if template.name.strip() == 'Dungeon Chest Table/Row':
item = None
ifloor = None
chest = None
drop_chances = {}
for param in template.params:
attr_name = param.name.nodes[0].strip()
attr_value = param.value.nodes[0].strip()
if attr_name == 'item':
if item is None:
item = find_item_by_name(attr_value)
elif attr_name == 'customlink':
if item is None:
item = find_item_by_name(attr_value.split('#')[-1])
elif attr_name == 'chest':
chest = attr_value
elif attr_name == 'floor':
ifloor = int(attr_value)
elif attr_name.startswith("S"):
drop_chances[attr_name] = attr_value
if item is None or ifloor is None or chest is None:
print('WARNING: Missing data for item: ' + str(template))
else:
items.append(DungeonDrop(item, ifloor, chest, drop_chances))
return items
| StarcoderdataPython |
63090 | # -*- coding: utf-8 -*-
"""
-------------------------------------------------
File Name: parser_funs
Description :
Author : <NAME>
date:
-------------------------------------------------
Change Activity:
2019/7/28:
-------------------------------------------------
"""
import torch
import numpy as np
def sdp_decoder(semgraph_probs, sentlens):
'''
semhead_probs type:ndarray, shape:(n,m,m)
'''
semhead_probs = semgraph_probs.sum(axis=-1)
semhead_preds = np.where(semhead_probs >= 0.5, 1, 0)
masked_semhead_preds = np.zeros(semhead_preds.shape, dtype=np.int32)
for i, (sem_preds, length) in enumerate(zip(semhead_preds, sentlens)):
masked_semhead_preds[i, :length, :length] = sem_preds[:length, :length]
n_counts = {'no_root': 0, 'multi_root': 0, 'no_head': 0, 'self_circle': 0}
for i, length in enumerate(sentlens):
for j in range(length):
if masked_semhead_preds[i, j, j] == 1:
n_counts['self_circle'] += 1
masked_semhead_preds[i, j, j] = 0
n_root = np.sum(masked_semhead_preds[i, :, 0])
if n_root == 0:
n_counts['no_root'] += 1
new_root = np.argmax(semhead_probs[i, 1:, 0]) + 1
masked_semhead_preds[i, new_root, 0] = 1
elif n_root > 1:
n_counts['multi_root'] += 1
kept_root = np.argmax(semhead_probs[i, 1:, 0]) + 1
masked_semhead_preds[i, :, 0] = 0
masked_semhead_preds[i, kept_root, 0] = 1
n_heads = masked_semhead_preds[i, :length, :length].sum(axis=-1)
n_heads[0] = 1
for j, n_head in enumerate(n_heads):
if n_head == 0:
n_counts['no_head'] += 1
semhead_probs[i, j, j] = 0
new_head = np.argmax(semhead_probs[i, j, 1:length]) + 1
masked_semhead_preds[i, j, new_head] = 1
# (n x m x m x c) -> (n x m x m)
semrel_preds = np.argmax(semgraph_probs, axis=-1)
# (n x m x m) (*) (n x m x m) -> (n x m x m)
semgraph_preds = masked_semhead_preds * semrel_preds
result = masked_semhead_preds + semgraph_preds
return result
def parse_semgraph(semgraph, sentlens):
semgraph = semgraph.tolist()
sents = []
for s, l in zip(semgraph, sentlens):
words = []
for w in s[1:l]:
arc = []
for head_idx, deprel in enumerate(w[:l]):
if deprel == 0:
continue
arc.append([head_idx, deprel - 1])
words.append(arc)
sents.append(words)
return sents
| StarcoderdataPython |
1667817 | #!/usr/bin/env python3
#
# Copyright (c) 2019, Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
import argparse
import logging
import time
from damlassistant import get_package_id, start_trigger_service_in_background, kill_process, \
add_trigger_to_service, wait_for_port, catch_signals, DEFAULT_TRIGGER_SERVICE_PORT
dar = 'target/bond-issuance-triggers.dar'
triggers_with_parties = [
("Bank1", "DA.RefApps.Bond.Triggers.InvestorSettlementTrigger:investorSettlementTrigger"),
("Bank1", "DA.RefApps.Bond.Triggers.PlaceBidTrigger:placeBidTrigger"),
("Bank2", "DA.RefApps.Bond.Triggers.InvestorSettlementTrigger:investorSettlementTrigger"),
("Bank2", "DA.RefApps.Bond.Triggers.PlaceBidTrigger:placeBidTrigger"),
("Bank3", "DA.RefApps.Bond.Triggers.InvestorSettlementTrigger:investorSettlementTrigger"),
("Bank3", "DA.RefApps.Bond.Triggers.PlaceBidTrigger:placeBidTrigger"),
("Issuer", "DA.RefApps.Bond.Triggers.CommissionTrigger:commissionTrigger"),
("Issuer", "DA.RefApps.Bond.Triggers.RedemptionFinalizeTrigger:redemptionFinalizeTrigger"),
("AuctionAgent", "DA.RefApps.Bond.Triggers.AuctionFinalizeTrigger:auctionFinalizeTrigger"),
("Csd", "DA.RefApps.Bond.Triggers.RedemptionCalculationTrigger:redemptionCalculationTrigger"),
]
parser = argparse.ArgumentParser()
parser.add_argument('ledger_port')
args = parser.parse_args()
logging.basicConfig(level=logging.DEBUG)
wait_for_port(port=args.ledger_port, timeout=30)
service = start_trigger_service_in_background(dar=dar, ledger_port=args.ledger_port)
try:
catch_signals()
package_id = get_package_id(dar)
wait_for_port(port=DEFAULT_TRIGGER_SERVICE_PORT, timeout=30)
for party, triggerName in triggers_with_parties:
add_trigger_to_service(party=party, package_id=package_id, trigger=triggerName)
def print_message_after_triggers_started(m: str):
time.sleep(3)
print(m)
print_message_after_triggers_started('\nPress Ctrl+C to stop...')
service.wait()
logging.error(f"Trigger service died unexpectedly:\n{service.stderr}")
finally:
kill_process(service)
| StarcoderdataPython |
97089 | <reponame>hoogamaphone/world-manager<filename>world_manager/blueprints/user/forms.py<gh_stars>0
from flask_wtf.form import FlaskForm
class LoginForm(FlaskForm):
pass | StarcoderdataPython |
44575 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Contains a context manager for temporarily introducing an environment var.
import os
import contextlib
@contextlib.contextmanager
def use_environment_variable(key, value):
""" Used to temporarily introduce a new environment variable as if it
was set by the execution environment.
:param str key: key of environment variable
:param str value: value of environment variable
"""
assert type(value) == str
assert key not in os.environ
os.environ[key] = value
assert key in os.environ
yield
assert key in os.environ
os.environ.pop(key)
assert key not in os.environ
| StarcoderdataPython |
1684815 | <reponame>reddit-pygame/minigolf-prototype
"""
The main function is defined here. It simply creates an instance of
tools.Control and adds the game states to its dictionary using
tools.setup_states. There should be no need (theoretically) to edit
the tools.Control class. All modifications should occur in this module
and in the prepare module.
"""
from . import prepare,tools
from .states import splash, putting, swinging, spectating
from .states import view_scorecard, hole_start, ball_placement
def main():
"""Add states to control here."""
run_it = tools.Control(prepare.ORIGINAL_CAPTION)
state_dict = {
"SPLASH": splash.Splash(),
"PUTTING": putting.Putting(),
"SWINGING": swinging.Swinging(),
"SPECTATING": spectating.Spectating(),
"SCORECARD": view_scorecard.ViewScorecard(),
"HOLESTART": hole_start.HoleStart(),
"BALLPLACEMENT": ball_placement.BallPlacement()
}
run_it.setup_states(state_dict, "SPLASH")
run_it.main()
| StarcoderdataPython |
3379208 | #!/usr/bin/env python
import argparse
import datetime as dt
import os.path
import sys
import ait.core.log as log
'''
Convert MPS Seq files to AIT formatted sequence files
'''
VALID_HEADER_KEYS = [
'gap',
'on_board_filename',
'on_board_path',
'upload_type'
]
def extract_seq_header(input_file):
''' Extract Seq file header setting values
Seq files can start with a header specifying the values for configurable
attributes. This extracts those values and returns key=value pairs as
a dictionary.
Note, this reads lines from the open input file handle until it encounters
a line not starting with a comment indicator ';'. Ensure the file handle
points to the beginning of the SEQ file.
Args:
input_file: (file handle) The open file handle from which lines
should be read.
Returns:
Dictionary containing the key=value pairs from the header
'''
header = {}
while True:
line = input_file.next()
if not line.startswith(';'): break
line = line.split(';')[-1]
if line.index('=') != -1:
line = line.split('=')
if line[0] in VALID_HEADER_KEYS:
header[line[0]] = line[1]
return header
def decode_sequence_time(time, prev_time=None):
''' Decode a MPS Seq time into a datetime object
Decode an absolute or relative time MPS Seq command time string into
an absolute time datetime object. If a relative command time is passed
a previous time must be supplied from which the absolute time should
be calculated.
Args:
time: (string) A MPS Seq command time string to convert into
a datetime object.
prev_time: (datetime) A datetime object from which a relative time
command time will be calculated. Required if `time` is a
relative command time.
Returns:
A datetime object representing the time string
Raises:
TypeError: If prev_time is not supplied or is not a datetime object
and time is a relative command time.
ValueError: If time has a time code other than `A` or `R`.
'''
time_code, time = time[0], time[1:]
if '.' not in time:
time += ':000'
else:
time = time.replace('.', ':')
if time_code == 'A':
converted_time = dt.datetime.strptime(time, '%Y-%jT%H:%M:%S:%f')
elif time_code == 'R':
if not prev_time or type(prev_time) is not type(dt.datetime.now()):
msg = (
'Previous time not specified or incorrect format provided '
'when given a relative command'
)
log.error(msg)
raise TypeError(msg)
if 'T' in time:
t_split = time.split('T')
days, dur = int(t_split[0]), t_split[1]
hours, mins, secs, msecs = [int(i) for i in dur.split(':')]
else:
days = 0
hours, mins, secs, msecs = [int(i) for i in time.split(':')]
converted_time = prev_time + dt.timedelta(
days=days, hours=hours, minutes=mins, seconds=secs, milliseconds=msecs
)
else:
msg = 'Invalid time code "{}" in sequence time'.format(time_code)
log.error(msg)
raise ValueError(msg)
return converted_time
def convert_sequence(input_file, output_file_path):
''' Convert a MPS Seq file into absolute and relative time AIT sequences
Args:
input_file: (file object) Input MPS Seq file
output_file_path: (string) Output file path excluding file extension.
This path / name will be used to write an Relative Time Sequence
(RTS) and Absolute Time Sequence (ATS) version of the sequence.
'''
rts_path = output_file_path + '_rts.txt'
ats_path = output_file_path + '_ats.txt'
rts_out = open(rts_path, 'w')
ats_out = open(ats_path, 'w')
prev_time = None
for line in input_file:
if line.startswith(';'): continue
clean_line = line.split(';')[0].strip()
clean_line = clean_line.replace('"', '')
clean_line = clean_line.replace(',', '')
split_line = clean_line.split(' ')
time, command = split_line[0], split_line[1:]
time = decode_sequence_time(time, prev_time)
if prev_time is None: prev_time = time
ats_out.write('{} {}\n'.format(time.strftime('%Y-%m-%dT%H:%M:%S.%f'), ' '.join(command)))
time_delta = time - prev_time
second_offset = '{}.{}'.format(int(time_delta.total_seconds()), time_delta.microseconds * 1000)
rts_out.write('{} {}\n'.format(second_offset, ' '.join(command)))
prev_time = time
rts_out.close()
ats_out.close()
if __name__ == '__main__':
log.begin()
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter
)
parser.add_argument(
'inputseq',
help='MPS Seq formatted input file'
)
parser.add_argument(
'-o', '--output-name',
help=(
"Output file path/name for converted sequence. This should not "
"include a file extension. File extension and ATS/RTS "
"identifier will be added automatically. Output name preference "
"is in order of this argument's value, MPS Seq header's "
"'on_board_filename' value, or default 'seq_out'."
)
)
args = parser.parse_args()
in_file = args.inputseq
if not os.path.exists(in_file):
log.error('Input MPS Sequence file does not exist.')
sys.exit(1)
with open(in_file, 'r') as input_file:
seq_header = extract_seq_header(input_file)
out_file = 'seq_out'
seq_header_outpath = seq_header.get('on_board_filename', None)
if args.output_name:
out_file = arg_outpath.strip()
elif seq_header_outpath:
out_file = seq_header_outpath.split('.')[0].strip()
with open(in_file, 'r') as input_file:
convert_sequence(input_file, out_file)
log.end()
| StarcoderdataPython |
64104 | # ---
# jupyter:
# jupytext:
# formats: ipynb,py
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.5'
# jupytext_version: 1.9.1+dev
# kernelspec:
# display_name: Python [conda env:annorxiver]
# language: python
# name: conda-env-annorxiver-py
# ---
# # Re-Run Analyses with Polka et. al. Subset
# This notebook was created in response to Polka et al. Group's inquiry on training a logistic regression model on preprints posted recently rather than preprints from 2019 and below.
# Overall their subset can be separated with a few features.
# +
from pathlib import Path
import sys
from gensim.models import Word2Vec
import matplotlib.pyplot as plt
import matplotlib as mpl
import numpy as np
import pandas as pd
import plotnine as p9
import requests
from scipy.spatial.distance import cdist
from scipy.stats import linregress
from sklearn.model_selection import GridSearchCV
from sklearn.linear_model import LogisticRegressionCV, LogisticRegression
from sklearn.preprocessing import StandardScaler
from sklearn.tree import DecisionTreeClassifier, export_graphviz
import spacy
import tqdm
from annorxiver_modules.document_helper import generate_doc_vector
mpl.rcParams["figure.dpi"] = 250
# -
# # Random BioRxiv Sample
manual_papers_df = pd.read_csv(str(Path("output/all_pairs_2021-02-11.csv")))
manual_papers_df.head().T
api_url = "https://api.biorxiv.org/details/biorxiv/2020-01-01/2020-04-30"
response = requests.get(api_url)
content = response.json()
total_papers = content["messages"][0]["total"]
total_papers
np.random.seed(100)
selected_biorxiv_papers = np.random.randint(0, total_papers, 100)
selected_biorxiv_papers.sort()
selected_biorxiv_papers
paper_cursor = {}
for paper in selected_biorxiv_papers:
cursor = int(np.ceil(int(paper / 100)))
if cursor not in paper_cursor:
paper_cursor[cursor] = []
paper_cursor[cursor].append(paper)
paper_cursor
published_doi_map = []
for paper in tqdm.tqdm(paper_cursor):
api_url = f"https://api.biorxiv.org/details/biorxiv/2020-01-01/2020-04-30/{paper}"
response = requests.get(api_url)
content = response.json()
collection = content["collection"]
for paper_idx in paper_cursor[paper]:
user_doi = collection[paper_idx % 100]["doi"]
file_name = user_doi.split("/")[-1]
api_url = f"https://api.biorxiv.org/details/biorxiv/{user_doi}"
response = requests.get(api_url)
content = response.json()
latest_paper = content["collection"][-1]
version_count = len(content["collection"])
doc_url = "http://biorxiv.org/content"
file_url = f"{doc_url}/early/{latest_paper['date'].replace('-', '/')}/{file_name}.source.xml"
response = requests.get(file_url)
with open(
f"output/biorxiv_xml_files_recent/{file_name}_v{version_count}.xml", "wb"
) as outfile:
outfile.write(response.content)
# # Document Embeddings
# ## Convert New biorxiv subset
biorxiv_documents = [
Path(x.name) for x in list(Path("output/biorxiv_xml_files_recent").rglob("*xml"))
]
biorxiv_xpath_str = "//abstract/p|//abstract/title|//body/sec//p|//body/sec//title"
word_model = Word2Vec.load(
str(Path("../word_vector_experiment/output/word2vec_models/300/biorxiv_300.model"))
)
biorxiv_document_map = {
document: generate_doc_vector(
word_model,
document_path=str(Path("output/biorxiv_xml_files_recent") / document),
xpath=biorxiv_xpath_str,
)
for document in tqdm.tqdm_notebook(biorxiv_documents)
}
# +
biorxiv_vec_df = (
pd.DataFrame.from_dict(biorxiv_document_map, orient="index")
.rename(columns={col: f"feat_{col}" for col in range(int(300))})
.rename_axis("document")
.reset_index()
)
biorxiv_vec_df.to_csv(
"output/random_recent_biorxiv_subset_embeddings.tsv", sep="\t", index=False
)
biorxiv_vec_df.head().T
# -
# ## Load the Documents
polka_preprints_df = pd.read_csv("output/polka_et_al_biorxiv_embeddings.tsv", sep="\t")
polka_preprints_df.head()
pca_components = pd.read_csv(
Path("../pca_association_experiment/output/word_pca_similarity/pca_components.tsv"),
sep="\t",
)
pca_components.head()
# ## PCA Components
# This section aims to see which principal components have a high association with Polka et al's subset. Furthermore, we also aim to see if we can use linear models to explain which PCs affect preprint prediction.
document_pca_sim = 1 - cdist(
polka_preprints_df.drop("document", axis=1).values, pca_components.values, "cosine"
)
print(document_pca_sim.shape)
document_pca_sim
document_to_pca_map = {
document: document_pca_sim[idx, :]
for idx, document in enumerate(polka_preprints_df.document.tolist())
}
polka_pca_sim_df = (
pd.DataFrame.from_dict(document_to_pca_map, orient="index")
.rename(index=str, columns={col: f"pc{col+1}" for col in range(int(300))})
.reset_index()
.rename(index=str, columns={"index": "document"})
)
# polka_pca_sim_df.to_csv("output/polka_pca_enrichment.tsv", sep="\t")
polka_pca_sim_df = polka_pca_sim_df.assign(label="polka")
polka_pca_sim_df.head()
document_pca_sim = 1 - cdist(
biorxiv_vec_df.drop("document", axis=1).values,
pca_components.values,
"cosine",
)
print(document_pca_sim.shape)
document_pca_sim
document_to_pca_map = {
document: document_pca_sim[idx, :]
for idx, document in enumerate(biorxiv_vec_df.document.tolist())
}
biorxiv_pca_sim_df = (
pd.DataFrame.from_dict(document_to_pca_map, orient="index")
.rename(index=str, columns={col: f"pc{col+1}" for col in range(int(300))})
.reset_index()
.rename(index=str, columns={"index": "document"})
.assign(label="biorxiv")
)
biorxiv_pca_sim_df.head()
# ## PC Regression
# ### Logistic Regression
# Goal here is to determine if we can figure out which PCs separate the bioRxiv subset from Polka et al.'s subset. Given that their dataset is only 60 papers we downsampled our dataset to contain only 60 papers.
dataset_df = biorxiv_pca_sim_df.append(polka_pca_sim_df)
dataset_df.head()
model = LogisticRegressionCV(
cv=10, Cs=100, max_iter=1000, penalty="l1", solver="liblinear"
)
model.fit(
StandardScaler().fit_transform(dataset_df[[f"pc{idx+1}" for idx in range(50)]]),
dataset_df["label"],
)
best_result = list(filter(lambda x: x[1] == model.C_, enumerate(model.Cs_)))[0]
print(best_result)
print("Best CV Fold")
print(model.scores_["polka"][:, best_result[0]])
model.scores_["polka"][:, best_result[0]].mean()
model_weights_df = pd.DataFrame.from_dict(
{
"weight": model.coef_[0],
"pc": list(range(1, 51)),
}
)
model_weights_df["pc"] = pd.Categorical(model_weights_df["pc"])
model_weights_df.head()
g = (
p9.ggplot(model_weights_df, p9.aes(x="pc", y="weight"))
+ p9.geom_col(position=p9.position_dodge(width=5), fill="#253494")
+ p9.coord_flip()
+ p9.scale_x_discrete(limits=list(sorted(range(1, 51), reverse=True)))
+ p9.theme_seaborn(context="paper", style="ticks", font_scale=1.1, font="Arial")
+ p9.theme(figure_size=(10, 8))
+ p9.labs(
title="Regression Model Weights", x="Princpial Component", y="Model Weight"
)
)
# g.save("output/figures/pca_log_regression_weights.svg")
# g.save("output/figures/pca_log_regression_weights.png", dpi=250)
print(g)
fold_features = model.coefs_paths_["polka"].transpose(1, 0, 2)
model_performance_df = pd.DataFrame.from_dict(
{
"feat_num": ((fold_features.astype(bool).sum(axis=1)) > 0).sum(axis=1),
"C": model.Cs_,
"score": model.scores_["polka"].mean(axis=0),
}
)
model_performance_df.head()
# +
fig, ax1 = plt.subplots()
ax1.set_xscale("log")
ax2 = plt.twinx()
ax1.plot(
model_performance_df.C.tolist(),
model_performance_df.feat_num.tolist(),
label="Features",
marker=".",
)
ax1.set_ylabel("# of Features")
ax1.set_xlabel("Inverse Regularization (C)")
ax1.legend(loc=0)
ax2.plot(
model_performance_df.C.tolist(),
model_performance_df.score.tolist(),
label="Score",
marker=".",
color="green",
)
ax2.set_ylabel("Score (Accuracy %)")
ax2.legend(loc=4)
# plt.savefig("output/preprint_classifier_results.png")
# -
plot_path = list(
zip(
model.Cs_,
model.scores_["polka"].transpose(),
model.coefs_paths_["polka"].transpose(1, 0, 2),
)
)
data_records = []
for cs in plot_path[33:40]:
model = LogisticRegression(C=cs[0], max_iter=1000, penalty="l1", solver="liblinear")
model.fit(
StandardScaler().fit_transform(dataset_df[[f"pc{idx+1}" for idx in range(50)]]),
dataset_df["label"],
)
data_records.append(
{
"C": cs[0],
"PCs": ",".join(map(str, model.coef_.nonzero()[1] + 1)),
"feat_num": len(model.coef_.nonzero()[1]),
"accuracy": cs[1].mean(),
}
)
model_coefs_df = pd.DataFrame.from_records(data_records)
model_coefs_df
| StarcoderdataPython |
1734591 | <reponame>sikienzl/Teamprojekt
#!/usr/bin/python3
# -*- coding: utf-8 -*-
"""Converts any files into txt-files"""
import sys
import getopt
import logging
import os.path
import extractTxt
import loggingModule
def main():
NO_ARG_TXT="No argument"
PUT_CORRECT_PARAM_TXT="Please put a correct parameter: error \n" + help()
UNEXPECTED_ERR_TXT="Unexpected error"
argv = sys.argv[1:]
text = None
if(len(sys.argv) == 1): # if no argument
loggingModule.logger2.info(NO_ARG_TXT)
loggingModule.logger2.info(PUT_CORRECT_PARAM_TXT)
try:
opts, args = getopt.getopt(
argv, "hvi:o:", ['help', 'input=', 'output='])
except getopt.GetoptError as e:
loggingModule.logger2.error(e) # write into logfile
loggingModule.logger2.info(sys.argv[1] + " is not an argument") # write into logfile
loggingModule.logger2.info(PUT_CORRECT_PARAM_TXT)
sys.exit(2)
except:
loggingModule.logger2.info(UNEXPECTED_ERR_TXT)
if len(args) != 0:
logging.info(sys.argv[1] + " is not an argument")
loggingModule.logger2.error(PUT_CORRECT_PARAM_TXT)
verbose = False
for o, a in opts:
if o == "-v":
verbose = True
elif o in ("-h", "--help"): # help
loggingModule.logger2.info(help())
elif o in ("-i", "--input"): # help
text = extractTxt.process(a)
elif o in ("-o", "--output"):
extractTxt.file(text, a)
else:
loggingModule.logger2.info(PUT_CORRECT_PARAM_TXT)
if verbose:
if text is not None:
loggingModule.logger2.info(text)
else:
loggingModule.logger2.info(help())
def help():
return("arguments:\n" +
"-h, --help " +
"show help message and exit\n" +
"-i [path to file] --input [path to file] " +
"to run the program\n" +
"-o [path to output-file] --output [path to output-file] " +
"to extract text into file \n" +
" " +
"(works only with the argument -i)\n" +
"-v " +
"verbose-Mode")
if __name__ == '__main__':
main()
| StarcoderdataPython |
180248 | <gh_stars>1-10
import wolframalpha
WOLFRAM_ALPHA_APP_ID = "" # Put Wolfram Alpha App ID here
fallback_response = "Sorry, I don't understand."
greetings = ["hello", "hi", "howdy", "hey", "hola", "sup", "aloha"]
farewells = ["bye", "goodbye", "adios"]
client = wolframalpha.Client(WOLFRAM_ALPHA_APP_ID)
def execute(input):
tokens = input.split()
for token in tokens:
if token in greetings:
return "Hello!"
if token in farewells:
return "Bye!"
# Query WolframAlpha
response = client.query(input)
if response['@success'] == 'false':
return fallback_response
else:
pod_answer = response['pod'][1]['subpod']
if isinstance(pod_answer, list):
return pod_answer[0]['plaintext']
else:
return pod_answer['plaintext']
| StarcoderdataPython |
1785263 | import math
import time
import arcade
# Do the math to figure out our screen dimensions
SCREEN_WIDTH = 1280
SCREEN_HEIGHT = 720
SCREEN_TITLE = "Basic Renderer"
class MyGame(arcade.Window):
def __init__(self, width, height, title):
super().__init__(width, height, title)
# vsync must be off when measuring rendering calls
self.set_vsync(False)
start = time.time()
self.sprites = arcade.SpriteList()
num_sprites = 100_000
# sprite_scale = 0.01 # All sprites covering the screen
sprite_scale = 1.0 # default
sprite_size = 128
r = int(math.sqrt(num_sprites))
for y in range(r):
for x in range(r):
self.sprites.append(arcade.Sprite(
arcade.resources.image_box_crate,
scale=sprite_scale,
center_x=128 * sprite_scale * x,
center_y=128 * sprite_scale * y,
))
self.sprites.draw() # Force the list to build
self.sprites.program = self.ctx.sprite_list_program_no_cull
print(f"Initialization time: {time.time() -start}")
self.query = self.ctx.query()
self.frames = 0
self.frame_step = 600
self.time_elapsed_total = 0
def on_draw(self):
self.clear()
with self.query:
self.sprites.draw()
self.time_elapsed_total += self.query.time_elapsed
self.frames += 1
if self.frames % self.frame_step == 0:
print(f"--- Stats over {self.frame_step} frames")
print(f"Time elapsed : {self.time_elapsed_total / 1_000_000_000} seconds")
print(f"Samples passed : {self.query.samples_passed}")
print(f"Primitives created : {self.query.primitives_generated}")
self.time_elapsed_total = 0
if __name__ == "__main__":
MyGame(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE)
arcade.run()
| StarcoderdataPython |
1642721 | import os
from _common_search_paths import charm_path_search, grackle_path_search
is_arch_valid = 1
#python_lt_27 = 1
flags_arch = '-O3 -Wall -g'
#flags_arch = '-Wall -g'
flags_link = '-rdynamic'
#optional fortran flag
flags_arch_fortran = '-ffixed-line-length-132'
cc = 'gcc'
f90 = 'gfortran'
#flags_prec_single = '-fdefault-real-4 -fdefault-double-8'
flags_prec_single = ''
flags_prec_double = '-fdefault-real-8 -fdefault-double-8'
libpath_fortran = ''
libs_fortran = ['gfortran']
home = os.environ['HOME']
hdf5_path = os.getenv('HDF5HOME',None)
if hdf5_path is not None:
hdf5_inc = hdf5_path + '/include'
hdf5_lib = hdf5_path + '/lib'
else:
# the following environment variables are set by the hdf5 module
hdf5_inc = os.environ['TACC_HDF5_INC']
hdf5_lib = os.environ['TACC_HDF5_LIB']
boost_path = os.environ['BOOST_ROOT']
boost_inc = boost_path + '/include'
boost_lib = boost_path + '/lib'
#--------------------------------------------------
# CHARM
#
# Change charm_path below to match where your copy is. To compile
# Charm++ on Stampede with GNU compilers, use the following:
#
# ./build charm++ ofi-linux-x86_64 -j8 --with-production --enable-tracing
#
#--------------------------------------------------
if os.path.isdir(home + '/Charm/682/gnu/omni/charm'):
charm_path = home + '/Charm/682/gnu/omni/charm'
else:
charm_path = charm_path_search(home)
if ((os.getenv("TACC_PAPI_LIB", None) is not None) and
(os.getenv("TACC_PAPI_INC", None) is not None)):
papi_inc = os.environ["TACC_PAPI_INC"]
papi_lin = os.environ["TACC_PAPI_LIB"]
else:
papi_inc = home + '/include'
papi_lib = home + '/lib'
png_path = '/usr/lib64'
if os.path.isdir(home + '/public/Grackle/src/clib'):
grackle_path = home + '/public/Grackle/src/clib'
else:
grackle_path = grackle_path_search(home)
| StarcoderdataPython |
49313 | """Report generator for the error command.
TODO: move reporting functionality out of the ErrorEstimator class.
"""
from itertools import repeat
from atropos.commands.reports import BaseReportGenerator
from atropos.io import open_output
from atropos.commands.legacy_report import Printer, TitlePrinter
class ReportGenerator(BaseReportGenerator):
def generate_text_report(self, fmt, summary, outfile, **kwargs):
if fmt == 'txt':
with open_output(outfile, context_wrapper=True) as out:
generate_reports(out, summary)
else:
super().generate_from_template(fmt, summary, outfile, **kwargs)
def generate_reports(outstream, summary):
names = summary['input']['input_names'] or repeat(None)
estimates = summary['errorrate']['estimate']
_print = Printer(outstream)
_print_title = TitlePrinter(outstream)
input_idx = 0
for input_idx, (estimate, details, name) in enumerate(zip(
estimates, summary['errorrate']['details'], names), 1):
generate_estimator_report(
outstream, input_idx, estimate, details, _print, _print_title, name)
if input_idx > 1:
_print.newline()
_print_title("Overall", level=0)
total_lens = summary['errorrate']['total_len']
overall_err = (
sum(err * total_len for err, total_len in zip(estimates, total_lens)) /
sum(total_lens))
print("Error rate: {:.2%}".format(overall_err), file=outstream)
def generate_estimator_report(
outstream, input_idx, estimate, details, _print, _print_title,
input_name=None):
_print_indent = Printer(outstream, indent=' ')
_print.newline()
_print_title("Input {}".format(input_idx), level=0)
if input_name:
_print("File: {}".format(input_name))
_print("Error rate: {:.2%}".format(estimate))
if details:
_print("Details:\n")
per_read = details['per_read']
per_cycle = details['per_cycle']
_print_indent("StdErr: {:.2%}".format(per_read['standard error']))
_print_indent("Per-cycle rates:")
for cycle in per_cycle:
_print_indent(
"Cycle: {}, Error: {:.2%}, StdErr: {:.2%}".format(*cycle),
indent=2)
| StarcoderdataPython |
3326120 | import mafs
import json
fs = mafs.MagicFS()
fs.add_argument('file', help='json file to read from')
# read json file
with open(fs.args.file) as f:
items = json.load(f)
def dig(d, parts):
if parts:
try:
res = d.get(parts[0])
if res:
return dig(res, parts[1:])
except (KeyError, AttributeError):
return None
else:
return d
@fs.read('/*item')
def read_item(path, ps):
return str(dig(items, ps.item)) + '\n'
@fs.list('/')
def list_root(path, ps):
return items.keys()
@fs.list('/*item')
def list_item(path, ps):
return dig(items, ps.item).keys()
@fs.stat('/*item')
def stat_item(path, ps):
item = dig(items, ps.item)
if item:
if hasattr(item, 'get'):
return {'st_mode': 0o755 | mafs.FileType.DIRECTORY}
else:
return {}
raise FileNotFoundError()
fs.run()
| StarcoderdataPython |
3316719 | import re
subst = re.compile("(%\((\w+)\))")
def substitute_str(text, vars):
out = []
i0 = 0
for m in subst.finditer(text):
name = m.group(2)
if name in vars:
out.append(text[i0:m.start(1)])
out.append(str(vars[name]))
i0 = m.end(1)
out.append(text[i0:])
return "".join(out)
def substitute_list(lst, vars):
return [substitute_in(item, vars) for item in lst]
def substitute_dict(d, outer):
vars = {}
vars.update(outer)
# substitute top level strings only
out = {k:substitute_str(v, outer) for k, v in d.items() if isinstance(v, (str, int))}
# use this as the substitution dictionary
vars.update(out)
out.update({k:substitute_in(v, vars) for k, v in d.items()}
return out
def substitute_in(item, vars)
if isinstance(item, str):
item = substitute_str(item, vars)
elif isinstance(item, dict):
item = substitute_dict(item, vars)
elif isinstance(item, list):
item = substitute_list(item, vars)
return item
def preprocess(s, vars={}):
return substitute_in(s, vars)
| StarcoderdataPython |
1653040 | #!/bin/env python3
# author: <NAME>
#
# Excel Sheet Column Title
#
# Given a positive integer, return its corresponding column title as appear in an Excel sheet.
# For example:
# 1 -> A
# 2 -> B
# 3 -> C
# ...
# 26 -> Z
# 27 -> AA
# 28 -> AB
# Credits:Special thanks to @ifanchu for adding this problem and creating all test cases.
# Show Tags
class Solution:
# @param n, an integer
# @return a string
def convertToTitle(self, n):
s = ''
while n != 0:
s = chr(ord('A') + (n-1)%26) + s
n = (n-1) // 26
return s
def main():
solver = Solution()
tests = [1, 2, 26, 1*26+1, 1*26+2, 1*26+26, 2*26 + 1]
for test in tests:
print(test)
print(' ->')
result = solver.convertToTitle(test)
print(result)
print('~'*10)
pass
if __name__ == '__main__':
main()
pass
| StarcoderdataPython |
3305567 | # -*- coding: utf-8 -*-
class ElementsInCurve:
def __init__(self,filename_1, filename_2,sheet_name):
"""
Input the file containing the elements data
:param filename_1: Trace elements total 700 + data
:param filename_2: trace Standardized values (ppm)
:param sheet_name: 0 = Rare earth elements; 1 = Trace multi element
"""
import pandas as pd
raw_data = pd.read_excel(filename_1, sheet_name)
self.df1 = raw_data[raw_data[" Whether or not metasomatism"] == 1].drop(["Whether or not metasomatism", "CITATION"], axis=1)
self.df2 = raw_data[raw_data["Whether or not metasomatism"] == -1].drop(["Whether or not metasomatism", "CITATION"], axis=1)
standard_data = pd.read_excel(filename_2, sheet_name, header=1, index="Element")
# ppm to ppb
self.std = standard_data.drop(["Element"], axis=1) * 1000
def plot(self, x_index = None,
num_data_1 = None, num_data_2 = None,
fig_size = (12,8),
save_png = None):
"""
plot the elements in curve
:param x_index: X-axis element coordinates (default all elements, uppercase)
:param num_data_1: The amount of data used (Whether or not metasomatism =1) (all data is used by default)
:param num_data_2: The amount of data used (Whether or not metasomatism =-1) (the default is to use all data)
:param fig_size: Image size, default: (12,8)
:param save_png: Save the file name of the image. By default, the image is not stored
:return: Display picture (or save as)
"""
import matplotlib.pyplot as plt
from collections import OrderedDict
if num_data_1 == None:
num_data_1 = len(self.df1)
if num_data_2 == None:
num_data_2 = len(self.df2)
if x_index == None:
x_index = list(self.std.columns)
x_index = [x.upper() for x in x_index]
self.std.columns = [x.upper() for x in x_index]
x_index = list(set(x_index) & set(self.df1.columns))
data1 = self.df1[x_index]
data2 = self.df2[x_index]
self.std = self.std[x_index]
result1 = data1.div(self.std.iloc[0].values)
result2 = data2.div(self.std.iloc[0].values)
plt.figure(figsize=fig_size)
for i in range(num_data_1):
plt.semilogy(range(len(result1.columns.values)), result1.iloc[i], 'b.-', label=1)
plt.xticks(range(len(result1.columns.values)), result1.columns.values)
for j in range(num_data_2):
plt.semilogy(range(len(result2.columns.values)), result2.iloc[j], 'k.-', label=-1)
plt.xticks(range(len(result2.columns.values)), result2.columns.values)
handles, labels = plt.gca().get_legend_handles_labels()
by_label = OrderedDict(zip(labels, handles))
plt.legend(by_label.values(), by_label.keys())
if save_png != None:
plt.savefig(save_png)
plt.show()
def main():
# Trace multi - element standardization diagram
EN_multi = ElementsInCurve('Trace elements total 700 + data(ppb).xlsx', 'trace Standardized values(ppm).xlsx', 1)
EN_multi.plot(save_png = 'Trace multi - element standardization diagram.png')
# Rare earth element spider diagram
EN_rare = ElementsInCurve('Trace elements total 700 + data(ppb).xlsx', 'trace Standardized values(ppm).xlsx', 0)
EN_rare.plot(save_png='Rare earth element spider diagram.png')
# Select trace elements
# x_index = ['LA', 'CE', 'PR', 'ND', 'SM', 'EU', 'GD', 'TB', 'DY', 'HO', 'ER', 'TM', 'YB', 'LU']
# EN_rare.plot(x_index = x_index)
# pick 100 samples
# EN_rare.plot(num_data_1 = 100,num_data_2 = 100)
if __name__ == '__main__':
main() | StarcoderdataPython |
1606166 | from Autodesk.Revit.DB import *
from Autodesk.Revit.DB.Architecture import *
from Autodesk.Revit.DB.Analysis import *
from Autodesk.Revit import Exceptions
uidoc = __revit__.ActiveUIDocument
doc = __revit__.ActiveUIDocument.Document
getselection = uidoc.Selection.GetElementIds
from Autodesk.Revit.UI import TaskDialog
from Autodesk.Revit.UI import UIApplication
def alert(msg):
TaskDialog.Show('RevitPythonShell', msg)
def quit():
__window__.Close()
exit = quit
try:
t = Transaction(doc, "Renomme la vue")
t.Start()
for e in getselection(): #Cherche l'Id des éléments sélectionnés
view = doc.GetElement(e) #Cherche l'élément correspondant à l'Id
vft = doc.GetElement(view.GetTypeId()) #Get ViewFamilyType Id
vft_name = Element.Name.GetValue(vft) #Get ViewFamilyType Name
#S'il existe une zone de définition, récupère son nom
try:
vzv = view.get_Parameter(BuiltInParameter.VIEWER_VOLUME_OF_INTEREST_CROP)
vzv_name = "" if vzv.AsValueString() == 'None' else "_" + vzv.AsValueString()
except:
vzv_name = ""
vgl = "" if view.GenLevel == None else view.GenLevel.Name #S'il y a un niveau associé, récupère son nom
view_name = "{c}_{a}{b}".format(a=vgl, b=vzv_name, c=vft_name,) #Nomme la vue avec nom du niveau associé + nom du type de la vue
i = 0
while True:
try:
view.Name = view_name
except Exceptions.ArgumentException:
i += 1
view_name = view_name + str(i)
except:
break
else:
break
t.Commit()
except:
# print a stack trace and error messages for debugging
import traceback
traceback.print_exc()
t.RollBack()
else:
# no errors, so just close the window
__window__.Close()
| StarcoderdataPython |
1726031 | ##Question 23
##Implement a stack class in Python. It should support 3 APIs:
##stack.top(): prints element at top of stack
##stack.pop(): takes out an element from top of stack
##stack.push(): adds a new element at top of stack
class Stack():
def __init__(self):
self.item = []
def size(self):
return len(self.item)
def top(self):
if len(self.item) >= 1:
print self.item[len(self.item) -1]
else :
print "Empty list"
def pop(self):
if len(self.item) >= 1:
self.item.pop()
else:
raise IndexError
def push(self,item):
self.item.append(item)
print self.item
new_stack = Stack()
new_stack.push(19)
new_stack.push(20)
new_stack.push(119)
new_stack.push(202)
new_stack.push(195)
new_stack.push(205)
new_stack.push(149)
new_stack.push(230)
print new_stack.size()
new_stack.top()
new_stack.pop()
new_stack.top()
new_stack.pop()
new_stack.pop()
new_stack.pop()
new_stack.pop()
new_stack.pop()
new_stack.pop()
new_stack.pop()
new_stack.top()
| StarcoderdataPython |
38228 | <filename>beam.py
import numpy as np
import cv2
import wall
SAME_LINE_THRESHOLD = 100
SAME_LEVEL_THRESHOLD = 8
SHORT_LINE_LENGTH = 10
BLEED_THRESHOLD = 10
def similar_line_already_found(line, found_lines):
for fline in found_lines:
x1, y1, x2, y2 = line
fx1, fy1, fx2, fy2 = fline
is_vertical_with_range = abs(x1 - x2) < SAME_LEVEL_THRESHOLD
is_horizental_with_range = abs(y1 - y2) < SAME_LEVEL_THRESHOLD
# Drop if short line.
if ((is_horizental_with_range and abs(x1 - x2) < SHORT_LINE_LENGTH) or
(is_vertical_with_range and abs(y1 - y2) < SHORT_LINE_LENGTH)):
return True
xdiff = abs(x1 - fx1) + abs(x2 - fx2)
ydiff = abs(y1 - fy1) + abs(y2 - fy2)
diff = xdiff + ydiff
if diff <= SAME_LINE_THRESHOLD:
if is_horizental_with_range:
avg_y = int((y1 + y2 + fy1 + fy2) / 4)
fline[1] = fline[3] = avg_y
elif is_vertical_with_range:
avg_x = int((x1 + x2 + fx1 + fx2) / 4)
fline[0] = fline[2] = avg_x
return True
if is_horizental_with_range and (
(x1 > fx1 - BLEED_THRESHOLD and x2 < fx2 + BLEED_THRESHOLD) or
(x1 > fx2 - BLEED_THRESHOLD and x2 < fx1 + BLEED_THRESHOLD)
) and abs(ydiff < SAME_LINE_THRESHOLD/2):
avg_y = int((y1 + y2 + fy1 + fy2) / 4)
fline[1] = fline[3] = avg_y
return True
elif is_vertical_with_range and (
(y1 > fy1 - BLEED_THRESHOLD and y2 < fy2 + BLEED_THRESHOLD) or
(y1 > fy2 - BLEED_THRESHOLD and y2 < fy1 + BLEED_THRESHOLD)
) and abs(xdiff < SAME_LINE_THRESHOLD/2):
avg_x = int((x1 + x2 + fx1 + fx2) / 4)
fline[0] = fline[2] = avg_x
return True
return False
def normalize_lines(lines):
norm_dict = {}
normalized_lines = []
for line in lines:
existing_line = similar_line_already_found(line[0].tolist(), normalized_lines)
if not existing_line:
normalized_lines.append(line[0].tolist())
return normalized_lines
def find_steel_beams(img, debug=False):
image = wall.remove_walls(img)
if debug:
cv2.imshow("Walls/Columns Removed", image)
cv2.waitKey(0)
thresh = 50
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
im_bw = cv2.threshold(gray, thresh, 255, cv2.THRESH_BINARY)[1]
if debug:
cv2.imshow("Black And White", im_bw)
cv2.waitKey(0)
kernel = np.ones((2,2),np.uint8)
erosion = cv2.erode(im_bw, kernel, iterations=3)
if debug:
cv2.imshow("Erode", erosion)
cv2.waitKey(0)
dilation = cv2.dilate(erosion, kernel, iterations=3)
if debug:
cv2.imshow("Dilate", dilation)
cv2.waitKey(0)
minLineLength = 100
maxLineGap = 0
lines = cv2.HoughLinesP(dilation, rho=0.02, theta=np.pi/500, threshold=10, minLineLength=minLineLength, maxLineGap=maxLineGap)
print("Found %d lines" % len(lines))
lines = normalize_lines(lines)
print("Normalized to %d lines" % len(lines))
for line in lines:
x1, y1, x2, y2 = line
cv2.line(img,(x1,y1),(x2,y2),(0,255,0),2)
if debug:
cv2.imshow("Beam", img)
cv2.waitKey(0)
return lines | StarcoderdataPython |
3341085 | class MyHashSet:
def __init__(self):
"""
Initialize your data structure here.
"""
self.set = []
def add(self, key: int) -> None:
if key not in self.set:
self.set.append(key)
def remove(self, key: int) -> None:
if key in self.set:
self.set.remove(key)
def contains(self, key: int) -> bool:
"""
Returns true if this set contains the specified element
"""
if key in self.set:
return True
return False
| StarcoderdataPython |
1715015 |
numerical_tokens = []
fully_numerical_tokens = []
with open("../../data-bin/wikitext-103/dict.txt", "r", encoding='utf-8') as f_r:
for line in f_r:
token, id = line.split()
if any(char.isdigit() for char in token):
numerical_tokens.append(token)
if all(char.isdigit() for char in token):
fully_numerical_tokens.append(token)
print(f"Token numerical tokens: {len(numerical_tokens)}")
print(f"Total fully numerical tokens: {len(fully_numerical_tokens)}")
# print(fully_numerical_tokens)
int_tokens = []
for token in fully_numerical_tokens:
try:
int_tokens.append(int(token))
except:
pass
print(f"After int operation, total fully numerical tokens: {len(set(int_tokens))}")
final_list = sorted(list(set(int_tokens)))
for i in range(len(final_list)):
if final_list[i] != i:
print(final_list[i-1:])
break
| StarcoderdataPython |
156284 | class Solution:
def wordPatternV1(self, pattern: str, str: str) -> bool:
p2s, s2p, words = {}, {}, str.split()
if len(pattern) != len(words):
return False
for p, s in zip(pattern, words):
if p in p2s and p2s[p] != s:
return False
else:
p2s[p] = s
if s in s2p and s2p[s] != p:
return False
else:
s2p[s] = p
return True
def wordPatternV2(self, pattern: str, str: str) -> bool:
# map the element of iterable xs to the index of first appearance
f = lambda xs: map({}.setdefault, xs, range(len(xs)))
return list(f(pattern)) == list(f(str.split()))
# TESTS
tests = [
("aaa", "aa aa aa aa", False),
("abba", "dog cat cat dog", True),
("abba", "dog cat cat fish", False),
("aaaa", "dog cat cat dog", False),
("abba", "dog dog dog dog", False),
]
for pattern, string, expected in tests:
sol = Solution()
actual = sol.wordPatternV1(pattern, string)
print("String", string, "follows the same pattern", pattern, "->", actual)
assert actual == expected
assert expected == sol.wordPatternV2(pattern, string)
| StarcoderdataPython |
3375845 | import inspect
import sys
from typing import Any, NamedTuple, Type
if sys.version_info < (3, 8):
from typing_extensions import get_args
else:
from typing import get_args
import msgpack # type: ignore[import]
from pydantic import BaseModel
from xpresso import Request
from xpresso.binders.api import SupportsExtractor
from xpresso.requests import HTTPConnection
class Extractor(NamedTuple):
model: Type[BaseModel]
async def extract(self, connection: HTTPConnection) -> Any:
assert isinstance(connection, Request)
data = await connection.body()
deserialized_obj: Any = msgpack.unpackb(data) # type: ignore[assignment]
# You probably want more checks and validation here
# For example, handling empty bodies
# This is just a tutorial!
return self.model.parse_obj(deserialized_obj)
class ExtractorMarker:
def register_parameter(
self, param: inspect.Parameter
) -> SupportsExtractor:
# get the first paramater to Annotated, which should be our actual type
model = next(iter(get_args(param.annotation)))
if not issubclass(model, BaseModel):
# You may want more rigourous checks here
# Or you may want to accept non-Pydantic models
# We do the easiest thing here
raise TypeError(
"MessagePack model must be a Pydantic model"
)
return Extractor(model)
| StarcoderdataPython |
1756395 |
class PDFColumn(object):
def __init__(self, parent):
self.parent = parent
self.cells = []
self.max_width = 0
def _add_cell(self, cell):
self.cells.append(cell)
def _set_max_width(self, value):
self.max_width = value
def _get_max_width(self):
for cell in self.cells:
if cell.width > self.max_width and cell.text_wrap is False:
self.max_width = cell.width
for cell in self.cells:
cell._set_max_width(self.max_width)
def _finish(self):
self._get_max_width()
| StarcoderdataPython |
3242867 | # Generated by Django 3.0.4 on 2020-03-16 07:53
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Auto',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mark', models.CharField(max_length=50)),
('model', models.CharField(max_length=50)),
('colour', models.CharField(max_length=50)),
('gov_number', models.IntegerField()),
],
),
migrations.CreateModel(
name='Owner',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=50)),
('last_name', models.CharField(max_length=50)),
('birth_date', models.DateField()),
],
),
migrations.CreateModel(
name='Ocupation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('start_date', models.DateField()),
('end_date', models.DateField()),
('auto', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='project_first_app.Auto')),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='project_first_app.Owner')),
],
),
migrations.CreateModel(
name='License',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('license_number', models.IntegerField()),
('date_of_issue', models.DateField()),
('type', models.CharField(choices=[('A', 'MOTO'), ('B', 'CAR'), ('C', 'TRUNK'), ('D', 'BUS'), ('M', 'MOPED')], max_length=2)),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='project_first_app.Owner')),
],
),
]
| StarcoderdataPython |
1680162 | <filename>app/mqtt_handler.py<gh_stars>1-10
import logging
import time
from queue import SimpleQueue
import paho.mqtt.client as mqtt
class MQTTHandler(object):
def __init__(self, mqtt_broker_host, mqtt_broker_port=1883):
self.logger = logging.getLogger("mqtt.client")
self.mqtt_broker_host = mqtt_broker_host
self.mqtt_broker_port = mqtt_broker_port
self.mqtt_client = mqtt.Client(
client_id="telegram2mqtt", protocol=mqtt.MQTTv311, transport="tcp"
)
self.mqtt_client.on_connect = self.on_connect
self.mqtt_client.on_disconnect = self.on_disconnect
self.mqtt_client.on_message = self.on_message
self.pending_messages = SimpleQueue()
self.connected = False
self.logger.info("MQTT-Handler is initialized.")
def __call__(self):
self.mqtt_client.connect_async(self.mqtt_broker_host, port=1883)
self.mqtt_client.loop_start()
self.logger.info("MQTT-Client started.")
def on_connect(self, client, userdata, flags, rc):
self.logger.debug(f"MQTT-Client connected. Flags: {flags}. Result code {rc}")
self.connected = True
def on_disconnect(self, client, userdata, rc):
self.logger.debug(f"MQTT-Client disconnected. Result code {rc}")
def on_message(self, client, userdata, msg):
self.logger.debug(
f"MQTT-Client received mesage. Topic: '{msg.topic}' Message: '{msg.payload}'"
)
self.pending_messages.put((msg.topic, msg.payload))
def subscribe(self, topic):
while not self.connected:
self.logger.debug("Subscribe - wait for connect...")
time.sleep(0.5)
self.mqtt_client.subscribe(topic)
self.logger.debug(f"Subscribed to {topic}")
def unsubscribe(self, topic):
while not self.connected:
self.logger.debug("Unsubscribe - wait for connect...")
time.sleep(0.5)
self.mqtt_client.unsubscribe(topic)
self.logger.debug(f"Unsubscribed from {topic}")
def publish(self, topic, message):
while not self.connected:
self.logger.debug("Publish - wait for connect...")
time.sleep(0.5)
self.mqtt_client.publish(topic, payload=message)
self.logger.debug(f"Published message '{message}' on topic '{topic}'.")
def disconnect(self):
self.mqtt_client.disconnect()
self.mqtt_client.loop_stop()
self.logger.info("MQTT-Client stopped.")
| StarcoderdataPython |
1684854 | '''
Created on Jan, 2017
@author: hugo
'''
from __future__ import absolute_import
import os
import re
import numpy as np
from random import shuffle
from collections import Counter
from ..preprocessing.preprocessing import init_stopwords, tiny_tokenize_xml, tiny_tokenize, get_all_files, count_words
from ..datasets.reuters import construct_corpus
from ..utils.io_utils import dump_json
pattern = r'>([^<>]+)<'
prog = re.compile(pattern)
cached_stop_words = init_stopwords()
class CorpusIterWiki10plus(object):
def __init__(self, corpus_dir, train_docs, stem=True, with_docname=False):
self.stem = stem
self.train_docs = train_docs
self.with_docname = with_docname
self.files = get_all_files(corpus_dir, False)
def __iter__(self):
shuffle(self.files)
count = 0
for filename in self.files:
doc_name = os.path.basename(filename)
if not doc_name in self.train_docs:
continue
try:
with open(filename, 'r') as fp:
count += 1
text = fp.read().lower()
# remove punctuations, stopwords and *unnecessary digits*, stemming
words = tiny_tokenize(text, self.stem, cached_stop_words)
if self.with_docname:
yield [words, [doc_name]]
else:
yield words
except Exception as e:
raise e
print count
def extract_contents(text, out_file):
if not isinstance(text, unicode):
text = text.decode('utf-8')
contents = ' '.join(prog.findall(text))
contents = tiny_tokenize_xml(contents, False, cached_stop_words)
with open(out_file, 'w') as f:
f.write(' '.join(contents))
return contents
def xml2text(in_dir, out_dir, white_list=None):
# it will be fast if white_list is a dict instead of a list
files = get_all_files(in_dir, recursive=False)
count = 0
for filename in files:
if white_list and not os.path.basename(filename) in white_list:
continue
try:
with open(filename, 'r') as fp:
text = fp.read().lower()
extract_contents(text, os.path.join(out_dir, os.path.basename(filename)))
count += 1
except Exception as e:
raise e
if count % 500 == 0:
print 'processed %s' % count
print 'processed %s docs, discarded %s docs' % (count, len(files) - count)
def load_data(corpus_dir, test_split, seed=666, stem=True):
'''Loads the Wiki10+ dataset.
@Params
corpus_dir : path to the corpus dir
test_split : fraction of the dataset to be used as test data.
seed : random seed for sample shuffling.
stem : stem flag.
'''
# count the number of times a word appears in a doc
corpus = {}
files = get_all_files(corpus_dir, False)
cached_stop_words = []
# cached_stop_words = init_stopwords()
count = 0
for filename in files:
try:
with open(filename, 'r') as fp:
text = fp.read().lower()
# remove punctuations, stopwords and *unnecessary digits*, stemming
words = tiny_tokenize(text, stem, cached_stop_words)
corpus[os.path.basename(filename)] = dict(Counter(words)) # doc-word frequency
count += 1
except Exception as e:
raise e
if count % 500 == 0:
print count
corpus = corpus.items()
np.random.seed(seed)
np.random.shuffle(corpus)
n_docs = len(corpus)
train_data = dict(corpus[:-int(n_docs * test_split)])
test_data = dict(corpus[-int(n_docs * test_split):])
return train_data, test_data
def construct_train_test_corpus(corpus_dir, test_split, output, threshold=10, topn=2000):
train_data, test_data = load_data(corpus_dir, test_split)
train_word_freq = count_words(train_data.values())
train_docs, vocab_dict, train_word_freq = construct_corpus(train_data, train_word_freq, True, threshold=threshold, topn=topn)
train_corpus = {'docs': train_docs, 'vocab': vocab_dict, 'word_freq': train_word_freq}
dump_json(train_corpus, os.path.join(output, 'train.corpus'))
print 'Generated training corpus'
test_word_freq = count_words(test_data.values())
test_docs, _, _ = construct_corpus(test_data, test_word_freq, False, vocab_dict=vocab_dict)
test_corpus = {'docs': test_docs, 'vocab': vocab_dict}
dump_json(test_corpus, os.path.join(output, 'test.corpus'))
print 'Generated test corpus'
def extract_labels(docs, labels, output):
# it will be fast if docs is a dict instead of a list
doc_labels = {}
for name in docs:
doc_labels[name] = labels[name]
dump_json(doc_labels, output)
import pdb;pdb.set_trace()
return doc_labels
| StarcoderdataPython |
3394936 | from collections import Counter
from itertools import combinations
def part_one(words):
two = 0
three = 0
for word in words:
counter = Counter(word)
occurences = set(elem[1] for elem in counter.most_common())
if 2 in occurences:
two += 1
if 3 in occurences:
three += 1
return two * three
def distance(words):
return sum(elem[0] != elem[1] for elem in zip(*words))
def part_two(words):
correct = min(combinations(words, 2), key=distance)
return ''.join(elem[0] for elem in zip(*correct) if elem[0] == elem[1])
def main():
example = ['abcdef', 'bababc', 'abbcde', 'abcccd', 'aabcdd', 'abcdee', 'ababab']
assert part_one(example) == 12
example = ['abcde', 'fghij', 'klmno', 'pqrst', 'fguij', 'axcye', 'wvxyz']
assert part_two(example) == 'fgij'
with open('inputs/day02.txt') as f:
words = [word.rstrip() for word in f]
print('Part one:', part_one(words))
print('Part two:', part_two(words))
if __name__ == '__main__':
main()
| StarcoderdataPython |
1745593 | """Utility Functions"""
import logging
from collections import namedtuple # pytype: disable=pyi-error
def get_logger(logname):
"""Create and return a logger object."""
logger = logging.getLogger(logname)
return logger
def log_method(method):
"""Generate method for logging"""
def wrapped(self, *args, **kwargs):
"""Method that gets called for logging"""
self.logger.info('Entering %s' % method.__name__)
return method(self, *args, **kwargs)
return wrapped
class MessageParseError(Exception):
"""Error for when parsing cannot be successfully completed."""
pass
class EapQueueMessage(namedtuple('EapQueueMessage',
'message src_mac port_mac')):
pass
class RadiusQueueMessage(namedtuple('RadiusQueueMessage',
'message src_mac identity state port_mac')):
pass
| StarcoderdataPython |
1698469 | <reponame>ishine/neurst
from collections import namedtuple
METRIC_REDUCTION = namedtuple(
"metric_reduction", "SUM MEAN")(0, 1)
REGISTERED_METRICS = dict()
def register_metric(name, redution):
if name in REGISTERED_METRICS:
raise ValueError(f"Metric {name} already registered.")
REGISTERED_METRICS[name] = redution
def get_metric_reduction(name, default=METRIC_REDUCTION.MEAN):
return REGISTERED_METRICS.get(name, default)
| StarcoderdataPython |
28504 | # (C) Datadog, Inc. 2020-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
# metrics namespaced under 'scylla'
SCYLLA_ALIEN = {
'scylla_alien_receive_batch_queue_length': 'alien.receive_batch_queue_length',
'scylla_alien_total_received_messages': 'alien.total_received_messages',
'scylla_alien_total_sent_messages': 'alien.total_sent_messages',
}
SCYLLA_BATCHLOG = {
'scylla_batchlog_manager_total_write_replay_attempts': 'batchlog_manager.total_write_replay_attempts',
}
SCYLLA_CACHE = {
'scylla_cache_active_reads': 'cache.active_reads',
'scylla_cache_bytes_total': 'cache.bytes_total',
'scylla_cache_bytes_used': 'cache.bytes_used',
'scylla_cache_concurrent_misses_same_key': 'cache.concurrent_misses_same_key',
'scylla_cache_mispopulations': 'cache.mispopulations',
'scylla_cache_partition_evictions': 'cache.partition_evictions',
'scylla_cache_partition_hits': 'cache.partition_hits',
'scylla_cache_partition_insertions': 'cache.partition_insertions',
'scylla_cache_partition_merges': 'cache.partition_merges',
'scylla_cache_partition_misses': 'cache.partition_misses',
'scylla_cache_partition_removals': 'cache.partition_removals',
'scylla_cache_partitions': 'cache.partitions',
'scylla_cache_pinned_dirty_memory_overload': 'cache.pinned_dirty_memory_overload',
'scylla_cache_reads': 'cache.reads',
'scylla_cache_reads_with_misses': 'cache.reads_with_misses',
'scylla_cache_row_evictions': 'cache.row_evictions',
'scylla_cache_row_hits': 'cache.row_hits',
'scylla_cache_row_insertions': 'cache.row_insertions',
'scylla_cache_row_misses': 'cache.row_misses',
'scylla_cache_row_removals': 'cache.row_removals',
'scylla_cache_rows': 'cache.rows',
'scylla_cache_rows_dropped_from_memtable': 'cache.rows_dropped_from_memtable',
'scylla_cache_rows_merged_from_memtable': 'cache.rows_merged_from_memtable',
'scylla_cache_rows_processed_from_memtable': 'cache.rows_processed_from_memtable',
'scylla_cache_sstable_partition_skips': 'cache.sstable_partition_skips',
'scylla_cache_sstable_reader_recreations': 'cache.sstable_reader_recreations',
'scylla_cache_sstable_row_skips': 'cache.sstable_row_skips',
'scylla_cache_static_row_insertions': 'cache.static_row_insertions',
}
SCYLLA_COMMITLOG = {
'scylla_commitlog_alloc': 'commitlog.alloc',
'scylla_commitlog_allocating_segments': 'commitlog.allocating_segments',
'scylla_commitlog_bytes_written': 'commitlog.bytes_written',
'scylla_commitlog_cycle': 'commitlog.cycle',
'scylla_commitlog_disk_total_bytes': 'commitlog.disk_total_bytes',
'scylla_commitlog_flush': 'commitlog.flush',
'scylla_commitlog_flush_limit_exceeded': 'commitlog.flush_limit_exceeded',
'scylla_commitlog_memory_buffer_bytes': 'commitlog.memory_buffer_bytes',
'scylla_commitlog_pending_allocations': 'commitlog.pending_allocations',
'scylla_commitlog_pending_flushes': 'commitlog.pending_flushes',
'scylla_commitlog_requests_blocked_memory': 'commitlog.requests_blocked_memory',
'scylla_commitlog_segments': 'commitlog.segments',
'scylla_commitlog_slack': 'commitlog.slack',
'scylla_commitlog_unused_segments': 'commitlog.unused_segments',
}
SCYLLA_COMPACTION = {
'scylla_compaction_manager_compactions': 'compaction_manager.compactions',
}
SCYLLA_CQL = {
'scylla_cql_authorized_prepared_statements_cache_evictions': 'cql.authorized_prepared_statements_cache_evictions',
'scylla_cql_authorized_prepared_statements_cache_size': 'cql.authorized_prepared_statements_cache_size',
'scylla_cql_batches': 'cql.batches',
'scylla_cql_batches_pure_logged': 'cql.batches_pure_logged',
'scylla_cql_batches_pure_unlogged': 'cql.batches_pure_unlogged',
'scylla_cql_batches_unlogged_from_logged': 'cql.batches_unlogged_from_logged',
'scylla_cql_deletes': 'cql.deletes',
'scylla_cql_filtered_read_requests': 'cql.filtered_read_requests',
'scylla_cql_filtered_rows_dropped_total': 'cql.filtered_rows_dropped_total',
'scylla_cql_filtered_rows_matched_total': 'cql.filtered_rows_matched_total',
'scylla_cql_filtered_rows_read_total': 'cql.filtered_rows_read_total',
'scylla_cql_inserts': 'cql.inserts',
'scylla_cql_prepared_cache_evictions': 'cql.prepared_cache_evictions',
'scylla_cql_prepared_cache_memory_footprint': 'cql.prepared_cache_memory_footprint',
'scylla_cql_prepared_cache_size': 'cql.prepared_cache_size',
'scylla_cql_reads': 'cql.reads',
'scylla_cql_reverse_queries': 'cql.reverse_queries',
'scylla_cql_rows_read': 'cql.rows_read',
'scylla_cql_secondary_index_creates': 'cql.secondary_index_creates',
'scylla_cql_secondary_index_drops': 'cql.secondary_index_drops',
'scylla_cql_secondary_index_reads': 'cql.secondary_index_reads',
'scylla_cql_secondary_index_rows_read': 'cql.secondary_index_rows_read',
'scylla_cql_statements_in_batches': 'cql.statements_in_batches',
'scylla_cql_unpaged_select_queries': 'cql.unpaged_select_queries',
'scylla_cql_updates': 'cql.updates',
'scylla_cql_user_prepared_auth_cache_footprint': 'cql.user_prepared_auth_cache_footprint',
}
SCYLLA_DATABASE = {
'scylla_database_active_reads': 'database.active_reads',
'scylla_database_active_reads_memory_consumption': 'database.active_reads_memory_consumption',
'scylla_database_clustering_filter_count': 'database.clustering_filter_count',
'scylla_database_clustering_filter_fast_path_count': 'database.clustering_filter_fast_path_count',
'scylla_database_clustering_filter_sstables_checked': 'database.clustering_filter_sstables_checked',
'scylla_database_clustering_filter_surviving_sstables': 'database.clustering_filter_surviving_sstables',
'scylla_database_counter_cell_lock_acquisition': 'database.counter_cell_lock_acquisition',
'scylla_database_counter_cell_lock_pending': 'database.counter_cell_lock_pending',
'scylla_database_dropped_view_updates': 'database.dropped_view_updates',
'scylla_database_large_partition_exceeding_threshold': 'database.large_partition_exceeding_threshold',
'scylla_database_multishard_query_failed_reader_saves': 'database.multishard_query_failed_reader_saves',
'scylla_database_multishard_query_failed_reader_stops': 'database.multishard_query_failed_reader_stops',
'scylla_database_multishard_query_unpopped_bytes': 'database.multishard_query_unpopped_bytes',
'scylla_database_multishard_query_unpopped_fragments': 'database.multishard_query_unpopped_fragments',
'scylla_database_paused_reads': 'database.paused_reads',
'scylla_database_paused_reads_permit_based_evictions': 'database.paused_reads_permit_based_evictions',
'scylla_database_querier_cache_drops': 'database.querier_cache_drops',
'scylla_database_querier_cache_lookups': 'database.querier_cache_lookups',
'scylla_database_querier_cache_memory_based_evictions': 'database.querier_cache_memory_based_evictions',
'scylla_database_querier_cache_misses': 'database.querier_cache_misses',
'scylla_database_querier_cache_population': 'database.querier_cache_population',
'scylla_database_querier_cache_resource_based_evictions': 'database.querier_cache_resource_based_evictions',
'scylla_database_querier_cache_time_based_evictions': 'database.querier_cache_time_based_evictions',
'scylla_database_queued_reads': 'database.queued_reads',
'scylla_database_requests_blocked_memory': 'database.requests_blocked_memory',
'scylla_database_requests_blocked_memory_current': 'database.requests_blocked_memory_current',
'scylla_database_short_data_queries': 'database.short_data_queries',
'scylla_database_short_mutation_queries': 'database.short_mutation_queries',
'scylla_database_sstable_read_queue_overloads': 'database.sstable_read_queue_overloads',
'scylla_database_total_reads': 'database.total_reads',
'scylla_database_total_reads_failed': 'database.total_reads_failed',
'scylla_database_total_result_bytes': 'database.total_result_bytes',
'scylla_database_total_view_updates_failed_local': 'database.total_view_updates_failed_local',
'scylla_database_total_view_updates_failed_remote': 'database.total_view_updates_failed_remote',
'scylla_database_total_view_updates_pushed_local': 'database.total_view_updates_pushed_local',
'scylla_database_total_view_updates_pushed_remote': 'database.total_view_updates_pushed_remote',
'scylla_database_total_writes': 'database.total_writes',
'scylla_database_total_writes_failed': 'database.total_writes_failed',
'scylla_database_total_writes_timedout': 'database.total_writes_timedout',
'scylla_database_view_building_paused': 'database.view_building_paused',
'scylla_database_view_update_backlog': 'database.view_update_backlog',
}
SCYLLA_EXECUTION = {
'scylla_execution_stages_function_calls_enqueued': 'execution_stages.function_calls_enqueued',
'scylla_execution_stages_function_calls_executed': 'execution_stages.function_calls_executed',
'scylla_execution_stages_tasks_preempted': 'execution_stages.tasks_preempted',
'scylla_execution_stages_tasks_scheduled': 'execution_stages.tasks_scheduled',
}
SCYLLA_GOSSIP = {
'scylla_gossip_heart_beat': 'gossip.heart_beat',
}
SCYLLA_HINTS = {
'scylla_hints_for_views_manager_corrupted_files': 'hints.for_views_manager_corrupted_files',
'scylla_hints_for_views_manager_discarded': 'hints.for_views_manager_discarded',
'scylla_hints_for_views_manager_dropped': 'hints.for_views_manager_dropped',
'scylla_hints_for_views_manager_errors': 'hints.for_views_manager_errors',
'scylla_hints_for_views_manager_sent': 'hints.for_views_manager_sent',
'scylla_hints_for_views_manager_size_of_hints_in_progress': 'hints.for_views_manager_size_of_hints_in_progress',
'scylla_hints_for_views_manager_written': 'hints.for_views_manager_written',
'scylla_hints_manager_corrupted_files': 'hints.manager_corrupted_files',
'scylla_hints_manager_discarded': 'hints.manager_discarded',
'scylla_hints_manager_dropped': 'hints.manager_dropped',
'scylla_hints_manager_errors': 'hints.manager_errors',
'scylla_hints_manager_sent': 'hints.manager_sent',
'scylla_hints_manager_size_of_hints_in_progress': 'hints.manager_size_of_hints_in_progress',
'scylla_hints_manager_written': 'hints.manager_written',
}
SCYLLA_HTTPD = {
'scylla_httpd_connections_current': 'httpd.connections_current',
'scylla_httpd_connections_total': 'httpd.connections_total',
'scylla_httpd_read_errors': 'httpd.read_errors',
'scylla_httpd_reply_errors': 'httpd.reply_errors',
'scylla_httpd_requests_served': 'httpd.requests_served',
}
SCYLLA_IO = {
'scylla_io_queue_delay': 'io_queue.delay',
'scylla_io_queue_queue_length': 'io_queue.queue_length',
'scylla_io_queue_shares': 'io_queue.shares',
'scylla_io_queue_total_bytes': 'io_queue.total_bytes',
'scylla_io_queue_total_operations': 'io_queue.total_operations',
}
SCYLLA_LSA = {
'scylla_lsa_free_space': 'lsa.free_space',
'scylla_lsa_large_objects_total_space_bytes': 'lsa.large_objects_total_space_bytes',
'scylla_lsa_memory_allocated': 'lsa.memory_allocated',
'scylla_lsa_memory_compacted': 'lsa.memory_compacted',
'scylla_lsa_non_lsa_used_space_bytes': 'lsa.non_lsa_used_space_bytes',
'scylla_lsa_occupancy': 'lsa.occupancy',
'scylla_lsa_segments_compacted': 'lsa.segments_compacted',
'scylla_lsa_segments_migrated': 'lsa.segments_migrated',
'scylla_lsa_small_objects_total_space_bytes': 'lsa.small_objects_total_space_bytes',
'scylla_lsa_small_objects_used_space_bytes': 'lsa.small_objects_used_space_bytes',
'scylla_lsa_total_space_bytes': 'lsa.total_space_bytes',
'scylla_lsa_used_space_bytes': 'lsa.used_space_bytes',
}
SCYLLA_MEMORY = {
'scylla_memory_allocated_memory': 'memory.allocated_memory',
'scylla_memory_cross_cpu_free_operations': 'memory.cross_cpu_free_operations',
'scylla_memory_dirty_bytes': 'memory.dirty_bytes',
'scylla_memory_free_memory': 'memory.free_memory',
'scylla_memory_free_operations': 'memory.free_operations',
'scylla_memory_malloc_live_objects': 'memory.malloc_live_objects',
'scylla_memory_malloc_operations': 'memory.malloc_operations',
'scylla_memory_reclaims_operations': 'memory.reclaims_operations',
'scylla_memory_regular_dirty_bytes': 'memory.regular_dirty_bytes',
'scylla_memory_regular_virtual_dirty_bytes': 'memory.regular_virtual_dirty_bytes',
'scylla_memory_streaming_dirty_bytes': 'memory.streaming_dirty_bytes',
'scylla_memory_streaming_virtual_dirty_bytes': 'memory.streaming_virtual_dirty_bytes',
'scylla_memory_system_dirty_bytes': 'memory.system_dirty_bytes',
'scylla_memory_system_virtual_dirty_bytes': 'memory.system_virtual_dirty_bytes',
'scylla_memory_total_memory': 'memory.total_memory',
'scylla_memory_virtual_dirty_bytes': 'memory.virtual_dirty_bytes',
}
SCYLLA_MEMTABLES = {
'scylla_memtables_pending_flushes': 'memtables.pending_flushes',
'scylla_memtables_pending_flushes_bytes': 'memtables.pending_flushes_bytes',
}
SCYLLA_NODE = {
'scylla_node_operation_mode': 'node.operation_mode',
}
SCYLLA_QUERY = {
'scylla_query_processor_queries': 'query_processor.queries',
'scylla_query_processor_statements_prepared': 'query_processor.statements_prepared',
}
SCYLLA_REACTOR = {
'scylla_reactor_aio_bytes_read': 'reactor.aio_bytes_read',
'scylla_reactor_aio_bytes_write': 'reactor.aio_bytes_write',
'scylla_reactor_aio_errors': 'reactor.aio_errors',
'scylla_reactor_aio_reads': 'reactor.aio_reads',
'scylla_reactor_aio_writes': 'reactor.aio_writes',
'scylla_reactor_cpp_exceptions': 'reactor.cpp_exceptions',
'scylla_reactor_cpu_busy_ms': 'reactor.cpu_busy_ms',
'scylla_reactor_cpu_steal_time_ms': 'reactor.cpu_steal_time_ms',
'scylla_reactor_fstream_read_bytes': 'reactor.fstream_read_bytes',
'scylla_reactor_fstream_read_bytes_blocked': 'reactor.fstream_read_bytes_blocked',
'scylla_reactor_fstream_reads': 'reactor.fstream_reads',
'scylla_reactor_fstream_reads_ahead_bytes_discarded': 'reactor.fstream_reads_ahead_bytes_discarded',
'scylla_reactor_fstream_reads_aheads_discarded': 'reactor.fstream_reads_aheads_discarded',
'scylla_reactor_fstream_reads_blocked': 'reactor.fstream_reads_blocked',
'scylla_reactor_fsyncs': 'reactor.fsyncs',
'scylla_reactor_io_queue_requests': 'reactor.io_queue_requests',
'scylla_reactor_io_threaded_fallbacks': 'reactor.io_threaded_fallbacks',
'scylla_reactor_logging_failures': 'reactor.logging_failures',
'scylla_reactor_polls': 'reactor.polls',
'scylla_reactor_tasks_pending': 'reactor.tasks_pending',
'scylla_reactor_tasks_processed': 'reactor.tasks_processed',
'scylla_reactor_timers_pending': 'reactor.timers_pending',
'scylla_reactor_utilization': 'reactor.utilization',
}
SCYLLA_SCHEDULER = {
'scylla_scheduler_queue_length': 'scheduler.queue_length',
'scylla_scheduler_runtime_ms': 'scheduler.runtime_ms',
'scylla_scheduler_shares': 'scheduler.shares',
'scylla_scheduler_tasks_processed': 'scheduler.tasks_processed',
'scylla_scheduler_time_spent_on_task_quota_violations_ms': 'scheduler.time_spent_on_task_quota_violations_ms',
}
SCYLLA_SSTABLES = {
'scylla_sstables_capped_local_deletion_time': 'sstables.capped_local_deletion_time',
'scylla_sstables_capped_tombstone_deletion_time': 'sstables.capped_tombstone_deletion_time',
'scylla_sstables_cell_tombstone_writes': 'sstables.cell_tombstone_writes',
'scylla_sstables_cell_writes': 'sstables.cell_writes',
'scylla_sstables_index_page_blocks': 'sstables.index_page_blocks',
'scylla_sstables_index_page_hits': 'sstables.index_page_hits',
'scylla_sstables_index_page_misses': 'sstables.index_page_misses',
'scylla_sstables_partition_reads': 'sstables.partition_reads',
'scylla_sstables_partition_seeks': 'sstables.partition_seeks',
'scylla_sstables_partition_writes': 'sstables.partition_writes',
'scylla_sstables_range_partition_reads': 'sstables.range_partition_reads',
'scylla_sstables_range_tombstone_writes': 'sstables.range_tombstone_writes',
'scylla_sstables_row_reads': 'sstables.row_reads',
'scylla_sstables_row_writes': 'sstables.row_writes',
'scylla_sstables_single_partition_reads': 'sstables.single_partition_reads',
'scylla_sstables_sstable_partition_reads': 'sstables.sstable_partition_reads',
'scylla_sstables_static_row_writes': 'sstables.static_row_writes',
'scylla_sstables_tombstone_writes': 'sstables.tombstone_writes',
}
SCYLLA_STORAGE = {
# Scylla 3.1
'scylla_storage_proxy_coordinator_background_read_repairs': 'storage.proxy.coordinator_background_read_repairs',
'scylla_storage_proxy_coordinator_background_reads': 'storage.proxy.coordinator_background_reads',
'scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node': 'storage.proxy.coordinator_background_replica_writes_failed_local_node', # noqa E501
'scylla_storage_proxy_coordinator_background_write_bytes': 'storage.proxy.coordinator_background_write_bytes',
'scylla_storage_proxy_coordinator_background_writes': 'storage.proxy.coordinator_background_writes',
'scylla_storage_proxy_coordinator_background_writes_failed': 'storage.proxy.coordinator_background_writes_failed',
'scylla_storage_proxy_coordinator_canceled_read_repairs': 'storage.proxy.coordinator_canceled_read_repairs',
'scylla_storage_proxy_coordinator_completed_reads_local_node': 'storage.proxy.coordinator_completed_reads_local_node', # noqa E501
'scylla_storage_proxy_coordinator_current_throttled_base_writes': 'storage.proxy.coordinator_current_throttled_base_writes', # noqa E501
'scylla_storage_proxy_coordinator_current_throttled_writes': 'storage.proxy.coordinator_current_throttled_writes',
'scylla_storage_proxy_coordinator_foreground_read_repair': 'storage.proxy.coordinator_foreground_read_repair',
'scylla_storage_proxy_coordinator_foreground_reads': 'storage.proxy.coordinator_foreground_reads',
'scylla_storage_proxy_coordinator_foreground_writes': 'storage.proxy.coordinator_foreground_writes',
'scylla_storage_proxy_coordinator_last_mv_flow_control_delay': 'storage.proxy.coordinator_last_mv_flow_control_delay', # noqa E501
'scylla_storage_proxy_coordinator_queued_write_bytes': 'storage.proxy.coordinator_queued_write_bytes',
'scylla_storage_proxy_coordinator_range_timeouts': 'storage.proxy.coordinator_range_timeouts',
'scylla_storage_proxy_coordinator_range_unavailable': 'storage.proxy.coordinator_range_unavailable',
'scylla_storage_proxy_coordinator_read_errors_local_node': 'storage.proxy.coordinator_read_errors_local_node',
'scylla_storage_proxy_coordinator_read_latency': 'storage.proxy.coordinator_read_latency',
'scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node': 'storage.proxy.coordinator_read_repair_write_attempts_local_node', # noqa E501
'scylla_storage_proxy_coordinator_read_retries': 'storage.proxy.coordinator_read_retries',
'scylla_storage_proxy_coordinator_read_timeouts': 'storage.proxy.coordinator_read_timeouts',
'scylla_storage_proxy_coordinator_read_unavailable': 'storage.proxy.coordinator_read_unavailable',
'scylla_storage_proxy_coordinator_reads_local_node': 'storage.proxy.coordinator_reads_local_node',
'scylla_storage_proxy_coordinator_speculative_data_reads': 'storage.proxy.coordinator_speculative_data_reads',
'scylla_storage_proxy_coordinator_speculative_digest_reads': 'storage.proxy.coordinator_speculative_digest_reads',
'scylla_storage_proxy_coordinator_throttled_writes': 'storage.proxy.coordinator_throttled_writes',
'scylla_storage_proxy_coordinator_total_write_attempts_local_node': 'storage.proxy.coordinator_total_write_attempts_local_node', # noqa E501
'scylla_storage_proxy_coordinator_write_errors_local_node': 'storage.proxy.coordinator_write_errors_local_node',
'scylla_storage_proxy_coordinator_write_latency': 'storage.proxy.coordinator_write_latency',
'scylla_storage_proxy_coordinator_write_timeouts': 'storage.proxy.coordinator_write_timeouts',
'scylla_storage_proxy_coordinator_write_unavailable': 'storage.proxy.coordinator_write_unavailable',
'scylla_storage_proxy_replica_cross_shard_ops': 'storage.proxy.replica_cross_shard_ops',
'scylla_storage_proxy_replica_forwarded_mutations': 'storage.proxy.replica_forwarded_mutations',
'scylla_storage_proxy_replica_forwarding_errors': 'storage.proxy.replica_forwarding_errors',
'scylla_storage_proxy_replica_reads': 'storage.proxy.replica_reads',
'scylla_storage_proxy_replica_received_counter_updates': 'storage.proxy.replica_received_counter_updates',
'scylla_storage_proxy_replica_received_mutations': 'storage.proxy.replica_received_mutations',
# Scylla 3.2 - renamed
'scylla_storage_proxy_coordinator_foreground_read_repairs': 'storage.proxy.coordinator_foreground_read_repair',
}
SCYLLA_STREAMING = {
'scylla_streaming_total_incoming_bytes': 'streaming.total_incoming_bytes',
'scylla_streaming_total_outgoing_bytes': 'streaming.total_outgoing_bytes',
}
SCYLLA_THRIFT = {
'scylla_thrift_current_connections': 'thrift.current_connections',
'scylla_thrift_served': 'thrift.served',
'scylla_thrift_thrift_connections': 'thrift.thrift_connections',
}
SCYLLA_TRACING = {
'scylla_tracing_active_sessions': 'tracing.active_sessions',
'scylla_tracing_cached_records': 'tracing.cached_records',
'scylla_tracing_dropped_records': 'tracing.dropped_records',
'scylla_tracing_dropped_sessions': 'tracing.dropped_sessions',
'scylla_tracing_flushing_records': 'tracing.flushing_records',
'scylla_tracing_keyspace_helper_bad_column_family_errors': 'tracing.keyspace_helper_bad_column_family_errors',
'scylla_tracing_keyspace_helper_tracing_errors': 'tracing.keyspace_helper_tracing_errors',
'scylla_tracing_pending_for_write_records': 'tracing.pending_for_write_records',
'scylla_tracing_trace_errors': 'tracing.trace_errors',
'scylla_tracing_trace_records_count': 'tracing.trace_records_count',
}
SCYLLA_TRANSPORT = {
'scylla_transport_cql_connections': 'transport.cql_connections',
'scylla_transport_current_connections': 'transport.current_connections',
'scylla_transport_requests_blocked_memory': 'transport.requests_blocked_memory',
'scylla_transport_requests_blocked_memory_current': 'transport.requests_blocked_memory_current',
'scylla_transport_requests_served': 'transport.requests_served',
'scylla_transport_requests_serving': 'transport.requests_serving',
}
INSTANCE_DEFAULT_METRICS = [
SCYLLA_CACHE,
SCYLLA_COMPACTION,
SCYLLA_GOSSIP,
SCYLLA_NODE,
SCYLLA_REACTOR,
SCYLLA_STORAGE,
SCYLLA_STREAMING,
SCYLLA_TRANSPORT,
]
ADDITIONAL_METRICS_MAP = {
'scylla.alien': SCYLLA_ALIEN,
'scylla.batchlog': SCYLLA_BATCHLOG,
'scylla.commitlog': SCYLLA_COMMITLOG,
'scylla.cql': SCYLLA_CQL,
'scylla.database': SCYLLA_DATABASE,
'scylla.execution': SCYLLA_EXECUTION,
'scylla.hints': SCYLLA_HINTS,
'scylla.httpd': SCYLLA_HTTPD,
'scylla.io': SCYLLA_IO,
'scylla.lsa': SCYLLA_LSA,
'scylla.memory': SCYLLA_MEMORY,
'scylla.memtables': SCYLLA_MEMTABLES,
'scylla.query': SCYLLA_QUERY,
'scylla.scheduler': SCYLLA_SCHEDULER,
'scylla.sstables': SCYLLA_SSTABLES,
'scylla.thrift': SCYLLA_THRIFT,
'scylla.tracing': SCYLLA_TRACING,
}
| StarcoderdataPython |
167686 | from milight import MiLight, LightBulb, color_from_hex
from . import LightController
class MiLightController(LightController):
VENDOR = "milight"
def __init__(self, host, port, bulbs, *args, **kwargs):
super(MiLightController, self).__init__(*args, **kwargs)
self._milight = MiLight({'host': host, 'port': int(port)},
wait_duration=0)
self._bulbs = LightBulb(bulbs)
def switch_on(self, light_id):
self._milight.send(self._bulbs.on(light_id))
def switch_all_on(self):
self._milight.send(self._bulbs.all_on())
def switch_off(self, light_id):
self._milight.send(self._bulbs.off(light_id))
def switch_all_off(self):
self._milight.send(self._bulbs.all_off())
def change_color(self, light_id, color_code):
self._milight.send(
self._bulbs.color(color_from_hex(color_code), light_id))
| StarcoderdataPython |
182987 | # Generated by Django 3.1.2 on 2020-10-19 10:26
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import insta.models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('insta', '0002_remove_profile_pictures'),
]
operations = [
migrations.RemoveField(
model_name='profile',
name='date',
),
migrations.RemoveField(
model_name='stream',
name='profile',
),
migrations.AddField(
model_name='post',
name='user',
field=models.OneToOneField(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='post_user', to='auth.user'),
preserve_default=False,
),
migrations.AddField(
model_name='profile',
name='pictures',
field=models.ImageField(null=True, upload_to=insta.models.user_directory_path, verbose_name='Picture'),
),
migrations.AddField(
model_name='stream',
name='post',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='insta.post'),
preserve_default=False,
),
]
| StarcoderdataPython |
1721845 | <filename>leetcode/stamping_the_sequence/stamping_the_sequence.py
class Solution:
def movesToStamp(self, stamp: str, target: str) -> List[int]:
ans = []
N = len(target)
M = len(stamp)
stamp = list(stamp)
old_target = target
target = list(target)
def match(offset):
count = 0
for i in range(M):
if stamp[i] != target[i + offset] and target[i + offset] != "*":
return False
if target[i + offset] != "*":
count +=1
return count > 0
while True:
found = False
for start in range(N - M + 1):
if match(start):
found = True
for i in range(start, start + M):
target[i] = "*"
ans.append(start)
break
if all(x == "*" for x in target):
break
if not found:
return []
ans.reverse()
constructed = [""] * N
for index in ans:
for k in range(M):
constructed[index + k] = stamp[k]
if "".join(constructed) == old_target:
return ans
return []
| StarcoderdataPython |
140602 | import unittest
from tree import TreeNode
# O(n). Recursive DFS.
class Solution:
def findSecondMinimumValue(self, root):
"""
:type root: TreeNode
:rtype: int
"""
if not root:
return -1
root_val = root.val
def find_min(node):
if not node:
return 0x7FFFFFFF
if node.val != root_val:
return node.val
return min(find_min(node.left), find_min(node.right))
sec_min = find_min(root)
return sec_min if sec_min != 0x7FFFFFFF else -1
class Test(unittest.TestCase):
def test(self):
self._test([2, 2, 5, None, None, 5, 7], 5)
self._test([2, 2, 2], -1)
def _test(self, root, expected):
root = TreeNode.from_array(root)
actual = Solution().findSecondMinimumValue(root)
self.assertEqual(expected, actual)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
63318 | from twilio.rest import Client
def sendotp(otpreci):
account_sid = 'ACe8caad8112e2135294377d739ce3e9b9'
auth_token = '<PASSWORD>'
client = Client(account_sid, auth_token)
msg='OTP for Login : ' + str(otpreci)
message = client.messages.create(
from_='whatsapp:+14155238886',
body= msg ,
to='whatsapp:+918421296860'
)
| StarcoderdataPython |
3359894 | <gh_stars>0
from .....messaging.base_handler import (
BaseHandler,
BaseResponder,
HandlerException,
RequestContext,
)
from ..messages.credential_issue import CredentialIssue
from ..messages.credential_request import CredentialRequest
from aries_cloudagent.holder.base import BaseHolder, HolderError
from aries_cloudagent.issuer.base import BaseIssuer, IssuerError
from aries_cloudagent.connections.models.connection_record import ConnectionRecord
import json
from aries_cloudagent.protocols.issue_credential.v1_1.models.credential_exchange import (
CredentialExchangeRecord,
)
from aries_cloudagent.aathcf.utils import debug_handler
class CredentialRequestHandler(BaseHandler):
"""
Message handler logic for incoming credential requests.
"""
async def handle(self, context: RequestContext, responder: BaseResponder):
debug_handler(self._logger.debug, context, CredentialRequest)
message: CredentialRequest = context.message
credential = context.message.credential
exchange_record: CredentialExchangeRecord = CredentialExchangeRecord(
connection_id=responder.connection_id,
initiator=CredentialExchangeRecord.INITIATOR_EXTERNAL,
role=CredentialExchangeRecord.ROLE_ISSUER,
state=CredentialExchangeRecord.STATE_REQUEST_RECEIVED,
thread_id=message._thread_id,
credential_request=credential,
their_public_did=context.message.did,
)
credential_exchange_id = await exchange_record.save(context)
await responder.send_webhook(
CredentialExchangeRecord.webhook_topic,
{
"credential_exchange_id": credential_exchange_id,
"connection_id": responder.connection_id,
},
)
| StarcoderdataPython |
141593 | <filename>dreamcoder/domains/logo/makeLogoTasks.py<gh_stars>1-10
# coding: utf8
import os
import random
import sys
from dreamcoder.domains.logo.logoPrimitives import primitives, turtle
from dreamcoder.task import Task
from dreamcoder.program import Abstraction, Application, Index, Program
from dreamcoder.type import arrow
from dreamcoder.utilities import eprint, jsonBinaryInvoke, random_seed, montage
from dreamcoder.grammar import Grammar
def drawLogo(*programs,
timeout=None,
resolution=None,
pretty=False, smoothPretty=False,
filenames=[],
animate=False,
cost=False):
message = {}
if pretty: message["pretty"] = pretty
if smoothPretty: message["smoothPretty"] = smoothPretty
if timeout: message["timeout"] = timeout
assert resolution is not None, "resolution not provided in drawLogo"
if isinstance(resolution, list):
assert len(resolution) == len(programs), "must provide a resolution for each program"
elif isinstance(resolution, int):
resolution = [resolution]*len(programs)
else: assert False
jobs = []
for p, size in zip(programs, resolution):
entry = {"program": str(p),
"size": size}
if animate: entry["animate"] = True
if len(filenames) > 0:
entry["export"] = filenames[0]
filenames = filenames[1:]
jobs.append(entry)
message["jobs"] = jobs
response = jsonBinaryInvoke("./logoDrawString", message)
if cost:
# include the cost and return tuples of (pixels, cost)
response = [programResponse if isinstance(programResponse,str) else (programResponse["pixels"], programResponse["cost"])
for programResponse in response ]
else:
response = [programResponse if isinstance(programResponse,str) else programResponse["pixels"]
for programResponse in response ]
if len(programs) == 1:
return response[0]
return response
def makeTasks(subfolders, proto):
return manualLogoTasks()
def parseLogo(s):
_ua = Program.parse("logo_UA")
_ul = Program.parse("logo_UL")
_za = Program.parse("logo_ZA")
_zl = Program.parse("logo_ZL")
_da = Program.parse("logo_DIVA")
_ma = Program.parse("logo_MULA")
_dl = Program.parse("logo_DIVL")
_ml = Program.parse("logo_MULL")
_aa = Program.parse("logo_ADDA")
_sa = Program.parse("logo_SUBA")
_al = None#Program.parse("logo_ADDL")
_sl = None#Program.parse("logo_SUBL")
_pu = None#Program.parse("logo_PU")
_pd = None#Program.parse("logo_PD")
_p = Program.parse("logo_PT")
_move = Program.parse("logo_FWRT")
_embed = Program.parse("logo_GETSET")
_addition = Program.parse("+")
_infinity = Program.parse("logo_IFTY")
_ea = Program.parse("logo_epsA")
_el = Program.parse("logo_epsL")
_loop = Program.parse("logo_forLoop")
from sexpdata import loads, Symbol
s = loads(s)
def command(k, environment, continuation):
assert isinstance(k,list)
if k[0] == Symbol("move"):
return Application(Application(Application(_move,
expression(k[1],environment)),
expression(k[2],environment)),
continuation)
if k[0] == Symbol("for") or k[0] == Symbol("loop"):
v = k[1]
b = expression(k[2], environment)
newEnvironment = [None, v] + environment
body = block(k[3:], newEnvironment, Index(0))
return Application(Application(Application(_loop,b),
Abstraction(Abstraction(body))),
continuation)
if k[0] == Symbol("embed"):
body = block(k[1:], [None] + environment, Index(0))
return Application(Application(_embed,Abstraction(body)),continuation)
if k[0] == Symbol("p"):
body = block(k[1:], [None] + environment, Index(0))
return Application(Application(_p,Abstraction(body)),continuation)
assert False
def expression(e, environment):
for n, v in enumerate(environment):
if e == v: return Index(n)
if isinstance(e,int): return Program.parse(str(e))
mapping = {"1a": _ua,
"1d": _ul, "1l": _ul,
"0a": _za,
"0d": _zl, "0l": _zl,
"/a": _da,
"/l": _dl, "/d": _dl,
"*a": _ma,
"*l": _ml, "*d": _ml,
"+a": _aa,
"+d": _al, "+l": _al,
"-a": _sa,
"-d": _sl, "-l": _sl,
"+": _addition,
"infinity": _infinity,
"epsilonAngle": _ea,
"epsilonDistance": _el,
"epsilonLength": _el}
if e == float('inf'): return _infinity
for name, value in mapping.items():
if e == Symbol(name): return value
assert isinstance(e,list), "not a list %s"%e
for name, value in mapping.items():
if e[0] == Symbol(name):
f = value
for argument in e[1:]:
f = Application(f, expression(argument, environment))
return f
assert False
def block(b, environment, continuation):
if len(b) == 0: return continuation
return command(b[0], environment, block(b[1:], environment, continuation))
try: return Abstraction(command(s, [], Index(0)))
except: return Abstraction(block(s, [], Index(0)))
def manualLogoTask(name, expression, proto=False, needToTrain=False,
supervise=False, lambdaCalculus=False):
p = Program.parse(expression) if lambdaCalculus else parseLogo(expression)
from dreamcoder.domains.logo.logoPrimitives import primitives
from dreamcoder.grammar import Grammar
g = Grammar.uniform(primitives, continuationType=turtle)
gp = Grammar.uniform(primitives)
try:
l = g.logLikelihood(arrow(turtle,turtle),p)
lp = gp.logLikelihood(arrow(turtle,turtle),p)
assert l >= lp
eprint(name,-l,"nats")
except: eprint("WARNING: could not calculate likelihood of manual logo",p)
attempts = 0
while True:
[output, highresolution] = drawLogo(p, p, resolution=[28,128], cost=True)
if output == "timeout" or highresolution == "timeout":
attempts += 1
else:
break
if attempts > 0:
eprint(f"WARNING: Took {attempts} attempts to render task {name} within timeout")
cost = output[1]
output = output[0]
assert highresolution[1] == cost
highresolution = highresolution[0]
shape = list(map(int, output))
highresolution = list(map(float, highresolution))
t = Task(name, arrow(turtle,turtle),
[(([0]), shape)])
t.mustTrain = needToTrain
t.proto = proto
t.specialTask = ("LOGO", {"proto": proto})
t.specialTask[1]["cost"] = cost*1.05
t.highresolution = highresolution
if supervise:
t.supervisedSolution = p
return t
def dSLDemo():
n = 0
demos = []
def T(source):
demos.append(manualLogoTask(str(len(demos)), source,
lambdaCalculus="lambda" in source))
# this looks like polygons - verify and include
T("(#(lambda (lambda (#(lambda (lambda (#(lambda (lambda (lambda (logo_forLoop $0 (lambda (lambda (logo_FWRT $4 $3 $0))))))) $1 $0 logo_IFTY))) $1 (logo_DIVA logo_UA $0)))) (logo_MULL logo_UL 4) 3)")
T("(#(lambda (lambda (#(lambda (lambda (#(lambda (lambda (lambda (logo_forLoop $0 (lambda (lambda (logo_FWRT $4 $3 $0))))))) $1 $0 logo_IFTY))) $1 (logo_DIVA logo_UA $0)))) (logo_MULL logo_UL 6) 4)")
T("(#(lambda (lambda (#(lambda (lambda (#(lambda (lambda (lambda (logo_forLoop $0 (lambda (lambda (logo_FWRT $4 $3 $0))))))) $1 $0 logo_IFTY))) $1 (logo_DIVA logo_UA $0)))) (logo_MULL logo_UL 5) 5)")
T("(#(lambda (lambda (#(lambda (lambda (#(lambda (lambda (lambda (logo_forLoop $0 (lambda (lambda (logo_FWRT $4 $3 $0))))))) $1 $0 logo_IFTY))) $1 (logo_DIVA logo_UA $0)))) (logo_MULL logo_UL 3) 6)")
T("(#(lambda (lambda (#(lambda (lambda (#(lambda (lambda (lambda (logo_forLoop $0 (lambda (lambda (logo_FWRT $4 $3 $0))))))) $1 $0 logo_IFTY))) $1 (logo_DIVA logo_UA $0)))) (logo_MULL logo_UL 2) 7)")
# Spirals!
for spiralSize in [1,2,3,4,5]:
T(f"((lambda (logo_forLoop logo_IFTY (lambda (lambda (logo_FWRT (logo_MULL logo_epsL $1) (logo_MULA logo_epsA $2) $0))))) {spiralSize})")
for spiralSize in [5,6,7,8,9]:
#T(f"(lambda (#(lambda (logo_forLoop $0 (lambda (lambda (#(lambda (logo_FWRT (logo_MULL logo_UL $0) (logo_DIVA logo_UA 4))) $1 $0))))) {spiralSize} $0))")
T("(loop i " + str(spiralSize) + " (move (*d 1l i) (/a 1a 4)))")# (#(lambda (logo_forLoop $0 (lambda (lambda (#(lambda (logo_FWRT (logo_MULL logo_UL $0) (logo_DIVA logo_UA 4))) $1 $0))))) {spiralSize} $0))")
# CIRCLES
#(lambda (#(lambda (logo_forLoop 6 (lambda (lambda (#(lambda (lambda (logo_forLoop logo_IFTY (lambda (lambda (logo_FWRT $2 $3 $0)))))) logo_epsA (logo_MULL logo_epsL $2) $0))))) 6 $0))
for circleSize in [1,3,5,7,9]:
T(f"(lambda (#(lambda (logo_forLoop 6 (lambda (lambda (#(lambda (lambda (logo_forLoop logo_IFTY (lambda (lambda (logo_FWRT $2 $3 $0)))))) logo_epsA (logo_MULL logo_epsL $2) $0))))) {circleSize} $0))")
T("(loop i 3 (move (*d 1l 3) (/a 1a 4)))")
T("(loop i 5 (move (*d 1l 5) (/a 1a 5)))")
T("(loop i infinity (move (*d epsilonDistance 5) (/a epsilonAngle 3)))")
T("(loop i infinity (move (*d epsilonDistance 9) (/a epsilonAngle 2)))")
T("(loop i infinity (move (*d epsilonLength i) (*a epsilonAngle 3)))")
T("(loop i 9 (move (*d 1l i) (/a 1a 4)))")
T("(move 1d 0a)")
T("(loop i infinity (move (*d epsilonLength 6) epsilonAngle))")
T("(loop i infinity (move (*d epsilonLength 8) epsilonAngle))")
T("(loop k 2 (loop i infinity (move (*d epsilonLength 4) epsilonAngle)))")
T("(loop k 2 (loop i infinity (move (*d epsilonLength 8) epsilonAngle)))")
T("(loop s 4 (move (*d 1d 3) (/a 1a 4)))")
T("(loop s 4 (move (*d 1d 6) (/a 1a 4)))")
T("""
(loop j 5
(move 0d (/a 1a 5))
(embed (loop i infinity
(move (*d epsilonLength 6) epsilonAngle))
(loop i infinity
(move (*d epsilonLength 6) epsilonAngle))))""")
T("""
(loop j 5
(embed (loop s 4 (move (*d 1d 3) (/a 1a 4))))
(move 0d (/a 1a 5)))""")
return demos
def rotationalSymmetryDemo():
demos = []
def T(source):
demos.append(manualLogoTask(str(len(demos)), source))
body = {"dashed": "(p (move 1d 0a)) (move 1d 0a) (p (move 1d 0a)) (move 1d 0a)",
"lonely circle": "(p (move (*d 1d 2) 0a)) (loop k 2 (loop i infinity (move (*d epsilonLength 2) epsilonAngle)))",
"square dashed": "(p (move 1d 0a)) (loop s 4 (move 1d (/a 1a 4)))",
"square": "(loop s 4 (move (*d 1d 2) (/a 1a 4)))",
"semicircle": "(loop i infinity (move (*d epsilonLength 4) epsilonAngle))"}
for name in body:
for n in [3,4,5,6,7]:
T("""
(loop j %d
(embed %s)
(move 0d (/a 1a %d)))"""%(n,body[name],n))
return demos
def manualLogoTasks():
tasks = []
def T(name, source, needToTrain=False, supervise=False):
tasks.append(manualLogoTask(name, source, supervise=supervise,
needToTrain=needToTrain))
if False:
for d,a,s in [('1l','0a','(loop i infinity (move epsilonLength epsilonAngle))'),
('epsilonLength','0a','(loop i infinity (move epsilonLength epsilonAngle))'),
('(*d 1l 3)','0a','(move 1l 0a)'),
('epsilonLength','0a','(move (*d 1l 2) 0a)'),
('(*d epsilonLength 9)','0a','(move epsilonLength 0a)'),
('(/d 1l 2)','0a','(move 1l 0a)')]:
# 'epsilonLength']:
# for a in ['epsilonAngle','0a']:
# for s in ['(move 1l 0a)',
# '(move epsilonLength 0a)',
# '(loop i infinity (move epsilonLength epsilonAngle))']:
# if d == 'epsilonLength' and s == '(move epsilonLength 0a)': continue
T("pu: %s/%s/%s"%(d,a,s),
"""
(pu (move %s %s) pd %s)
"""%(d,a,s))
return tasks
def slant(n):
return f"(move 0d (/a 1a {n}))"
for n,l,s in [(3,"1l",8),
(4,"(*d 1d 3)",None),
(5,"1l",None),
(6,"(*d 1d 2)",5),
(7,"1l",None),
(8,"(/d 1d 2)",None)]:
T(f"{n}-gon {l}{'' if s is None else ' slanted '+str(s)}",
f"""
({'' if s is None else slant(s)}
(loop i {n}
(move {l} (/a 1a {n}))))
""",
needToTrain=True)
for n,l,s in [(3,"(*d 1l 2)",None),
(4,"(*d 1d 4)",None),
(5,"(*d 1d 2)",None),
(6,"1l",None),
(7,"(*d 1d 3)",None),
(8,"1l",3)]:
T(f"{n}-gon {l}{'' if s is None else ' slanted '+str(s)}",
f"""
({'' if s is None else slant(s)}
(loop i {n}
(move {l} (/a 1a {n}))))
""",
needToTrain=False)
T("upwards", "((move 0d (/a 1a 4)) (move 1d 0a))",
needToTrain=True)
T("right angle", "((move (*d 1d 2) (/a 1a 4)) (move 1d 0a))",
needToTrain=True)
T("right angle epsilon", "((move epsilonLength (/a 1a 4)) (move epsilonLength 0a))",
needToTrain=True)
T("line segment", "(move 1d 0a)",
needToTrain=True)
T("square slanted by 2pi/3",
"""((move 0d (/a 1a 3))
(loop k 4 (move 1d (/a 1a 4))))""",
needToTrain=True)
T("semicircle slanted by 2pi/5",
"""((move 0d (/a 1a 5))
(loop i infinity
(move (*d epsilonLength 4) epsilonAngle)))""",
needToTrain=True)
T("Greek spiral slanted by 2pi/6",
"""((move 0d (/a 1a 6))
(loop i 7 (move (*l 1l i) (/a 1a 4))))""",
needToTrain=True)
T("Hook slanted by 2pi/7",
"""((move 0d (/a 1a 7))
(move 1d 0a)
(loop i infinity
(move (*d epsilonLength 4) epsilonAngle)))""",
needToTrain=True)
T("""slanted line""",
"""((move 0d (/a 1a 8))
(move (*d 1l 3) 0a))""",
needToTrain=True)
for i in [6,7,8,9]:
T("Greek spiral %d"%i,
"""
(loop i %d
(move (*l 1l i) (/a 1a 4)))
"""%i,
needToTrain=i in [7,8])
for i in [2,3,4,5]:
T("smooth spiral %d"%i,
"""
(loop i infinity
(move (*d epsilonLength i) (*a epsilonAngle %d)))
"""%i,
needToTrain=i in [3,5])
T("smooth spiral 4 slanted by 2pi/2",
"""
((move 0d (/a 1a 2))
(loop i infinity
(move (*d epsilonLength i) (*a epsilonAngle 4))))
""",
needToTrain=True)
for i in [3,5,7,9]:
T("star %d"%i,
"""
(loop i %d (move (*d 1d 4) (-a (/a 1a 2) (/a (/a 1a 2) %s))))
"""%(i,i),
needToTrain=i in [5,9])
T("leaf iteration 1.1",
"""
(loop i infinity (move epsilonDistance (/a epsilonAngle 2)))
""",
needToTrain=True)
T("leaf iteration 1.2",
"""
((move 0d (/a 1a 2))
(loop i infinity (move epsilonDistance (/a epsilonAngle 2))))
""",
needToTrain=True)
T("leaf iteration 2.1",
"""
(loop n 2
(loop i infinity (move epsilonDistance (/a epsilonAngle 2)))
(move 0d (/a 1a 4)))
""",
needToTrain=True)
T("leaf iteration 2.2",
"""
((move 0d (/a 1a 2))
(loop n 2
(loop i infinity (move epsilonDistance (/a epsilonAngle 2)))
(move 0d (/a 1a 4))))
""",
needToTrain=True)
for n in range(3,8):
T("flower %d"%n,
"""
(loop j %d
(loop n 2
(loop i infinity (move epsilonDistance (/a epsilonAngle 2)))
(move 0d (/a 1a 4)))
(move 0d (/a 1a %d)))
"""%(n,n),
needToTrain=n in range(3,5))
for n in [5,6]:
T("staircase %d"%n,
"""
(loop i %d
(move 1d (/a 1a 4))
(move 1d (/a 1a 4))
(move 0d (/a 1a 2)))
"""%n,
needToTrain=n in [5])
for n in range(1,6):
T("blocks zigzag %d"%n,
"""
(loop i %d
(move 1d (/a 1a 4)) (move 1d (/a 1a 4))
(move 1d (+a (/a 1a 2) (/a 1a 4))) (move 1d (+a (/a 1a 2) (/a 1a 4))))
"""%n,
needToTrain=n in [1,2,3])
for n in [3,4]:#range(1,5):
T("diagonal zigzag %d"%n,
"""
((move 0d (/a 1a 8))
(loop i %d
(move 1d (/a 1a 4))
(move 1d (+a (/a 1a 2) (/a 1a 4)))))
"""%n,
needToTrain=n == 4)
for n in [1,2,3,4,5,6]:
T("right semicircle of size %d"%n,
"""
(loop i infinity
(move (*d epsilonLength %d) (-a 0a epsilonAngle)))
"""%n,
needToTrain=n%2 == 0)
T("left semicircle of size %d"%n,
f"""
({'' if n != 1 else slant(8)}
(loop i infinity
(move (*d epsilonLength {n}) epsilonAngle)))
""",
needToTrain=n%2 == 1)
T("circle of size %d"%n,
"""
((loop i infinity
(move (*d epsilonLength %d) epsilonAngle))
(loop i infinity
(move (*d epsilonLength %d) epsilonAngle)))
"""%(n,n),
needToTrain=n in [1,4,3,5,6])
for n in [5,6]:
T("%d enclosed circles"%n,
"""
(loop j %d
(loop i infinity
(move (*d epsilonLength j) epsilonAngle))
(loop i infinity
(move (*d epsilonLength j) epsilonAngle)))"""%n,
needToTrain=n == 5)
for n,l in [(4,2),
(5,3),
(6,4),
(3,1)]:
T("%d-circle flower l=%d"%(n,l),
"""
(loop j %d
(move 0d (/a 1a %d))
(embed (loop i infinity
(move (*d epsilonLength %d) epsilonAngle))
(loop i infinity
(move (*d epsilonLength %d) epsilonAngle))))"""%(n,n,l,l),
needToTrain=(n,l) in [(6,4),(3,1)])
for n,l in [(3,1),(2,2),(1,3),
(2,1),(1,2),(1,1)]:
T("%d-semicircle sequence L=%d"%(n,l),
"""
(loop j %d
(loop i infinity
(move (*d epsilonLength %d) epsilonAngle))
(loop i infinity
(move (*d epsilonLength %d) (-a 0a epsilonAngle))))
"""%(n,l,l),
needToTrain=(n,l) in [(3,1),(2,2),(1,3)])
for n,l in [(2,"1d"),
(3,"1d")]:
T("row of %d circles"%n,
"""
(loop j %d
(embed (loop k 2 (loop i infinity (move epsilonLength epsilonAngle))))
(p (move %s 0a)))"""%(n,l),
needToTrain=n == 2)
for n,l in [(2,"1d"),
(3,"1d")]:
T("row of %d lines"%n,
"""
(loop j %d
(move 1d 0a)
(p (move %s 0a)))"""%(n,l),
needToTrain=n == 2)
T("line next to semicircle",
"""
((move 1d 0a) (p (move 1d 0a)) (loop i infinity (move epsilonLength epsilonAngle)))
""",
needToTrain=True)
for n,l in [(3,"(/d 1d 2)"),
(4,"(/d 1d 3)")]:
T("%d dashed lines of size %s"%(n,l),
"""(loop i %d (p (move 1d 0a)) (move %s 0a))"""%(n,l),
needToTrain=n == 3)
T("broken circle",
"""
((loop i infinity (move epsilonLength epsilonAngle)) (p (move 1d 0a)) (loop i infinity (move epsilonLength epsilonAngle)))
""",
needToTrain=True)
T("circle next to semicircle",
"""
((loop i infinity (move epsilonLength epsilonAngle))
(loop i infinity (move epsilonLength epsilonAngle))
(p (move 1d 0a))
(loop i infinity (move epsilonLength epsilonAngle)))
""",
needToTrain=True)
T("semicircle next to square",
"""
((loop i infinity (move epsilonLength epsilonAngle))
(p (move 1d 0a))
(loop i infinity (move 1d (/a 1a 4))))
""",
needToTrain=False)
T("circle next to square",
"""
((loop i infinity (move epsilonLength epsilonAngle))
(loop i infinity (move epsilonLength epsilonAngle))
(p (move 1d 0a))
(loop i infinity (move 1d (/a 1a 4))))
""",
needToTrain=False)
T("circle next to line",
"""
((loop i infinity (move epsilonLength epsilonAngle))
(loop i infinity (move epsilonLength epsilonAngle))
(p (move 1d 0a))
(move 1d 0a))
""",
needToTrain=True)
T("line next to circle",
"""
((move 1d 0a)
(p (move 1d 0a))
(loop i infinity (move epsilonLength epsilonAngle))
(loop i infinity (move epsilonLength epsilonAngle))
(move 1d 0a))
""",
needToTrain=True)
for n,l in [(4,"1d"),
(5,"1d")]:
T("row of %d dashes"%n,
"""
(loop j %d
(embed (move 0d (/a 1a 4)) (move 1d 0a))
(p (move %s 0a)))"""%(n,l),
needToTrain=n == 4)
for n,l in [(5,"1d"),(6,"1d")]:
T("row of %d semicircles"%n,
"""
(loop j %d
(embed (loop i infinity (move epsilonLength epsilonAngle)))
(p (move %s 0a)))"""%(n,l),
needToTrain=n == 5)
with random_seed(42): # carefully selected for maximum entropy
for n in [3,4,5,6,7]:
body = {"empty": "(move 1d 0a)",
"spiral": "(loop i infinity (move (*d epsilonLength i) (*a epsilonAngle 2)))",
"dashed": "(p (move 1d 0a)) (move 1d 0a)",
"circle": "(move 1d 0a) (loop k 2 (loop i infinity (move epsilonLength epsilonAngle)))",
"lonely circle": "(p (move 1d 0a)) (loop k 2 (loop i infinity (move epsilonLength epsilonAngle)))",
"square dashed": "(p (move 1d 0a)) (loop s 4 (move 1d (/a 1a 4)))",
"square": "(move 1d 0a) (loop s 4 (move 1d (/a 1a 4)))",
"close large semicircle": "(loop i infinity (move (*d epsilonLength 2) epsilonAngle))",
"close semicircle": "(loop i infinity (move epsilonLength epsilonAngle))",
"semicircle": "(move 1d 0a) (loop i infinity (move epsilonLength epsilonAngle))",
"double dashed": "(p (move 1d 0a)) (move 1d 0a) (p (move 1d 0a)) (move 1d 0a)",
"Greek": "(loop i 3 (move (*l 1l i) (/a 1a 4)))"}
for name in body:
if name == "spiral" and n not in [3,5]: continue
if name == "square" and n not in [5,3,6,7]: continue
if name == "semicircle" and n not in [5,3,4,6]: continue
if name == "Greek" and n not in [3,5]: continue
if name == "double dashed" and n not in [6,4,3]: continue
mustTrain = False
mustTrain = mustTrain or (n == 3 and name == "Greek")
mustTrain = mustTrain or (n == 7 and name == "empty")
mustTrain = mustTrain or (n == 5 and name == "dashed")
mustTrain = mustTrain or (n == 7 and name == "circle")
mustTrain = mustTrain or (n == 6 and name == "circle")
mustTrain = mustTrain or (n == 6 and name == "lonely circle")
mustTrain = mustTrain or (n == 5 and name == "square")
mustTrain = mustTrain or (n == 7 and name == "square")
mustTrain = mustTrain or (n == 5 and name == "semicircle")
mustTrain = mustTrain or (n == 3 and name == "square dashed")
mustTrain = mustTrain or (n == 6 and name == "close semicircle")
mustTrain = mustTrain or (n == 5 and name == "close large semicircle")
mustTrain = mustTrain or (n == 3 and name == "spiral")
mustTrain = mustTrain or (n == 6 and name == "double dashed")
mustTrain = mustTrain or (n == 3 and name == "double dashed")
#mustTrain = mustTrain or (n == 6 and name == "empty")
#mustTrain = mustTrain or (random.random() < 0.07) # calibrated to give 70 training tasks
# # cap number of super easy snowflakes
# if name == "empty" and n not in [7]: mustTrain = False
# if name == "dashed" and n not in [4]: mustTrain = False
T("%d-%s snowflake"%(n,name),
"""
(loop j %d
(embed %s)
(move 0d (/a 1a %d)))"""%(n,body[name],n),
needToTrain=mustTrain)
for n in [3,4]:#2,3,4]:
T("%d-row of squares"%n,
"""
(loop i %d
(embed (loop k 4 (move 1d (/a 1a 4))))
(move 1d 0a))
"""%n,
needToTrain=n == 4)
T("2x2 grid",
"""
(for x 2 (embed (for y 2
(embed (loop k 4 (move 1d (/a 1a 4))))
(move 1d 0a)))
(move 0d (/a 1a 4)) (move 1d (-a 0a (/a 1a 4))))
""")
T("slanted squares",
"""
((embed (loop k 4 (move 1d (/a 1a 4))))
(move 0d (/a 1a 8))
(loop k 4 (move 1d (/a 1a 4))))
""")
for l in range(1,6):
T("square of size %d"%l,
"""
(for i 4
(move (*d 1d %d) (/a 1a 4)))
"""%l,
needToTrain=l in range(4))
for n in [5,7]:
T("%d-concentric squares"%n,
"""
(for i %d
(embed (loop j 4 (move (*d 1d i) (/a 1a 4)))))
"""%n,
needToTrain=n == 5)
return tasks
def montageTasks(tasks, prefix="", columns=None, testTrain=False):
import numpy as np
w = 128
arrays = [t.highresolution for t in tasks]
for a in arrays:
assert len(a) == w*w
if testTrain:
arrays = [a for a,t in zip(arrays, tasks) if t.mustTrain ] + [a for a,t in zip(arrays, tasks) if not t.mustTrain ]
arrays = [np.array([a[i:i + w]
for i in range(0, len(a), w) ])
for a in arrays]
i = montage(arrays, columns=columns)
import scipy.misc
scipy.misc.imsave('/tmp/%smontage.png'%prefix, i)
if testTrain:
trainingTasks = arrays[:sum(t.mustTrain for t in tasks)]
testingTasks = arrays[sum(t.mustTrain for t in tasks):]
random.shuffle(trainingTasks)
random.shuffle(testingTasks)
arrays = trainingTasks + testingTasks
else:
random.shuffle(arrays)
scipy.misc.imsave('/tmp/%srandomMontage.png'%prefix, montage(arrays, columns=columns))
def demoLogoTasks():
import scipy.misc
import numpy as np
g0 = Grammar.uniform(primitives, continuationType=turtle)
eprint("dreaming into /tmp/dreams_0...")
N = 1000
programs = [ p
for _ in range(N)
for p in [g0.sample(arrow(turtle,turtle),
maximumDepth=50)]
if p is not None]
os.system("mkdir -p /tmp/dreams_0")
for n,p in enumerate(programs):
with open(f"/tmp/dreams_0/{n}.dream","w") as handle:
handle.write(str(p))
drawLogo(*programs, pretty=True, smoothPretty=False,
resolution=512,
filenames=[f"/tmp/dreams_0/{n}_pretty.png"
for n in range(len(programs)) ],
timeout=1)
if len(sys.argv) > 1:
tasks = makeTasks(sys.argv[1:],proto=False)
else:
tasks = makeTasks(['all'],proto=False)
montageTasks(tasks,columns=16,testTrain=True)
for n,t in enumerate(tasks):
a = t.highresolution
w = int(len(a)**0.5)
scipy.misc.imsave('/tmp/logo%d.png'%n, np.array([a[i:i+w]
for i in range(0,len(a),w) ]))
logo_safe_name = t.name.replace("=","_").replace(' ','_').replace('/','_').replace("-","_") + ".png"
#os.system(f"convert /tmp/logo{n}.png -morphology Dilate Octagon /tmp/{logo_safe_name}")
os.system(f"convert /tmp/logo{n}.png -channel RGB -negate /tmp/{logo_safe_name}")
eprint(len(tasks),"tasks")
eprint(sum(t.mustTrain for t in tasks),"need to be trained on")
for t in dSLDemo():
a = t.highresolution
w = int(len(a)**0.5)
scipy.misc.imsave('/tmp/logoDemo%s.png'%t.name, np.array([a[i:i+w]
for i in range(0,len(a),w) ]))
os.system(f"convert /tmp/logoDemo{t.name}.png -morphology Dilate Octagon /tmp/logoDemo{t.name}_dilated.png")
tasks = [t for t in tasks if t.mustTrain ]
random.shuffle(tasks)
montageTasks(tasks[:16*3],"subset",columns=16)
montageTasks(rotationalSymmetryDemo(),"rotational")
| StarcoderdataPython |
1678801 | <reponame>compressore/metabolism-of-cities-platform<filename>src/core/migrations/0018_auto_20210107_1533.py
# Generated by Django 3.1.2 on 2021-01-07 15:33
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0017_auto_20201222_0711'),
]
operations = [
migrations.CreateModel(
name='LocalBusinessDependency',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
],
options={
'verbose_name_plural': 'Local Business Dependencies',
'ordering': ['id'],
},
),
migrations.CreateModel(
name='NaceCode',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
],
options={
'ordering': ['id'],
},
),
migrations.AddField(
model_name='organization',
name='updated_at',
field=models.DateField(auto_now=True, null=True),
),
migrations.CreateModel(
name='LocalBusinessLink',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('business', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.organization')),
('dependence', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.localbusinessdependency')),
('organization', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='organization', to='core.organization')),
],
),
migrations.AddField(
model_name='organization',
name='nace_code',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='core.nacecode'),
),
]
| StarcoderdataPython |
45199 | #!_PYTHONLOC
#
# (C) COPYRIGHT 2014-2021 Ahasuerus
# ALL RIGHTS RESERVED
#
# The copyright notice above does not evidence any actual or
# intended publication of such source code.
#
# Version: $Revision$
# Date: $Date$
from isfdb import *
from common import *
from isfdblib import *
from SQLparsing import *
from library import *
def PrintTableHeaders():
print '<table class="generic_table">'
print '<tr class="generic_table_header">'
for column in ('#', 'Publication', 'Suspect URL', 'Click Once Resolved'):
print '<th>%s</th>' % column
print '</tr>'
def PrintPubRecord(count, pub_id, url, pub_title, bgcolor):
if bgcolor:
print '<tr align=left class="table1">'
else:
print '<tr align=left class="table2">'
print '<td>%d</td>' % (count)
print '<td>%s</td>' % ISFDBLink('pl.cgi', pub_id, pub_title)
print '<td>%s</td>' % (url)
print '<td>%s</td>' % ISFDBLink('mod/resolve_bad_url.cgi', pub_id, 'Click Once Resolved')
print '</tr>'
if __name__ == '__main__':
PrintPreMod('Publications with Suspect Images')
PrintNavBar()
query = """select bad_images.pub_id, bad_images.image_url, pubs.pub_title
from bad_images, pubs
where pubs.pub_id=bad_images.pub_id
order by pubs.pub_title"""
db.query(query)
result = db.store_result()
num = result.num_rows()
if num:
PrintTableHeaders()
record = result.fetch_row()
bgcolor = 1
count = 1
while record:
pub_id = record[0][0]
url = record[0][1]
pub_title = record[0][2]
PrintPubRecord(count, pub_id, url, pub_title, bgcolor)
record = result.fetch_row()
bgcolor ^= 1
count += 1
print '</table>'
else:
print '<h2>No publications with bad images found</h2>'
PrintPostMod(0)
| StarcoderdataPython |
1687722 | <reponame>arthurlewisbrown/altcoin_max_price_prediction<gh_stars>1-10
import shutil
import pandas as pd
import traceback
import datetime
import os
from lib.model import model_info, predict_simulation
from lib import mysql_helper
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.optimizers import SGD
from keras import callbacks
"""
###################################################################################
###################################################################################
### if you want to execute this in terminal, you should put this command first. ###
export PYTHONPATH=..
###################################################################################
###################################################################################
Main purpose of this class is training the NN model.
"""
class Trainer():
ticker_data_folder = './tools/ticker_data'
training_data_folder = f'{ticker_data_folder}_training/'
model_title_name = "TrainerA"
input_without_split = ['last', 'usdt_btc_last']
input_with_split = ['last', 'usdt_btc_last']
input_ban = ['low', 'high', 'ask', 'bid', 'volume', 'base_volume', 'open_sell_orders', 'market_name', 'high',
'open_buy_orders', 'time_stamp', 'prev_day', 'created']
def __init__(self,
lr,
dense,
deep,
dropout,
batchsize,
epoch,
testing=False,
output_name=["last_max","last_max_index"],
history_size=180,
model_version ="a1",
model_group ="split",
input_group ="a1",
activation= 'relu',
last_activation='sigmoid',
prediction_result=False
):
if history_size is not None:
self.history_size=history_size
self.output_name = output_name
self.output_columns_count = len(self.output_name)
self.oneHotColumns = {}
self.output_name_prediction = [
x + "_prediction" for x in self.output_name
]
self.start_datetime = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
self.column_standard_values = {}
self.input_column = []
self.min_standard = []
self.max_standard = []
self.input_all = []
self.input_training = []
self.input_without_training = []
self.model = None
self.lr = lr
self.dense = dense
self.deep = deep
self.dropout = dropout
self.batchsize = batchsize
self.epoch = epoch
self.activation = activation
self.last_activation = last_activation
self.input_group = input_group
self.model_group = model_group
self.model_version = model_version
self.testing = testing
self.prediction_result = prediction_result
output_name_str = ""
for output_name_each in output_name:
output_name_str = output_name_str + output_name_each
self.model_name = f"{self.model_title_name}_{model_version}_{lr}_{dense}_{deep}_{dropout}_{batchsize}_{epoch}_{activation}_{last_activation}_{history_size}_{input_group}_{output_name_str}"
self.root_folder_name = f"./tools/result"
self.root_folder_name_output = f"{self.root_folder_name}/{self.model_title_name}_{model_version}/"
self.output_folder_name = f"{self.root_folder_name_output}{self.model_name}/"
if testing:
if os.path.exists(self.output_folder_name):
shutil.rmtree(self.output_folder_name)
self.input_all = self.input_all + self.input_without_split
for column_raw in self.input_with_split:
for num in range(1, self.history_size+1):
self.input_all.append(column_raw + "_old_" + str(num))
self.input_training = list(set(self.input_all) - set(self.input_ban))
self.input_training.sort()
def start(self):
try:
print("start : ", self.start_datetime)
print("start : ", self.model_name)
self.session = mysql_helper.MysqlHelper.session
self.create_folders()
self.register_model_info()
offset = 0
try:
while True:
data = pd.read_csv(self.training_data_folder + f"{str(offset)}.csv")
x, y = self.load(data)
training_x = x[self.input_training]
self.model, training_result, training_history = self.train(training_x, y, model=self.model)
if self.testing:
break
offset += 1
except FileNotFoundError:
pass
self.save_model(self.model, training_result, training_history)
if self.prediction_result:
self.predict_for_validation(self.model)
return
except:
print(traceback.format_exc())
modelResult = {
'status': 'error',
'model_group': self.model_group,
'comment': str(traceback.format_exc()),
'learning_rate': self.lr,
'dense': self.dense,
'deep': self.deep,
'dropout': self.dropout,
'batchsize': self.batchsize,
'epoch': self.epoch,
'activation': self.activation,
'last_activation': self.last_activation,
'input_group': self.input_group,
'output_name': str(self.output_name),
'input_without_split': str(self.input_without_split),
'input_with_split': str(self.input_with_split),
'input_ban': str(self.input_ban),
'model_name': self.model_name,
'history_size' : self.history_size,
'updated_at': self.start_datetime,
'model_version' : self.model_version,
'training_accuracy' : None,
'validation_accuracy' : None,
'training_loss' : None,
'validation_loss' : None,
'training_accuracy_last_5' : None,
'validation_accuracy_last_5' : None,
'training_loss_last_5' : None,
'validation_loss_last_5' : None,
'last_max_similar_acc' : None,
'last_max_index_similar_acc' : None,
}
self.save_model_info(modelResult)
pass
def create_folders(self):
if not os.path.exists(self.root_folder_name):
os.makedirs(self.root_folder_name)
if not os.path.exists(self.root_folder_name_output):
os.makedirs(self.root_folder_name_output)
if os.path.exists(self.output_folder_name):
self.model = self.load_model()
else:
os.makedirs(self.output_folder_name)
def register_model_info(self):
model_info = {
'id': None,
'status': 'submitted',
'model_group': self.model_group,
'comment': None,
'learning_rate': self.lr,
'dense': self.dense,
'deep': self.deep,
'dropout': self.dropout,
'batchsize': self.batchsize,
'epoch': self.epoch,
'activation': self.activation,
'last_activation': self.last_activation,
'input_group': self.input_group,
'output_name': str(self.output_name),
'input_without_split': str(self.input_without_split),
'input_with_split': str(self.input_with_split),
'input_ban': str(self.input_ban),
'model_name': self.model_name,
'history_size': self.history_size,
'updated_at': self.start_datetime,
'model_version': self.model_version,
'training_accuracy': None,
'validation_accuracy': None,
'training_loss': None,
'validation_loss': None,
'training_accuracy_last_5': None,
'validation_accuracy_last_5': None,
'training_loss_last_5': None,
'validation_loss_last_5': None,
'last_max_similar_acc': None,
'last_max_index_similar_acc': None,
}
self.save_model_info(model_info)
def get_training_x(self, data):
for columnRaw in self.input_with_split:
oldData = data["old_" + columnRaw].split(",")
data = pd.concat([data, oldData], axis=1)
return data
def load(self, allData):
x = allData.iloc[:,:-2]
y = allData[self.output_name]
return x, y
def train(self, X, y, model=None):
if not model:
model = Sequential()
model.add(Dense(self.dense, input_dim=X.shape[1]))
model.add(Activation(self.activation))
for _ in range(0, self.deep):
model.add(Dense(self.dense))
model.add(Activation(self.activation))
model.add(Dropout(self.dropout))
model.add(Dense(self.output_columns_count))
model.add(Activation(self.last_activation))
sgd = SGD(lr=self.lr)
model.compile(
loss='binary_crossentropy',
optimizer=sgd,
metrics=['accuracy'])
tb_call_back = callbacks.TensorBoard(log_dir=self.output_folder_name + 'graph',
write_graph=True,
write_grads=True,
)
training_history = model.fit(
X.values,
y.values,
batch_size=self.batchsize,
nb_epoch=self.epoch,
validation_split=0.2,
callbacks=[tb_call_back])
predict_y = pd.DataFrame(
model.predict_proba(X.values), columns=self.output_name_prediction)
training_result = pd.concat(
(X, y, predict_y), axis=1)
return model, training_result, training_history
def predict_for_validation(self, model):
data = pd.read_csv(self.ticker_data_folder + "/BTC-1ST.csv")
predict_x_origin, y = self.load(data)
one_hot_predict_x = predict_x_origin[self.input_training]
prediction_raw = model.predict(one_hot_predict_x.values)
prediction = pd.concat(
[
predict_x_origin,
y,
pd.DataFrame(
prediction_raw, columns=self.output_name_prediction)
],
axis=1)
self.save_prediction(prediction)
return prediction
def load_model(self):
from keras.models import load_model
model = load_model(self.output_folder_name + 'model.h5')
model.load_weights(self.output_folder_name + 'model_weights.h5')
return model
def save_model(self, model, training_result, training_history):
model.save(self.output_folder_name + 'model.h5')
model.save_weights(self.output_folder_name + 'model_weights.h5', overwrite=True)
training_history_dict = training_history.history
val_loss = training_history_dict.get("val_loss", [None])
val_acc = training_history_dict.get("val_acc", [None])
acc = training_history_dict.get("acc", [None])
loss = training_history_dict.get("loss", [None])
val_loss_avg = sum(val_loss[-5:]) / 5 if len(val_loss) > 4 else val_loss[-1]
val_acc_avg = sum(val_acc[-5:]) / 5 if len(val_acc) > 4 else val_acc[-1]
acc_avg = sum(acc[-5:]) / 5 if len(acc) > 4 else acc[-1]
loss_avg = sum(loss[-5:]) / 5 if len(loss) > 4 else loss[-1]
model_info = {
'status': 'finished',
'model_group': self.model_group,
'comment': None,
'learning_rate': self.lr,
'dense': self.dense,
'deep': self.deep,
'dropout': self.dropout,
'batchsize': self.batchsize,
'epoch': self.epoch,
'activation': self.activation,
'last_activation': self.last_activation,
'input_group': self.input_group,
'output_name': str(self.output_name),
'input_without_split': str(self.input_without_split),
'input_with_split': str(self.input_with_split),
'input_ban': str(self.input_ban),
'model_name': self.model_name,
'history_size' : self.history_size,
'updated_at': self.start_datetime,
'training_accuracy': float(round(acc[-1], 4)),
'validation_accuracy': float(round(val_acc[-1], 4)),
'training_loss': float(round(loss[-1], 4)),
'validation_loss': float(round(val_loss[-1], 4)),
'training_accuracy_last_5': float(round(acc_avg, 4)),
'validation_accuracy_last_5': float(round(val_acc_avg, 4)),
'training_loss_last_5': float(round(loss_avg, 4)),
'validation_loss_last_5': float(round(val_loss_avg, 4)),
'model_version': self.model_version,
}
last_max_index_prediction_similar_count = 0
last_max_index_prediction_not_similar_count = 1
last_max_prediction_similar_count = 0
last_max_prediction_not_similar_count = 1
for index, row in training_result.iterrows():
if "last_max" in self.output_name:
if abs(row["last_max_prediction"] - row["last_max"]) < 0.01:
last_max_prediction_similar_count += 1
else:
last_max_prediction_not_similar_count += 1
if "last_max_index" in self.output_name:
if abs(row["last_max_index_prediction"] - row["last_max_index"]) < 0.001:
last_max_index_prediction_similar_count += 1
else:
last_max_index_prediction_not_similar_count += 1
model_info['last_max_similar_acc'] = round(last_max_prediction_similar_count / (last_max_prediction_similar_count + last_max_prediction_not_similar_count), 4)
model_info['last_max_index_similar_acc'] = round(last_max_index_prediction_similar_count / (last_max_index_prediction_similar_count + last_max_index_prediction_not_similar_count), 4)
self.save_model_info(model_info)
def save_model_info(self, data):
try:
ad = model_info(**data)
self.session.add(ad)
self.session.commit()
except:
print(traceback.format_exc())
pass
def save_prediction(self, predictions):
for index, prediction in predictions.iterrows():
data = prediction[list(set(predict_simulation.__table__.columns.keys()) - set(['id']))]
if "last_max" in self.output_name:
if abs(prediction["last_max_prediction"] - prediction["last_max"]) < 0.01:
data["last_max_prediction_similar"] = 1
else:
data["last_max_prediction_similar"] = 0
else:
data["last_max_prediction_similar"] = None
data["last_max_prediction"] = None
data["last_max"] = None
if "last_max_index" in self.output_name:
if abs(prediction["last_max_index_prediction"] - prediction["last_max_index"]) < 0.001:
data["last_max_index_prediction_similar"] = 1
else:
data["last_max_index_prediction_similar"] = 0
else:
data["last_max_index_prediction_similar"] = None
data["last_max_index_prediction"] = None
data["last_max_index"] = None
data['model_name'] = self.model_name
try:
ad = predict_simulation(**data)
self.session.add(ad)
self.session.commit()
except:
print(traceback.format_exc())
pass
if __name__ == "__main__":
import sys
args = sys.argv[1:]
if len(args) > 1:
lr, dense, deep, dropout, batchsize, epoch, activation, last_activation, history_size, model_version, inputGroup, model_group, prediction_result_raw, output_name_raw = args
if "predict" in prediction_result_raw:
prediction_result = True
else:
prediction_result = False
print("output_name_raw : ", output_name_raw)
output_name = []
if "index" in output_name_raw:
output_name.append("last_max_index")
if "last_max" in output_name_raw:
output_name.append("last_max")
if len(output_name) == 0:
output_name = ["last_max_index","last_max"]
Trainer(float(lr), int(dense), int(deep), float(dropout), int(batchsize), int(epoch),
model_version=model_version,
model_group=model_group, output_name=output_name, history_size=int(history_size),
activation=activation, last_activation=last_activation, prediction_result=prediction_result).start()
# Trainer(0.02, 50, 10, 0.2, 32, 300, testing=True).start()
| StarcoderdataPython |
3249021 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
__author__ = 'andyguo'
from dayu_file_format.curve.data_structure import Point2D
import pytest
class TestPoint2D(object):
def test___init__(self):
p = Point2D(0, 1)
assert p.x == 0
assert p.y == 1
assert type(p.x) is float
assert type(p.y) is float
p = Point2D(0.0, 0.0)
assert p.x == 0
assert p.y == 0
assert type(p.x) is float
assert type(p.y) is float
with pytest.raises(ValueError) as e:
Point2D('12', 12)
def test___eq__(self):
assert Point2D(100, 100) == Point2D(100, 100)
assert Point2D(100, 100) == Point2D(100.0, 100.0)
assert Point2D(100.0, 100.9) != Point2D(100.0, 100.0)
assert Point2D(0,0) != [0,0]
def test___add__(self):
assert Point2D(0, 0) + Point2D(1, 2) == Point2D(1, 2)
assert Point2D(1, 1) + 2 == Point2D(3, 3)
assert Point2D(1, 1) + 2.0 == Point2D(3, 3)
assert Point2D(1, 1) + (-2.0) == Point2D(-1, -1)
with pytest.raises(TypeError) as e:
Point2D(1, 1) + [1, 2]
def test___iadd__(self):
p = Point2D(0, 0)
p += 1
assert p == Point2D(1, 1)
p += Point2D(2, 3)
assert p == Point2D(3, 4)
p += (-4.0)
assert p == Point2D(-1, 0)
with pytest.raises(TypeError) as e:
p += [1, 2]
def test___sub__(self):
assert Point2D(0, 0) - Point2D(1, 2) == Point2D(-1, -2)
assert Point2D(1, 1) - 2 == Point2D(-1, -1)
assert Point2D(1, 1) - 2.0 == Point2D(-1, -1)
assert Point2D(1, 1) - (-2.0) == Point2D(3, 3)
with pytest.raises(TypeError) as e:
Point2D(1, 1) - [1, 2]
def test___isub__(self):
p = Point2D(0, 0)
p -= 1
assert p == Point2D(-1, -1)
p -= Point2D(2, 3)
assert p == Point2D(-3, -4)
p -= (-4.0)
assert p == Point2D(1, 0)
with pytest.raises(TypeError) as e:
p -= [1, 2]
def test___neg__(self):
assert -Point2D(1, 1) == Point2D(-1, -1)
assert -Point2D(-3, 4) == Point2D(3, -4)
assert -Point2D(0, 0) == Point2D(0, 0)
def test___mul__(self):
assert Point2D(0, 0) * Point2D(1, 2) == Point2D(0, 0)
assert Point2D(1, 1) * 2 == Point2D(2, 2)
assert Point2D(1, 1) * 2.0 == Point2D(2, 2)
assert Point2D(1, 1) * (-2.0) == Point2D(-2, -2)
with pytest.raises(TypeError) as e:
Point2D(1, 1) * [1, 2]
def test___imul__(self):
p = Point2D(1, 2)
p *= 1
assert p == Point2D(1, 2)
p *= Point2D(2, 3)
assert p == Point2D(2, 6)
p *= (-4.0)
assert p == Point2D(-8, -24)
with pytest.raises(TypeError) as e:
p *= [1, 2]
def test___div__(self):
assert Point2D(0, 0) / Point2D(1, 2) == Point2D(0, 0)
assert Point2D(1, 1) / 2 == Point2D(0.5, 0.5)
assert Point2D(1, 1) / 2.0 == Point2D(0.5, 0.5)
assert Point2D(1, 1) / (-2.0) == Point2D(-0.5, -0.5)
with pytest.raises(TypeError) as e:
Point2D(1, 1) / [1, 2]
with pytest.raises(ZeroDivisionError) as e:
Point2D(100, 24) / 0
with pytest.raises(ZeroDivisionError) as e:
Point2D(100, 24) / Point2D(0, 2)
with pytest.raises(ZeroDivisionError) as e:
Point2D(100, 24) / Point2D(2, 0)
with pytest.raises(ZeroDivisionError) as e:
Point2D(100, 24) / Point2D(0, 0)
def test___idiv__(self):
p = Point2D(1, 2)
p /= 1
assert p == Point2D(1, 2)
p /= Point2D(2, 4)
assert p == Point2D(0.5, 0.5)
p /= (-0.25)
assert p == Point2D(-2, -2)
with pytest.raises(TypeError) as e:
p /= [1, 2]
with pytest.raises(ZeroDivisionError) as e:
p /= 0
with pytest.raises(ZeroDivisionError) as e:
p /= Point2D(0, 2)
with pytest.raises(ZeroDivisionError) as e:
p /= Point2D(2, 0)
with pytest.raises(ZeroDivisionError) as e:
p /= Point2D(0, 0)
def test_dot(self):
assert Point2D(0, 0).dot(Point2D(1, 2)) == 0
assert Point2D(0, 0).dot(Point2D(0, 0)) == 0
assert Point2D(1, 3).dot(Point2D(1, 2)) == 7
assert Point2D(-2, -3).dot(Point2D(1, 2)) == -8
assert Point2D(-2, -3).dot(Point2D(-1, -2)) == 8
assert Point2D(-2, 3).dot(Point2D(1, -2)) == -8
with pytest.raises(TypeError) as e:
Point2D(1, 2).dot([1, 2])
def test_length(self):
assert Point2D(0, 0).length == 0
assert Point2D(1, 2).length == 5 ** 0.5
assert Point2D(3, 4).length == 5
assert Point2D(-3, 4).length == 5
assert Point2D(-3, -4).length == 5
def test_normalize(self):
assert Point2D(1, 0).normalize() == Point2D(1, 0)
assert Point2D(1, 1).normalize() == Point2D(1 / 2 ** 0.5, 1 / 2 ** 0.5)
assert Point2D(-1, 1).normalize() == Point2D(-1 / 2 ** 0.5, 1 / 2 ** 0.5)
assert Point2D(-1, -1).normalize() == Point2D(-1 / 2 ** 0.5, -1 / 2 ** 0.5)
def test_to_list(self):
assert Point2D(0, 0).to_list() == [0, 0]
assert Point2D(1, 1).to_list() == [1, 1]
assert Point2D(-1, 1).to_list() == [-1, 1]
| StarcoderdataPython |
1667743 | <gh_stars>10-100
import torch
from torch.nn import Parameter, ParameterList
import tntorch as tn
class LinearNorm(torch.nn.Module):
def __init__(self, in_dim, out_dim, bias=True, w_init_gain='linear'):
super(LinearNorm, self).__init__()
self.linear_layer = torch.nn.Linear(in_dim, out_dim, bias=bias)
torch.nn.init.xavier_uniform_(
self.linear_layer.weight,
gain=torch.nn.init.calculate_gain(w_init_gain))
def forward(self, x):
return self.linear_layer(x)
class ConvNorm(torch.nn.Module):
def __init__(self, in_channels, out_channels, kernel_size=1, stride=1,
padding=None, dilation=1, bias=True, w_init_gain='linear'):
super(ConvNorm, self).__init__()
if padding is None:
assert(kernel_size % 2 == 1)
padding = int(dilation * (kernel_size - 1) / 2)
self.conv = torch.nn.Conv1d(in_channels, out_channels,
kernel_size=kernel_size, stride=stride,
padding=padding, dilation=dilation,
bias=bias)
torch.nn.init.xavier_uniform_(
self.conv.weight, gain=torch.nn.init.calculate_gain(w_init_gain))
def forward(self, signal):
conv_signal = self.conv(signal)
return conv_signal
class TruncatedSVDLinear(torch.nn.Module):
def __init__(self, layer, explained_variance=0.7):
"""Applies SVD to the trained layer given as an input for class constructor."""
super(TruncatedSVDLinear, self).__init__()
self.bias = layer.bias
W = layer.weight.data
U, s, V = torch.svd(W)
self.rank = (torch.cumsum(s / s.sum(), dim=-1) < explained_variance).int().sum()
U, s, V = U[:, :self.rank], s[:self.rank], V[:, :self.rank]
self.US = torch.nn.Parameter((U @ torch.diag(s)).transpose(1, 0))
self.V = torch.nn.Parameter(V)
def forward(self, x):
return x @ self.V @ self.US \
+ (self.bias if type(self.bias).__name__ != 'NoneType' else 0)
from enum import IntEnum
class DIMS(IntEnum):
batch = 0
seq = 1
feature = 2
class TTLSTMCell(torch.nn.Module):
def __init__(self, lstm_cell_layer, ranks_tt=70):
"""LSTMCell class wrapper with tensor-trained weights."""
super(TTLSTMCell, self).__init__()
self.input_size = lstm_cell_layer.input_size
self.hidden_size = lstm_cell_layer.hidden_size
self.bias = lstm_cell_layer.bias
self.weight_ih = ParameterList([
Parameter(core) \
for core in tn.Tensor(lstm_cell_layer.weight_ih.data.T,
ranks_tt=ranks_tt).cores
])
self.weight_hh = ParameterList([
Parameter(core) \
for core in tn.Tensor(lstm_cell_layer.weight_hh.data.T,
ranks_tt=ranks_tt).cores
])
if self.bias:
self.bias_ih = Parameter(lstm_cell_layer.bias_ih.data)
self.bias_hh = Parameter(lstm_cell_layer.bias_hh.data)
def __restore_weight(self, cores):
return torch.einsum('amc,cna->mn', *cores)
def forward(self, x_t, states=None):
if states is None:
h_t, c_t = (torch.zeros(self.hidden_size).to(x_t.device),
torch.zeros(self.hidden_size).to(x_t.device))
else:
h_t, c_t = states
# batch the computations into a single matrix multiplication
gates = x_t @ self.__restore_weight(self.weight_ih) + self.bias_ih \
+ h_t @ self.__restore_weight(self.weight_hh) + self.bias_hh
i_t, f_t, g_t, o_t = (
torch.sigmoid(gates[:, :self.hidden_size]), # input
torch.sigmoid(gates[:, self.hidden_size:self.hidden_size*2]), # forget
torch.tanh(gates[:, self.hidden_size*2:self.hidden_size*3]),
torch.sigmoid(gates[:, self.hidden_size*3:]), # output
)
c_t = f_t * c_t + i_t * g_t
h_t = o_t * torch.tanh(c_t)
return h_t, c_t
class TTLSTM(torch.nn.Module):
def __init__(self, lstm_layer, ranks_tt=70):
"""LSTM class wrapper with tensor-trained weights."""
super(TTLSTM, self).__init__()
self.input_size = lstm_layer.input_size
self.hidden_size = lstm_layer.hidden_size
self.num_layers = lstm_layer.num_layers
self.bias = lstm_layer.bias
self.bidirectional = lstm_layer.bidirectional
self.weight_ih = ParameterList([
Parameter(core) \
for core in tn.Tensor(lstm_layer.weight_ih_l0.data.T,
ranks_tt=ranks_tt).cores
])
self.weight_hh = ParameterList([
Parameter(core) \
for core in tn.Tensor(lstm_layer.weight_hh_l0.data.T,
ranks_tt=ranks_tt).cores
])
if self.bias:
self.bias_ih = Parameter(lstm_layer.bias_ih_l0.data)
self.bias_hh = Parameter(lstm_layer.bias_hh_l0.data)
if self.bidirectional:
self.weight_ih_reverse = ParameterList([
Parameter(core) \
for core in tn.Tensor(lstm_layer.weight_ih_l0_reverse.data.T,
ranks_tt=ranks_tt).cores
])
self.weight_hh_reverse = ParameterList([
Parameter(core) \
for core in tn.Tensor(lstm_layer.weight_hh_l0_reverse.data.T,
ranks_tt=ranks_tt).cores
])
if self.bias:
self.bias_ih_reverse = Parameter(lstm_layer.bias_ih_l0_reverse.data)
self.bias_hh_reverse = Parameter(lstm_layer.bias_hh_l0_reverse.data)
def nparams(self):
return sum(p.numel() for p in self.parameters() if p.requires_grad)
def flatten_parameters(self):
pass
def __restore_weight(self, cores):
return torch.einsum('amc,cna->mn', *cores)
def __impl(self, x, init_states=None, backward=False):
"""Assumes x is of shape (batch, sequence, feature)"""
bs, seq_sz, _ = x.size()
hidden_seq = []
if init_states is None:
h_t, c_t = (torch.zeros(self.hidden_size).to(x.device),
torch.zeros(self.hidden_size).to(x.device))
else:
h_t, c_t = init_states
for t in range(seq_sz):
x_t = x[:, t, :]
weight_ih = self.weight_ih if not backward else self.weight_ih_reverse
weight_hh = self.weight_hh if not backward else self.weight_hh_reverse
bias_ih = self.bias_ih if not backward else self.bias_ih_reverse
bias_hh = self.bias_hh if not backward else self.bias_hh_reverse
# batch the computations into a single matrix multiplication
gates = x_t @ self.__restore_weight(weight_ih) + bias_ih \
+ h_t @ self.__restore_weight(weight_hh) + bias_hh
i_t, f_t, g_t, o_t = (
torch.sigmoid(gates[:, :self.hidden_size]), # input
torch.sigmoid(gates[:, self.hidden_size:self.hidden_size*2]), # forget
torch.tanh(gates[:, self.hidden_size*2:self.hidden_size*3]),
torch.sigmoid(gates[:, self.hidden_size*3:]), # output
)
c_t = f_t * c_t + i_t * g_t
h_t = o_t * torch.tanh(c_t)
hidden_seq.append(h_t.unsqueeze(DIMS.batch))
hidden_seq = torch.cat(hidden_seq, dim=DIMS.batch)
# reshape from shape (sequence, batch, feature) to (batch, sequence, feature)
hidden_seq = hidden_seq.transpose(DIMS.batch, DIMS.seq).contiguous()
return hidden_seq, (h_t, c_t)
def forward(self, x, init_states=None):
outputs_forward, (h_n_forward, c_n_forward) = self.__impl(
x, init_states=init_states, backward=False
)
if self.bidirectional:
outputs_backward, (h_n_backward, c_n_backward) = self.__impl(
x[:, range(x.shape[1] - 1, -1, -1), :], init_states=init_states, backward=True
)
return torch.cat([outputs_forward, outputs_backward], -1), (
torch.cat([h_n_forward, h_n_backward], 0),
torch.cat([c_n_forward, c_n_backward], 0)
)
return outputs_forward, (h_n_forward, c_n_forward)
| StarcoderdataPython |
3268234 | <gh_stars>1-10
from django.apps import AppConfig
class FetcherConfig(AppConfig):
name = 'fetcher'
| StarcoderdataPython |
3234361 | import numpy as np
a = np.zeros((2,2,2))
reveal_type(a)
| StarcoderdataPython |
1621987 | <reponame>Anchovee/dash_TA<filename>app/dashapp3/callbacks.py
from datetime import datetime as dt
import pandas_datareader as pdr
from dash.dependencies import Input
from dash.dependencies import Output
from dash.exceptions import PreventUpdate
def register_callbacks(dashapp):
@dashapp.callback([
Output('graph', 'figure'),
Output('data-table', 'data'),
Output('data-table', 'columns'),
Output('container', 'style')
], [Input('data-dropdown', 'value')])
def update_graph(value):
sample_data = {
'series': {
'data': [
{'title': 'Game of Thrones', 'score': 9.5},
{'title': 'Stranger Things', 'score': 8.9},
{'title': 'Vikings', 'score': 8.6}
],
'style': {
'backgroundColor': '#ff998a'
}
},
'movies': {
'data': [
{'title': 'Rambo', 'score': 7.7},
{'title': 'The Terminator', 'score': 8.0},
{'title': 'Alien', 'score': 8.5}
],
'style': {
'backgroundColor': '#fff289'
}
}
}
if value is None:
raise PreventUpdate
selected = sample_data[value]
data = selected['data']
columns = [
{'name': k.capitalize(), 'id': k}
for k in data[0].keys()
]
figure = go.Figure(
data=[
go.Bar(x=[x['score']], text=x['title'], name=x['title'])
for x in data
]
)
return figure, data, columns, selected['style'] | StarcoderdataPython |
1624111 |
__all__ = ['Word', 'Line', 'Block', 'SourceFile']
from .component import Component
class Word(Component):
""" Represents immutable string.
Provides content for:
words
"""
templates = dict(
string = '%(word)s'
)
template_options = dict()
def __init__(self, word):
Component.__init__(self)
self.word = word
def add(self, other):
raise TypeError('%s instance is immutable' % (type(self).__name__))
def request_data(self, subs_name, parents):
if subs_name=='words':
return self.word
def realize(self):
self.save()
r = self.get_view('string')
self.restore()
return r
class Line(Component):
""" Represents a string.
Components must provide:
words
Provides content for:
strings
"""
templates = dict(
string = '%(words)s',
)
template_options = dict(
words = dict(separator='')
)
def add(self, other):
if isinstance(other, str):
other = Word(other)
Component.add(self, other)
def request_data(self, subs_name, parents):
if subs_name=='strings':
return self.get_view('string')
if subs_name=='blocks':
return self.get_view('string')
def realize(self):
self.save()
r = self.get_view('string')
self.restore()
return r
class Block(Component):
""" Represents a (optionally named) block of lines.
Components must provide:
strings
Provides content for:
block, named_block
"""
templates = dict(
string = '%(strings)s'
)
template_options = dict(
strings = dict(separator='\n',
ignore_empty_content = True
)
)
def __init__(self, name = None, *leafs):
Component.__init__(self, *leafs)
self.name = name
def add(self, other):
if isinstance(other, str):
other = Line(other)
Component.add(self, other)
def request_data(self, subs_name, parents):
if subs_name=='block':
return self.get_view('string')
if subs_name=='named_block':
return self.name, self.get_view('string')
def realize(self):
self.save()
r = self.get_view('string')
self.restore()
return r
class SourceFile(Block):
""" Represents a file.
Components must provide:
strings
Provides content for:
block, named_block. filename
"""
templates = dict(
filename = '%(name)s',
)
def request_data(self, subs_name, parents):
if subs_name=='filename':
d = parents.get_attr('path', '.')
return os.path.join(d, self.name)
return Block.request_data(self, subs_name, parents)
def realize(self):
content = Block.realize(self)
f = open(self.name, 'w')
f.write(content)
f.close()
return self.name
| StarcoderdataPython |
95814 | <filename>src/sfcparse/__xml/xmlbuildmanual.py<gh_stars>0
# xmlbuildmanual
#########################################################################################################
# Imports
import xml.etree.ElementTree as __xml_etree
#########################################################################################################
# Build manual xml data
def xmlbuildmanual() -> __xml_etree:
"""
Returns a empty xml ElementTree obj to build/work with xml data
Assign the output to var
This is using the native xml library via etree shipped with the python standard library.
For more information on the xml.etree api, visit: https://docs.python.org/3/library/xml.etree.elementtree.html#module-xml.etree.ElementTree
"""
return __xml_etree
| StarcoderdataPython |
1764916 | from collections import OrderedDict, abc
from enum import Enum
from functools import reduce, wraps
from typing import Callable, List, Optional, Sequence, TypeVar, Union, overload
import torch
from torch import Tensor, nn
from .delay import Delay
from .logging import getLogger
from .module import CallMode, CoModule, PaddingMode, TensorPlaceholder
from .utils import (
function_repr,
load_state_dict,
num_from,
state_dict,
temporary_parameter,
)
logger = getLogger(__name__)
__all__ = [
"Sequential",
"BroadcastReduce",
"Residual",
"Broadcast",
"Parallel",
"Reduce",
]
T = TypeVar("T")
class Reduction(Enum):
"""Types of parallel tensor reduce operation.
Supported tupes are:
- SUM: Element-wise summation
- CONCAT: Channel-wise concatenation
- MUL: Hadamark product
"""
SUM = "sum"
CONCAT = "concat"
MUL = "mul"
ReductionFunc = Callable[[Sequence[Tensor]], Tensor]
ReductionFuncOrEnum = Union[Reduction, ReductionFunc]
def reduce_sum(inputs: Sequence[Tensor]) -> Tensor:
assert len(inputs) >= 2
return reduce(torch.Tensor.add, inputs[1:], inputs[0])
def reduce_concat(inputs: Sequence[Tensor]) -> Tensor:
"""Channel-wise concatenation of input
Args:
inputs (Sequence[Tensor]): Inputs with broadcastable shapes.
Returns:
Tensor: Inputs concatenated in the channel dimension
"""
return torch.cat(inputs, dim=1) # channel dim for inputs of shape (B, C, T, H, W)
def reduce_mul(inputs: Sequence[Tensor]) -> Tensor:
"""Hadamard product between inputs
Args:
inputs (Sequence[Tensor]): Inputs with broadcastable shapes.
Returns:
Tensor: Haramard product of inputs
"""
assert len(inputs) >= 2
return reduce(torch.Tensor.mul, inputs[1:], inputs[0])
def nonempty(fn: ReductionFunc) -> ReductionFunc:
@wraps(fn)
def wrapped(inputs: Sequence[Tensor]) -> Tensor:
if any(len(inp) == 0 for inp in inputs):
return TensorPlaceholder(inputs[0].shape) # pragma: no cover
return fn(inputs)
return wrapped
class FlattenableStateDict:
"""Mixes in the ability to flatten state dicts.
It is assumed that classes that inherit this modlue also inherit from nn.Module
"""
flatten_state_dict = False
def __init__(self, *args, **kwargs):
... # pragma: no cover
def state_dict(
self, destination=None, prefix="", keep_vars=False, flatten=False
) -> "OrderedDict[str, Tensor]":
flatten = flatten or self.flatten_state_dict
return state_dict(self, destination, prefix, keep_vars, flatten)
def load_state_dict(
self,
state_dict: "OrderedDict[str, Tensor]",
strict: bool = True,
flatten=False,
):
flatten = flatten or self.flatten_state_dict
return load_state_dict(self, state_dict, strict, flatten)
def co_add_module(self, name: str, module: Optional["nn.Module"]) -> None:
if not CoModule.is_valid(module):
# Attempt automatic conversion
from continual.convert import continual # break cyclical import
module = continual(module)
nn.Module.add_module(self, name, module)
class Broadcast(CoModule, nn.Module):
"""Broadcast a single input to multiple streams"""
def __init__(
self,
num_streams: int = None,
):
nn.Module.__init__(self)
self.num_streams = num_streams
def forward(self, input: T) -> List[T]:
assert isinstance(
self.num_streams, int
), "Unknown number of target streams in Broadcast."
return [input for _ in range(self.num_streams)]
def forward_step(self, input: T, update_state=True) -> List[T]:
return self.forward(input)
def forward_steps(self, input: T, pad_end=False, update_state=True) -> List[T]:
return self.forward(input)
class Parallel(FlattenableStateDict, CoModule, nn.Sequential):
"""Container for parallel modules.
Args:
*args: Either vargs of modules or an OrderedDict.
auto_delay (bool, optional):
Automatically add delay to modules in order to match the longest delay.
Defaults to True.
"""
@overload
def __init__(
self,
*args: CoModule,
auto_delay=True,
) -> None:
... # pragma: no cover
@overload
def __init__(
self,
arg: "OrderedDict[str, CoModule]",
auto_delay=True,
) -> None:
... # pragma: no cover
def __init__(
self,
*args,
auto_delay=True,
):
nn.Module.__init__(self)
if len(args) == 1 and isinstance(args[0], OrderedDict):
modules = [(key, module) for key, module in args[0].items()]
else:
modules = [(str(idx), module) for idx, module in enumerate(args)]
if auto_delay:
# If there is a delay mismatch, automatically add delay to match the longest
max_delay = max([m.delay for _, m in modules])
modules = [
(
key,
(
Sequential(module, Delay(max_delay - module.delay))
if module.delay < max_delay
else module
),
)
for key, module in modules
]
assert (
len(set(num_from(getattr(m, "stride", 1)) for _, m in modules)) == 1
), f"Expected all modules to have the same stride, but got strides {[(num_from(getattr(m, 'stride', 1))) for _, m in modules]}"
for key, module in modules:
self.add_module(key, module)
delays = set(m.delay for m in self)
if len(delays) != 1: # pragma: no cover
logger.warning(
f"It recommended that parallel modules have the same delay, but found delays {delays}. "
"Temporal consistency cannot be guaranteed."
)
self._delay = max(delays)
receptive_fields = set(m.receptive_field for m in self)
self._receptive_field = max(receptive_fields)
def add_module(self, name: str, module: Optional["nn.Module"]) -> None:
co_add_module(self, name, module)
def forward_step(self, inputs: List[T], update_state=True) -> List[T]:
outs = []
for i, m in enumerate(self):
with temporary_parameter(m, "call_mode", CallMode.FORWARD_STEP):
outs.append(m(inputs[i], update_state=update_state))
return outs
def forward_steps(
self, inputs: List[T], pad_end=False, update_state=True
) -> List[T]:
outs = []
for i, m in enumerate(self):
with temporary_parameter(m, "call_mode", CallMode.FORWARD_STEPS):
outs.append(m(inputs[i], pad_end=pad_end, update_state=update_state))
return outs
def forward(self, inputs: List[T]) -> List[T]:
outs = []
for i, m in enumerate(self):
with temporary_parameter(m, "call_mode", CallMode.FORWARD):
outs.append(m(inputs[i]))
return outs
@property
def receptive_field(self) -> int:
return self._receptive_field
@property
def delay(self) -> int:
return self._delay
@property
def stride(self) -> int:
return num_from(getattr(next(iter(self)), "stride", 1))
@property
def padding(self) -> int:
return max(num_from(getattr(m, "padding", 0)) for m in self)
def clean_state(self):
for m in self:
if hasattr(m, "clean_state"):
m.clean_state()
class ParallelDispatch(CoModule, nn.Module):
"""Parallel dispatch of many input streams to many output streams"""
def __init__(
self,
dispatch_mapping: Sequence[Union[int, Sequence[int]]],
):
"""Initialise ParallelDispatch
Args:
dispatch_mapping (Sequence[Union[int, Sequence[int]]]):
input-to-output mapping, where the integers signify the input stream ordering
and the positions denote corresponding output ordering.
Examples:
[1,0] to shufle order of streams.
[0,1,1] to copy stream 1 onto a new stream.
[[0,1],2] to collect stream 0 and 1 while keeping stream 2 separate.
"""
nn.Module.__init__(self)
def is_int_or_valid_list(x):
if isinstance(x, int):
return True
elif isinstance(x, abc.Sequence):
return all(is_int_or_valid_list(z) for z in x)
else:
return False
assert isinstance(dispatch_mapping, abc.Sequence) and is_int_or_valid_list(
dispatch_mapping
), "The dispatch_mapping should be of type Sequence[Union[StreamId, Sequence[StreamId]]]"
self.dispatch_mapping = dispatch_mapping
def forward(self, input: List[T]) -> List[Union[T, List[T]]]:
def dispatch(mapping):
nonlocal input
if isinstance(mapping, abc.Sequence):
return [dispatch(m) for m in mapping]
else:
return input[mapping]
return dispatch(self.dispatch_mapping)
def forward_step(
self, input: List[T], update_state=True
) -> List[Union[T, List[T]]]:
return self.forward(input)
def forward_steps(
self, input: List[T], pad_end=False, update_state=True
) -> List[Union[T, List[T]]]:
return self.forward(input)
class Reduce(CoModule, nn.Module):
"""Reduce multiple input streams to a single output"""
def __init__(
self,
reduce: ReductionFuncOrEnum = Reduction.SUM,
):
nn.Module.__init__(self)
self.reduce = nonempty(
reduce
if callable(reduce)
else {
Reduction.SUM: reduce_sum,
Reduction.CONCAT: reduce_concat,
Reduction.MUL: reduce_mul,
}[Reduction(reduce)]
)
def forward(self, inputs: List[T]) -> T:
return self.reduce(inputs)
def forward_step(self, inputs: List[T], update_state=True) -> T:
if all(isinstance(i, Tensor) for i in inputs):
return self.reduce(inputs)
return TensorPlaceholder() # pragma: no cover
def forward_steps(self, inputs: List[T], pad_end=False, update_state=True) -> T:
return self.reduce(inputs)
class Sequential(FlattenableStateDict, CoModule, nn.Sequential):
"""Continual Sequential module
This module is a drop-in replacement for `torch.nn.Sequential`
which adds the `forward_step`, `forward_steps`, and `like` methods
as well as a `delay` property
"""
@overload
def __init__(self, *args: nn.Module) -> None:
... # pragma: no cover
@overload
def __init__(self, arg: "OrderedDict[str, nn.Module]") -> None:
... # pragma: no cover
def __init__(self, *args):
nn.Module.__init__(self)
modules = []
if len(args) == 1 and isinstance(args[0], OrderedDict):
for key, module in args[0].items():
modules.append((key, module))
else:
for idx, module in enumerate(args):
modules.append((str(idx), module))
# If a co.Broadcast is followed by a co.Parallel, automatically infer num_streams
for i in range(len(modules)):
if isinstance(modules[i][1], Broadcast) and i < len(modules) - 1:
if isinstance(modules[i + 1][1], Parallel):
modules[i][1].num_streams = modules[i][1].num_streams or len(
modules[i + 1][1]
)
for n, m in modules:
self.add_module(n, m)
def add_module(self, name: str, module: Optional["nn.Module"]) -> None:
co_add_module(self, name, module)
def forward(self, input):
for m in self:
with temporary_parameter(m, "call_mode", CallMode.FORWARD):
input = m(input) # == module.forward
return input
def forward_step(self, input, update_state=True):
for module in self:
# ptflops only works when __call__ is triggered
with temporary_parameter(module, "call_mode", CallMode.FORWARD_STEP):
input = module( # == module.forward_step
input, update_state=update_state
)
if not type(input) in {Tensor, list}:
return TensorPlaceholder()
return input
def forward_steps(self, input: Tensor, pad_end=False, update_state=True):
for m in self:
if not type(input) in {Tensor, list} or len(input) == 0:
return TensorPlaceholder() # pragma: no cover
# ptflops only works when __call__ is triggered
with temporary_parameter(m, "call_mode", CallMode.FORWARD_STEPS):
# == m.forward_steps
input = m(input, pad_end=pad_end, update_state=update_state)
return input
@property
def receptive_field(self) -> int:
reverse_modules = [m for m in self][::-1]
rf = reverse_modules[0].receptive_field
for m in reverse_modules[1:]:
s = num_from(getattr(m, "stride", 1))
rf = s * rf + m.receptive_field - s
return rf
@property
def stride(self) -> int:
tot = 1
for m in self:
tot *= num_from(m.stride)
return tot
@property
def padding(self) -> int:
m = [m for m in self]
p = num_from(m[0].padding)
s = num_from(m[0].stride)
for i in range(1, len(m)):
p += num_from(m[i].padding) * s
s = s * num_from(m[i].stride)
return p
@staticmethod
def build_from(module: nn.Sequential) -> "Sequential":
from .convert import continual # import here due to circular import
return Sequential(
OrderedDict([(k, continual(m)) for k, m in module._modules.items()])
)
def clean_state(self):
for m in self:
if hasattr(m, "clean_state"):
m.clean_state()
class BroadcastReduce(FlattenableStateDict, CoModule, nn.Sequential):
"""Broadcast an input to parallel modules and reduce.
This module is a shorthand for
>>> co.Sequential(co.Broadcast(),co.Parallel(*args),co.Reduce(reduce))
Args:
*args: Either vargs of modules or an OrderedDict.
reduce (ReductionFuncOrEnum, optional):
Function used to reduce the parallel outputs.
Sum or concatenation can be specified by passing Reduction.SUM or Reduction.CONCAT respectively.
Custom reduce functions can also be passed.
Defaults to Reduction.SUM.
auto_delay (bool, optional):
Automatically add delay to modules in order to match the longest delay.
Defaults to True.
"""
@overload
def __init__(
self,
*args: CoModule,
reduce: ReductionFuncOrEnum = Reduction.SUM,
auto_delay=True,
) -> None:
... # pragma: no cover
@overload
def __init__(
self,
arg: "OrderedDict[str, CoModule]",
reduce: ReductionFuncOrEnum = Reduction.SUM,
auto_delay=True,
) -> None:
... # pragma: no cover
def __init__(
self,
*args,
reduce: ReductionFuncOrEnum = Reduction.SUM,
auto_delay=True,
):
nn.Module.__init__(self)
if len(args) == 1 and isinstance(args[0], OrderedDict):
modules = [(key, module) for key, module in args[0].items()]
else:
modules = [(str(idx), module) for idx, module in enumerate(args)]
assert (
len(modules) > 1
), "You should pass at least two modules for the map-reduce operation to make sense."
if auto_delay:
# If there is a delay mismatch, automatically add delay to match the longest
max_delay = max([m.delay for _, m in modules])
modules = [
(
key,
(
Sequential(module, Delay(max_delay - module.delay))
if module.delay < max_delay
else module
),
)
for key, module in modules
]
assert (
len(set(num_from(getattr(m, "stride", 1)) for _, m in modules)) == 1
), f"Expected all modules to have the same stride, but got strides {[(num_from(getattr(m, 'stride', 1))) for _, m in modules]}"
for key, module in modules:
self.add_module(key, module)
self.reduce = nonempty(
reduce
if callable(reduce)
else {
Reduction.SUM: reduce_sum,
Reduction.CONCAT: reduce_concat,
Reduction.MUL: reduce_mul,
}[Reduction(reduce)]
)
delays = set(m.delay for m in self)
self._delay = max(delays)
receptive_fields = set(m.receptive_field for m in self)
self._receptive_field = max(receptive_fields)
def add_module(self, name: str, module: Optional["nn.Module"]) -> None:
co_add_module(self, name, module)
def forward_step(self, input: Tensor, update_state=True) -> Tensor:
outs = []
for m in self:
with temporary_parameter(m, "call_mode", CallMode.FORWARD_STEP):
outs.append(m(input, update_state=update_state)) # == m.forward_step
if all(isinstance(o, Tensor) for o in outs):
return self.reduce(outs)
# Try to infer shape
shape = tuple()
for o in outs:
if isinstance(o, Tensor): # pragma: no cover
shape = o.shape
break
return TensorPlaceholder(shape)
def forward_steps(self, input: Tensor, pad_end=False, update_state=True) -> Tensor:
outs = []
for m in self:
with temporary_parameter(m, "call_mode", CallMode.FORWARD_STEPS):
# m.forward_steps
outs.append(m(input, pad_end=pad_end, update_state=update_state))
return self.reduce(outs)
def forward(self, input: Tensor) -> Tensor:
outs = []
for m in self:
with temporary_parameter(m, "call_mode", CallMode.FORWARD):
outs.append(m(input)) # == m.forward
return self.reduce(outs)
@property
def receptive_field(self) -> int:
return self._receptive_field
@property
def delay(self) -> int:
return self._delay
@property
def stride(self) -> int:
return num_from(getattr(next(iter(self)), "stride", 1))
@property
def padding(self) -> int:
return max(num_from(getattr(m, "padding", 0)) for m in self)
def clean_state(self):
for m in self:
if hasattr(m, "clean_state"):
m.clean_state()
def extra_repr(self):
return f"reduce={self.reduce.__name__}"
def Residual(
module: CoModule,
temporal_fill: PaddingMode = None,
reduce: Reduction = "sum",
phantom_padding: bool = False,
) -> BroadcastReduce:
"""[summary]
Args:
module (CoModule): module to which a residual should be added.
temporal_fill (PaddingMode, optional): temporal fill type in delay. Defaults to None.
reduce (Reduction, optional): Reduction function. Defaults to "sum".
phantom_padding (bool, optional):
Set residual delay to operate as if equal padding was used for the . Defaults to False.
Returns:
BroadcastReduce: BroadcastReduce module with residual.
"""
assert num_from(getattr(module, "stride", 1)) == 1, (
"The simple `Residual` only works for modules with temporal stride=1. "
"Complex residuals can be achieved using `BroadcastReduce` or the `Broadcast`, `Parallel`, and `Reduce` modules."
)
temporal_fill = temporal_fill or getattr(
module, "temporal_fill", PaddingMode.REPLICATE.value
)
equal_padding = module.receptive_field - num_from(module.padding) * 2 == 1
do_phantom_padding = phantom_padding and not equal_padding
delay = module.delay
if do_phantom_padding:
assert delay % 2 == 0, "Auto-shrink only works for even-number delays."
delay = delay // 2
return BroadcastReduce(
# Residual first yields easier broadcasting in reduce functions
Delay(delay, temporal_fill, auto_shrink=do_phantom_padding),
module,
reduce=reduce,
auto_delay=False,
)
class Conditional(FlattenableStateDict, CoModule, nn.Module):
"""Module wrapper for conditional invocations at runtime"""
def __init__(
self,
predicate: Callable[[CoModule, Tensor], bool],
on_true: CoModule,
on_false: CoModule = None,
):
from continual.convert import continual # Break cyclical import
assert callable(predicate), "The pased function should be callable."
if not isinstance(on_true, CoModule):
on_true = continual(on_true)
if not (isinstance(on_false, CoModule) or on_false is None):
on_false = continual(on_false)
nn.Module.__init__(self)
self.predicate = predicate
# Ensure modules have the same delay
self._delay = max(on_true.delay, getattr(on_false, "delay", 0))
self._receptive_field = max(
on_true.receptive_field, getattr(on_false, "receptive_field", 1)
)
self.add_module(
"0",
on_true
if on_true.delay == self._delay
else Sequential(Delay(self._delay - on_true.delay), on_true),
)
if on_false is not None:
self.add_module(
"1",
on_false
if on_false.delay == self._delay
else Sequential(Delay(self._delay - on_false.delay), on_false),
)
def forward(self, input: Tensor) -> Tensor:
if self.predicate(self, input):
return self._modules["0"](input)
elif "1" in self._modules:
return self._modules["1"](input)
return input
def forward_step(self, input: Tensor, update_state=True) -> Tensor:
if self.predicate(self, input):
return self._modules["0"].forward_step(input)
elif "1" in self._modules:
return self._modules["1"].forward_step(input)
return input
def forward_steps(self, input: Tensor, pad_end=False, update_state=True) -> Tensor:
if self.predicate(self, input):
return self._modules["0"].forward_steps(input)
elif "1" in self._modules:
return self._modules["1"].forward_steps(input)
return input
@property
def delay(self) -> int:
return self._delay
@property
def receptive_field(self) -> int:
return self._receptive_field
def extra_repr(self):
return f"predicate={function_repr(self.predicate)}"
| StarcoderdataPython |
3217532 | import sys
import colorama
class HascalError:
def __init__(self, exception_message):
colorama.init()
sys.stderr.write(colorama.Fore.RED + "Error : ")
sys.stderr.write(colorama.Style.RESET_ALL)
sys.stderr.write(exception_message)
sys.stderr.write("\n")
sys.exit(1)
class HascalWarning:
def __init__(self, warning_message):
colorama.init()
print(
colorama.Fore.YELLOW
+ "Warning : "
+ colorama.Style.RESET_ALL
+ warning_message
)
| StarcoderdataPython |
1646116 | <reponame>jonrzhang/MegEngine<gh_stars>1-10
from functools import partial
from megengine.quantization import QConfig, tqt_qconfig
from megengine.quantization.fake_quant import TQT
def test_equal():
qconfig = QConfig(
weight_observer=None,
act_observer=None,
weight_fake_quant=partial(TQT, dtype="qint8", narrow_range=True),
act_fake_quant=partial(TQT, dtype="qint8", narrow_range=False),
)
assert qconfig == tqt_qconfig
| StarcoderdataPython |
153697 | from functools import singledispatch
from functools import update_wrapper
class singledispatchmethod:
"""Single-dispatch generic method descriptor.
Supports wrapping existing descriptors and handles non-descriptor
callables as instance methods.
"""
def __init__(self, func):
if not callable(func) and not hasattr(func, "__get__"):
raise TypeError(f"{func!r} is not callable or a descriptor")
self.dispatcher = singledispatch(func)
self.func = func
def register(self, cls, method=None):
"""generic_method.register(cls, func) -> func
Registers a new implementation for the given *cls* on a *generic_method*.
"""
return self.dispatcher.register(cls, func=method)
def __get__(self, obj, cls=None):
def _method(*args, **kwargs):
method = self.dispatcher.dispatch(args[0].__class__)
return method.__get__(obj, cls)(*args, **kwargs)
_method.__isabstractmethod__ = self.__isabstractmethod__
_method.register = self.register
update_wrapper(_method, self.func)
return _method
@property
def __isabstractmethod__(self):
return getattr(self.func, "__isabstractmethod__", False)
| StarcoderdataPython |
1676540 | <filename>ontask/action/tests/test_serializers.py
# -*- coding: utf-8 -*-
"""Test the views for the scheduler pages."""
import os
import test
from django.conf import settings
from ontask.action.serializers import ActionSelfcontainedSerializer
class ActionTestSerializers(test.OnTaskTestCase):
"""Test stat views."""
fixtures = ['simple_workflow_two_actions']
filename = os.path.join(
settings.BASE_DIR(),
'ontask',
'fixtures',
'simple_workflow_two_actions.sql',
)
user_email = '<EMAIL>'
user_pwd = '<PASSWORD>'
workflow_name = 'wflow2'
action_obj = {
"conditions": [
{
"columns": [],
"name": "old",
"description_text": "",
"formula": {
"not": False, "rules": [{
"id": "age",
"type": "double",
"field": "age",
"input": "number",
"value": "12",
"operator": "greater"}],
"valid": True,
"condition": "AND"},
"n_rows_selected": 2,
"is_filter": True},
{
"columns": [],
"name": "Registered",
"description_text": "",
"formula": {
"not": False, "rules": [{
"id": "registered",
"type": "boolean",
"field": "registered",
"input": "radio", "value": "1",
"operator": "equal"}],
"valid": True,
"condition": "AND"},
"n_rows_selected": 1,
"is_filter": False}],
"column_condition_pair": [],
"is_out": True,
"used_columns": [
{
"name": "age-2",
"description_text": "",
"data_type": "double",
"is_key": False,
"position": 1,
"in_viz": True,
"categories": [],
"active_from": None,
"active_to": None},
{
"name": "registered-2",
"description_text": "",
"data_type": "boolean",
"is_key": False,
"position": 5,
"in_viz": True,
"categories": [],
"active_from": None,
"active_to": None}],
"name": "Detecting age",
"description_text": "",
"action_type": "personalized_text",
"serve_enabled": False,
"active_from": None,
"active_to": None,
"rows_all_False": [2],
"text_content": "<p>Hi {{ name }}</p><p>ATT: {{ attribute name }}</p><p>COL: {{ registered }}</p><p>{% if Registered %}Thank you for registering{% else %}Remember to register{% endif %}</p>",
"target_url": "", "shuffle": False}
def test_serializer(self):
"""Test the self-contained action serializer."""
# Try to create a view with a name that already exists.
action_data = ActionSelfcontainedSerializer(
data=self.action_obj,
many=False,
context={
'user': self.workflow.user,
'name': 'NEW ACTION',
'workflow': self.workflow,
'columns': self.workflow.columns.all()
},
)
self.assertTrue(action_data.is_valid())
action = action_data.save(user=self.workflow.user, name='NEW ACTION')
self.workflow.refresh_from_db()
action = self.workflow.actions.get(name='NEW ACTION')
self.assertEqual(action.is_out, True)
self.assertEqual(
action.description_text,
self.action_obj['description_text'])
self.assertEqual(
action.conditions.count(),
len(self.action_obj['conditions']))
| StarcoderdataPython |
182100 | <gh_stars>1000+
# Copyright 2019 Google LLC.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Tests for third_party.nucleus.io.gfile."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
if 'google' in sys.modules and 'google.protobuf' not in sys.modules:
del sys.modules['google']
from absl.testing import absltest
from third_party.nucleus.io import gfile
from third_party.nucleus.testing import test_utils
class GfileTest(absltest.TestCase):
def test_exists(self):
self.assertTrue(gfile.Exists(
test_utils.genomics_core_testdata('test_regions.bedgraph')))
self.assertFalse(gfile.Exists(
test_utils.genomics_core_testdata('does_not_exist')))
def test_glob(self):
g1 = gfile.Glob(test_utils.genomics_core_testdata('test*'))
self.assertGreater(len(g1), 1)
self.assertIn(
test_utils.genomics_core_testdata('test.bam'), g1)
g2 = gfile.Glob(test_utils.genomics_core_testdata('does_not_exist*'))
self.assertEqual([], g2)
def test_reading(self):
with gfile.Open(test_utils.genomics_core_testdata('headerless.sam')) as f:
for line in f:
self.assertTrue(line.startswith('SRR3656745'))
def test_writing(self):
path = test_utils.test_tmpfile('test_gfile')
with gfile.Open(path, 'w') as f:
f.write('test\n')
f.write('end\n')
with gfile.Open(path, 'r') as f2:
lines = f2.readlines()
self.assertEqual(['test\n', 'end\n'], lines)
if __name__ == '__main__':
absltest.main()
| StarcoderdataPython |
3384290 | <gh_stars>0
from app import create_app
# Creating app instance
app = create_app('development')
def test():
"""
Running unit tests
"""
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if __name__ == '__main__':
app.run() | StarcoderdataPython |
3240218 | <gh_stars>0
from django.contrib import admin
from .models import Book, Borrower
# Register your models here.
admin.site.register(Book)
admin.site.register(Borrower) | StarcoderdataPython |
1701490 | import lab9
from lab9 import Matrix
A = Matrix(3, 3)
B = Matrix(3, 3)
C = A + B
D = A - B
E = A * 2
F = A * B
print('Матрица А = ')
A.outMat()
print('Матрица В = ')
B.outMat()
print('C = A + B: ')
if C != 1:
C.outMat()
else:
print('Матрицы разных размеров!')
print('D = A - B: ')
if D != 1:
D.outMat()
else:
print('Матрицы разных размеров!')
print('E = A * 2: ')
E.outMat()
print('F = A * B: ')
if F != 1:
F.outMat()
else:
print('Матрицы разных размеров!')
print('Определитель матрицы А: ')
A.determinant()
print('Матрица А транспонированная: ')
A.transporate()
A.outMat()
input("\n\nНажмите Enter чтобы выйти ...") | StarcoderdataPython |
183418 | <reponame>AutomataRaven/azaharTEA<filename>editorcontainer/rightclickmenu/__init__.py
__all__ = ['rightclickmenu.RightClickMenu']
| StarcoderdataPython |
43093 | import pandas as pd
import matplotlib.pyplot as plt
from src.utils.function_libraries import *
from src.utils.data_utils import *
from src.utils.identification.PI_Identifier import PI_Identifier
from src.utils.solution_processing import *
from differentiation.spectral_derivative import compute_spectral_derivative
from differentiation.finite_diff import compute_finite_diff
from filtering.SpectralFilter import SpectralFilter
from filtering.KernelFilter import KernelFilter
from tools import halve, mirror, add_noise, downsample
from src.utils.theta_processing.single_pend import *
from sklearn.model_selection import train_test_split
import matplotlib as mpl
import os
import pickle
from containers.DynaFrame import DynaFrame, create_df
from definitions import ROOT_DIR
import sympy as sp
from sympy.utilities.codegen import codegen
style_path = os.path.join(ROOT_DIR, 'src', 'utils', 'visualization', 'BystrickyK.mplstyle')
print(style_path)
plt.style.use({'seaborn', style_path})
mpl.use('Qt5Agg')
datafile = 'singlePend.csv'
data_path = os.path.join(ROOT_DIR,'data','singlePend','simulated',datafile)
cache_path = os.path.join(ROOT_DIR,'src', 'singlePendulumCart', 'cache')
# Get training dataset
def load_data(data_path):
sim_data = pd.read_csv(data_path)
sim_data_x = sim_data.loc[:, ['s', 'phi1', 'Ds', 'Dphi']]
sim_data_x.columns = ['x_' + str(i) for i in [1,2,3,4]]
sim_data_dx = sim_data.loc[:, ['Ds', 'Dphi', 'DDs', 'DDphi']]
sim_data_dx.columns = ['dx_' + str(i) for i in [1,2,3,4]]
sim_data_u = sim_data.loc[:, 'u']
sim_data_t = sim_data.loc[:, 't']
sim_data = pd.concat([sim_data_t, sim_data_x, sim_data_dx, sim_data_u], axis=1)
sim_data = DynaFrame(sim_data)
dt = sim_data.get_dt()
sim_data = sim_data.reset_index(drop=True)
return DynaFrame(sim_data), dt
sim_data, dt = load_data(data_path)
sim_data, sim_data_test = train_test_split(sim_data, test_size=0.2,
shuffle=False, random_state=42)
#%%
# dx = compute_spectral_derivative(x, dt, mirroring=True)
# dx = create_df(dx, 'dx')
# filter = KernelFilter(kernel_size=51)
# dx = filter.filter(dx)
# compare_signals(DXt, dx, ['Clean', 'Filtered'], ylabels=['$\dot{x}_1 \; [m\; s^{-2}]$',
# '$\dot{x}_2 \; [rad\; s^{-2}]$'])
#
# ddx = compute_spectral_derivative(dx, dt)
# ddx = create_df(ddx, 'ddx')
# compare_signals(DDXt, ddx, ['Clean', 'Filtered'], ylabels=['$\ddot{x}_1 \; [m\; s^{-2}]$',
# '$\ddot{x}_2 \; [rad\; s^{-2}]$'])
#%% Downsample training data
sim_data = downsample(sim_data, 10).reset_index(drop=True)
sim_data = DynaFrame(sim_data)
sim_data_test = DynaFrame(sim_data_test)
# compare_signals(DX.iloc[:,[2,3]], downsample(DDXt.iloc[:,[0,1]], step),
# legend_str=['Filt','Clean'], ylabels=['a', 'b'])
#%%
def data_dict(sim_data):
data = {'X': sim_data.get_state_vars(),
'DX': sim_data.get_state_derivative_vars(),
'u': sim_data.get_input_vars()}
return data
data = data_dict(sim_data)
data_val = data_dict(sim_data_test)
theta_basis = create_basis(data)
theta_basis_val = create_basis(data_val)
theta_train = poly_library(theta_basis, (1,2,3,4))
theta_val = poly_library(theta_basis_val, (1,2,3,4))
#%%
theta_train = drop_bad_terms(theta_train)
theta_val = drop_bad_terms(theta_val)
theta_train.iloc[:,0] = 1
theta_train.iloc[0,0] = 1.00001
theta_val.iloc[:,0] = 1
theta_val.iloc[0,0] = 1.00001
# %% Compute the solution or retrieve it from cache
rewrite = True # Should the cache be rewritten
rewrite = False
eqns_to_identify = ['dx_3', 'dx_4'] # State derivatives whose equation we want to identify
cache_str = 'SPFinalDense'
eqns_models = {}
for eqn in eqns_to_identify:
# find cols with other state derivatives than the one currently being identified
idx = np.array([('d' in col and eqn not in col) for col in theta_train.columns])
print(f'ii {np.sum(idx)}')
# Construct a library for identifying the desired equation
theta_hat_train = theta_train.loc[:, ~idx]
eqns_models[eqn] = {}
eqns_models[eqn]['theta_train'] = theta_hat_train
# corr = theta_hat_train.corr()
# plot_corr(corr, theta_hat_train.columns, labels=False, ticks=True)
cachename = cache_str + '_' + eqn
cachename = os.path.join(cache_path, cachename)
if os.path.exists(cachename) and not rewrite:
print("Retrieving solution from cache.")
with open(cachename, 'rb') as f:
eqns_models[eqn] = pickle.load(f)
else:
print("No solution in cache, calculating solution from scratch.")
EqnIdentifier = PI_Identifier(theta_hat_train)
EqnIdentifier.set_thresh_range(lims=(0.000001, 0.01), n=5)
EqnIdentifier.set_target(eqn)
EqnIdentifier.create_models(n_models=theta_hat_train.shape[1], iters=8, shuffle=False)
eqns_models[eqn]['models'] = EqnIdentifier.models
with open(cachename, 'wb') as f:
pickle.dump(eqns_models[eqn], f)
# %%
sim_data_xu = pd.concat([sim_data_test.get_state_vars(),
sim_data_test.get_input_vars()],
axis=1).reset_index(drop=True)
sim_data_dx = sim_data_test.get_state_derivative_vars().reset_index(drop=True)
dynamic_model = {}
for target_models_str, eqn_model in eqns_models.items():
theta_train = eqn_model['theta_train']
col_names = theta_train.columns
theta_sub_val = theta_val.loc[:, col_names]
models = eqn_model['models']
dynamic_model[target_models_str] = {}
# %% Remove duplicate models
models = model_unique(models)
models = model_activations(models)
models = model_val_rmse(models, theta_sub_val)
# plot_implicit_sols(models, col_names, show_labels=False, axislabels=False)
# Calculate AIC for each model
models = model_aic(models, theta_sub_val)
#%%
# model_metrics = models.loc[:, ['n_terms', 'train_metric', 'validation_metric', 'aic']]
# model_metrics = model_metrics.sort_values('n_terms')
# fig, axs = plt.subplots(ncols=2, tight_layout=True, sharex=True)
# axs[0].plot(model_metrics['n_terms'], model_metrics['train_metric'], 'o',
# color='tab:blue', alpha=0.7)
# axs[0].set_yscale('log')
# axs[0].set_xlabel("$Number\ of\ terms$")
# axs[0].set_ylabel("$Training\ RMSE$")
#
# axs[1].plot(model_metrics['n_terms'], model_metrics['validation_metric'],
# 'o', color='tab:red', alpha=0.7)
# axs[1].set_yscale('log')
# axs[1].set_xlabel("$Number\ of\ terms$")
# axs[1].set_ylabel("$Validation\ RMSE$")
# %% Look for consistent models by finding clusters in the term activation space
models = model_consistent(models, min_cluster_size=2)
# Discard non-sparse models
models = model_sparse(models, threshold=10)
# plot_implicit_sols(models, col_names, show_labels=False, axislabels=True)
models = model_equation_strings(models, col_names)
vars = ['x_1', 'x_2', 'x_3', 'x_4', 'u']
lhsvar = target_models_str
# Create symbolic implicit equations column
models = model_symbolic_implicit_eqns(models, lhsvar)
#%%
# Drop bad models
aic_thresh = models['aic'].max() * 0.1
models = models[ models['aic'] < aic_thresh ] # Keep models under the threshold
models = model_symbolic_eqn(models, lhsvar)
models = model_lambdify_eqn(models, vars)
models = models.reset_index(drop=True)
# %%
plot_implicit_sols(models, col_names, show_labels=True)
plt.show()
# %% Decompose one of the models
# choice = int(input("Choose model index:"))
choice = models['aic'].argmin()
best_model = models.loc[choice]
# %%
dynamic_model[target_models_str]['symeqn'] = best_model['eqn_sym']
dynamic_model[target_models_str]['str'] = best_model['eqn_sym_implicit']
dynamic_model[target_models_str]['models'] = models
dynamic_model[target_models_str]['choice'] = best_model
derivative_trajectory_model = np.apply_along_axis(best_model['eqn_lambda'], axis=1, arr=sim_data_xu)
derivative_trajectory_real = sim_data_dx.loc[:, target_models_str]
dynamic_model[target_models_str]['model_val_traj'] = derivative_trajectory_model
dynamic_model[target_models_str]['real_val_traj'] = derivative_trajectory_real
#%%
derivative_trajectory_real = []
derivative_trajectory_model = []
for eqn in eqns_to_identify:
dx_traj_model = dynamic_model[eqn]['model_val_traj']
dx_traj_real = dynamic_model[eqn]['real_val_traj']
derivative_trajectory_model.append(dx_traj_model)
derivative_trajectory_real.append(dx_traj_real)
derivative_trajectory_model = np.array(derivative_trajectory_model).T
derivative_trajectory_real = np.array(derivative_trajectory_real).T
# fig = plt.figure(tight_layout=True, figsize=(9,8))
compare_signals(derivative_trajectory_real, derivative_trajectory_model,
['Real', 'Model'], ['$\\dot{x_3}$', '$\\dot{x_4}$'])
#%%
def round_expr(expr, num_digits):
return expr.xreplace({n : round(n, num_digits) for n in expr.atoms(sp.Number)})
symeqns = [dynamic_model[eqn]['symeqn'] for eqn in eqns_to_identify]
symeqns = [round_expr(sp.simplify(sp.factor(eqn)), 5) for eqn in symeqns]
latex_output = ' \\\\ \n '.join([sp.latex(eqn) for eqn in symeqns])
latex_output_file = 'model_latex.txt'
with open(latex_output_file, 'w') as file:
file.write(latex_output)
os.chdir('models')
codegen(('identified_model_clean', symeqns),
language='octave', to_files=True)
#%%
sim_data = DynaFrame(sim_data)
plot_signals(sim_data.get_state_vars(),
# ['$\\dot{x_3}$', '$\\dot{x_4}$']
['$x_1\ [m]$',
'$x_2\ [rad]$',
'$x_3=\\dot{x}_1\ [\\frac{m}{s}]$',
'$x_4=\\dot{x}_2\ [\\frac{rad}{s}]$'])
#%% Save good guess columns
good_guesses = []
for eqn, results in dynamic_model.items():
print(eqn)
models = results['models']
active_cols = models['active'].values
active_cols = np.vstack(active_cols)
active_cols = active_cols.any(axis=0)
good_guesses.append(active_cols)
good_guesses = np.array(good_guesses)
# good_guesses = good_guesses.any(axis=0)
# good_guesses = np.argwhere(good_guesses).T[0]
good_guesses = [np.argwhere(g).T[0] for g in good_guesses]
cache_path = os.path.join(ROOT_DIR,'src', 'singlePendulumCart', 'cache')
guess_cache_name = 'guessColumnsReal'
guess_cache_path = os.path.join(cache_path, guess_cache_name)
with open(guess_cache_path, 'wb') as f:
pickle.dump(good_guesses, f)
| StarcoderdataPython |
3231699 | from flask import Blueprint
api = Blueprint('api', __name__)
from . import authentication
from . import posts
from . import users
from . import comments
from . import errors
| StarcoderdataPython |
1620696 | import unittest
from io import StringIO
from unittest import mock, TestCase
def get_test_dataframe():
import pandas as pd
import numpy as np
df = pd.DataFrame(
{
"one": [-1, np.nan, 2.5],
"two": ["foo", "bar", "baz"],
"three": [True, False, True],
"four": ["foo", "bar", "baz"],
},
index=list("abc"),
)
df.four = df.four.astype("category")
return df
class Test(TestCase):
@mock.patch("sys.stdin")
def test_uses_stdin_as_default_input(self, mock_input):
import getpass
mock_input.readline.return_value = "input_string"
getpass._raw_input(stream=StringIO())
mock_input.readline.assert_called_once_with()
@mock.patch("getpass.getpass")
@mock.patch.dict(
"os.environ",
{"SALT_FOR_PASSWORD_HASH": "<PASSWORD>="},
)
def test_get_hashed_input_password(self, mock_getpass):
from encryption2fa.encryption import get_user_key
mock_getpass.return_value = "dfdfdfdf"
out = get_user_key()
self.assertEqual(out, b"<PASSWORD>3bmsC6yr1TY0P8G-m-mws6rnOqQVSgpukVGwP-gs=")
@mock.patch("getpass.getpass")
@mock.patch.dict("os.environ", {"SALT_FOR_PASSWORD_HASH": "some <PASSWORD>"})
def test_decrypt_data_pickle(self, mock_getpass):
from encryption2fa.encryption import encrypt_data, decrypt_data
from encryption2fa.serializer import serializer_pickle, deserializer_pickle
mock_getpass.return_value = "dfdfdfdf"
salt = "some salt"
data = "secret data as a string"
token = encrypt_data(data=data, serializer=serializer_pickle, salt=salt)
out = decrypt_data(token=token, deserializer=deserializer_pickle, salt=salt)
self.assertEqual(data, out)
@mock.patch("getpass.getpass")
@mock.patch.dict("os.environ", {"SALT_FOR_PASSWORD_HASH": "some salt"})
def test_decrypt_data_dataframe(self, mock_getpass):
from encryption2fa.encryption import encrypt_data, decrypt_data
from encryption2fa.serializer import serializer_parquet, deserializer_parquet
df = get_test_dataframe()
mock_getpass.return_value = "dfdfdfdf"
salt = "some salt"
token = encrypt_data(data=df, serializer=serializer_parquet, salt=salt)
df_out = decrypt_data(token=token, deserializer=deserializer_parquet, salt=salt)
self.assertTrue(df.equals(df_out))
@mock.patch("getpass.getpass")
@mock.patch.dict("os.environ", {"SALT_FOR_PASSWORD_HASH": "short salt"})
def test_decrypt_data_fail(self, mock_getpass):
from encryption2fa.encryption import decrypt_data
from encryption2fa.serializer import serializer_pickle
from cryptography.fernet import InvalidToken
mock_getpass.return_value = "dfdfdfdf"
salt = "some salt"
self.assertRaises(
InvalidToken, decrypt_data, b"invalid token", serializer_pickle, salt
)
@mock.patch("getpass.getpass")
@mock.patch.dict("os.environ", {"SALT_FOR_PASSWORD_HASH": "short salt"})
def test_read_encrypted(self, mock_getpass):
from encryption2fa.encryption import save_encrypted, read_encrypted
mock_getpass.return_value = "dfdfdfdf"
testfile = "testfile.encrypted"
data = get_test_dataframe()
save_encrypted(data=data, file=testfile)
data_out = read_encrypted(file=testfile)
self.assertTrue(data.equals(data_out))
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
21660 | """
Methods for user login
"""
from cgi import escape
from google.appengine.ext import ndb
def login_fields_complete(post_data):
"""
validates that both login fields were filled in
:param post_data:
:return:
"""
try:
user_id = escape(post_data['user_id'], quote=True)
except KeyError:
user_id = False
try:
password = escape(post_data['password'], quote=True)
except KeyError:
password = False
if user_id and password:
return {'complete': True, 'user_id': user_id, 'password': password}
else:
return {'complete': False}
def valid_user_id_check(user_id):
"""
checks that user exists
:param user_id:
:return:
"""
user_key = ndb.Key('User', user_id)
user = user_key.get()
if user:
return True
else:
return False
| StarcoderdataPython |
182929 | <reponame>lauramv1832/ship_it
# coding=utf-8
from __future__ import unicode_literals
import collections
import os.path
import mock
import pytest
from ship_it import fpm, cli, get_version_from_setup_py
from ship_it.manifest import Manifest
# not a fixture to make sure this is never passed down a module level
__here__ = os.path.dirname(__file__)
def test_invoke_with_command_line(mock_local):
assert not mock_local.called
cli.invoke_fpm('test')
mock_local.assert_called_once_with('fpm -f -s dir -t rpm test')
class TestGettingTheCommandLine(object):
# Using ordered dictionaries for an expected order in tests.
@pytest.mark.parametrize('flag_list, expected', [
([('-f', 1)], ['-f 1']),
([('--flag', 1)], ['--flag 1']),
# We shortcut some flag rules for you and do some normalization.
([('--f', 1)], ['-f 1']),
([('f', 1)], ['-f 1']),
([('flag', 1)], ['--flag 1']),
([('-flag', 1)], ['--flag 1']),
# Multiple flags are fine
([('--f1', 'a'), ('--f2', 'b')], ['--f1 a', '--f2 b']),
# Values can be blank and we don't have a trailing space.
([('-f', '')], ['-f']),
# Values are quoted using normal cli rules
([('-f', 'some string')], ["-f 'some string'"]),
# We raise errors too.
([('--', 'a')], ValueError),
([('-', 'a')], ValueError),
([('', 'a')], ValueError),
])
def test_flags_from_dict(self, flag_list, expected):
if isinstance(expected, collections.Sequence):
flag_list = list(cli.format_flags(flag_list))
assert flag_list == expected
else:
with pytest.raises(expected):
list(cli.format_flags(flag_list))
@pytest.mark.parametrize('args,flags,expected', [
(['arg1', 'arg2'], [('--flag', 'value')], '--flag value arg1 arg2')
])
def test_flags_then_args_in_command_line(self, args, flags, expected):
assert cli.get_command_line(args, flags) == expected
@pytest.fixture
def manifest_path():
return os.path.join(__here__, 'manifest.yaml')
@pytest.fixture()
def setup_path():
return os.path.join(__here__, 'setup.py')
@pytest.yield_fixture
def get_manifest_with_no_version(manifest_path):
no_version = Manifest(manifest_path, manifest_contents=dict(name='ship_it'))
with mock.patch('ship_it.validate_path'),\
mock.patch('ship_it.get_manifest_from_path',
return_value=no_version) as mocked:
yield mocked
class TestHandleNoVersionSpecified(object):
@pytest.fixture
def manifest_with_no_version(self, manifest_path):
return Manifest(manifest_path, manifest_contents=dict(name='ship_it'))
def _run_test(self, manifest_path, manifest):
with mock.patch('ship_it.validate_path'), \
mock.patch('ship_it.cli'), \
mock.patch('ship_it._package_virtualenv_with_manifest'), \
mock.patch('ship_it.get_version_from_setup_py') as mocked_get, \
mock.patch('ship_it.get_manifest_from_path',
return_value=manifest) as mocked:
assert not mocked_get.called
fpm(manifest_path)
return mocked_get
def test_no_version_in_the_manifest_will_cause_us_to_check(self, manifest_path, setup_path,
manifest_with_no_version):
mocked_get = self._run_test(manifest_path, manifest_with_no_version)
mocked_get.assert_called_once_with(setup_path)
def test_version_in_the_manifest_will_not_cause_us_to_check(self, manifest_path, setup_path,
manifest):
mocked_get = self._run_test(manifest_path, manifest)
assert not mocked_get.called
def test_getting_a_version_number_from_the_manifest(setup_path):
assert get_version_from_setup_py(setup_path) == '0.0.1'
| StarcoderdataPython |
89395 | import pygame
from pygame.locals import *
from sys import exit
from random import *
pygame.init()
screen = pygame.display.set_mode((640, 480), 0, 32)
screen.lock()
for count in range(10):
random_color = (randint(0,255), randint(0,255), randint(0,255))
random_pos = (randint(0,639), randint(0,479))
random_size = (639-randint(random_pos[0],639), 479-randint(random_pos[1],479))
pygame.draw.rect(screen, random_color, Rect(random_pos, random_size))
screen.unlock()
pygame.display.update()
while True:
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
exit()
| StarcoderdataPython |
38064 |
#Step 1 :- Importing dependancies and train test data generated
from config import *
train_data = pd.read_csv("data/train_data/train_feature.csv")
test_data = pd.read_csv("data/test_data/test_feature.csv")
#Step 2 :- Getting train data insights and drop unnecessary columns, Splitting data into input and target variable sets.
print(list(train_data['redemption_status']).count(0) * 100 / len(train_data['redemption_status']), "% coupons not redeemed in training data ")
X = train_data
X.dropna(inplace=True)
X.drop(["id","campaign_id","c_freq_category","c_rare_category","start_date","end_date","duration","age_range","overall_freq_category","overall_rare_category"], axis=1,inplace=True)
y = train_data['redemption_status']
X.drop('redemption_status',axis = 1, inplace = True)
#Step 3 :- Train-test Split for the model
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, random_state=42)
#Step 4 :- Initiate model and fit transform
model = GaussianNB()
model.fit(X_train, y_train)
#Step 5 :- Predict on the test part of the split
y_pred = model.predict(X_test)
#Step 6 :- Save the model for the inference engine
filename = 'model/finalized_model_2.sav'
pickle.dump(model, open(filename, 'wb'))
#Step 7 :- Calculate Training data accuracy of the model
print("Accuracy:",metrics.accuracy_score(y_test, y_pred))
#Step 8 :- Use the model on test data to predict the target in test data
Y = test_data
Y.drop(["id","campaign_id","c_freq_category","c_rare_category","start_date","end_date","duration","age_range","overall_freq_category","overall_rare_category"], axis=1,inplace=True)
Y.dropna(inplace = True)
Predictions = model.predict(Y)
# Print results
print(list(Predictions).count(0) * 100 / len(Predictions) , "% Coupans not redeemed in Test Data" )
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.