hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
e76480471e6bea18ea40a9e4037a74ba815e2226
1,756
py
Python
bot/sign_api.py
rombintu/project12
a127b8c697cb6e7cc07ff9528a79888f72c4c4ca
[ "MIT" ]
null
null
null
bot/sign_api.py
rombintu/project12
a127b8c697cb6e7cc07ff9528a79888f72c4c4ca
[ "MIT" ]
null
null
null
bot/sign_api.py
rombintu/project12
a127b8c697cb6e7cc07ff9528a79888f72c4c4ca
[ "MIT" ]
null
null
null
import os from Crypto.Hash import SHA256 from Crypto.PublicKey import RSA from Crypto.Signature import pkcs1_15 def create_sign_of_file(file_name): # Генерируем новый ключ key = RSA.generate(1024, os.urandom) # Получаем хэш файла hesh = SHA256.new() with open(file_name, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): hesh.update(chunk) # Подписываем хэш signature = pkcs1_15.new(key).sign(hesh) # Получаем открытый ключ из закрытого pubkey = key.publickey() return pubkey, signature def check_of_file(file_name, pubkey, signature): hesh = SHA256.new() with open(file_name, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): hesh.update(chunk) # Переменная для проверки подписи check_sign = False try: pkcs1_15.new(pubkey).verify(hesh, signature) # Отличающийся хэш не должен проходить проверку # pkcs1_15.new(pubkey).verify(SHA256.new(b'test'), signature) # raise ValueError("Invalid signature") check_sign = True return check_sign except Exception as e: print(e) return check_sign def create_sign(content): key = RSA.generate(1024, os.urandom) hesh = SHA256.new(content.encode()) signature = pkcs1_15.new(key).sign(hesh) pubkey = key.publickey() return pubkey, signature def check(content, pubkey, signature): hesh = SHA256.new(content.encode()) check_sign = False try: pkcs1_15.new(pubkey).verify(hesh, signature) check_sign = True return check_sign except Exception as e: print(e) return check_sign # pubkey, sign = create_sign('README.md') # print(check('README.md', pubkey, sign))
28.786885
109
0.656606
235
1,756
4.8
0.319149
0.06383
0.044326
0.042553
0.635638
0.54344
0.495567
0.442376
0.359043
0.359043
0
0.038922
0.23918
1,756
61
110
28.786885
0.805389
0.198747
0
0.809524
0
0
0.002863
0
0
0
0
0
0
1
0.095238
false
0
0.095238
0
0.333333
0.047619
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
e764cbf89763dd805ae8cb69ca387c4c6438236e
38,685
py
Python
vyperlogix/decorators/pyanno.py
raychorn/chrome_gui
f1fade70b61af12ee43c55c075aa9cfd32caa962
[ "CC0-1.0" ]
1
2020-09-29T01:36:33.000Z
2020-09-29T01:36:33.000Z
vyperlogix/decorators/pyanno.py
raychorn/chrome_gui
f1fade70b61af12ee43c55c075aa9cfd32caa962
[ "CC0-1.0" ]
null
null
null
vyperlogix/decorators/pyanno.py
raychorn/chrome_gui
f1fade70b61af12ee43c55c075aa9cfd32caa962
[ "CC0-1.0" ]
null
null
null
""" Pyanno Python Annotations version 0.76 Uses the new python decorators feature. Do not apply these annotations within this module. charlesmchen@gmail.com for documentation, see the index.html file included in the distribution. http://fightingquaker.com/pyanno/ """ #from __future__ import with_statement import types, inspect, sys try: from sip import wrappertype USE_SIP = True except: wrappertype = None USE_SIP = False ''' Runtime checking can be disabled with this global. ''' DO_RUNTIME_VALIDATION = True """ The constants can be used in place of the type constants defined in the python "types" module. """ selfType = 'selfType' # singleton placeholder constant classType = 'classType' # singleton placeholder constant ignoreType = 'ignoreType' callableType = 'callableType' """ ClassName can be used to avoid circular imports and other illegal references. See documentation. """ class ClassName: def __init__(self, classname): self.classname = classname def __str__(self): return self.classname """ Exceptions thrown by the Pyanno annotations """ class AnnotationException(Exception): def __init__(self, description): description = __getCallerDescription__() + description Exception.__init__(self, description) class AnnotationMethodError(AnnotationException): def __init__(self, description): AnnotationException.__init__(self, description) class AbstractMethodError(AnnotationException): def __init__(self, description): AnnotationException.__init__(self, description) class PrivateMethodError(AnnotationException): def __init__(self, description): AnnotationException.__init__(self, description) class ProtectedMethodError(AnnotationException): def __init__(self, description): AnnotationException.__init__(self, description) class ReturnTypeError(AnnotationException): def __init__(self, description): AnnotationException.__init__(self, description) class ParamTypeError(AnnotationException): def __init__(self, description): AnnotationException.__init__(self, description) PYANNO_ERRORS = (ParamTypeError, ReturnTypeError, AbstractMethodError, AnnotationMethodError, PrivateMethodError, ProtectedMethodError, ) """ ----------------------------------------------------- """ def __annotationHasArguments__(positionalParameters, keywordParameters): if len(keywordParameters) == 0 and len(positionalParameters) == 1 and type(positionalParameters[0]) is types.FunctionType: return False return True def __copyPropertiesToWrapper__(func, wrapper): wrapper.__name__ = func.__name__ wrapper.__dict__ = func.__dict__ wrapper.__doc__ = func.__doc__ wrapper.__module__ = func.__module__ ''' This annotation does no checking; strictly for commenting purposes ''' def noopAnnotation(*positionalParameters, **keywordParameters): if not __annotationHasArguments__(positionalParameters, keywordParameters): func = positionalParameters[0] __addAnnotationDoc__(func, '@noopAnnotation', '@noopAnnotation ' + str(positionalParameters) \ + ', ' + str(keywordParameters) + '') return func if len(positionalParameters) > 0 or len(keywordParameters) > 0: raise AnnotationMethodError('noop method annotation doesn\'t accept arguments.') def decorator(func): __addAnnotationDoc__(func, '@noopAnnotation', '@noopAnnotation ' + str(positionalParameters) \ + ', ' + str(keywordParameters) + '') return func return decorator class __privateMethodDecorator__: def __init__(self, funcModule): self.__funcModule__ = funcModule def __call__(self, func): if not DO_RUNTIME_VALIDATION: return func def wrapper(*positionalValues, **keywordValues): stack = inspect.stack() callerFrame = stack[1] callerModule = callerFrame[1] if callerModule != self.__funcModule__: raise PrivateMethodError("Private method called from another module: " + callerModule) return func(*positionalValues, **keywordValues) wrapper.__privateMethod__ = True __copyPropertiesToWrapper__(func, wrapper) __addAnnotationDoc__(func, '@privateMethod', '@privateMethod') wrapper.__wrappedFunction__ = func argspec = __getFunctionArgumentsRecursive__(func) wrapper.__func_argspec__ = argspec return wrapper ''' This annotation throws an error if the decorated function is called from another module. ''' def privateMethod(*positionalParameters, **keywordParameters): stack = inspect.stack() if len(stack) < 2: raise PrivateMethodError("Couldn\'t retrieve stack.") callerFrame = stack[1] callerModule = callerFrame[1] if not __annotationHasArguments__(positionalParameters, keywordParameters): func = positionalParameters[0] return __privateMethodDecorator__(callerModule)(func) if len(positionalParameters) > 0 or len(keywordParameters) > 0: raise AnnotationMethodError('private method annotation doesn\'t accept arguments.') return __privateMethodDecorator__(callerModule) class __protectedMethodDecorator__: def __init__(self, funcModule): self.__funcModule__ = funcModule def __call__(self, func): if not DO_RUNTIME_VALIDATION: return func def wrapper(*positionalValues, **keywordValues): stack = inspect.stack() callerFrame = stack[1] callerModule = callerFrame[1] if callerModule != self.__funcModule__: import os.path if os.path.dirname( callerModule ) != os.path.dirname( self.__funcModule__ ): # print 'funcPackage ', funcPackage # print 'callerModule', callerModule, self.__funcModule__ raise ProtectedMethodError("Protected method called from another module: " + callerModule) return func(*positionalValues, **keywordValues) # raise ProtectedMethodError("Abstract Method called.") wrapper.__protectedMethod__ = True __copyPropertiesToWrapper__(func, wrapper) __addAnnotationDoc__(func, '@protectedMethod', '@protectedMethod') wrapper.__wrappedFunction__ = func argspec = __getFunctionArgumentsRecursive__(func) wrapper.__func_argspec__ = argspec return wrapper ''' This annotation throws an error if the decorated function is called from a module in another package. ''' def protectedMethod(*positionalParameters, **keywordParameters): stack = inspect.stack() if len(stack) < 2: raise ProtectedMethodError("Couldn\'t retrieve stack.") callerFrame = stack[1] callerModule = callerFrame[1] if not __annotationHasArguments__(positionalParameters, keywordParameters): func = positionalParameters[0] return __protectedMethodDecorator__(callerModule)(func) if len(positionalParameters) > 0 or len(keywordParameters) > 0: raise AnnotationMethodError('protected method annotation doesn\'t accept arguments.') return __protectedMethodDecorator__(callerModule) ''' This annotation does no checking; strictly for commenting purposes. This decorator expects its arguments to be a list of exceptions. ''' def raises(*positionalParameters, **keywordParameters): if not __annotationHasArguments__(positionalParameters, keywordParameters): func = positionalParameters[0] __addAnnotationDoc__(func, '@raises', '@raises ' + str(positionalParameters) \ + ', ' + str(keywordParameters) + '') return func if len(keywordParameters) > 0: raise AnnotationMethodError('raises method annotation doesn\'t accept keyword arguments.') exceptions = [] for positionalParameter in positionalParameters: if not issubclass(positionalParameter, BaseException): raise AnnotationMethodError('arguments to raises method annotation must be Exceptions (a subclass of BaseException).' ) if positionalParameter in exceptions: raise AnnotationMethodError('Exception appears twice in arguments to @raises annotation: ' + positionalParameter.__name__) exceptions.append( positionalParameter ) def decorator(func): __addAnnotationDoc__(func, '@raises', '@raises ' + str(positionalParameters) \ + ', ' + str(keywordParameters) + '') return func return decorator def __addAnnotationDoc__(func, key, value): if not hasattr(func, '__annotation_docs__'): func.__annotation_docs__ = {} func.__annotation_docs__[key] = value def __abstractMethodDecorator__(func): if not DO_RUNTIME_VALIDATION: return func def wrapper(*positionalValues, **keywordValues): raise AbstractMethodError("Abstract Method called.") wrapper.__abstractMethod__ = True __copyPropertiesToWrapper__(func, wrapper) __addAnnotationDoc__(func, '@abstractMethod', '@abstractMethod') wrapper.__wrappedFunction__ = func argspec = __getFunctionArgumentsRecursive__(func) wrapper.__func_argspec__ = argspec return wrapper ''' This annotation raises an exception if the decorated function is ever called. ''' def abstractMethod(*positionalParameters, **keywordParameters): if not __annotationHasArguments__(positionalParameters, keywordParameters): func = positionalParameters[0] return __abstractMethodDecorator__(func) if len(positionalParameters) > 0 or len(keywordParameters) > 0: raise AnnotationMethodError('abstract method annotation doesn\'t accept arguments.') return __abstractMethodDecorator__ def __deprecatedMethodDecorator__(func): if not DO_RUNTIME_VALIDATION: return func def wrapper(*positionalValues, **keywordValues): print str(func.__name__) + ' is deprecated.' func(*positionalValues, **keywordValues) wrapper.__deprecatedMethod__ = True __copyPropertiesToWrapper__(func, wrapper) __addAnnotationDoc__(func, '@deprecatedMethod', '@deprecatedMethod') wrapper.__wrappedFunction__ = func argspec = __getFunctionArgumentsRecursive__(func) wrapper.__func_argspec__ = argspec return wrapper ''' This annotation prints a warning if the decorated function is ever called. ''' def deprecatedMethod(*positionalParameters, **keywordParameters): if not __annotationHasArguments__(positionalParameters, keywordParameters): func = positionalParameters[0] return __deprecatedMethodDecorator__(func) if len(positionalParameters) > 0 or len(keywordParameters) > 0: raise AnnotationMethodError('deprecated method annotation doesn\'t accept arguments.') return __deprecatedMethodDecorator__ def __dumpFunc__(func, prefix = ''): print if len(prefix) > 0: prefix += ' ' print prefix + "__dumpFunc__ " + str(func) + " " + str(type(func)) print prefix + '\t' + "__call__" + str(func.__call__) + " " + str(type(func.__call__)) for name in dir(func): if hasattr(func, name): print prefix + "\t" + str(name) + ": " + str(getattr(func, name)) else: print prefix + "\t" + str(name) print def __ParamErrorFactory__(funcName, msg): return ParamTypeError(funcName + " received " + msg) def __noParamsDecorator__(func): if not DO_RUNTIME_VALIDATION: return func def wrapper(*positionalValues, **keywordValues): if len(positionalValues) > 0 or len(keywordValues) > 0: raise ParamTypeError(func.__name__ + ' has no arguments: ' + str(positionalValues) + \ ', ' + str(keywordValues)) return func(*positionalValues, **keywordValues) __copyPropertiesToWrapper__(func, wrapper) __addAnnotationDoc__(wrapper, '@parameterTypes', '@parameterTypes None') argspec = __getFunctionArgumentsRecursive__(func) wrapper.__func_argspec__ = argspec wrapper.__wrappedFunction__ = func return wrapper def __getFunctionArgumentsRecursive__(func): if hasattr(func, '__func_argspec__'): argspec = getattr(func, '__func_argspec__') else: argspec = inspect.getargspec(func) return argspec ''' This annotation does runtime type-checking on the arguments passed to the decorated function. ''' def parameterTypes(*positionalParameters, **keywordParameters): if keywordParameters: raise AnnotationMethodError('Don\'t annotate parameter types with keywords.') if not __annotationHasArguments__(positionalParameters, keywordParameters): func = positionalParameters[0] return __noParamsDecorator__(func) if not positionalParameters and not keywordParameters: return __noParamsDecorator__ def decorator(func): if not DO_RUNTIME_VALIDATION: return func argspec = __getFunctionArgumentsRecursive__(func) #__dumpFunc__(func) #print "noResultDecorator: " + str(func) + " " + str(type(func)) def wrapper(*positionalValues, **keywordValues): try: # charles, we want more unique names than __parsedParamTypes__ and __unparsedParamTypes__ if not hasattr(func, '__parsedParamTypes__'): #print 'parsing params' #__dumpFunc__(func) func.__parsedParamTypes__ = __parseParamTypes__(func.__name__, func.func_globals, argspec, func.__unparsedParamTypes__) positionalTypes, keywordTypes = func.__parsedParamTypes__ ''' print func.__name__ + ' param ' + "__unparsedParamTypes__: " + str(func.__unparsedParamTypes__) + " " + str(type(func.__unparsedParamTypes__)) print func.__name__ + ' param ' + "correctTypes: " + str(correctTypes) + " " + str(type(correctTypes)) ''' __checkParamTypes__(func.__name__, __ParamErrorFactory__, positionalValues, keywordValues, positionalTypes, keywordTypes, argspec, False) return func(*positionalValues, **keywordValues) except BaseException, e: raise e wrapper.__func_argspec__ = argspec __copyPropertiesToWrapper__(func, wrapper) func.__unparsedParamTypes__ = positionalParameters __addAnnotationDoc__(wrapper, '@parameterTypes', '@parameterTypes ' + str(positionalParameters) \ + ', ' + str(keywordParameters) + '') return wrapper return decorator def __checkParamType__(funcName, errorFactory, values, correctTypes, i, value, correctType, debug = False): # is none always okay? if type(value) is types.NoneType: return errorMsg = "unexpected value["+str(i)+"]: " # + str(value) if type(value) == types.InstanceType: errorMsg += ' (' + str(value.__class__) + ')' else: errorMsg += ' (' + str(type(value)) + ')' errorMsg += ", expected: " + str(correctType) \ + " (" + str(values) + "), expected: " + str(correctTypes) \ + " " errorFactoryArgs = (funcName, errorMsg, ) # can we validate this assertoin more narrowly and check the class type? if correctType is selfType: # correctType = types.InstanceType return elif correctType is ignoreType: return elif correctType is classType: correctType = types.ClassType global USE_SIP if USE_SIP and type(correctType) is wrappertype: if type(type(value)) is wrappertype: if not isinstance(value, correctType): raise errorFactory(*errorFactoryArgs) return raise errorFactory(*errorFactoryArgs) if isinstance(correctType, ClassName): if USE_SIP and type(type(value)) is wrappertype: pass elif type(value) is types.InstanceType: pass else: raise errorFactory(*errorFactoryArgs) # declared classname must match name of class or superclass. mro = inspect.getmro(value.__class__) for item in mro: #print 'item.__name__', item.__name__ if item.__name__ == correctType.classname: return raise errorFactory(*errorFactoryArgs) #if type(value) == types.InstanceType and type(correctType) == types.ClassType: if type(correctType) == types.ClassType: if not isinstance(value, correctType): raise errorFactory(*errorFactoryArgs) return if type(correctType) is dict: keyType = correctType.keys()[0] valueType = correctType[keyType] if type(value) is not dict: raise errorFactory(*errorFactoryArgs) for key in value.keys(): __checkParamType__(funcName, errorFactory, values, correctTypes, i, key, keyType, debug) # print 'value[key]', value, type(value), key, type(key) subvalue = value[key] __checkParamType__(funcName, errorFactory, values, correctTypes, i, subvalue, valueType, debug) return elif type(correctType) in (tuple, list): if type(value) is not type(correctType): raise errorFactory(*errorFactoryArgs) elemType = correctType[0] for elem in value: __checkParamType__(funcName, errorFactory, values, correctTypes, i, elem, elemType, debug) return elif correctType is str: if type(value) in (str, unicode): return raise errorFactory(*errorFactoryArgs) elif correctType is float: if type(value) in (int, float): return raise errorFactory(*errorFactoryArgs) elif correctType is callableType: if callable(value): return # if type(value) in (types.BuiltinFunctionType, types.BuiltinMethodType, types.FunctionType, \ # types.GeneratorType, types.LambdaType, types.MethodType, \ # types.UnboundMethodType): # return raise errorFactory(*errorFactoryArgs) elif type(value) == correctType: return #be more specific about tuple index #print 'problem: ' + funcName +" correctTypes: " + str(correctTypes) raise errorFactory(*errorFactoryArgs) def __normalizeValues__(funcName, errorFactory, positionalValues, keywordValues, \ positionalTypes, keywordTypes, argspec, debug = False): # debug = True if debug: print "__normalizeValues__ funcName: " + funcName print "__normalizeValues__ argspec: " + str(argspec) + " " + str(type(argspec)) args = argspec[0] varargs = argspec[1] varkw = argspec[2] defaults = argspec[3] totalValues = len(positionalValues) if keywordValues: totalValues += len(keywordValues) if totalValues > len(args): raise ParamTypeError( funcName + ': function too many arguments: ' + str(positionalValues) + ', ' + str(keywordValues)) #charles: TODO: not handling varargs, varkw if debug: print '\t', 'args', funcName, args print '\t', 'varargs', funcName, varargs print '\t', 'varkw', funcName, varkw print '\t', 'defaults', funcName, defaults if not defaults: requiredParamCount = len(args) else: requiredParamCount = len(args) - len(defaults) if debug: print '\t', 'requiredParamCount', funcName, requiredParamCount requiredValues = [] optionalValues = {} if len(positionalValues) > requiredParamCount: for index in xrange(len(positionalValues)): value = positionalValues[index] if index < requiredParamCount: requiredValues.append(value) else: #if index >= len(args): argname = args[index] optionalValues[argname] = value for keyword in keywordValues: if keyword in optionalValues: raise ParamTypeError('more than one value for paramter: ' + keyword) optionalValues[keyword] = keywordValues[keyword] else: requiredValues.extend(positionalValues) keywords = keywordValues.keys() if debug: print '\t', 'keywords', funcName, keywords, type(keywords) while len(requiredValues) < requiredParamCount: index = len(requiredValues) argname = args[index] if debug: print '\t', 'argname', funcName, argname if argname not in keywords: raise ParamTypeError('function missing required argument: ' + argname) value = keywordValues[argname] keywords.remove(argname) requiredValues.append(value) for keyword in keywords: optionalValues[keyword] = keywordValues[keyword] if debug: print 'requiredValues', requiredValues print 'optionalValues', optionalValues return requiredValues, optionalValues def __checkParamTypes__(funcName, errorFactory, positionalValues, keywordValues, \ positionalTypes, keywordTypes, argspec, debug = False): #debug = True if debug: print "checkTypes positionalValues: " + str(positionalValues) + " " + str(type(positionalValues)) print "checkTypes keywordValues: " + str(keywordValues) + " " + str(type(keywordValues)) print "checkTypes positionalTypes: " + str(positionalTypes) + " " + str(type(positionalTypes)) print "checkTypes keywordTypes: " + str(keywordTypes) + " " + str(type(keywordTypes)) positionalValues, keywordValues = __normalizeValues__(funcName, errorFactory, positionalValues, keywordValues, \ positionalTypes, keywordTypes, argspec, debug) if debug: print "checkTypes positionalValues: " + str(positionalValues) + " " + str(type(positionalValues)) print "checkTypes keywordValues: " + str(keywordValues) + " " + str(type(keywordValues)) debug = False __checkValueTypes__(funcName, errorFactory, positionalValues, keywordValues, \ positionalTypes, keywordTypes, debug) def __checkValueTypes__(funcName, errorFactory, positionalValues, keywordValues, \ positionalTypes, keywordTypes, debug = False): if debug: print "checkTypes positionalValues: " + str(positionalValues) + " " + str(type(positionalValues)) print "checkTypes keywordValues: " + str(keywordValues) + " " + str(type(keywordValues)) print "checkTypes positionalTypes: " + str(positionalTypes) + " " + str(type(positionalTypes)) print "checkTypes keywordTypes: " + str(keywordTypes) + " " + str(type(keywordTypes)) if not positionalTypes: if positionalValues: raise errorFactory(funcName, "unexpected positional arguments (" + str(positionalValues) + ")") else: if not positionalValues: raise errorFactory(funcName, "missing positional arguments (" + str(positionalValues) + ")") if len(positionalValues) != len(positionalTypes): print "checkTypes positionalValues: " + str(positionalValues) + " " + str(type(positionalValues)) print "checkTypes keywordValues: " + str(keywordValues) + " " + str(type(keywordValues)) print "checkTypes positionalTypes: " + str(positionalTypes) + " " + str(type(positionalTypes)) print "checkTypes keywordTypes: " + str(keywordTypes) + " " + str(type(keywordTypes)) if len(positionalValues) < len(positionalTypes): raise errorFactory(funcName, "missing positional arguments (" + str(positionalValues) + ")") else: raise errorFactory(funcName, "unexpected positional arguments (" + str(positionalValues) + ")") for index in range(len(positionalValues)): positionalValue = positionalValues[index] positionalType = positionalTypes[index] __checkParamType__(funcName, errorFactory, positionalValues, positionalTypes, index, \ positionalValue, positionalType, debug) if keywordValues: if not keywordTypes: raise errorFactory(funcName, "unexpected keyword arguments (" + str(keywordValues) + ")") for keyword in keywordValues: keywordValue = keywordValues[keyword] if keyword not in keywordTypes: raise errorFactory(funcName, "unexpected keyword argument (" + str(keyword) + ": " + \ str(keywordValue) + ")") keywordType = keywordTypes[keyword] __checkParamType__(funcName, errorFactory, keywordValues, keywordTypes, keyword, \ keywordValue, keywordType, debug) def __checkResultTypes__(funcName, errorFactory, values, positionalTypes, debug = False): if len(positionalTypes) == 1: # special case. returned results not always in a tuple values = [ values ] keywordTypes = None # return values don't use keywords. __checkValueTypes__(funcName, errorFactory, values, None, positionalTypes, keywordTypes, debug) def __ReturnErrorFactory__(funcName, msg): return ReturnTypeError(funcName + " returned " + msg) def __parseStringType__(func_name, func_globals, typeString, checkForSelfType): result = [] if checkForSelfType: typeString = typeString.strip() #print "checkForSelfType: " + typeString selfTypeName = 'selfType' if typeString.startswith(selfTypeName): result.append(selfType) typeString = typeString[len(selfTypeName):] #print "checkForSelfType: " + typeString typeString = typeString.strip() canBeEmpty = True if len(typeString)>0: if typeString[0] != ',': raise AnnotationMethodError(func_name + ': Missing comma after selfType: ' + str(typeStrings)) typeString = typeString[1:] typeString = typeString.strip() #print '\t\t'+"__parseStringType__: " + str(typeString) + " " + str(type(typeString)) if len(typeString) < 1: return result #print '\t\t'+"__parseStringType__: " + str(typeString) + " " + str(type(typeString)) evals = eval('[' + typeString + ']', func_globals) for evaled in evals: #print '\t\t\t'+"evaled.1: " + str(evaled) + " " + str(type(evaled)) result.append(__parseType__(func_name, evaled)) return result def __parseType__(func_name, arg): #print '\t'+"arg: " + str(arg) + " " + str(type(arg)) if arg in (selfType, ignoreType, classType, callableType) : return arg elif isinstance(arg, ClassName): return arg elif type(arg) is types.TypeType: return arg elif type(arg) is types.ClassType: return arg elif USE_SIP and type(arg) is wrappertype: return arg elif type(arg) is dict: keys = arg.keys() if len(keys) == 0: return dict if len(keys) > 1: raise AnnotationMethodError(func_name + ': Unknown annotation argument: ' + str(arg) + " " + str(type(arg))) key = keys[0] __parseType__(func_name, key) value = arg[key] __parseType__(func_name, value) return arg elif type(arg) in (tuple, list,): if len(arg) == 0: return type(arg) if len(arg) > 1: raise AnnotationMethodError(func_name + ': Unknown annotation argument: ' + str(arg) + " " + str(type(arg))) __parseType__(func_name, arg[0]) return arg else: raise AnnotationMethodError(func_name + ': Unknown annotation argument: ' + str(arg) + " " + str(type(arg))) def __parseReturnTypes__(func_name, func_globals, rawTypes): # return __parseReturnTypes__(func_name, func_globals, args, False) checkForSelfType = False # if True: # print '\t'+"__parseReturnTypes__ rawTypes: " + str(rawTypes) + " " + str(type(rawTypes)) parsedTypes = __evaluateTypes__(func_name, func_globals, rawTypes, checkForSelfType) # if True: # print '\t'+"__parseReturnTypes__ parsedTypes: " + str(parsedTypes) + " " + str(type(parsedTypes)) requiredTypes = parsedTypes optionalTypes = {} return requiredTypes, optionalTypes def __parseExceptionTypes__(func_name, func_globals, rawTypes): checkForSelfType = False parsedTypes = __evaluateTypes__(func_name, func_globals, rawTypes, checkForSelfType) requiredTypes = parsedTypes optionalTypes = {} return requiredTypes, optionalTypes def __parseParamTypes__(func_name, func_globals, argspec, rawTypes): checkForSelfType = True # print 'argspec', argspec, type(argspec) argumentNames = argspec[0] varargs = argspec[1] varkw = argspec[2] defaults = argspec[3] # print '\t', 'argumentNames', argumentNames # print '\t', 'varargs', varargs # print '\t', 'varkw', varkw # print '\t', 'defaults', defaults # annotation argument: \'' + str(rawPositionalType) + "'") # if True: # print '\t'+"__parseParamTypes__ rawTypes: " + str(rawTypes) + " " + str(type(rawTypes)) parsedTypes = __evaluateTypes__(func_name, func_globals, rawTypes, checkForSelfType) # if True: # print '\t'+"__parseParamTypes__ parsedTypes: " + str(parsedTypes) + " " + str(type(parsedTypes)) if len(parsedTypes) < len(argumentNames): raise AnnotationMethodError(func_name + ': Missing param types (' + str(len(rawTypes)) + ' < ' + str(len(argumentNames)) + ')') elif len(parsedTypes) > len(argumentNames): raise AnnotationMethodError(func_name + ': Missing param types (' + str(len(rawTypes)) + ' > ' + str(len(argumentNames)) + ')') if not defaults: requiredParamCount = len(argumentNames) else: requiredParamCount = len(argumentNames) - len(defaults) requiredParams = [] for index in xrange(requiredParamCount): requiredParams.append(parsedTypes[index]) optionalParams = {} if defaults: for index in xrange(len(defaults)): optionalParams[argumentNames[requiredParamCount+index]] = parsedTypes[requiredParamCount+index] # if False: # print '\t'+"__parseParamTypes__ requiredParams: " + str(requiredParams) + " " + str(type(requiredParams)) # print '\t'+"__parseParamTypes__ optionalParams: " + str(optionalParams) + " " + str(type(optionalParams)) return requiredParams, optionalParams def __evaluateTypes__(func_name, func_globals, rawTypes, checkForSelfType): parsedTypes = [] isFirstParsedType = True for rawType in rawTypes: if type(rawType) is str: parsed = __parseStringType__(func_name, func_globals, rawType, \ checkForSelfType and isFirstParsedType) if parsed is None: #if not canBeEmpty: if len(rawTypes) > 1: raise AnnotationMethodError(func_name + ': Unknown annotation argument: \'' + str(rawParsedType) + "'") # okay to pass empty string if only arg. means no types. continue parsedTypes.extend(parsed) else: parsedTypes.append(__parseType__(func_name, rawType)) isFirstParsedType = False if False: print '\t'+"__evaluateTypes__ parsedTypes: " + str(parsedTypes) + " " + str(type(parsedTypes)) return parsedTypes def __noResultDecorator__(func): if not DO_RUNTIME_VALIDATION: return func #__dumpFunc__(func) #print "noResultDecorator: " + str(func) + " " + str(type(func)) def wrapper(*positionalValues, **keywordValues): result = func(*positionalValues, **keywordValues) if result is not None: raise ReturnTypeError(func.__name__ + ' should not return a value: ' + str(result)) return result __copyPropertiesToWrapper__(func, wrapper) __addAnnotationDoc__(wrapper, '@returnType', '@returnType None') wrapper.__wrappedFunction__ = func argspec = __getFunctionArgumentsRecursive__(func) wrapper.__func_argspec__ = argspec return wrapper def __getCallerDescription__(): stack = inspect.stack() callerFrame = None for index in xrange(len(stack)): frame = stack[index] module = frame[1] if __file__ != module: callerFrame = frame break if not callerFrame: print 'missing callerFrame!' callerFrame = stack[0] # print'frame', frame callerModule = callerFrame[1] # print'callerModule', callerModule import os.path callerModuleName = os.path.basename(callerModule) # print'callerModuleName', callerModuleName callerLine = callerFrame[2] # print'callerLine', callerLine callerDescription = callerModuleName + '(' + str(callerLine) + '): ' # print 'callerDescription', callerDescription return callerDescription ''' This annotation does runtime type-checking on the values returned by the decorated function. ''' def returnType(*positionalParameters, **keywordParameters): ''' use like this: @returnType def aMethodThatReturnsNothing(self): pass @returnType () def aMethodThatReturnsNothing(self): pass @returnType ( int ) def aMethodThatReturnsAnInt(self): pass ''' if True: # try: if keywordParameters: raise AnnotationMethodError( 'return values can\'t have keywords.') if not __annotationHasArguments__(positionalParameters, keywordParameters): func = positionalParameters[0] return __noResultDecorator__(func) if not positionalParameters and not keywordParameters: return __noResultDecorator__ #unparsedReturnTypes = args def decorator(func): if not DO_RUNTIME_VALIDATION: return func #__dumpFunc__(func) #print "noResultDecorator: " + str(func) + " " + str(type(func)) def wrapper(*positionalValues, **keywordValues): try: values = func(*positionalValues, **keywordValues) # charles, we want more unique names than __parsedReturnTypes__ and __unparsedReturnTypes__ if not hasattr(func, '__parsedReturnTypes__'): #print 'parsing' #__dumpFunc__(func) func.__parsedReturnTypes__ = __parseReturnTypes__(func.__name__, func.func_globals, func.__unparsedReturnTypes__) positionalTypes, keywordTypes = func.__parsedReturnTypes__ ''' print "__unparsedReturnTypes__: " + str(func.__unparsedReturnTypes__) + " " + str(type(func.__unparsedReturnTypes__)) print "correctTypes: " + str(correctTypes) + " " + str(type(correctTypes)) ''' __checkResultTypes__(func.__name__, __ReturnErrorFactory__, values, positionalTypes, False) return values except BaseException, e: raise e # raise e, None, sys.exc_info()[2] __copyPropertiesToWrapper__(func, wrapper) func.__unparsedReturnTypes__ = positionalParameters __addAnnotationDoc__(wrapper, '@returnType', '@returnType ' + str(positionalParameters) \ + ', ' + str(keywordParameters) + '') argspec = __getFunctionArgumentsRecursive__(func) wrapper.__func_argspec__ = argspec return wrapper return decorator # except AnnotationException, e: # # # frame = inspect.currentframe() # from utils.Debug import dirDebug # # dirDebug('frame', frame) # # frameinfo = inspect.getframeinfo(frame) # ## dirDebug('frameinfo', frameinfo) # # print'frameinfo', frameinfo # # stack = inspect.stack() # print'stack', stack # frame = stack[1] # # dirDebug('frame', frame) # print'frame', frame # callerModule = frame[1] # print'callerModule', callerModule # callerLine = frame[1] # print'callerLine', callerLine # ## frameinfo = inspect.getframeinfo(frame) ### dirDebug('frameinfo', frameinfo) ## print'frameinfo', frameinfo # ## raise Exception('') # tb = sys.exc_info()[2] # while True: # tbframe = tb.tb_frame # print'tbframe', tbframe # tbframeinfo = inspect.getframeinfo(tbframe) # print'tbframeinfo', tbframeinfo # tbframeModule = tbframeinfo[0] ## tbframeModule', tbframeModule # import os.path # modulename = os.path.basename(tbframeModule) # print'modulename', modulename # print 'dir', dir() # print '__file__', __file__ # print __file__ == tbframeModule # break # # print 'tb', tb # dirDebug('tb', tb) # raise e, None, sys.exc_info()[2]
36.323944
158
0.632338
3,248
38,685
7.242303
0.115456
0.011606
0.011308
0.008077
0.474217
0.413298
0.369468
0.339327
0.303023
0.273435
0
0.002567
0.274939
38,685
1,064
159
36.358083
0.836073
0.123588
0
0.446228
0
0.004815
0.073774
0.000666
0
0
0
0.00094
0
0
null
null
0.00321
0.006421
null
null
0.054575
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
e765459a83dd0305141cdf26d6934ff99915432e
495
py
Python
apps/batchResize.py
mshafiei/cvutils
5805229d8822a9ee4a3c63e060358aca96fe5338
[ "MIT" ]
2
2021-07-19T12:38:14.000Z
2021-11-01T21:30:36.000Z
apps/batchResize.py
mshafiei/cvgutils
ea93b3cb70a969c3b814f5a9f7672c6e00de5c61
[ "MIT" ]
null
null
null
apps/batchResize.py
mshafiei/cvgutils
ea93b3cb70a969c3b814f5a9f7672c6e00de5c61
[ "MIT" ]
null
null
null
import numpy as np import glob import cvgutils.Image as cvgim import os import tqdm indir = '/home/mohammad/Projects/NRV/dataset/envmaps/*.exr' outdir = '/home/mohammad/Projects/NRV/dataset/envmaps_512_1024' inimgs = glob.glob(indir) max16 = (2**16-1) for img in tqdm.tqdm(inimgs): im = cvgim.imread(img) im = cvgim.resize(im,dx=1024,dy=512) fn = os.path.join(outdir,os.path.basename(img).replace('4k','1024x512')) # im = (im * max16).astype(np.uint16) cvgim.imwrite(fn, im)
30.9375
76
0.705051
80
495
4.3375
0.55
0.069164
0.115274
0.132565
0.213256
0.213256
0
0
0
0
0
0.074766
0.135354
495
15
77
33
0.735981
0.070707
0
0
0
0
0.242358
0.220524
0
0
0
0
0
1
0
false
0
0.357143
0
0.357143
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
e765541ab914b48a4f43b07b88c06ff9c0e6465a
44
py
Python
cli/__init__.py
charlesreid1/dahak-taco
b7ab6d510f4989e495667c745c4dd818d1732823
[ "BSD-3-Clause" ]
null
null
null
cli/__init__.py
charlesreid1/dahak-taco
b7ab6d510f4989e495667c745c4dd818d1732823
[ "BSD-3-Clause" ]
null
null
null
cli/__init__.py
charlesreid1/dahak-taco
b7ab6d510f4989e495667c745c4dd818d1732823
[ "BSD-3-Clause" ]
1
2018-03-16T21:27:28.000Z
2018-03-16T21:27:28.000Z
_program = "taco" __version__ = "1.0.1beta"
14.666667
25
0.681818
6
44
4.166667
1
0
0
0
0
0
0
0
0
0
0
0.078947
0.136364
44
2
26
22
0.578947
0
0
0
0
0
0.295455
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
e765b119fb0c58069efcdb5bcbcb88dc12c714c8
257
py
Python
telegramtoken/settings/sample_config.py
kahihia/community
46d55d1029764b8d66278c930a9d574e7d1421d8
[ "MIT" ]
null
null
null
telegramtoken/settings/sample_config.py
kahihia/community
46d55d1029764b8d66278c930a9d574e7d1421d8
[ "MIT" ]
null
null
null
telegramtoken/settings/sample_config.py
kahihia/community
46d55d1029764b8d66278c930a9d574e7d1421d8
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- NODE_IP = '127.0.0.1' NODE_PORT = '9718' NODE_USER = 'testuser' NODE_PWD = 'testpassword' STREAM_SMART_LICENSE = 'Telegram-license' STREAM_SMART_LICENSE_ATTESTATION = 'Telegram-license-attestation' STREAM_ISCC = 'telegramTokenJar'
23.363636
65
0.750973
33
257
5.545455
0.636364
0.120219
0.196721
0
0
0
0
0
0
0
0
0.048035
0.108949
257
10
66
25.7
0.751092
0.081712
0
0
0
0
0.397436
0.119658
0
0
0
0
0
1
0
false
0.142857
0
0
0
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
e765fb3f3635f387b5b8188b7acfcdc41c6bffec
894
py
Python
test/test_substitution.py
corneliusroemer/pyro-cov
54e89d128293f9ff9e995c442f72fa73f5f99b76
[ "Apache-2.0" ]
22
2021-09-14T04:33:11.000Z
2022-02-01T21:33:05.000Z
test/test_substitution.py
corneliusroemer/pyro-cov
54e89d128293f9ff9e995c442f72fa73f5f99b76
[ "Apache-2.0" ]
7
2021-11-02T13:48:35.000Z
2022-03-23T18:08:35.000Z
test/test_substitution.py
corneliusroemer/pyro-cov
54e89d128293f9ff9e995c442f72fa73f5f99b76
[ "Apache-2.0" ]
6
2021-09-18T01:06:51.000Z
2022-01-10T02:22:06.000Z
# Copyright Contributors to the Pyro-Cov project. # SPDX-License-Identifier: Apache-2.0 import pyro.poutine as poutine import pytest import torch from pyro.infer.autoguide import AutoDelta from pyrocov.substitution import GeneralizedTimeReversible, JukesCantor69 @pytest.mark.parametrize("Model", [JukesCantor69, GeneralizedTimeReversible]) def test_matrix_exp(Model): model = Model() guide = AutoDelta(model) guide() trace = poutine.trace(guide).get_trace() t = torch.randn(10).exp() with poutine.replay(trace=trace): m = model() assert torch.allclose(model(), m) exp_mt = (m * t[:, None, None]).matrix_exp() actual = model.matrix_exp(t) assert torch.allclose(actual, exp_mt, atol=1e-6) actual = model.log_matrix_exp(t) log_exp_mt = exp_mt.log() assert torch.allclose(actual, log_exp_mt, atol=1e-6)
29.8
77
0.694631
118
894
5.144068
0.432203
0.041186
0.093904
0.082372
0.039539
0
0
0
0
0
0
0.016598
0.191275
894
29
78
30.827586
0.82296
0.092841
0
0
0
0
0.006188
0
0
0
0
0
0.142857
1
0.047619
false
0
0.238095
0
0.285714
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7666c41475df3a201f3e9500fe80142589cab4b
438
py
Python
angr/engines/vex/expressions/unsupported.py
aeflores/angr
ac85a3f168375ed0ee20551b1b716c1bff4ac02b
[ "BSD-2-Clause" ]
1
2020-11-18T16:39:11.000Z
2020-11-18T16:39:11.000Z
angr/engines/vex/expressions/unsupported.py
aeflores/angr
ac85a3f168375ed0ee20551b1b716c1bff4ac02b
[ "BSD-2-Clause" ]
1
2019-04-08T12:10:07.000Z
2019-04-08T12:10:07.000Z
angr/engines/vex/expressions/unsupported.py
aeflores/angr
ac85a3f168375ed0ee20551b1b716c1bff4ac02b
[ "BSD-2-Clause" ]
1
2020-11-18T16:39:13.000Z
2020-11-18T16:39:13.000Z
import logging l = logging.getLogger(name=__name__) def SimIRExpr_Unsupported(_engine, state, expr): l.error("Unsupported IRExpr %s. Please implement.", type(expr).__name__) size = expr.result_size(state.scratch.tyenv) result = state.solver.Unconstrained(type(expr).__name__, size) state.history.add_event('resilience', resilience_type='irexpr', expr=type(expr).__name__, message='unsupported irexpr') return result
39.818182
123
0.755708
55
438
5.636364
0.527273
0.077419
0.116129
0.103226
0
0
0
0
0
0
0
0
0.116438
438
10
124
43.8
0.801034
0
0
0
0
0
0.16895
0
0
0
0
0
0
1
0.125
false
0
0.125
0
0.375
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e766e365a4ab93717384f44001cba425202a3102
837
py
Python
modulo-1/exercicios/atividade_2_operadores_aritmeticos.py
giselemanuel/programa-Ifood-backend
d12544c30e2a26f7e2e2cd85df38a3f2c8860fe7
[ "MIT" ]
3
2021-04-25T23:31:13.000Z
2021-04-26T16:59:12.000Z
modulo-1/exercicios/atividade_2_operadores_aritmeticos.py
giselemanuel/programa-Ifood-backend
d12544c30e2a26f7e2e2cd85df38a3f2c8860fe7
[ "MIT" ]
null
null
null
modulo-1/exercicios/atividade_2_operadores_aritmeticos.py
giselemanuel/programa-Ifood-backend
d12544c30e2a26f7e2e2cd85df38a3f2c8860fe7
[ "MIT" ]
null
null
null
""" Programa VamoAI: Aluna: Gisele Rodrigues Manuel Atividade 1.3 Descrição do Execício 2: Criar um programa que: Some : 1024 por 2048, Multiplique: 1024 por 2048, Divida 2048 por 1024,Subtraia 1024 por 2048, executar print dos operadores aritméticos. """ #Definição das variáveis num1 = 1024 num2 = 2048 soma = num1 + num2 multiplica = num1 * num2 divide = num2 / num1 subtrai = num2 - num1 #Saída do programa print('\n') print('\033[32m') print('-' * 50) print(f'{"Atividade 2 - Operadores Aritméticos":^50}') print('-' * 50) print('\033[m') print(f'Operação de Soma de {num1} + {num2} = {soma}') print(f'Operação de Multiplicação {num1} * {num2} = {multiplica}') print(f'Operação de Divisão {num2} / {num1} = {divide}') print(f'Operação de Subtração {num2} - {num1} = {subtrai}') print('\033[32m') print('-' * 50) print('\033[m')
27.9
162
0.685783
120
837
4.783333
0.408333
0.052265
0.097561
0.111498
0.114983
0.080139
0
0
0
0
0
0.121127
0.151732
837
30
163
27.9
0.687324
0.346476
0
0.368421
0
0
0.504638
0
0
0
0
0
0
1
0
false
0
0
0
0
0.684211
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
1
e76743f69fad47d5de760eeb8b2c1a2dca1ce8fe
2,354
py
Python
scripts/sfuzz_data_retr.py
sunbeam891/Smart_contract_fuzzing
327873f562028fb3ea241fb0c1dc0f039e8c005d
[ "MIT" ]
1
2021-04-01T09:33:58.000Z
2021-04-01T09:33:58.000Z
scripts/sfuzz_data_retr.py
sunbeam891/Smart_contract_fuzzing
327873f562028fb3ea241fb0c1dc0f039e8c005d
[ "MIT" ]
2
2021-11-09T03:25:40.000Z
2021-11-09T06:23:45.000Z
scripts/sfuzz_data_retr.py
sunbeam891/Smart_contract_fuzzing
327873f562028fb3ea241fb0c1dc0f039e8c005d
[ "MIT" ]
null
null
null
#Script_sFuzz_data_retriever_v0.6 #format = python3 sfuzz_data_retr.py <filename> <contractname> <contracts_folder> import json import os from decimal import Decimal import sys from openpyxl import load_workbook import pandas as pd import coverage_json import vulnerabilities_json filename = sys.argv[1] Contractname = sys.argv[2] contracts_fold = sys.argv[3] for root, dirs, files in os.walk(contracts_fold): for file in files: if file == "stats.json": File = os.path.join(root, file) if os.path.isfile(File) == False or os.stat(File).st_size == 0 : sys.exit() with open(File, 'r',encoding="utf-8") as f: vuln_json = json.load(f) dur = float(vuln_json["duration"]) #time = "{:.2f}".format(*100) time_taken = "{:.2f} secs".format(dur) total_execs = vuln_json["totalExecs"] vulnerabilities = vuln_json["vulnerabilities"] branches = vuln_json["branches"] Branch_coverage = "{} % ({})".format(vuln_json["coverage"],vuln_json["branches"]) for key,value in vulnerabilities.items(): if value != "0": if key == "gasless send": vulnerabilities_json.Vulnerabilities_detected.append("gasless") elif key == "dangerous delegatecall": vulnerabilities_json.Vulnerabilities_detected.append("DangerousDelegatecall") elif key == "exception disorder": vulnerabilities_json.Vulnerabilities_detected.append("UnhandledException") elif key == "freezing ether": vulnerabilities_json.Vulnerabilities_detected.append("Locking") elif key == "reentrancy": vulnerabilities_json.Vulnerabilities_detected.append("Reentrancy") elif key == "integer overflow": vulnerabilities_json.Vulnerabilities_detected.append("Overflow") elif key == "timestamp dependency": vulnerabilities_json.Vulnerabilities_detected.append("BlockStateDep") elif key == "integer underflow": vulnerabilities_json.Vulnerabilities_detected.append("Overflow") elif key == "block number dependency": vulnerabilities_json.Vulnerabilities_detected.append("BlockStateDep") coverage_json.Branchcov = Branch_coverage coverage_json.Transactions = total_execs coverage_json.timetaken = time_taken coverage_json.coverage_json_maker() vulnerabilities_json.vuln_Jsonmaker()
39.898305
89
0.708581
266
2,354
6.075188
0.390977
0.129332
0.189356
0.233911
0.314356
0.165842
0.165842
0.07797
0
0
0
0.007261
0.180969
2,354
59
90
39.898305
0.830913
0.059473
0
0.08
0
0
0.15868
0.009494
0
0
0
0
0
1
0
false
0
0.16
0
0.16
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e76781753c0e4a869e70caddd34d8e8a1557bef1
5,090
py
Python
bifrost_whats_my_species/datadump.py
ssi-dk/bifrost_whats_my_species
fe59e8cf096b8622747278959d53a95c80bed9ad
[ "MIT" ]
null
null
null
bifrost_whats_my_species/datadump.py
ssi-dk/bifrost_whats_my_species
fe59e8cf096b8622747278959d53a95c80bed9ad
[ "MIT" ]
2
2020-11-13T13:46:11.000Z
2020-11-20T08:36:55.000Z
bifrost_whats_my_species/datadump.py
ssi-dk/bifrost-whats_my_species
fe59e8cf096b8622747278959d53a95c80bed9ad
[ "MIT" ]
null
null
null
from bifrostlib import common from bifrostlib.datahandling import Sample from bifrostlib.datahandling import SampleComponentReference from bifrostlib.datahandling import SampleComponent from bifrostlib.datahandling import Category from typing import Dict import os def extract_bracken_txt(species_detection: Category, results: Dict, component_name: str) -> None: file_name = "bracken.txt" file_key = common.json_key_cleaner(file_name) file_path = os.path.join(component_name, file_name) results[file_key] = {} with open(file_path, "r") as fh: buffer = fh.readlines() number_of_entries = min(len(buffer) - 1, 2) if number_of_entries > 0: # skip first line as it's header for i in range(1, 1 + number_of_entries): # skip first line as it's header results[file_key]["species_" + str(i) + "_name"] = buffer[i].split("\t")[0] results[file_key]["species_" + str(i) + "_kraken_assigned_reads"] = buffer[i].split("\t")[3] results[file_key]["species_" + str(i) + "_added_reads"] = buffer[i].split("\t")[4] results[file_key]["species_" + str(i) + "_count"] = int(buffer[i].split("\t")[5].strip()) def extract_kraken_report_bracken_txt(species_detection: Category, results: Dict, component_name: str) -> None: file_name = "kraken_report_bracken.txt" file_key = common.json_key_cleaner(file_name) file_path = os.path.join(component_name, file_name) results[file_key] = {} with open(file_path, "r") as fh: buffer = fh.readlines() if len(buffer) > 2: results[file_key]["unclassified_count"] = int(buffer[0].split("\t")[1]) results[file_key]["root"] = int(buffer[1].split("\t")[1]) def species_math(species_detection: Category, results: Dict, component_name: str) -> None: kraken_report_bracken_key = common.json_key_cleaner("kraken_report_bracken.txt") bracken_key = common.json_key_cleaner("bracken.txt") if ("status" not in results[kraken_report_bracken_key] and "status" not in results[bracken_key] and "species_1_count" in results[bracken_key] and "species_2_count" in results[bracken_key]): species_detection["summary"]["percent_unclassified"] = results[kraken_report_bracken_key]["unclassified_count"] / (results[kraken_report_bracken_key]["unclassified_count"] + results[kraken_report_bracken_key]["root"]) species_detection["summary"]["percent_classified_species_1"] = results[bracken_key]["species_1_count"] / (results[kraken_report_bracken_key]["unclassified_count"] + results[kraken_report_bracken_key]["root"]) species_detection["summary"]["name_classified_species_1"] = results[bracken_key]["species_1_name"] species_detection["summary"]["percent_classified_species_2"] = results[bracken_key]["species_2_count"] / (results[kraken_report_bracken_key]["unclassified_count"] + results[kraken_report_bracken_key]["root"]) species_detection["summary"]["name_classified_species_2"] = results[bracken_key]["species_2_name"] species_detection["summary"]["detected_species"] = species_detection["summary"]["name_classified_species_1"] def set_sample_species(species_detection: Category, sample: Sample) -> None: sample_info = sample.get_category("sample_info") if sample_info is not None and sample_info.get("summary", {}).get("provided_species", None) is not None: species_detection["summary"]["species"] = sample_info["summary"]["provided_species"] else: species_detection["summary"]["species"] = species_detection["summary"].get("detected_species", None) def datadump(samplecomponent_ref_json: Dict): samplecomponent_ref = SampleComponentReference(value=samplecomponent_ref_json) samplecomponent = SampleComponent.load(samplecomponent_ref) sample = Sample.load(samplecomponent.sample) species_detection = samplecomponent.get_category("species_detection") if species_detection is None: species_detection = Category(value={ "name": "species_detection", "component": {"id": samplecomponent["component"]["_id"], "name": samplecomponent["component"]["name"]}, "summary": {}, "report": {} } ) extract_bracken_txt(species_detection, samplecomponent["results"], samplecomponent["component"]["name"]) extract_kraken_report_bracken_txt(species_detection, samplecomponent["results"], samplecomponent["component"]["name"]) species_math(species_detection, samplecomponent["results"], samplecomponent["component"]["name"]) set_sample_species(species_detection, sample) samplecomponent.set_category(species_detection) sample.set_category(species_detection) samplecomponent.save_files() common.set_status_and_save(sample, samplecomponent, "Success") with open(os.path.join(samplecomponent["component"]["name"], "datadump_complete"), "w+") as fh: fh.write("done") datadump( snakemake.params.samplecomponent_ref_json, )
57.840909
226
0.711591
610
5,090
5.622951
0.160656
0.116618
0.072012
0.057726
0.522157
0.472012
0.397668
0.340233
0.3
0.23965
0
0.005834
0.158153
5,090
87
227
58.505747
0.794632
0.011984
0
0.131579
0
0
0.179591
0.041101
0
0
0
0
0
1
0.065789
false
0
0.092105
0
0.157895
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e76785635d525e1ea987b9fb10498fdb21db674e
627
py
Python
ex6-8.py
yiyidhuang/PythonCrashCrouse2nd
3512f9ab8fcf32c6145604a37e2a62feddf174d1
[ "MIT" ]
null
null
null
ex6-8.py
yiyidhuang/PythonCrashCrouse2nd
3512f9ab8fcf32c6145604a37e2a62feddf174d1
[ "MIT" ]
null
null
null
ex6-8.py
yiyidhuang/PythonCrashCrouse2nd
3512f9ab8fcf32c6145604a37e2a62feddf174d1
[ "MIT" ]
null
null
null
cristiano = { 'type': 'dog', 'owner': 'wei', } rose = { 'type': 'cat', 'owner': 'yan', } cloud = { 'type': 'pig', 'owner': 'luo', } pets = [cristiano, rose, cloud] for pet in pets: if pet == cristiano: print('\nCristiano: ' + '\n\ttype: ' + pet['type'] + '\n\towner: ' + pet['owner']) elif pet == rose: print('\nRose: ' + '\n\ttype: ' + pet['type'] + '\n\towner: ' + pet['owner']) elif pet == cloud: print('\nCould: ' + '\n\ttype: ' + pet['type'] + '\n\towner: ' + pet['owner'])
20.225806
45
0.405104
62
627
4.096774
0.387097
0.070866
0.106299
0.153543
0.385827
0.385827
0.385827
0.385827
0.275591
0.275591
0
0
0.365231
627
30
46
20.9
0.638191
0
0
0.230769
0
0
0.263158
0
0
0
0
0
0
1
0
false
0
0
0
0
0.115385
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e769251e473b5f4b32970f5dbac6d06da53753e2
4,766
py
Python
dexy/filters/matrix.py
dexy/dexy
323c1806e51f75435e11d2265703e68f46c8aef3
[ "MIT" ]
136
2015-01-06T15:04:47.000Z
2021-12-21T22:52:41.000Z
dexy/filters/matrix.py
dexy/dexy
323c1806e51f75435e11d2265703e68f46c8aef3
[ "MIT" ]
13
2015-01-26T14:06:58.000Z
2020-03-27T21:16:10.000Z
dexy/filters/matrix.py
dexy/dexy
323c1806e51f75435e11d2265703e68f46c8aef3
[ "MIT" ]
34
2015-01-02T16:24:53.000Z
2021-11-27T05:38:30.000Z
from bs4 import BeautifulSoup from dexy.filters.api import ApiFilter import asyncio import json import mimetypes import markdown try: from nio import AsyncClient AVAILABLE = True except ImportError: AVAILABLE = False async def main_nio(homeserver, user, password, room_id, ext, mimetype, data_provider, content, log_fn): client = AsyncClient(homeserver, user) await client.login(password) upload_response, decrypt_info = None, None if data_provider: upload_response, decrypt_info = await client.upload( data_provider, mimetype ) content['url'] = upload_response.content_uri log_fn("uploading message to room %s: %s" % (room_id, str(content))) response = await client.room_send( room_id=room_id, message_type="m.room.message", content=content ) await client.close() return { "event_id" : response.event_id, "room_id" : response.room_id } class MatrixFilter(ApiFilter): """ Filter for posting text, files, or images to a matrix room. Uses matrix-nio Create a .dexyapis JSON file in your HOME dir with format: { "matrix": { "homeserver" : "https://example.org", "username" : "@example:example.org", "password" : "sekret1!" } } """ aliases = ['matrix'] _settings = { 'room-id' : ("The room id (NOT the room name!) to post to.", "!yMPKbtdRlqJWpwCcvg:matrix.org"), 'api-key-name' : 'matrix', 'input-extensions' : ['.*'], 'output-extensions' : ['.json'] } def is_active(self): return AVAILABLE def data_provider(self, a, b): # FIXME currently ignoring params a, b return self.input_data.storage.data_file() def process(self): if self.input_data.ext in ('.html'): text = str(self.input_data) soup = BeautifulSoup(text, 'html.parser') # https://matrix.org/docs/spec/client_server/r0.6.0#m-room-message-msgtypes # "should" do this in bs4 but this works # FIXME? bg-color is ignored in riot modified_html = text.replace("style=\"color: ", "data-mx-color=\"").replace("style=\"background: ", "data-mx-bg-color=\"") content = { 'msgtype' : 'm.text', 'format' : 'org.matrix.custom.html', 'body' : soup.get_text(), 'formatted_body' : modified_html } ### "matrix-markdown" elif self.input_data.ext in ('.md'): text = str(self.input_data) html = markdown.markdown(text, extensions=['fenced_code']) soup = BeautifulSoup(html, 'html.parser') for code_block in soup.find_all("code"): code_block['class'] = "language-%s" % code_block['class'][0] code_block.string = code_block.string.lstrip() content = { 'msgtype' : 'm.text', 'format' : 'org.matrix.custom.html', 'body' : soup.get_text(), 'formatted_body' : str(soup) } ### @end elif self.input_data.ext in ('.txt'): text = str(self.input_data) content = { 'msgtype' : "m.text", 'body' : text } elif self.input_data.ext in ('.png', '.jpeg', '.jpg', '.bmp'): if hasattr(self.doc, 'created_by_doc'): description = "image %s generated by script %s" % (self.input_data.name, self.doc.created_by_doc.name) else: description = "automatically generated image %s" % self.input_data.name content = { 'msgtype' : 'm.image', 'body' : description } else: content = { 'msgtype' : 'm.file', 'filename' : self.input_data.name, 'body' : self.input_data.name } loop = asyncio.get_event_loop() response = loop.run_until_complete(main_nio( homeserver=self.read_param('homeserver'), user=self.read_param('username'), password=self.read_param('password'), room_id=self.setting('room-id'), ext=self.input_data.ext, mimetype=mimetypes.guess_type(self.input_data.name)[0], data_provider=self.data_provider, content=content, log_fn=self.log_debug )) self.output_data.set_data(json.dumps(response))
32.868966
134
0.536299
511
4,766
4.857143
0.334638
0.050766
0.073328
0.032232
0.14585
0.084609
0.058018
0.058018
0.058018
0.058018
0
0.002553
0.342426
4,766
144
135
33.097222
0.789407
0.103651
0
0.156863
0
0
0.142891
0.017565
0
0
0
0.006944
0
1
0.029412
false
0.029412
0.078431
0.019608
0.166667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e76a39929d3dba1cca55b2346b00be6b52fb4b66
880
py
Python
vera molnar/random_grids.py
jkocontreras/drawbotscripts
6688e65e057f25901ac1adb93c3108ab889de49f
[ "MIT" ]
null
null
null
vera molnar/random_grids.py
jkocontreras/drawbotscripts
6688e65e057f25901ac1adb93c3108ab889de49f
[ "MIT" ]
null
null
null
vera molnar/random_grids.py
jkocontreras/drawbotscripts
6688e65e057f25901ac1adb93c3108ab889de49f
[ "MIT" ]
null
null
null
import random # ---------------------- # settings pw = ph = 500 cell_a = 10 # amount of cells sbdvs = 3 # subdivisions gap = pw /(cell_a * sbdvs + cell_a + 1) cell_s = sbdvs * gap points = [(x * gap, y * gap) for x in range(sbdvs+1) for y in range(sbdvs+1) ] # ---------------------- # function(s) def a_grid_cell(pos, s, points, amount = len(points)): random.shuffle(points) points = random.sample( points, amount ) with savedState(): translate(x * (cell_s + gap), y * (cell_s + gap)) polygon(*points, close=False) # ---------------------- # drawing newPage(pw, ph) rect(0, 0, pw, ph) translate(gap, gap) fill(None) strokeWidth(1) stroke(1) lineCap('round') lineJoin('round') for x in range( cell_a ): for y in range( cell_a ): a_grid_cell((x * cell_s, y * cell_s), cell_s, points, y + 3) # saveImage('random_grids.jpg')
19.555556
79
0.575
131
880
3.740458
0.389313
0.061224
0.02449
0.044898
0
0
0
0
0
0
0
0.020202
0.2125
880
45
80
19.555556
0.686869
0.181818
0
0
0
0
0.014065
0
0
0
0
0
0
1
0.041667
false
0
0.041667
0
0.083333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e76a3be58b3e2fbe5929631298e3ee8bfa3c6f30
1,997
py
Python
tests/exercise5/lesson_5/exercise_6/hangman_clean.py
PONSASIKALA/signal-interpreter-server
2a0fad375768e0dca99b8e8cb4ef19e863d99007
[ "MIT" ]
null
null
null
tests/exercise5/lesson_5/exercise_6/hangman_clean.py
PONSASIKALA/signal-interpreter-server
2a0fad375768e0dca99b8e8cb4ef19e863d99007
[ "MIT" ]
null
null
null
tests/exercise5/lesson_5/exercise_6/hangman_clean.py
PONSASIKALA/signal-interpreter-server
2a0fad375768e0dca99b8e8cb4ef19e863d99007
[ "MIT" ]
null
null
null
# pylint: disable=missing-function-docstring ALPHABET = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" def get_display_word(secret_word): display_word = [] for i in range(len(secret_word)): if secret_word[i] in ALPHABET: display_word.append("_") else: display_word.append(secret_word[i]) return display_word def get_valid_guess(): input_is_invalid = True guess = None while input_is_invalid: guess = input("Player 2, please guess a letter: ").upper() if len(guess) > 1 or guess not in ALPHABET: print("You did not enter a valid guess. Please try again.") else: input_is_invalid = False return guess def set_display_word(guess, secret_word, display_word): for i, letter in enumerate(secret_word): if letter == guess: display_word[i] = guess return display_word def print_display_word(display_word): word = "" for i in range(len(display_word)): word += display_word[i] print() print("Current Progress: ", word) def play_hangman(secret_word): guesses_left = 6 display_word = get_display_word(secret_word) while '_' in display_word and guesses_left: guess = get_valid_guess() if guess in secret_word: display_word = set_display_word(guess, secret_word, display_word) else: print("Your guess is incorrect! Please try again.") guesses_left -= 1 print(f"Number of guesses left: {guesses_left}") print_display_word(display_word) if "_" not in display_word: print("Player 2 wins!") else: print("You died!") def initialize_game(): secret_word = input("Player one, please enter your secret word: ").upper() for i in range(50): print() print("Player two must guess Player one's word") play_hangman(secret_word) initialize_game()
27.356164
79
0.620431
254
1,997
4.637795
0.271654
0.205433
0.089134
0.071307
0.194397
0.116299
0.067912
0.067912
0
0
0
0.004944
0.290936
1,997
72
80
27.736111
0.826977
0.021032
0
0.150943
0
0
0.167464
0.013822
0
0
0
0
0
1
0.113208
false
0
0
0
0.169811
0.207547
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
e76c2081bccf726a610c27def463e58a09b8c81d
21,136
py
Python
utlz/__init__.py
theno/utlz
bf7d2b53f3e0d35c6f8ded81f3f774a74fcd3389
[ "MIT" ]
2
2018-10-29T18:35:12.000Z
2019-01-14T12:56:15.000Z
utlz/__init__.py
Bhanditz/utlz
bf7d2b53f3e0d35c6f8ded81f3f774a74fcd3389
[ "MIT" ]
null
null
null
utlz/__init__.py
Bhanditz/utlz
bf7d2b53f3e0d35c6f8ded81f3f774a74fcd3389
[ "MIT" ]
1
2019-01-14T11:11:32.000Z
2019-01-14T11:11:32.000Z
# -*- coding: utf-8 -*- import collections import functools import gzip import json import inspect import os.path import shutil import sys import struct import time from functools import wraps from utlz._version import __version__ # inspired by: http://stackoverflow.com/a/6618825 def flo(string): '''Return the string given by param formatted with the callers locals.''' callers_locals = {} frame = inspect.currentframe() try: outerframe = frame.f_back callers_locals = outerframe.f_locals finally: del frame return string.format(**callers_locals) # does not work if called from another package (with other globals) # TODO: unit tests def doc1(): '''Return the first line of the (callers) docstring.''' return globals()[inspect.stack()[1][3]].__doc__.splitlines()[0] # TODO: unit tests def _wrap_with(color_code): '''Color wrapper. Example: >>> blue = _wrap_with('34') >>> print(blue('text')) \033[34mtext\033[0m ''' def inner(text, bold=False): '''Inner color function.''' code = color_code if bold: code = flo("1;{code}") return flo('\033[{code}m{text}\033[0m') return inner black = _wrap_with('30') red = _wrap_with('31') green = _wrap_with('32') yellow = _wrap_with('33') blue = _wrap_with('34') magenta = _wrap_with('35') cyan = _wrap_with('36') white = _wrap_with('37') default_color = _wrap_with('0') # TODO: unit tests def first_paragraph(multiline_str, without_trailing_dot=True, maxlength=None): '''Return first paragraph of multiline_str as a oneliner. When without_trailing_dot is True, the last char of the first paragraph will be removed, if it is a dot ('.'). Examples: >>> multiline_str = 'first line\\nsecond line\\n\\nnext paragraph' >>> print(first_paragraph(multiline_str)) first line second line >>> multiline_str = 'first \\n second \\n \\n next paragraph ' >>> print(first_paragraph(multiline_str)) first second >>> multiline_str = 'first line\\nsecond line\\n\\nnext paragraph' >>> print(first_paragraph(multiline_str, maxlength=3)) fir >>> multiline_str = 'first line\\nsecond line\\n\\nnext paragraph' >>> print(first_paragraph(multiline_str, maxlength=78)) first line second line >>> multiline_str = 'first line.' >>> print(first_paragraph(multiline_str)) first line >>> multiline_str = 'first line.' >>> print(first_paragraph(multiline_str, without_trailing_dot=False)) first line. >>> multiline_str = '' >>> print(first_paragraph(multiline_str)) <BLANKLINE> ''' stripped = '\n'.join([line.strip() for line in multiline_str.splitlines()]) paragraph = stripped.split('\n\n')[0] res = paragraph.replace('\n', ' ') if without_trailing_dot: res = res.rsplit('.', 1)[0] if maxlength: res = res[0:maxlength] return res # for decorator with arguments see: http://stackoverflow.com/a/5929165 # TODO: unit tests def print_doc1(*args, **kwargs): '''Print the first paragraph of the docstring of the decorated function. The paragraph will be printed as a oneliner. May be invoked as a simple, argument-less decorator (i.e. ``@print_doc1``) or with named arguments ``color``, ``bold``, ``prefix`` of ``tail`` (eg. ``@print_doc1(color=utils.red, bold=True, prefix=' ')``). Examples: # >>> @print_doc1 # ... def foo(): # ... """First line of docstring. # ... # ... another line. # ... """ # ... pass # ... # >>> foo() # \033[34mFirst line of docstring\033[0m # >>> @print_doc1 # ... def foo(): # ... """First paragraph of docstring which contains more than one # ... line. # ... # ... Another paragraph. # ... """ # ... pass # ... # >>> foo() # \033[34mFirst paragraph of docstring which contains more than one line\033[0m ''' # output settings from kwargs or take defaults color = kwargs.get('color', blue) bold = kwargs.get('bold', False) prefix = kwargs.get('prefix', '') tail = kwargs.get('tail', '\n') def real_decorator(func): '''real decorator function''' @wraps(func) def wrapper(*args, **kwargs): '''the wrapper function''' try: prgf = first_paragraph(func.__doc__) print(color(prefix + prgf + tail, bold)) except AttributeError as exc: name = func.__name__ print(red(flo('{name}() has no docstring'))) raise(exc) return func(*args, **kwargs) return wrapper invoked = bool(not args or kwargs) if not invoked: # invoke decorator function which returns the wrapper function return real_decorator(func=args[0]) return real_decorator # TODO: unit tests def print_full_name(*args, **kwargs): '''Decorator, print the full name of the decorated function. May be invoked as a simple, argument-less decorator (i.e. ``@print_doc1``) or with named arguments ``color``, ``bold``, or ``prefix`` (eg. ``@print_doc1(color=utils.red, bold=True, prefix=' ')``). ''' color = kwargs.get('color', default_color) bold = kwargs.get('bold', False) prefix = kwargs.get('prefix', '') tail = kwargs.get('tail', '') def real_decorator(func): '''real decorator function''' @wraps(func) def wrapper(*args, **kwargs): '''the wrapper function''' first_line = '' try: first_line = func.__module__ + '.' + func.__qualname__ except AttributeError as exc: first_line = func.__name__ print(color(prefix + first_line + tail, bold)) return func(*args, **kwargs) return wrapper invoked = bool(not args or kwargs) if not invoked: # invoke decorator function which returns the wrapper function return real_decorator(func=args[0]) return real_decorator def _get_input(): try: return raw_input() # Python-2.* except NameError: return input() # Python-3.* # taken from: http://stackoverflow.com/a/3041990 def query_yes_no(question, default="yes"): """Ask a yes/no question and return their answer. "question" is a string that is presented to the user. "default" is the presumed answer if the user just hits <Enter>. It must be "yes" (the default), "no", or None (which means an answer of the user is required). The "answer" return value is True for "yes" or False for "no". """ valid = {"yes": True, "y": True, "ye": True, '1': True, "no": False, "n": False, '0': False, } if default is None: prompt = " [y/n] " elif default == "yes": prompt = " [Y/n] " elif default == "no": prompt = " [y/N] " else: raise ValueError("invalid default answer: '%s'" % default) while True: sys.stdout.write(question + prompt) choice = _get_input().lower() if default is not None and choice == '': return valid[default] elif choice in valid: return valid[choice] else: sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n") def query_input(question, default=None, color=default_color): """Ask a question for input and return their answer. "question" is a string that is presented to the user. "default" is the presumed answer if the user just hits <Enter>. The "answer" return value is a str. """ if default is None or default == '': prompt = ' ' elif type(default) == str: prompt = flo(' [{default}] ') else: raise ValueError("invalid default answer: '%s'" % default) while True: sys.stdout.write(color(question + prompt)) choice = _get_input() if default is not None and choice == '': return default if choice != '': return choice # TODO: unit tests def filled_out_template_str(template, **substitutions): '''Return str template with applied substitutions. Example: >>> template = 'Asyl for {{name}} {{surname}}!' >>> filled_out_template_str(template, name='Edward', surname='Snowden') 'Asyl for Edward Snowden!' >>> template = '[[[foo]]] was substituted by {{foo}}' >>> filled_out_template_str(template, foo='bar') '{{foo}} was substituted by bar' >>> template = 'names wrapped by {single} {curly} {braces} {{curly}}' >>> filled_out_template_str(template, curly='remains unchanged') 'names wrapped by {single} {curly} {braces} remains unchanged' ''' template = template.replace('{', '{{') template = template.replace('}', '}}') template = template.replace('{{{{', '{') template = template.replace('}}}}', '}') template = template.format(**substitutions) template = template.replace('{{', '{') template = template.replace('}}', '}') template = template.replace('[[[', '{{') template = template.replace(']]]', '}}') return template # TODO: unit tests def filled_out_template(filename, **substitutions): '''Return content of file filename with applied substitutions.''' res = None with open(filename, 'r') as fp: template = fp.read() res = filled_out_template_str(template, **substitutions) return res # cf. http://stackoverflow.com/a/126389 # TODO: unit tests def update_or_append_line(filename, prefix, new_line, keep_backup=True, append=True): '''Search in file 'filename' for a line starting with 'prefix' and replace the line by 'new_line'. If a line starting with 'prefix' not exists 'new_line' will be appended. If the file not exists, it will be created. Return False if new_line was appended, else True (i.e. if the prefix was found within of the file). ''' same_line_exists, line_updated = False, False filename = os.path.expanduser(filename) if os.path.isfile(filename): backup = filename + '~' shutil.move(filename, backup) # with open(filename, 'w') as dest, open(backup, 'r') as source: with open(filename, 'w') as dest: with open(backup, 'r') as source: # try update.. for line in source: if line == new_line: same_line_exists = True if line.startswith(prefix): dest.write(new_line + '\n') line_updated = True else: dest.write(line) # ..or append if not (same_line_exists or line_updated) and append: dest.write(new_line + '\n') if not keep_backup: os.remove(backup) else: with open(filename, 'w') as dest: dest.write(new_line + '\n') return same_line_exists or line_updated # TODO: unit tests def comment_out_line(filename, line, comment='#', update_or_append_line=update_or_append_line): '''Comment line out by putting a comment sign in front of the line. If the file does not contain the line, the files content will not be changed (but the file will be touched in every case). ''' update_or_append_line(filename, prefix=line, new_line=comment+line, append=False) # TODO: unit tests def uncomment_or_update_or_append_line(filename, prefix, new_line, comment='#', keep_backup=True, update_or_append_line=update_or_append_line): '''Remove the comment of an commented out line and make the line "active". If such an commented out line not exists it would be appended. ''' uncommented = update_or_append_line(filename, prefix=comment+prefix, new_line=new_line, keep_backup=keep_backup, append=False) if not uncommented: update_or_append_line(filename, prefix, new_line, keep_backup=keep_backup, append=True) # idea comes from http://stackoverflow.com/a/13105359 # TODO: unit tests def convert_unicode_2_utf8(input): '''Return a copy of `input` with every str component encoded from unicode to utf-8. ''' if isinstance(input, dict): try: # python-2.6 return dict((convert_unicode_2_utf8(key), convert_unicode_2_utf8(value)) for key, value in input.iteritems()) except AttributeError: # since python-2.7 cf. http://stackoverflow.com/a/1747827 # [the ugly eval('...') is required for a valid syntax on # python-2.6, cf. http://stackoverflow.com/a/25049535] return eval('''{convert_unicode_2_utf8(key): convert_unicode_2_utf8(value) for key, value in input.items()}''') elif isinstance(input, list): return [convert_unicode_2_utf8(element) for element in input] # elif order relevant: python2 vs. python3 # cf. http://stackoverflow.com/a/19877309 elif isinstance(input, str): return input else: try: if eval('''isinstance(input, unicode)'''): return input.encode('utf-8') except NameError: # unicode does not exist in python-3.x pass return input def load_json(filename, gzip_mode=False): '''Return the json-file data, with all strings utf-8 encoded.''' open_file = open if gzip_mode: open_file = gzip.open try: with open_file(filename, 'rt') as fh: data = json.load(fh) data = convert_unicode_2_utf8(data) return data except AttributeError: # Python-2.6 fh = open_file(filename, 'rt') data = json.load(fh) fh.close() data = convert_unicode_2_utf8(data) return data def write_json(data, filename, gzip_mode=False): '''Write the python data structure as a json-Object to filename.''' open_file = open if gzip_mode: open_file = gzip.open try: with open_file(filename, 'wt') as fh: json.dump(obj=data, fp=fh, sort_keys=True) except AttributeError: # Python-2.6 fh = open_file(filename, 'wt') json.dump(obj=data, fp=fh, sort_keys=True) fh.close() def create_dir_if_not_exists(path): if not os.path.exists(path): os.makedirs(path) def flat_list(list_of_lists): '''Return a simple list out of a list of lists.''' return [item for sublist in list_of_lists for item in sublist] def text_with_newlines(text, line_length=78, newline='\n'): '''Return text with a `newline` inserted after each `line_length` char. Return `text` unchanged if line_length == 0. ''' if line_length > 0: if len(text) <= line_length: return text else: return newline.join([text[idx:idx+line_length] for idx in range(0, len(text), line_length)]) else: return text def func_has_arg(func, arg): '''Return True if an argument `arg` exists for function `func`, else False. ''' return arg in inspect.getargspec(func).args # originally written by Giampaolo Rodolà and Ken Seehof # https://code.activestate.com/recipes/576563-cached-property/#c3 def lazy_val(func, with_del_hook=False): '''A memoize decorator for class properties. Return a cached property that is calculated by function `func` on first access. ''' def hook_for(that): try: orig_del = that.__del__ except AttributeError: orig_del = None def del_hook(*args, **kwargs): del that._cache[id(that)] del that._del_hook_cache[id(that)] if orig_del is not None: orig_del(that, *args, **kwargs) try: if orig_del is not None: that.__del__ = del_hook except AttributeError: # that.__del__ is a class property and cannot be changed by instance orig_del = None return del_hook def add_to_del_hook_cache(that): if with_del_hook: try: that._del_hook_cache[id(that)] = hook_for(that) except AttributeError: # when that._del_hook_cache not exists, it means it is not a # class property. Then, we don't need a del_hook(). pass @functools.wraps(func) def get(self): try: return self._cache[id(self)][func] except AttributeError: self._cache = {id(self): {}, } add_to_del_hook_cache(self) except KeyError: try: self._cache[id(self)] except KeyError: self._cache[id(self)] = {} add_to_del_hook_cache(self) val = self._cache[id(self)][func] = func(self) return val return property(get) # namedtuple with defaults and lazy_vals def namedtuple(typename, field_names, lazy_vals=None, **kwargs): if isinstance(field_names, str): field_names = field_names.replace(',', ' ').split() field_names = list(map(str, field_names)) field_names_without_defaults = [] defaults = [] for name in field_names: list_ = name.split('=', 1) if len(list_) > 1: name, default = list_ defaults.append(eval(default)) elif len(defaults) != 0: raise ValueError('non-keyword arg after keyword arg in field_names') field_names_without_defaults.append(name) _class = collections.namedtuple(typename, field_names_without_defaults, **kwargs) _class.__new__.__defaults__ = tuple(defaults) if lazy_vals is not None: # namedtuple instances are tuples and so they are immutable. We cannot # add an instance property _cache. So we create one global _cache dict # and one _del_hook_cache dict as class properties for storing the lazy # vals and the del-hooks and enable the del_hook-functionality by # adding a __del__ attribute function wich calls the del-hook. _class._cache = {} _class._del_hook_cache = {} def noop(): pass _class.__del__ = lambda self: self._del_hook_cache.get(id(self), noop)() for attr_name, func in lazy_vals.items(): setattr(_class, attr_name, lazy_val(func, with_del_hook=True)) return _class # TODO unit test class StructContext(object): '''An instance of this is a file like object which enables access of an (data) struct. ''' def __init__(self, data_struct): self.data_struct = data_struct self.offset = 0 def __enter__(self): self.seek(0) return self def __exit__(self, exc_type, exc_value, exc_traceback): self.seek(0) def seek(self, offset): self.offset = offset def read(self, fmt): data = struct.unpack_from(fmt, self.data_struct, self.offset) self.offset += struct.calcsize(fmt) if len(data) == 1: return data[0] return data @lazy_val def length(self): return len(self.data_struct) # https://stackoverflow.com/a/15190306 # TODO: unit tests class timeout(object): '''timeout context. Usage example: >>> with timeout(0.1) as t: ... while True: ... if t.timed_out: ... break ... print('.') ... time.sleep(0.02) . . . . . For more usage, see https://stackoverflow.com/a/15190306 ''' def __init__(self, seconds): self.seconds = seconds def __enter__(self): self.die_after = time.time() + self.seconds return self def __exit__(self, type, value, traceback): pass @property def timed_out(self): return time.time() > self.die_after if __name__ == '__main__': import doctest doctest.testmod() # Repo = namedtuple('Repo', "url, name=None, basedir='~/repos'") Repo = namedtuple('Repo', "url, name=None, basedir='~/repos'") assert Repo.__new__.__defaults__ == (None, '~/repos') r = Repo(url='https://github.com/theno/fabsetup.git') assert r.__repr__() == 'Repo(' \ 'url=\'https://github.com/theno/fabsetup.git\', ' \ 'name=None, basedir=\'~/repos\')'
31.879336
86
0.58909
2,594
21,136
4.629144
0.178874
0.010493
0.012991
0.014657
0.379247
0.307878
0.273651
0.23959
0.205696
0.178548
0
0.014065
0.293575
21,136
662
87
31.927492
0.790168
0.33909
0
0.308782
0
0
0.052207
0.006253
0
0
0
0.016616
0.005666
1
0.121813
false
0.011331
0.036827
0.005666
0.294618
0.014164
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
e76c656e6c74af8f2d56825e23cd387e36aeef31
401
py
Python
dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GetGeometryDataResponse.py
srcarter3/python-awips
d981062662968cf3fb105e8e23d955950ae2497e
[ "BSD-3-Clause" ]
33
2016-03-17T01:21:18.000Z
2022-02-08T10:41:06.000Z
dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GetGeometryDataResponse.py
srcarter3/python-awips
d981062662968cf3fb105e8e23d955950ae2497e
[ "BSD-3-Clause" ]
15
2016-04-19T16:34:08.000Z
2020-09-09T19:57:54.000Z
dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GetGeometryDataResponse.py
Unidata/python-awips
8459aa756816e5a45d2e5bea534d23d5b1dd1690
[ "BSD-3-Clause" ]
20
2016-03-12T01:46:58.000Z
2022-02-08T06:53:22.000Z
class GetGeometryDataResponse(object): def __init__(self): self.geometryWKBs = None self.geoData = None def getGeometryWKBs(self): return self.geometryWKBs def setGeometryWKBs(self, geometryWKBs): self.geometryWKBs = geometryWKBs def getGeoData(self): return self.geoData def setGeoData(self, geoData): self.geoData = geoData
20.05
44
0.663342
38
401
6.894737
0.368421
0.244275
0.10687
0
0
0
0
0
0
0
0
0
0.261845
401
19
45
21.105263
0.885135
0
0
0
0
0
0
0
0
0
0
0
0
1
0.416667
false
0
0
0.166667
0.666667
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
3
e76c666397b985650186328fae42e70cb9a10b72
1,835
py
Python
distiller/core/Distiller.py
darkclouder/distiller
a8efbfd807d781b90daba6023e3f966a52836b42
[ "BSD-2-Clause" ]
3
2018-07-18T14:41:00.000Z
2020-10-30T13:26:26.000Z
distiller/core/Distiller.py
darkclouder/distiller
a8efbfd807d781b90daba6023e3f966a52836b42
[ "BSD-2-Clause" ]
1
2018-07-19T08:23:09.000Z
2018-07-19T08:23:09.000Z
distiller/core/Distiller.py
darkclouder/distiller
a8efbfd807d781b90daba6023e3f966a52836b42
[ "BSD-2-Clause" ]
null
null
null
import os from distiller.core.impl.HttpServer import HttpServer from distiller.core.impl.CoreHandler import CoreHandler class Distiller: def __init__(self, env): self.env = env self.logger = self.env.logger.claim("Core") self.shutdown = False self.srv = HttpServer(CoreHandler(), self.env) self.pidfile = self.env.config.get("distiller.pidfile", path=True) def is_running(self): # Check if pid file already exists # and if the pid is still running if os.path.isfile(self.pidfile): with open(self.pidfile, "r") as f: try: pid = int(f.readline()) except ValueError: self.logger.warning("Corrupt pid file") os.remove(self.pidfile) return False # Check if process still running try: os.kill(pid, 0) except OSError: self.logger.notice("Daemon not running, but pid file exists") os.remove(self.pidfile) return False else: return True return False def run(self): self.logger.notice("Daemon start-up") # Write pid to pidfile pid = str(os.getpid()) with open(self.pidfile, "w") as f: f.write(pid) # Start watchdog (non-blocking) self.env.watchdog.run() # Start web server (blocking) self.srv.run() def stop(self): self.logger.notice("Daemon shutdown initiated") # Stop web server self.srv.stop() # Stop watchdog (non-blocking) self.env.watchdog.stop() os.remove(self.pidfile) self.logger.notice("Daemon shutdown done")
27.38806
81
0.541144
208
1,835
4.75
0.355769
0.049595
0.064777
0.089069
0.220648
0.129555
0
0
0
0
0
0.000856
0.363488
1,835
66
82
27.80303
0.845034
0.119346
0
0.195122
0
0
0.085874
0
0
0
0
0
0
1
0.097561
false
0
0.073171
0
0.292683
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e76ddf3376b76bafc19cbf52b3ad8126a2980c57
2,491
py
Python
nora10/mylakeinitpar.py
kojitominaga/scratch
5eaf4de30c89ff1e855a6be493105d1201f07f74
[ "FSFAP" ]
null
null
null
nora10/mylakeinitpar.py
kojitominaga/scratch
5eaf4de30c89ff1e855a6be493105d1201f07f74
[ "FSFAP" ]
null
null
null
nora10/mylakeinitpar.py
kojitominaga/scratch
5eaf4de30c89ff1e855a6be493105d1201f07f74
[ "FSFAP" ]
null
null
null
import numpy as np import os depth_resolution = 0.1 # metre def mylakeinit(max_depth, area): '''max_depth: m, area: m2. returns string to be written to an init file of MyLake assumes a cone shaped bathymetry curve''' depth_levels = np.arange(0, max_depth, depth_resolution) if not max_depth in depth_levels: depth_levels = np.concatenate((depth_levels, np.array([max_depth]))) areas = area * (depth_levels - max_depth) ** 2 / max_depth ** 2 lines = ['\t'.join([('%.2f' % d), ('%.0f' % a)] + ['4'] + ['0'] * 9) for d, a in zip(depth_levels, areas)] lines[0] = lines[0] + '\t0\t0' # snow and ice firstlines = '''-999 "MyLake init" Z (m) Az (m2) Tz (deg C) Cz Sz (kg/m3) TPz (mg/m3) DOPz (mg/m3) Chlaz (mg/m3) DOCz (mg/m3) TPz_sed (mg/m3) Chlaz_sed (mg/m3) "Fvol_IM (m3/m3 dry w.)" Hice (m) Hsnow (m)''' lines = [firstlines] + lines return '\n'.join(lines) def mylakepar(atten_coeff, longitude, latitude): '''atten_coeff: m-1 uses the Minesota area and BV parameters -> sets NaNs returns string to be written to a file''' out = '''-999 "MyLake parameters" Parameter Value Min Max Unit dz 1 0.5 2 m Kz_ak NaN NaN NaN (-) Kz_ak_ice 0.000898 NaN NaN (-) Kz_N0 7.00E-05 NaN NaN s-2 C_shelter NaN NaN NaN (-) latitude %.5f NaN NaN dec.deg longitude %.5f NaN NaN dec.deg alb_melt_ice 0.3 NaN NaN (-) alb_melt_snow 0.77 NaN NaN (-) PAR_sat 3.00E-05 1.00E-05 1.00E-04 mol m-2 s-1 f_par 0.45 NaN NaN (-) beta_chl 0.015 0.005 0.045 m2 mg-1 lambda_I 5 NaN NaN m-1 lambda_s 15 NaN NaN m-1 sed_sld 0.36 NaN NaN (m3/m3) I_scV 2.15 NaN NaN (-) I_scT 0 NaN NaN deg C I_scC 1 NaN NaN (-) I_scS 1.5 1.1 1.9 (-) I_scTP 0.59 0.4 0.8 (-) I_scDOP 1 NaN NaN (-) I_scChl 1 NaN NaN (-) I_scDOC 1 NaN NaN (-) swa_b0 2.5 NaN NaN m-1 swa_b1 %.2f 0.8 1.3 m-1 S_res_epi 3.30E-07 7.30E-08 1.82E-06 m d-1 (dry mass) S_res_hypo 3.30E-08 NaN NaN m d-1 (dry mass) H_sed 0.03 NaN NaN m Psat_Lang 2500 NaN NaN mg m-3 Fmax_Lang 8000 5000 10000 mg kg-1 Uz_Sz 0.3 0.1 1 m d-1 Uz_Chl 0.16 0.05 0.5 m d-1 Y_cp 1 NaN NaN (-) m_twty 0.2 0.1 0.3 d-1 g_twty 1.5 1 1.5 d-1 k_sed_twty 2.00E-04 NaN NaN d-1 k_dop_twty 0 NaN NaN d-1 P_half 0.2 0.2 2 mg m-3 PAR_sat2 3.00E-05 NaN NaN mol m-2 s-1 beta_chl2 0.015 NaN NaN m2 mg-1 Uz_Chl2 0.16 NaN NaN m d-1 m_twty2 0.2 NaN NaN d-1 g_twty2 1.5 NaN NaN d-1 P_half2 0.2 NaN NaN mg m-3 oc_DOC 0.01 NaN NaN m2 mg-1 qy_DOC 0.1 NaN NaN mg mol-1 ''' % (latitude, longitude, atten_coeff) return out
31.531646
172
0.653954
573
2,491
2.719023
0.314136
0.134788
0.031451
0.020539
0.132863
0.033376
0
0
0
0
0
0.132789
0.21397
2,491
78
173
31.935897
0.662921
0.09996
0
0
0
0.045455
0.684734
0
0
0
0
0
0
1
0.030303
false
0
0.030303
0
0.090909
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
e76ef4520136e84bfa60de421094e1c1499594a2
5,854
py
Python
StratLearner/run_PreTrain.py
cdslabamotong/stratLearner
58f278d438eed92683a7daac2605ec39abd18c94
[ "MIT" ]
7
2020-12-02T06:58:30.000Z
2022-03-04T01:21:59.000Z
StratLearner/run_PreTrain.py
dm-ytlds/stratLearner
3ad880a5ca0472a3a5823fa27db7dd2bc8ba0f33
[ "MIT" ]
null
null
null
StratLearner/run_PreTrain.py
dm-ytlds/stratLearner
3ad880a5ca0472a3a5823fa27db7dd2bc8ba0f33
[ "MIT" ]
1
2020-12-02T06:58:32.000Z
2020-12-02T06:58:32.000Z
""" ============================== StratLearner Training ============================== """ import numpy as np from one_slack_ssvm import OneSlackSSVM from stratLearner import (StratLearn, Utils, InputInstance) import multiprocessing import argparse import os import sys from datetime import datetime class Object(object): pass parser = argparse.ArgumentParser() parser.add_argument( '--path', default="pre_train/preTrain_power768_uniform_structure0-01_100", help='the file of a pre_train model') parser.add_argument( '--testNum', type=int, default=270, help='number of testing data') parser.add_argument( '--thread', type=int, default=3, help='number of threads') parser.add_argument( '--output', action="store_true", help='if output prediction') args = parser.parse_args() utils= Utils() file = open(args.path, 'r') dataname= file.readline().split()[0] vNum=int(file.readline().split()[0]) featureGenMethod=file.readline().split()[0] featureNum=int(file.readline().split()[0]) indexes=[] w=[] line=file.readline() while line: indexes.append(int(line.split()[0])) w.append(float(line.split()[1])) line=file.readline() trainNum =0 testNum =args.testNum pairMax=2500 thread = args.thread verbose=3 #parameter used in SVM C = 0.01 tol=0.001 if featureGenMethod == "uniform_structure1-0": maxFeatureNum=1 max_iter=0 else: if featureGenMethod == "WC_Weibull_structure": maxFeatureNum=800 max_iter = 0 else: maxFeatureNum=2000 max_iter = 0 #define the one-hop loss balance_para=1000; loss_type = Object() loss_type.name="area" loss_type.weight=1 LAI_method = "fastLazy" effectAreaNum = 1 #simulation times, small number for testing infTimes = 1080 #get data path = os.getcwd() data_path=os.path.abspath(os.path.join(path, os.pardir))+"/data" pair_path = "{}/{}/{}_pair_{}".format(data_path,dataname,dataname,pairMax) graphPath = "{}/{}/{}_diffusionModel".format(data_path,dataname,dataname) featurePath = "{}/{}/feature/{}_{}/".format(data_path,dataname,featureGenMethod,maxFeatureNum) X_train, Y_train, _, _, X_test, Y_test, _, _ = utils.getDataTrainTestRandom(pair_path ,trainNum,testNum, pairMax) print("data fetched") instance = InputInstance(graphPath, featurePath, featureNum, vNum, effectAreaNum, balance_para, loss_type, featureRandom = True, maxFeatureNum = maxFeatureNum, thread = thread, LAI_method=LAI_method, indexes=indexes) #**************************OneSlackSSVM model = StratLearn() model.initialize(X_train, Y_train, instance) one_slack_svm = OneSlackSSVM(model, verbose=verbose, C=C, tol=tol, n_jobs=thread, max_iter = max_iter) #one_slack_svm.fit(X_train, Y_train, initialize = False) one_slack_svm.w=w print("Prediction Started") Y_pred = one_slack_svm.predict(X_test, featureNum) print("Testing Started") block_size =int (testNum/thread); p = multiprocessing.Pool(thread) influence_Xs = p.starmap(instance.testInfluence_0_block, ((X_test[i*block_size:(i+1)*block_size], infTimes) for i in range(thread)),1) p.close() p.join() p = multiprocessing.Pool(thread) influence_Ys = p.starmap(instance.testInfluence_0_block, ((X_test[i*block_size:(i+1)*block_size], infTimes, Y_test[i*block_size:(i+1)*block_size]) for i in range(thread)),1) p.close() p.join() p = multiprocessing.Pool(thread) influence_Y_preds = p.starmap(instance.testInfluence_0_block, ((X_test[i*block_size:(i+1)*block_size], infTimes, Y_pred[i*block_size:(i+1)*block_size]) for i in range(thread)),1) p.close() p.join() influence_X=[] influence_Y=[] influence_Y_pred=[] for i in range(thread): influence_X.extend(influence_Xs[i]) influence_Y.extend(influence_Ys[i]) influence_Y_pred.extend(influence_Y_preds[i]) reduce_percent_opt=[] reduce_percent_pre = [] com_to_opt = [] error_abs = [] error_ratio = [] for influence_x, influence_y, influence_y_pred in zip(influence_X, influence_Y, influence_Y_pred): #print("{} {} {} {} {}".format(influence_x,influence_y,influence_y_pred, influence_x_read, influence_y_read)) reduce_percent_opt.append((influence_x-influence_y)/influence_x) reduce_percent_pre.append( (influence_x-influence_y_pred)/influence_x) com_to_opt.append((influence_x-influence_y_pred)/(influence_x-influence_y+0.01)) error_abs.append((influence_y_pred-influence_y)) error_ratio.append((influence_y_pred-influence_y)/influence_y) if args.output: now = datetime.now() with open(now.strftime("%d-%m-%Y %H:%M:%S"), 'a') as the_file: for x_test, y_test, y_pred in zip(X_test,Y_test,Y_pred): for target in [x_test, y_test, y_pred]: line=''; for a in target: line += a line += ' ' line += '\n' the_file.write(line) the_file.write('\n') print(dataname) print('StratLearner') print("error_abs: {} +- {}".format(np.mean(np.array(error_abs)), np.std(np.array(error_abs)))) print("error_ratio: {} +- {}".format(np.mean(np.array(error_ratio)), np.std(np.array(error_ratio)))) print("reduce_percent_opt: {} +- {}".format(np.mean(np.array(reduce_percent_opt)), np.std(np.array(reduce_percent_opt)))) print("reduce_percent_pre: {} +- {}".format(np.mean(np.array(reduce_percent_pre)), np.std(np.array(reduce_percent_pre)))) print("com_to_opt: {} +- {}".format(np.mean(np.array(com_to_opt)), np.std(np.array(com_to_opt)))) # print("featureNum:{}, featureGenMethod: {}, c:{} balance_para: {}".format(featureNum, featureGenMethod, C,balance_para)) print("trainNum:{}, testNum:{}, infTimes:{} ".format(trainNum, testNum, infTimes)) print("loss_type:{}, LAI_method:{}, ".format(loss_type.name, LAI_method)) print("===============================================================")
29.27
178
0.686197
808
5,854
4.730198
0.221535
0.057561
0.032967
0.041863
0.333595
0.249084
0.181057
0.128728
0.10675
0.10675
0
0.013685
0.138709
5,854
199
179
29.417085
0.744347
0.064742
0
0.155039
0
0
0.124931
0.025463
0
0
0
0
0
1
0
false
0.007752
0.062016
0
0.069767
0.108527
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e76f72134d51ed0f47cafe05a038eba895df8c92
2,059
py
Python
chapter10/mech.py
NetworkRanger/python-core
f0aa2c4ce0665e559e85aef04c40c42164cd838a
[ "MIT" ]
1
2018-12-29T15:32:06.000Z
2018-12-29T15:32:06.000Z
chapter10/mech.py
NetworkRanger/python-core
f0aa2c4ce0665e559e85aef04c40c42164cd838a
[ "MIT" ]
null
null
null
chapter10/mech.py
NetworkRanger/python-core
f0aa2c4ce0665e559e85aef04c40c42164cd838a
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding:utf-8 -*- # Author: NetworkRanger # Date: 2019/8/11 9:39 AM from BeautifulSoup import BeautifulSoup, SoupStrainer from mechanize import Browser BS = lambda page: page YOUR_LOGIN = '' YOUR_PASSWD = '' br = Browser() # home page rsp = br.open('http://us.pycon.org/2011/home/') print '\n***', rsp.geturl() print "Confirm home page has 'Login in' link; click it" page = rsp.read() assert 'Log in' in page, 'Log in not in page' rsp = br.follow_link(text_regex='Log in') # login page print '\n***', rsp.geturl() print 'Error due to invalid creds; resubmit w/valid creds' assert rsp.geturl() == 'http://us.pycon.org/2011/account/login/', rsp.geturl() page = rsp.read() err = str(BS(page).find('div', {'id': 'errorMsg'}).find('ul').find('li').string) assert err == 'The username and/or password you specified are not correct.', err br.select_form(nr=0) br.form['username'] = YOUR_LOGIN br.form['password'] = YOUR_PASSWD rsp = br.submit() # login successful, home page redirect print '\n***', rsp.geturl() print 'Logged in properyly on home page; click Account link' assert rsp.geturl == 'http://us.pycon.org/2011/home/', rsp.geturl() page = rsp.read() assert 'Logout' in page, 'Logout not in page' rsp = br.follow_link(text_regex='Account') # account page print '\n**', rsp.geturl() print 'Email address parseable on Account page; go back' assert rsp.geturl() == 'http://us.pycon.org/2011/account/email/', rsp.geturl() page = rsp.read() assert 'Email Address' in page, 'Missing email address' print ' Primary e-mail: %r' % str(BS(page).find('table').find('tr').find('td').find('b').string) rsp = br.back() # back to home page print '\n***', rsp.geturl() print 'Back works, on home page again; click Logout link' rsp = br.follow_link(url_regex='logout') # logout page print '\n***', rsp.geturl() print 'Confirm on Logout page and Log in link at the top' assert rsp.geturl() == 'http://us.pycon.org/2011/account/logout/', rsp.geturl() page = rsp.read() assert 'Log in' in page, 'Log in not in page' print '\n*** DONE'
30.731343
99
0.685284
333
2,059
4.204204
0.33033
0.09
0.038571
0.064286
0.393571
0.365
0.21
0.21
0.186429
0.06
0
0.017978
0.135503
2,059
66
100
31.19697
0.768539
0.091306
0
0.272727
0
0
0.40914
0
0
0
0
0
0.204545
0
null
null
0.068182
0.045455
null
null
0.318182
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
2
e77025bb43131ecd54379f02fd5583a531b73916
581
py
Python
Assignment2/2-1/lib/session.py
Computer-Science-Lecture/ITE2038
3436ab4598e7ace334952156fa7a56b3124b5557
[ "MIT" ]
null
null
null
Assignment2/2-1/lib/session.py
Computer-Science-Lecture/ITE2038
3436ab4598e7ace334952156fa7a56b3124b5557
[ "MIT" ]
null
null
null
Assignment2/2-1/lib/session.py
Computer-Science-Lecture/ITE2038
3436ab4598e7ace334952156fa7a56b3124b5557
[ "MIT" ]
1
2018-09-07T01:47:09.000Z
2018-09-07T01:47:09.000Z
from lib.permission import Permission class Session(object): def __init__(self, user: int, permission: Permission): self.user = user self.permission = permission @property def isAdmin(self) -> bool: return self.permission == Permission.admin def require(): def wrapper(f): @wraps(f) def wrapped(self, *args, **kwargs): if self.session: return f(self, *args, **kwargs) raise Exception("Session required") return wrapped return wrapper
27.666667
58
0.567986
59
581
5.525424
0.474576
0.184049
0.147239
0
0
0
0
0
0
0
0
0
0.337349
581
20
59
29.05
0.846753
0
0
0
0
0
0.027539
0
0
0
0
0
0
1
0.294118
false
0
0.058824
0.058824
0.647059
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
e771901cac33122ea8a46bf698c48b3de96e015e
886
py
Python
nvic.py
dhylands/upy-examples
90cca32f0c6c65c33967da9ac1a998e731c60d91
[ "MIT" ]
78
2015-01-15T23:24:21.000Z
2022-02-25T09:24:58.000Z
nvic.py
dhylands/upy-examples
90cca32f0c6c65c33967da9ac1a998e731c60d91
[ "MIT" ]
1
2015-02-04T00:51:52.000Z
2015-02-04T00:51:52.000Z
nvic.py
dhylands/upy-examples
90cca32f0c6c65c33967da9ac1a998e731c60d91
[ "MIT" ]
26
2015-02-03T21:26:33.000Z
2022-02-21T02:57:46.000Z
import machine SCS = 0xE000E000 SCB = SCS + 0x0D00 NVIC = SCS + 0x0100 VTOR = SCB + 0x08 SCB_SHP = SCB + 0x18 NVIC_PRIO = NVIC + 0x300 def dump_nvic(): print('NVIC_PRIO = {:08x} @ {:08x}'.format(machine.mem32[NVIC_PRIO], NVIC_PRIO)) print('VTOR = {:08x} @ {:08x}'.format(machine.mem32[VTOR], VTOR)) print('System IRQs') for i in range(12): irq = -(16 - (i + 4)) prio = machine.mem8[SCB_SHP + i] >> 4 if prio > 0: print('{:3d}:{:d}'.format(irq, prio)) print('Regular IRQs') for irq in range(80): prio = machine.mem8[NVIC_PRIO + irq] >> 4 if prio > 0: print('{:3d}:{:d}'.format(irq, prio)) def nvic_set_prio(irq, prio): if irq < 0: idx = (irq & 0x0f) - 4 machine.mem8[SCB_SHP + idx] = prio << 4 else: machine.mem8[NVIC_PRIO + irq] = prio << 4 dump_nvic()
23.945946
84
0.546275
127
886
3.708661
0.322835
0.101911
0.050955
0.080679
0.318471
0.123142
0.123142
0.123142
0.123142
0.123142
0
0.096215
0.284424
886
36
85
24.611111
0.646688
0
0
0.142857
0
0
0.109481
0
0
0
0.044018
0
0
1
0.071429
false
0
0.035714
0
0.107143
0.214286
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e772c6aaf22ad97381e12d6d2154f737e40ff951
9,152
py
Python
trimesh/primitives.py
maganrobotics/UR3e-manipulation
ceaf650b1a811d0bfc3baf175d353fc7f4a33522
[ "MIT" ]
null
null
null
trimesh/primitives.py
maganrobotics/UR3e-manipulation
ceaf650b1a811d0bfc3baf175d353fc7f4a33522
[ "MIT" ]
null
null
null
trimesh/primitives.py
maganrobotics/UR3e-manipulation
ceaf650b1a811d0bfc3baf175d353fc7f4a33522
[ "MIT" ]
null
null
null
import numpy as np from . import util from . import points from . import creation from .base import Trimesh from .constants import log from .triangles import windings_aligned class Primitive(Trimesh): ''' Geometric primitives which are a subclass of Trimesh. Mesh is generated lazily when vertices or faces are requested. ''' def __init__(self, *args, **kwargs): super(Primitive, self).__init__(*args, **kwargs) self._data.clear() self._validate = False @property def faces(self): stored = self._cache['faces'] if util.is_shape(stored, (-1,3)): return stored self._create_mesh() #self._validate_face_normals() return self._cache['faces'] @faces.setter def faces(self, values): log.warning('Primitive faces are immutable! Not setting!') @property def vertices(self): stored = self._cache['vertices'] if util.is_shape(stored, (-1,3)): return stored self._create_mesh() return self._cache['vertices'] @vertices.setter def vertices(self, values): if values is not None: log.warning('Primitive vertices are immutable! Not setting!') @property def face_normals(self): stored = self._cache['face_normals'] if util.is_shape(stored, (-1,3)): return stored self._create_mesh() return self._cache['face_normals'] @face_normals.setter def face_normals(self, values): if values is not None: log.warning('Primitive face normals are immutable! Not setting!') def _create_mesh(self): raise ValueError('Primitive doesn\'t define mesh creation!') class Sphere(Primitive): def __init__(self, *args, **kwargs): ''' Create a Sphere primitive, which is a subclass of Trimesh. Arguments ---------- sphere_radius: float, radius of sphere sphere_center: (3,) float, center of sphere subdivisions: int, number of subdivisions for icosphere. Default is 3 ''' super(Sphere, self).__init__(*args, **kwargs) if 'sphere_radius' in kwargs: self.sphere_radius = kwargs['sphere_radius'] if 'sphere_center' in kwargs: self.sphere_center = kwargs['sphere_center'] if 'subdivisions' in kwargs: self._data['subdivisions'] = int(kwargs['subdivisions']) else: self._data['subdivisions'] = 3 self._unit_sphere = creation.icosphere(subdivisions=self._data['subdivisions']) @property def sphere_center(self): stored = self._data['center'] if stored is None: return np.zeros(3) return stored @sphere_center.setter def sphere_center(self, values): self._data['center'] = np.asanyarray(values, dtype=np.float64) @property def sphere_radius(self): stored = self._data['radius'] if stored is None: return 1.0 return stored @sphere_radius.setter def sphere_radius(self, value): self._data['radius'] = float(value) def _create_mesh(self): ico = self._unit_sphere self._cache['vertices'] = ((ico.vertices * self.sphere_radius) + self.sphere_center) self._cache['faces'] = ico.faces self._cache['face_normals'] = ico.face_normals class Box(Primitive): def __init__(self, *args, **kwargs): ''' Create a Box primitive, which is a subclass of Trimesh Arguments ---------- box_extents: (3,) float, size of box box_transform: (4,4) float, transformation matrix for box box_center: (3,) float, convience function which updates box_transform with a translation- only matrix ''' super(Box, self).__init__(*args, **kwargs) if 'box_extents' in kwargs: self.box_extents = kwargs['box_extents'] if 'box_transform' in kwargs: self.box_transform = kwargs['box_transform'] if 'box_center' in kwargs: self.box_center = kwargs['box_center'] self._unit_box = creation.box() @property def box_center(self): return self.box_transform[0:3,3] @box_center.setter def box_center(self, values): transform = self.box_transform transform[0:3,3] = values self._data['box_transform'] = transform @property def box_extents(self): stored = self._data['box_extents'] if util.is_shape(stored, (3,)): return stored return np.ones(3) @box_extents.setter def box_extents(self, values): self._data['box_extents'] = np.asanyarray(values, dtype=np.float64) @property def box_transform(self): stored = self._data['box_transform'] if util.is_shape(stored, (4,4)): return stored return np.eye(4) @box_transform.setter def box_transform(self, matrix): matrix = np.asanyarray(matrix, dtype=np.float64) if matrix.shape != (4,4): raise ValueError('Matrix must be (4,4)!') self._data['box_transform'] = matrix @property def is_oriented(self): if util.is_shape(self.box_transform, (4,4)): return not np.allclose(self.box_transform[0:3,0:3], np.eye(3)) else: return False def _create_mesh(self): log.debug('Creating mesh for box primitive') box = self._unit_box vertices, faces, normals = box.vertices, box.faces, box.face_normals vertices = points.transform_points(vertices * self.box_extents, self.box_transform) normals = np.dot(self.box_transform[0:3,0:3], normals.T).T aligned = windings_aligned(vertices[faces[:1]], normals[:1])[0] if not aligned: faces = np.fliplr(faces) # for a primitive the vertices and faces are derived from other information # so it goes in the cache, instead of the datastore self._cache['vertices'] = vertices self._cache['faces'] = faces self._cache['face_normals'] = normals class Extrusion(Primitive): def __init__(self, *args, **kwargs): ''' Create an Extrusion primitive, which subclasses Trimesh Arguments ---------- extrude_polygon: shapely.geometry.Polygon, polygon to extrude extrude_transform: (4,4) float, transform to apply after extrusion extrude_height: float, height to extrude polygon by ''' super(Extrusion, self).__init__(*args, **kwargs) if 'extrude_polygon' in kwargs: self.extrude_polygon = kwargs['extrude_polygon'] if 'extrude_transform' in kwargs: self.extrude_transform = kwargs['extrude_transform'] if 'extrude_height' in kwargs: self.extrude_height = kwargs['extrude_height'] @property def extrude_transform(self): stored = self._data['extrude_transform'] if np.shape(stored) == (4,4): return stored return np.eye(4) @extrude_transform.setter def extrude_transform(self, matrix): matrix = np.asanyarray(matrix, dtype=np.float64) if matrix.shape != (4,4): raise ValueError('Matrix must be (4,4)!') self._data['extrude_transform'] = matrix @property def extrude_height(self): stored = self._data['extrude_height'] if stored is None: raise ValueError('extrude height not specified!') return stored.copy()[0] @extrude_height.setter def extrude_height(self, value): self._data['extrude_height'] = float(value) @property def extrude_polygon(self): stored = self._data['extrude_polygon'] if stored is None: raise ValueError('extrude polygon not specified!') return stored[0] @extrude_polygon.setter def extrude_polygon(self, value): polygon = creation.validate_polygon(value) self._data['extrude_polygon'] = polygon @property def extrude_direction(self): direction = np.dot(self.extrude_transform[:3,:3], [0.0,0.0,1.0]) return direction def slide(self, distance): distance = float(distance) translation = np.eye(4) translation[2,3] = distance new_transform = np.dot(self.extrude_transform.copy(), translation.copy()) self.extrude_transform = new_transform def _create_mesh(self): log.debug('Creating mesh for extrude primitive') mesh = creation.extrude_polygon(self.extrude_polygon, self.extrude_height) mesh.apply_transform(self.extrude_transform) self._cache['vertices'] = mesh.vertices self._cache['faces'] = mesh.faces self._cache['face_normals'] = mesh.face_normals
33.52381
87
0.605114
1,065
9,152
5.010329
0.130516
0.028486
0.026237
0.023613
0.285045
0.20521
0.192841
0.165105
0.119003
0.10401
0
0.011515
0.288352
9,152
272
88
33.647059
0.807769
0.116149
0
0.253731
0
0.004975
0.11385
0
0
0
0
0
0
1
0.164179
false
0
0.034826
0.004975
0.328358
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7742de3e4510356f7231d426f247a622c865b21
1,923
py
Python
discordbot.py
asamii0006/discordpy-startup
3a14a4155373fff96067954e85ad64658e4bbbf5
[ "MIT" ]
null
null
null
discordbot.py
asamii0006/discordpy-startup
3a14a4155373fff96067954e85ad64658e4bbbf5
[ "MIT" ]
null
null
null
discordbot.py
asamii0006/discordpy-startup
3a14a4155373fff96067954e85ad64658e4bbbf5
[ "MIT" ]
null
null
null
from discord.ext import commands import os import traceback bot = commands.Bot(command_prefix='/') token = os.environ['DISCORD_BOT_TOKEN'] @bot.event async def on_command_error(ctx, error): orig_error = getattr(error, "original", error) error_msg = ''.join(traceback.TracebackException.from_exception(orig_error).format()) await ctx.send(error_msg) @bot.command() async def hello(ctx): await ctx.send('こんちゃ~す') bot.run(token) # coding: utf-8 import random import re pattern = '\d{1,2}d\d{1,3}|\d{1,2}D\d{1,3}' split_pattern = 'd|D' # 対象の文字列かどうか def judge_nDn(src): repatter = re.compile(pattern) result = repatter.fullmatch(src) if result is not None: return True elif src == '1d114514' or src == '1D114514': return True return False # 何面ダイスを何回振るか def split_nDn(src): return re.split(split_pattern,src) # ダイスを振る def role_nDn(src): result = [] sum_dice = 0 role_index = split_nDn(src) role_count = int(role_index[0]) nDice = int(role_index[1]) for i in range(role_count): tmp = random.randint(1,nDice) result.append(tmp) sum_dice = sum_dice + tmp is1dice = True if role_count == 1 else False return result,sum_dice,is1dice def nDn(text): if judge_nDn(text): result,sum_dice,is1dice = role_nDn(text) if is1dice: return 'ダイス:' + text + '\n出目:' + str(sum_dice) else: return 'ダイス:' + text + '\n出目:' + str(result) + '\n合計:' + str(sum_dice) else: return None import discord import nDnDICE client = discord.Client() @client.event async def on_ready(): print('Botを起動しました。') @client.event async def on_message(message): msg = message.content result = nDnDICE.nDn(msg) if result is not None: await client.send_message(message.channel, result) #ここにbotのアクセストークンを入力 client.run('DISCORD_BOT_TOKEN')
22.103448
89
0.651586
272
1,923
4.474265
0.345588
0.040263
0.032046
0.036976
0.130649
0.011504
0.011504
0
0
0
0
0.021448
0.224129
1,923
86
90
22.360465
0.794236
0.031721
0
0.129032
0
0.016129
0.071659
0.016703
0
0
0
0
0
1
0.064516
false
0
0.112903
0.016129
0.306452
0.016129
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7765cf07995f7e47b792bf00a9c30793c228c4a
1,604
py
Python
filling/parse/ex.py
nvxden/flask-films
038f4bcaa7feabdfff7662fb1048bf48515e5c26
[ "MIT" ]
null
null
null
filling/parse/ex.py
nvxden/flask-films
038f4bcaa7feabdfff7662fb1048bf48515e5c26
[ "MIT" ]
null
null
null
filling/parse/ex.py
nvxden/flask-films
038f4bcaa7feabdfff7662fb1048bf48515e5c26
[ "MIT" ]
null
null
null
import asyncio as aio import os import re from aiohttp import ClientSession from pageloader import LoadPageTask, PageLoader from nvxlira import Lira from nvxaex import Executor ############################################################ # class class LoadPage(LoadPageTask): def __str__(self): return self.filename ############################################################ # lira lira = Lira('data.bin', 'head.bin') if len(lira['load-page']) == 0 and len(lira['load-page-done']) == 0: for url in [ 'http://www.world-art.ru/cinema/cinema.php?id=65021', 'http://www.world-art.ru/cinema/cinema.php?id=17190', 'http://www.world-art.ru/cinema/cinema.php?id=36896', 'http://www.world-art.ru/cinema/cinema.php?id=547', 'http://www.world-art.ru/cinema/cinema.php?id=50952' ]: task = LoadPage(url=url, filename='works/' + re.search('id=(\d+)', url).group(1) + '.html') lira.put(task, cat='load-page') print('Not done:') for task in [ lira.get(id) for id in lira['load-page'] ]: print(task) print('Done:') for task in [ lira.get(id) for id in lira['load-page-done'] ]: print(task) ############################################################ # main async def main(): async with ClientSession() as session: loader = PageLoader(session, silent=False) ex = Executor(lira, loader, silent=False) await ex.extasks('load-page', 'load-page-done') return ############################################################ # run try: os.mkdir('works') except: pass aio.run(main()) del lira ############################################################ # END
20.831169
93
0.545511
204
1,604
4.269608
0.377451
0.064294
0.068886
0.086108
0.289323
0.289323
0.289323
0.289323
0.289323
0.094145
0
0.018532
0.125312
1,604
76
94
21.105263
0.602281
0.014963
0
0.054054
0
0
0.298273
0
0
0
0
0
0
1
0.027027
false
0.027027
0.189189
0.027027
0.297297
0.108108
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e776bec5c2d6010767a894ee51a22e9c4a498c74
5,803
py
Python
Incident-Response/Tools/cyphon/cyphon/contexts/autocomplete_light_registry.py
sn0b4ll/Incident-Playbook
cf519f58fcd4255674662b3620ea97c1091c1efb
[ "MIT" ]
1
2021-07-24T17:22:50.000Z
2021-07-24T17:22:50.000Z
Incident-Response/Tools/cyphon/cyphon/contexts/autocomplete_light_registry.py
sn0b4ll/Incident-Playbook
cf519f58fcd4255674662b3620ea97c1091c1efb
[ "MIT" ]
2
2022-02-28T03:40:31.000Z
2022-02-28T03:40:52.000Z
Incident-Response/Tools/cyphon/cyphon/contexts/autocomplete_light_registry.py
sn0b4ll/Incident-Playbook
cf519f58fcd4255674662b3620ea97c1091c1efb
[ "MIT" ]
2
2022-02-25T08:34:51.000Z
2022-03-16T17:29:44.000Z
# -*- coding: utf-8 -*- # Copyright 2017-2019 ControlScan, Inc. # # This file is part of Cyphon Engine. # # Cyphon Engine is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, version 3 of the License. # # Cyphon Engine is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Cyphon Engine. If not, see <http://www.gnu.org/licenses/>. """ Defines Autocomplete models for use in admin pages for the Contexts app. """ # third party import autocomplete_light.shortcuts as autocomplete_light # local from distilleries.models import Distillery from utils.choices.choices import get_operator_choices, get_field_type from .models import Context class FilterValueFieldsByFocalDistillery(autocomplete_light.AutocompleteListBase): """ Defines autocomplete rules for the value_field on the Context admin page. """ choices = () attrs = { 'data-autocomplete-minimum-characters': 0, 'placeholder': 'select a distillery and click to see options...' } def choices_for_request(self): """ Overrides the choices_for_request method of the AutocompleteListBase class. Filters options based on the selected primary_distillery. """ choices = self.choices distillery_id = self.request.GET.get('primary_distillery', None) if distillery_id: distillery = Distillery.objects.get(pk=distillery_id) choices = distillery.get_field_list() return self.order_choices(choices)[0:self.limit_choices] class FilterSearchFieldsByRelatedDistillery(autocomplete_light.AutocompleteListBase): """ Defines autocomplete rules for the value_field on the Context admin page. """ choices = () attrs = { 'data-autocomplete-minimum-characters': 0, 'placeholder': 'select a related distillery and click to see options...' } def choices_for_request(self): """ Overrides the choices_for_request method of the AutocompleteListBase class. Filters options based on the selected related_distillery. """ choices = self.choices distillery_id = self.request.GET.get('related_distillery', None) if distillery_id: distillery = Distillery.objects.get(pk=distillery_id) choices = distillery.get_field_list() return self.order_choices(choices)[0:self.limit_choices] class FilterValueFieldsByContext(autocomplete_light.AutocompleteListBase): """ Defines autocomplete rules for the value_field on the ContextFilter admin page. """ choices = () attrs = { 'data-autocomplete-minimum-characters': 0, 'placeholder': 'select a distillery and click to see options...' } def choices_for_request(self): """ Overrides the choices_for_request method of the AutocompleteListBase class. Filters options based on the primary_distillery of the selected Context. """ choices = self.choices context_id = self.request.GET.get('context', None) if context_id: context = Context.objects.select_related('primary_distillery')\ .get(pk=context_id) choices = context.primary_distillery.get_field_list() return self.order_choices(choices)[0:self.limit_choices] class FilterSearchFieldsByContext(autocomplete_light.AutocompleteListBase): """ Defines autocomplete rules for the value_field on the ContextFilter admin page. """ choices = () attrs = { 'data-autocomplete-minimum-characters': 0, 'placeholder': 'select a distillery and click to see options...' } def choices_for_request(self): """ Overrides the choices_for_request method of the AutocompleteListBase class. Filters options based on the related_distillery of the selected Context. """ choices = self.choices context_id = self.request.GET.get('context', None) if context_id: context = Context.objects.select_related('related_distillery')\ .get(pk=context_id) choices = context.related_distillery.get_field_list() return self.order_choices(choices)[0:self.limit_choices] class FilterOperatorsBySearchField(autocomplete_light.AutocompleteChoiceListBase): """ Defines autocomplete rules for the operator field on the ContextFilter admin page. """ choices = () attrs = { 'data-autocomplete-minimum-characters': 0, 'placeholder': 'select a search field and click to see options...' } def choices_for_request(self): """ Overrides the choices_for_request method of the AutocompleteListBase class. Filters options based on the selected search_field. """ choices = self.choices search_field = self.request.GET.get('search_field', None) if search_field: field_type = get_field_type(search_field) choices = get_operator_choices(field_type) return self.order_choices(choices)[0:self.limit_choices] autocomplete_light.register(FilterValueFieldsByFocalDistillery) autocomplete_light.register(FilterSearchFieldsByRelatedDistillery) autocomplete_light.register(FilterValueFieldsByContext) autocomplete_light.register(FilterSearchFieldsByContext) autocomplete_light.register(FilterOperatorsBySearchField)
33.16
85
0.697398
652
5,803
6.064417
0.211656
0.051593
0.042994
0.034143
0.643146
0.628983
0.61482
0.595599
0.595599
0.583966
0
0.004449
0.225401
5,803
174
86
33.350575
0.875195
0.31656
0
0.592105
0
0
0.157322
0.048993
0
0
0
0
0
1
0.065789
false
0
0.052632
0
0.381579
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e77801f926d4f34148ef7757e8d2132f28e45ad4
2,228
py
Python
download snipets.py
alxcord/my_public_scripts
856decd73afe869b68095d5ab21d32ea261d872f
[ "Apache-2.0" ]
null
null
null
download snipets.py
alxcord/my_public_scripts
856decd73afe869b68095d5ab21d32ea261d872f
[ "Apache-2.0" ]
null
null
null
download snipets.py
alxcord/my_public_scripts
856decd73afe869b68095d5ab21d32ea261d872f
[ "Apache-2.0" ]
null
null
null
import urllib.request ... url = 'http://example.com/' response = urllib.request.urlopen(url) data = response.read() # a `bytes` object text = data.decode('utf-8') # a `str`; this step can't be used if data is binary import urllib.request import gzip ... # Read the first 64 bytes of the file inside the .gz archive located at `url` url = 'http://example.com/something.gz' with urllib.request.urlopen(url) as response: with gzip.GzipFile(fileobj=response) as uncompressed: file_header = uncompressed.read(64) # a `bytes` object # Or do anything shown above using `uncompressed` instead of `response`. #I download files and save it locally using the below code: import requests url = 'https://www.python.org/static/img/python-logo.png' fileName = 'D:\Python\dwnldPythonLogo.png' req = requests.get(url) file = open(fileName, 'wb') for chunk in req.iter_content(100000): file.write(chunk) file.close() # Download import urllib2 url = "http://download.thinkbroadband.com/10MB.zip" file_name = url.split('/')[-1] u = urllib2.urlopen(url) f = open(file_name, 'wb') meta = u.info() file_size = int(meta.getheaders("Content-Length")[0]) print "Downloading: %s Bytes: %s" % (file_name, file_size) file_size_dl = 0 block_sz = 8192 while True: buffer = u.read(block_sz) if not buffer: break file_size_dl += len(buffer) f.write(buffer) status = r"%10d [%3.2f%%]" % (file_size_dl, file_size_dl * 100. / file_size) status = status + chr(8)*(len(status)+1) print status, f.close() # Unzip import requests, zipfile, io r = requests.get(zip_file_url) z = zipfile.ZipFile(io.BytesIO(r.content)) z.extractall() # if you'd like to save the downloaded file in a different location, replace z.extractall() with z.extractall("/path/to/destination_directory") # Outro exemplo import zipfile, urllib.request, shutil url = 'http://www....myzipfile.zip' file_name = 'myzip.zip' with urllib.request.urlopen(url) as response, open(file_name, 'wb') as out_file: shutil.copyfileobj(response, out_file) with zipfile.ZipFile(file_name) as zf: zf.extractall()
25.318182
144
0.670108
323
2,228
4.541796
0.452012
0.038173
0.027267
0.047035
0.050443
0.050443
0.050443
0
0
0
0
0.017357
0.198384
2,228
87
145
25.609195
0.804031
0.206912
0
0.076923
0
0
0.163669
0.017386
0
0
0
0
0
0
null
null
0
0.134615
null
null
0.038462
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
e77966df213ba660b9ceebdaefcb943c9ce395a4
33,959
py
Python
wavespin/scattering1d/utils.py
OverLordGoldDragon/dev_tg
1e06b89c1b0b5e95d9c53fda2efd02e41f708718
[ "MIT" ]
2
2020-03-28T05:37:34.000Z
2020-09-17T20:02:21.000Z
wavespin/scattering1d/utils.py
OverLordGoldDragon/dev_tg
1e06b89c1b0b5e95d9c53fda2efd02e41f708718
[ "MIT" ]
2
2020-06-02T17:52:53.000Z
2020-09-18T00:46:34.000Z
wavespin/scattering1d/utils.py
OverLordGoldDragon/dev_tg
1e06b89c1b0b5e95d9c53fda2efd02e41f708718
[ "MIT" ]
1
2020-06-02T17:52:24.000Z
2020-06-02T17:52:24.000Z
# -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Copyright (c) 2022- John Muradeli # # Distributed under the terms of the MIT License # (see wavespin/__init__.py for details) # ----------------------------------------------------------------------------- import numpy as np import math from .filter_bank import (calibrate_scattering_filters, compute_temporal_support, compute_minimum_required_length, gauss_1d, morlet_1d) def compute_border_indices(log2_T, J, i0, i1): """ Computes border indices at all scales which correspond to the original signal boundaries after padding. At the finest resolution, original_signal = padded_signal[..., i0:i1]. This function finds the integers i0, i1 for all temporal subsamplings by 2**J, being conservative on the indices. Maximal subsampling is by `2**log2_T` if `average=True`, else by `2**max(log2_T, J)`. We compute indices up to latter to be sure. Parameters ---------- log2_T : int Maximal subsampling by low-pass filtering is `2**log2_T`. J : int / tuple[int] Maximal subsampling by band-pass filtering is `2**J`. i0 : int start index of the original signal at the finest resolution i1 : int end index (excluded) of the original signal at the finest resolution Returns ------- ind_start, ind_end: dictionaries with keys in [0, ..., log2_T] such that the original signal is in padded_signal[ind_start[j]:ind_end[j]] after subsampling by 2**j References ---------- This is a modification of https://github.com/kymatio/kymatio/blob/master/kymatio/scattering1d/utils.py Kymatio, (C) 2018-present. The Kymatio developers. """ if isinstance(J, tuple): J = max(J) ind_start = {0: i0} ind_end = {0: i1} for j in range(1, max(log2_T, J) + 1): ind_start[j] = (ind_start[j - 1] // 2) + (ind_start[j - 1] % 2) ind_end[j] = (ind_end[j - 1] // 2) + (ind_end[j - 1] % 2) return ind_start, ind_end def compute_padding(J_pad, N): """ Computes the padding to be added on the left and on the right of the signal. It should hold that 2**J_pad >= N Parameters ---------- J_pad : int 2**J_pad is the support of the padded signal N : int original signal support size Returns ------- pad_left: amount to pad on the left ("beginning" of the support) pad_right: amount to pad on the right ("end" of the support) References ---------- This is a modification of https://github.com/kymatio/kymatio/blob/master/kymatio/scattering1d/utils.py Kymatio, (C) 2018-present. The Kymatio developers. """ N_pad = 2**J_pad if N_pad < N: raise ValueError('Padding support should be larger than the original ' 'signal size!') to_add = 2**J_pad - N pad_right = to_add // 2 pad_left = to_add - pad_right return pad_left, pad_right def compute_minimum_support_to_pad(N, J, Q, T, criterion_amplitude=1e-3, normalize='l1', r_psi=math.sqrt(0.5), sigma0=1e-1, alpha=4., P_max=5, eps=1e-7, pad_mode='reflect'): """ Computes the support to pad given the input size and the parameters of the scattering transform. Parameters ---------- N : int temporal size of the input signal J : int scale of the scattering Q : int >= 1 The number of first-order wavelets per octave. Defaults to `1`. If tuple, sets `Q = (Q1, Q2)`, where `Q2` is the number of second-order wavelets per octave (which defaults to `1`). - If `Q1==0`, will exclude `psi1_f` from computation. - If `Q2==0`, will exclude `psi2_f` from computation. T : int temporal support of low-pass filter, controlling amount of imposed time-shift invariance and maximum subsampling normalize : string / tuple[string], optional Normalization convention for the filters (in the temporal domain). Supports 'l1', 'l2', 'l1-energy', 'l2-energy', but only 'l1' or 'l2' is used. See `help(Scattering1D)`. criterion_amplitude: float `>0` and `<1`, optional Represents the numerical error which is allowed to be lost after convolution and padding. The larger criterion_amplitude, the smaller the padding size is. Defaults to `1e-3` r_psi : float, optional Should be `>0` and `<1`. Controls the redundancy of the filters (the larger r_psi, the larger the overlap between adjacent wavelets). Defaults to `sqrt(0.5)`. sigma0 : float, optional parameter controlling the frequential width of the low-pass filter at J_scattering=0; at a an absolute J_scattering, it is equal to :math:`\\frac{\\sigma_0}{2^J}`. Defaults to `1e-1`. alpha : float, optional tolerance factor for the aliasing after subsampling. The larger the alpha, the more conservative the value of maximal subsampling is. Defaults to `5`. P_max : int, optional maximal number of periods to use to make sure that the Fourier transform of the filters is periodic. `P_max = 5` is more than enough for double precision. Defaults to `5`. eps : float, optional required machine precision for the periodization (single floating point is enough for deep learning applications). Defaults to `1e-7`. pad_mode : str Name of padding used. If 'zero', will halve `min_to_pad`, else no effect. Returns ------- min_to_pad: int minimal value to pad the signal on one size to avoid any boundary error. """ # compute params for calibrating, & calibrate Q1, Q2 = Q if isinstance(Q, tuple) else (Q, 1) Q_temp = (max(Q1, 1), max(Q2, 1)) # don't pass in zero N_init = N # `None` means `xi_min` is limitless. Since this method is used to compute # padding, then we can't know what it is, so we compute worst case. # If `max_pad_factor=None`, then the realized filterbank's (what's built) # `xi_min` is also limitless. Else, it'll be greater, depending on # `max_pad_factor`. J_pad = None sigma_low, xi1, sigma1, j1s, _, xi2, sigma2, j2s, _ = \ calibrate_scattering_filters(J, Q_temp, T, r_psi=r_psi, sigma0=sigma0, alpha=alpha, J_pad=J_pad) # split `normalize` into orders if isinstance(normalize, tuple): normalize1, normalize2 = normalize else: normalize1 = normalize2 = normalize # compute psi1_f with greatest time support, if requested if Q1 >= 1: psi1_f_fn = lambda N: morlet_1d(N, xi1[-1], sigma1[-1], normalize=normalize1, P_max=P_max, eps=eps) # compute psi2_f with greatest time support, if requested if Q2 >= 1: psi2_f_fn = lambda N: morlet_1d(N, xi2[-1], sigma2[-1], normalize=normalize2, P_max=P_max, eps=eps) # compute lowpass phi_f_fn = lambda N: gauss_1d(N, sigma_low, normalize=normalize1, P_max=P_max, eps=eps) # compute for all cases as psi's time support might exceed phi's ca = dict(criterion_amplitude=criterion_amplitude) N_min_phi = compute_minimum_required_length(phi_f_fn, N_init=N_init, **ca) phi_halfsupport = compute_temporal_support(phi_f_fn(N_min_phi)[None], **ca) if Q1 >= 1: N_min_psi1 = compute_minimum_required_length(psi1_f_fn, N_init=N_init, **ca) psi1_halfsupport = compute_temporal_support(psi1_f_fn(N_min_psi1)[None], **ca) else: psi1_halfsupport = -1 # placeholder if Q2 >= 1: N_min_psi2 = compute_minimum_required_length(psi2_f_fn, N_init=N_init, **ca) psi2_halfsupport = compute_temporal_support(psi2_f_fn(N_min_psi2)[None], **ca) else: psi2_halfsupport = -1 # set min to pad based on each pads = (phi_halfsupport, psi1_halfsupport, psi2_halfsupport) # can pad half as much if pad_mode == 'zero': pads = [p//2 for p in pads] pad_phi, pad_psi1, pad_psi2 = pads # set main quantity as the max of all min_to_pad = max(pads) # return results return min_to_pad, pad_phi, pad_psi1, pad_psi2 def compute_meta_scattering(J_pad, J, Q, T, r_psi=math.sqrt(.5), max_order=2): """Get metadata on the transform. This information specifies the content of each scattering coefficient, which order, which frequencies, which filters were used, and so on. Parameters ---------- J : int The maximum log-scale of the scattering transform. In other words, the maximum scale is given by `2**J`. Q : int >= 1 / tuple[int] The number of first-order wavelets per octave. Defaults to `1`. If tuple, sets `Q = (Q1, Q2)`, where `Q2` is the number of second-order wavelets per octave (which defaults to `1`). J_pad : int 2**J_pad == amount of temporal padding T : int temporal support of low-pass filter, controlling amount of imposed time-shift invariance and maximum subsampling r_psi : float Filter redundancy. See `help(wavespin.scattering1d.filter_bank.calibrate_scattering_filters)`. max_order : int, optional The maximum order of scattering coefficients to compute. Must be either equal to `1` or `2`. Defaults to `2`. Returns ------- meta : dictionary A dictionary with the following keys: - `'order`' : tensor A Tensor of length `C`, the total number of scattering coefficients, specifying the scattering order. - `'xi'` : tensor A Tensor of size `(C, max_order)`, specifying the center frequency of the filter used at each order (padded with NaNs). - `'sigma'` : tensor A Tensor of size `(C, max_order)`, specifying the frequency bandwidth of the filter used at each order (padded with NaNs). - `'j'` : tensor A Tensor of size `(C, max_order)`, specifying the dyadic scale of the filter used at each order (padded with NaNs). - `'is_cqt'` : tensor A tensor of size `(C, max_order)`, specifying whether the filter was constructed per Constant Q Transform (padded with NaNs). - `'n'` : tensor A Tensor of size `(C, max_order)`, specifying the indices of the filters used at each order (padded with NaNs). - `'key'` : list The tuples indexing the corresponding scattering coefficient in the non-vectorized output. References ---------- This is a modification of https://github.com/kymatio/kymatio/blob/master/kymatio/scattering1d/utils.py Kymatio, (C) 2018-present. The Kymatio developers. """ sigma_low, xi1s, sigma1s, j1s, is_cqt1s, xi2s, sigma2s, j2s, is_cqt2s = \ calibrate_scattering_filters(J, Q, T, r_psi=r_psi, J_pad=J_pad) log2_T = math.floor(math.log2(T)) meta = {} meta['order'] = [[], [], []] meta['xi'] = [[], [], []] meta['sigma'] = [[], [], []] meta['j'] = [[], [], []] meta['is_cqt'] = [[], [], []] meta['n'] = [[], [], []] meta['key'] = [[], [], []] meta['order'][0].append(0) meta['xi'][0].append((0,)) meta['sigma'][0].append((sigma_low,)) meta['j'][0].append((log2_T,)) meta['is_cqt'][0].append(()) meta['n'][0].append(()) meta['key'][0].append(()) for (n1, (xi1, sigma1, j1, is_cqt1) ) in enumerate(zip(xi1s, sigma1s, j1s, is_cqt1s)): meta['order'][1].append(1) meta['xi'][1].append((xi1,)) meta['sigma'][1].append((sigma1,)) meta['j'][1].append((j1,)) meta['is_cqt'][1].append((is_cqt1,)) meta['n'][1].append((n1,)) meta['key'][1].append((n1,)) if max_order < 2: continue for (n2, (xi2, sigma2, j2, is_cqt2) ) in enumerate(zip(xi2s, sigma2s, j2s, is_cqt2s)): if j2 > j1: meta['order'][2].append(2) meta['xi'][2].append((xi1, xi2)) meta['sigma'][2].append((sigma1, sigma2)) meta['j'][2].append((j1, j2)) meta['is_cqt'][2].append((is_cqt1, is_cqt2)) meta['n'][2].append((n1, n2)) meta['key'][2].append((n1, n2)) for field, value in meta.items(): meta[field] = value[0] + value[1] + value[2] pad_fields = ['xi', 'sigma', 'j', 'is_cqt', 'n'] pad_len = max_order for field in pad_fields: meta[field] = [x + (math.nan,) * (pad_len - len(x)) for x in meta[field]] array_fields = ['order', 'xi', 'sigma', 'j', 'is_cqt', 'n'] for field in array_fields: meta[field] = np.array(meta[field]) return meta def compute_meta_jtfs(J_pad, J, Q, T, r_psi, sigma0, average, average_global, average_global_phi, oversampling, out_exclude, paths_exclude, scf): """Get metadata on the Joint Time-Frequency Scattering transform. This information specifies the content of each scattering coefficient, which order, which frequencies, which filters were used, and so on. See below for more info. Parameters ---------- J_pad : int 2**J_pad == amount of temporal padding. J, Q, J_fr, T, F: int, int, int, int, int See `help(wavespin.scattering1d.TimeFrequencyScattering1D)`. Control physical meta of bandpass and lowpass filters (xi, sigma, etc). out_3D : bool - True: will reshape meta fields to match output structure: `(n_coeffs, n_freqs, meta_len)`. - False: pack flattened: `(n_coeffs * n_freqs, meta_len)`. out_type : str - `'dict:list'` or `'dict:array'`: meta is packed into respective pairs (e.g. `meta['n']['psi_t * phi_f'][1]`) - `'list'` or `'array'`: meta is flattened (e.g. `meta['n'][15]`). out_exclude : list/tuple[str] Names of coefficient pairs to exclude from meta. sampling_filters_fr : tuple[str] See `help(TimeFrequencyScattering1D)`. Affects `xi`, `sigma`, and `j`. average : bool Affects `S0`'s meta, and temporal stride meta. average_global : bool Affects `S0`'s meta, and temporal stride meta. average_global_phi : bool Affects joint temporal stride meta. oversampling : int Affects temporal stride meta. scf : `scattering1d.frontend.base_frontend._FrequencyScatteringBase` Frequential scattering object, storing pertinent attributes and filters. Returns ------- meta : dictionary A dictionary with the following keys: - `'order`' : tensor A Tensor of length `C`, the total number of scattering coefficients, specifying the scattering order. - `'xi'` : tensor A Tensor of size `(C, 3)`, specifying the center frequency of the filter used at each order (padded with NaNs). - `'sigma'` : tensor A Tensor of size `(C, 3)`, specifying the frequency bandwidth of the filter used at each order (padded with NaNs). - `'j'` : tensor A Tensor of size `(C, 3)`, specifying the dyadic scale of the filter used at each order (padded with NaNs), excluding lowpass filtering (unless it was the only filtering). - `'is_cqt'` : tensor A tensor of size `(C, max_order)`, specifying whether the filter was constructed per Constant Q Transform (padded with NaNs). - `'n'` : tensor A Tensor of size `(C, 3)`, specifying the indices of the filters used at each order (padded with NaNs). Lowpass filters in `phi_*` pairs are denoted via `-1`. - `'s'` : tensor A Tensor of length `C`, specifying the spin of each frequency scattering filter (+1=up, -1=down, 0=none). - `'stride'` : tensor A Tensor of size `(C, 2)`, specifying the total temporal and frequential convolutional stride (i.e. subsampling) of resulting coefficient (including lowpass filtering). - `'key'` : list The tuples indexing the corresponding scattering coefficient in the non-vectorized output. In case of `out_3D=True`, for joint pairs, will reshape each field into `(n_coeffs, C, meta_len)`, where `n_coeffs` is the number of joint slices in the pair, and `meta_len` is the existing `shape[-1]` (1, 2, or 3). Computation and Structure ------------------------- Computation replicates logic in `timefrequency_scattering1d()`. Meta values depend on: - out_3D (True only possible with `average and average_fr`) - aligned - sampling_psi_fr - sampling_phi_fr - average - average_global - average_global_phi - average_fr - average_fr_global - average_fr_global_phi - oversampling - oversampling_fr - max_pad_factor_fr (mainly via `unrestricted_pad_fr`) - max_noncqt_fr - out_exclude - paths_exclude and some of their interactions. Listed are only "unobvious" parameters; anything that controls the filterbanks will change meta (`J`, `Q`, etc). """ def _get_compute_params(n2, n1_fr): """Reproduce exact logic in `timefrequency_scattering1d.py`.""" # basics scale_diff = scf.scale_diffs[n2] J_pad_fr = scf.J_pad_frs[scale_diff] N_fr_padded = 2**J_pad_fr # n1_fr_subsample, lowpass_subsample_fr ############################## global_averaged_fr = (scf.average_fr_global if n1_fr != -1 else scf.average_fr_global_phi) if n2 == -1 and n1_fr == -1: lowpass_subsample_fr = 0 if scf.average_fr_global_phi: n1_fr_subsample = scf.log2_F log2_F_phi = scf.log2_F log2_F_phi_diff = 0 else: log2_F_phi = scf.log2_F_phis['phi'][scale_diff] log2_F_phi_diff = scf.log2_F_phi_diffs['phi'][scale_diff] n1_fr_subsample = max(scf.n1_fr_subsamples['phi'][scale_diff] - scf.oversampling_fr, 0) elif n1_fr == -1: lowpass_subsample_fr = 0 if scf.average_fr_global_phi: total_conv_stride_over_U1_phi = min(J_pad_fr, scf.log2_F) n1_fr_subsample = total_conv_stride_over_U1_phi log2_F_phi = scf.log2_F log2_F_phi_diff = 0 else: n1_fr_subsample = max(scf.n1_fr_subsamples['phi'][scale_diff] - scf.oversampling_fr, 0) log2_F_phi = scf.log2_F_phis['phi'][scale_diff] log2_F_phi_diff = scf.log2_F_phi_diffs['phi'][scale_diff] else: total_conv_stride_over_U1 = ( scf.total_conv_stride_over_U1s[scale_diff][n1_fr]) n1_fr_subsample = max(scf.n1_fr_subsamples['spinned' ][scale_diff][n1_fr] - scf.oversampling_fr, 0) log2_F_phi = scf.log2_F_phis['spinned'][scale_diff][n1_fr] log2_F_phi_diff = scf.log2_F_phi_diffs['spinned'][scale_diff][n1_fr] if global_averaged_fr: lowpass_subsample_fr = (total_conv_stride_over_U1 - n1_fr_subsample) elif scf.average_fr: lowpass_subsample_fr = max(total_conv_stride_over_U1 - n1_fr_subsample - scf.oversampling_fr, 0) else: lowpass_subsample_fr = 0 # total stride, unpadding ############################################ total_conv_stride_over_U1_realized = (n1_fr_subsample + lowpass_subsample_fr) if scf.out_3D: stride_ref = scf.total_conv_stride_over_U1s[0][0] stride_ref = max(stride_ref - scf.oversampling_fr, 0) ind_start_fr = scf.ind_start_fr_max[stride_ref] ind_end_fr = scf.ind_end_fr_max[ stride_ref] else: _stride = total_conv_stride_over_U1_realized ind_start_fr = scf.ind_start_fr[n2][_stride] ind_end_fr = scf.ind_end_fr[ n2][_stride] return (N_fr_padded, total_conv_stride_over_U1_realized, n1_fr_subsample, scale_diff, log2_F_phi_diff, log2_F_phi, ind_start_fr, ind_end_fr, global_averaged_fr) def _get_fr_params(n1_fr, scale_diff, log2_F_phi_diff, log2_F_phi): if n1_fr != -1: # spinned psi_id = scf.psi_ids[scale_diff] p = [scf.psi1_f_fr_up[field][psi_id][n1_fr] for field in ('xi', 'sigma', 'j', 'is_cqt')] else: # phi_f if not scf.average_fr_global: F_phi = scf.F / 2**log2_F_phi_diff p = (0., sigma0 / F_phi, log2_F_phi, nan) else: p = (0., sigma0 / 2**log2_F_phi, log2_F_phi, nan) xi1_fr, sigma1_fr, j1_fr, is_cqt1_fr = p return xi1_fr, sigma1_fr, j1_fr, is_cqt1_fr def _exclude_excess_scale(n2, n1_fr): scale_diff = scf.scale_diffs[n2] psi_id = scf.psi_ids[scale_diff] j1_frs = scf.psi1_f_fr_up['j'][psi_id] return bool(n1_fr > len(j1_frs) - 1) def _skip_path(n2, n1_fr): excess_scale = bool(scf.sampling_psi_fr == 'exclude' and _exclude_excess_scale(n2, n1_fr)) user_skip_path = bool(n2 in paths_exclude.get('n2', {}) or n1_fr in paths_exclude.get('n1_fr', {})) return excess_scale or user_skip_path def _fill_n1_info(pair, n2, n1_fr, spin): if _skip_path(n2, n1_fr): return # track S1 from padding to `_joint_lowpass()` (N_fr_padded, total_conv_stride_over_U1_realized, n1_fr_subsample, scale_diff, log2_F_phi_diff, log2_F_phi, ind_start_fr, ind_end_fr, global_averaged_fr) = _get_compute_params(n2, n1_fr) # fetch xi, sigma for n2, n1_fr if n2 != -1: xi2, sigma2, j2, is_cqt2 = (xi2s[n2], sigma2s[n2], j2s[n2], is_cqt2s[n2]) else: xi2, sigma2, j2, is_cqt2 = 0., sigma_low, log2_T, nan xi1_fr, sigma1_fr, j1_fr, is_cqt1_fr = _get_fr_params( n1_fr, scale_diff, log2_F_phi_diff, log2_F_phi) # get temporal stride info global_averaged = (average_global if n2 != -1 else average_global_phi) if global_averaged: total_conv_stride_tm = log2_T else: k1_plus_k2 = max(min(j2, log2_T) - oversampling, 0) if average: k2_tm_J = max(log2_T - k1_plus_k2 - oversampling, 0) total_conv_stride_tm = k1_plus_k2 + k2_tm_J else: total_conv_stride_tm = k1_plus_k2 stride = (total_conv_stride_over_U1_realized, total_conv_stride_tm) # distinguish between `key` and `n` n1_fr_n = n1_fr if (n1_fr != -1) else inf n1_fr_key = n1_fr if (n1_fr != -1) else 0 n2_n = n2 if (n2 != -1) else inf n2_key = n2 if (n2 != -1) else 0 # global average pooling, all S1 collapsed into single point if global_averaged_fr: meta['order' ][pair].append(2) meta['xi' ][pair].append((xi2, xi1_fr, nan)) meta['sigma' ][pair].append((sigma2, sigma1_fr, nan)) meta['j' ][pair].append((j2, j1_fr, nan)) meta['is_cqt'][pair].append((is_cqt2, is_cqt1_fr, nan)) meta['n' ][pair].append((n2_n, n1_fr_n, nan)) meta['s' ][pair].append((spin,)) meta['stride'][pair].append(stride) meta['key' ][pair].append((n2_key, n1_fr_key, 0)) return fr_max = scf.N_frs[n2] if (n2 != -1) else len(xi1s) # simulate subsampling n1_step = 2 ** total_conv_stride_over_U1_realized for n1 in range(0, N_fr_padded, n1_step): # simulate unpadding if n1 / n1_step < ind_start_fr: continue elif n1 / n1_step >= ind_end_fr: break if n1 >= fr_max: # equivalently `j1 > j2` # these are padded rows, no associated filters xi1, sigma1, j1, is_cqt1 = nan, nan, nan, nan else: xi1, sigma1, j1, is_cqt1 = (xi1s[n1], sigma1s[n1], j1s[n1], is_cqt1s[n1]) meta['order' ][pair].append(2) meta['xi' ][pair].append((xi2, xi1_fr, xi1)) meta['sigma' ][pair].append((sigma2, sigma1_fr, sigma1)) meta['j' ][pair].append((j2, j1_fr, j1)) meta['is_cqt'][pair].append((is_cqt2, is_cqt1_fr, is_cqt1)) meta['n' ][pair].append((n2_n, n1_fr_n, n1)) meta['s' ][pair].append((spin,)) meta['stride'][pair].append(stride) meta['key' ][pair].append((n2_key, n1_fr_key, n1)) # set params log2_T = math.floor(math.log2(T)) log2_F = math.floor(math.log2(scf.F)) # extract filter meta sigma_low, xi1s, sigma1s, j1s, is_cqt1s, xi2s, sigma2s, j2s, is_cqt2s = \ calibrate_scattering_filters(J, Q, T, J_pad=J_pad, r_psi=r_psi) j1_frs = scf.psi1_f_fr_up['j'] # fetch phi meta; must access `phi_f_fr` as `j1s_fr` requires sampling phi meta_phi = {} for field in ('xi', 'sigma', 'j'): meta_phi[field] = {} for k in scf.phi_f_fr[field]: meta_phi[field][k] = scf.phi_f_fr[field][k] xi1s_fr_phi, sigma1_fr_phi, j1s_fr_phi = list(meta_phi.values()) meta = {} inf = -1 # placeholder for infinity nan = math.nan coef_names = ( 'S0', # (time) zeroth order 'S1', # (time) first order 'phi_t * phi_f', # (joint) joint lowpass 'phi_t * psi_f', # (joint) time lowpass 'psi_t * phi_f', # (joint) freq lowpass 'psi_t * psi_f_up', # (joint) spin up 'psi_t * psi_f_dn', # (joint) spin down ) for field in ('order', 'xi', 'sigma', 'j', 'is_cqt', 'n', 's', 'stride', 'key'): meta[field] = {name: [] for name in coef_names} # Zeroth-order ########################################################### if average_global: k0 = log2_T elif average: k0 = max(log2_T - oversampling, 0) meta['order' ]['S0'].append(0) meta['xi' ]['S0'].append((nan, nan, 0. if average else nan)) meta['sigma' ]['S0'].append((nan, nan, sigma_low if average else nan)) meta['j' ]['S0'].append((nan, nan, log2_T if average else nan)) meta['is_cqt']['S0'].append((nan, nan, nan)) meta['n' ]['S0'].append((nan, nan, inf if average else nan)) meta['s' ]['S0'].append((nan,)) meta['stride']['S0'].append((nan, k0 if average else nan)) meta['key' ]['S0'].append((0, 0, 0)) # First-order ############################################################ def stride_S1(j1): sub1_adj = min(j1, log2_T) if average else j1 k1 = max(sub1_adj - oversampling, 0) k1_J = max(log2_T - k1 - oversampling, 0) if average_global: total_conv_stride_tm = log2_T elif average: total_conv_stride_tm = k1 + k1_J else: total_conv_stride_tm = k1 return total_conv_stride_tm for (n1, (xi1, sigma1, j1, is_cqt1) ) in enumerate(zip(xi1s, sigma1s, j1s, is_cqt1s)): meta['order' ]['S1'].append(1) meta['xi' ]['S1'].append((nan, nan, xi1)) meta['sigma' ]['S1'].append((nan, nan, sigma1)) meta['j' ]['S1'].append((nan, nan, j1)) meta['is_cqt']['S1'].append((nan, nan, is_cqt1)) meta['n' ]['S1'].append((nan, nan, n1)) meta['s' ]['S1'].append((nan,)) meta['stride']['S1'].append((nan, stride_S1(j1))) meta['key' ]['S1'].append((0, 0, n1)) S1_len = len(meta['n']['S1']) assert S1_len >= scf.N_frs_max, (S1_len, scf.N_frs_max) # Joint scattering ####################################################### # `phi_t * phi_f` coeffs _fill_n1_info('phi_t * phi_f', n2=-1, n1_fr=-1, spin=0) # `phi_t * psi_f` coeffs for n1_fr in range(len(j1_frs[0])): _fill_n1_info('phi_t * psi_f', n2=-1, n1_fr=n1_fr, spin=0) # `psi_t * phi_f` coeffs for n2, j2 in enumerate(j2s): if j2 == 0: continue _fill_n1_info('psi_t * phi_f', n2, n1_fr=-1, spin=0) # `psi_t * psi_f` coeffs for spin in (1, -1): pair = ('psi_t * psi_f_up' if spin == 1 else 'psi_t * psi_f_dn') for n2, j2 in enumerate(j2s): if j2 == 0: continue psi_id = scf.psi_ids[scf.scale_diffs[n2]] for n1_fr, j1_fr in enumerate(j1_frs[psi_id]): _fill_n1_info(pair, n2, n1_fr, spin=spin) array_fields = ['order', 'xi', 'sigma', 'j', 'is_cqt', 'n', 's', 'stride', 'key'] for field in array_fields: for pair, v in meta[field].items(): meta[field][pair] = np.array(v) if scf.out_3D: # reorder for 3D for field in array_fields: # meta_len if field in ('s', 'order'): meta_len = 1 elif field == 'stride': meta_len = 2 else: meta_len = 3 for pair in meta[field]: # number of n2s if pair.startswith('phi_t'): n_n2s = 1 else: n_n2s = sum((j2 != 0 and n2 not in paths_exclude.get('n2', {})) for n2, j2 in enumerate(j2s)) # number of n1_frs; n_slices n_slices = None if pair in ('S0', 'S1'): # simply expand dim for consistency, no 3D structure meta[field][pair] = meta[field][pair].reshape(-1, 1, meta_len) continue elif 'psi_f' in pair: if pair.startswith('phi_t'): n_slices = sum(not _skip_path(n2=-1, n1_fr=n1_fr) for n1_fr in range(len(j1_frs[0]))) else: n_slices = sum(not _skip_path(n2=n2, n1_fr=n1_fr) for n2, j2 in enumerate(j2s) for n1_fr in range(len(j1_frs[0])) if j2 != 0) elif 'phi_f' in pair: n_n1_frs = 1 # n_slices if n_slices is None: n_slices = n_n2s * n_n1_frs # reshape meta meta[field][pair] = meta[field][pair].reshape(n_slices, -1, meta_len) if out_exclude is not None: # drop excluded pairs for pair in out_exclude: for field in meta: del meta[field][pair] # ensure time / freq stride doesn't exceed log2_T / log2_F in averaged cases, # and J / J_fr in unaveraged smax_t_nophi = log2_T if average else max(J) if scf.average_fr: if not scf.out_3D and not scf.aligned: # see "Compute logic: stride, padding" in `core` smax_f_nophi = max(scf.log2_F, scf.J_fr) else: smax_f_nophi = scf.log2_F else: smax_f_nophi = scf.J_fr for pair in meta['stride']: if pair == 'S0' and not average: continue stride_max_t = (smax_t_nophi if ('phi_t' not in pair) else log2_T) stride_max_f = (smax_f_nophi if ('phi_f' not in pair) else log2_F) for i, s in enumerate(meta['stride'][pair][..., 1].ravel()): assert s <= stride_max_t, ("meta['stride'][{}][{}] > stride_max_t " "({} > {})").format(pair, i, s, stride_max_t) if pair in ('S0', 'S1'): continue for i, s in enumerate(meta['stride'][pair][..., 0].ravel()): assert s <= stride_max_f, ("meta['stride'][{}][{}] > stride_max_f " "({} > {})").format(pair, i, s, stride_max_f) if not scf.out_type.startswith('dict'): # join pairs if not scf.out_3D: meta_flat = {f: np.concatenate([v for v in meta[f].values()], axis=0) for f in meta} else: meta_flat0 = {f: np.concatenate( [v for k, v in meta[f].items() if k in ('S0', 'S1')], axis=0) for f in meta} meta_flat1 = {f: np.concatenate( [v for k, v in meta[f].items() if k not in ('S0', 'S1')], axis=0) for f in meta} meta_flat = (meta_flat0, meta_flat1) meta = meta_flat return meta
40.427381
83
0.55835
4,612
33,959
3.897225
0.116002
0.013353
0.011127
0.011684
0.430288
0.375932
0.324858
0.284077
0.261878
0.24324
0
0.033003
0.317412
33,959
839
84
40.475566
0.742407
0.367149
0
0.244706
0
0
0.040882
0.002186
0
0
0
0
0.007059
1
0.025882
false
0.014118
0.007059
0
0.061176
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e77ae9d82735c469e3ecb3a79c3b10547249e4e0
350
py
Python
Demo/Demo_gym/envs/classic_control/__init__.py
Remosy/iceHocekeyIRL
1ffeaf8a9bd9585038629be41a2da552e0a4473b
[ "MIT" ]
null
null
null
Demo/Demo_gym/envs/classic_control/__init__.py
Remosy/iceHocekeyIRL
1ffeaf8a9bd9585038629be41a2da552e0a4473b
[ "MIT" ]
3
2019-03-09T02:35:24.000Z
2019-09-27T11:05:01.000Z
Demo/Demo_gym/envs/classic_control/__init__.py
Remosy/iceHocekeyIRL
1ffeaf8a9bd9585038629be41a2da552e0a4473b
[ "MIT" ]
null
null
null
from Demo_gym.envs.classic_control.cartpole import CartPoleEnv from Demo_gym.envs.classic_control.mountain_car import MountainCarEnv from Demo_gym.envs.classic_control.continuous_mountain_car import Continuous_MountainCarEnv from Demo_gym.envs.classic_control.pendulum import PendulumEnv from Demo_gym.envs.classic_control.acrobot import AcrobotEnv
50
91
0.897143
49
350
6.122449
0.346939
0.133333
0.183333
0.25
0.576667
0.576667
0.286667
0
0
0
0
0
0.06
350
6
92
58.333333
0.911854
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
e77bc1533880b4a66753674e008ece8b99afe6f5
3,959
py
Python
google-datacatalog-kafka-connector/tests/google/datacatalog_connectors/kafka/prepare/assembled_entry_factory_test.py
bonifacyj/datacatalog-connectors-message-brokers
0f72c800ebf1e570b638a0ad930d48e9dc44a25e
[ "Apache-2.0" ]
1
2021-04-30T22:52:41.000Z
2021-04-30T22:52:41.000Z
google-datacatalog-kafka-connector/tests/google/datacatalog_connectors/kafka/prepare/assembled_entry_factory_test.py
bonifacyj/datacatalog-connectors-message-brokers
0f72c800ebf1e570b638a0ad930d48e9dc44a25e
[ "Apache-2.0" ]
2
2020-10-01T14:24:12.000Z
2020-11-12T16:40:01.000Z
google-datacatalog-kafka-connector/tests/google/datacatalog_connectors/kafka/prepare/assembled_entry_factory_test.py
bonifacyj/datacatalog-connectors-message-brokers
0f72c800ebf1e570b638a0ad930d48e9dc44a25e
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/python # # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import unittest import mock from google.datacatalog_connectors.commons_test import utils from google.datacatalog_connectors.kafka import prepare from google.datacatalog_connectors.kafka.config.\ metadata_constants import MetadataConstants from .. import test_utils @mock.patch('google.cloud.datacatalog_v1beta1.DataCatalogClient.entry_path') class AssembledEntryFactoryTestCase(unittest.TestCase): __PROJECT_ID = 'test_project' __LOCATION_ID = 'location_id' __ENTRY_GROUP_ID = 'kafka' __MOCKED_ENTRY_PATH = 'mocked_entry_path' __METADATA_SERVER_HOST = 'metadata_host' __MODULE_PATH = os.path.dirname(os.path.abspath(__file__)) __PREPARE_PACKAGE = 'google.datacatalog_connectors.kafka.prepare' def setUp(self): entry_factory = test_utils.FakeDataCatalogEntryFactory( self.__PROJECT_ID, self.__LOCATION_ID, self.__METADATA_SERVER_HOST, self.__ENTRY_GROUP_ID) tag_factory = prepare.DataCatalogTagFactory() self.__assembled_entry_factory = prepare.assembled_entry_factory. \ AssembledEntryFactory( AssembledEntryFactoryTestCase.__ENTRY_GROUP_ID, entry_factory, tag_factory) tag_templates = { 'kafka_cluster_metadata': {}, 'kafka_topic_metadata': {} } self.__assembled_entry_factory_with_tag_template = prepare.\ assembled_entry_factory.AssembledEntryFactory( AssembledEntryFactoryTestCase.__ENTRY_GROUP_ID, entry_factory, tag_factory, tag_templates) def test_dc_entries_should_be_created_from_cluster_metadata( self, entry_path): entry_path.return_value = \ AssembledEntryFactoryTestCase.__MOCKED_ENTRY_PATH metadata = utils.Utils.convert_json_to_object(self.__MODULE_PATH, 'test_metadata.json') assembled_entries = self.__assembled_entry_factory.\ make_entries_from_cluster_metadata(metadata) num_topics = len(metadata[MetadataConstants.TOPICS]) num_clusters = 1 self.assertEqual(num_topics + num_clusters, len(assembled_entries)) @mock.patch('{}.'.format(__PREPARE_PACKAGE) + 'datacatalog_tag_factory.' + 'DataCatalogTagFactory.make_tag_for_cluster') @mock.patch('{}.datacatalog_tag_factory.'.format(__PREPARE_PACKAGE) + 'DataCatalogTagFactory.make_tag_for_topic') def test_with_tag_templates_should_be_converted_to_dc_entries_with_tags( self, make_tag_for_topic, make_tag_for_cluster, entry_path): entry_path.return_value = \ AssembledEntryFactoryTestCase.__MOCKED_ENTRY_PATH entry_factory = \ self.__assembled_entry_factory_with_tag_template cluster_metadata = utils.Utils.convert_json_to_object( self.__MODULE_PATH, 'test_metadata.json') num_topics = len(cluster_metadata[MetadataConstants.TOPICS]) prepared_entries = \ entry_factory. \ make_entries_from_cluster_metadata( cluster_metadata) for entry in prepared_entries: self.assertEqual(1, len(entry.tags)) self.assertEqual(num_topics, make_tag_for_topic.call_count) self.assertEqual(1, make_tag_for_cluster.call_count)
42.569892
79
0.717605
446
3,959
5.914798
0.309417
0.050038
0.047763
0.037908
0.289613
0.26232
0.26232
0.200152
0.200152
0.200152
0
0.004167
0.211922
3,959
92
80
43.032609
0.841346
0.142713
0
0.090909
0
0
0.111276
0.07665
0
0
0
0
0.060606
1
0.045455
false
0
0.106061
0
0.272727
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e77c7c1bc1c9242cd9b4a6f6bef09a3ab1d3884c
731
py
Python
_unittests/ut_datasource/test_geodata.py
sdpython/pyensae
ada4dbb0b9901bf481eff2ea239e74ed964d93b0
[ "MIT" ]
28
2015-07-19T21:20:51.000Z
2022-02-16T11:50:53.000Z
_unittests/ut_datasource/test_geodata.py
sdpython/pyensae
ada4dbb0b9901bf481eff2ea239e74ed964d93b0
[ "MIT" ]
34
2015-06-16T15:38:25.000Z
2021-12-29T11:04:01.000Z
_unittests/ut_datasource/test_geodata.py
sdpython/pyensae
ada4dbb0b9901bf481eff2ea239e74ed964d93b0
[ "MIT" ]
27
2015-01-13T08:24:22.000Z
2022-03-31T14:51:23.000Z
""" @brief test log(time=3s) """ import unittest from pyquickhelper.pycode import ExtTestCase, get_temp_folder from pyensae.datasource import load_french_departements class TestGeoData(ExtTestCase): def test_load_french_departements(self): temp = get_temp_folder(__file__, "temp_load_french_departements") df = load_french_departements(cache=temp) cols = set(['geometry', 'CODE_DEPT', 'CODE_REG', 'CODE_CHF', 'ID_GEOFLA', 'NOM_CHF', 'NOM_DEPT', 'NOM_REG', 'X_CENTROID', 'X_CHF_LIEU', 'Y_CENTROID', 'Y_CHF_LIEU']) self.assertEqual(df.shape, (96, 12)) self.assertEqual(cols, set(df.columns)) if __name__ == "__main__": unittest.main()
31.782609
92
0.670315
90
731
5.011111
0.533333
0.088692
0.195122
0
0
0
0
0
0
0
0
0.008591
0.20383
731
22
93
33.227273
0.766323
0.039672
0
0
0
0
0.20317
0.041787
0
0
0
0
0.142857
1
0.071429
false
0
0.214286
0
0.357143
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
e77e265dab3efd07cc86c0fb9a999b719d05fb9d
1,053
py
Python
openfe/setup/methods/base.py
OpenFreeEnergy/openfe
f2423ed2e4444a0824ff9b560e23b420c4997c6f
[ "MIT" ]
14
2022-01-24T22:01:19.000Z
2022-03-31T04:58:35.000Z
openfe/setup/methods/base.py
OpenFreeEnergy/openfe
f2423ed2e4444a0824ff9b560e23b420c4997c6f
[ "MIT" ]
109
2022-01-24T18:57:05.000Z
2022-03-31T20:13:07.000Z
openfe/setup/methods/base.py
OpenFreeEnergy/openfe
f2423ed2e4444a0824ff9b560e23b420c4997c6f
[ "MIT" ]
4
2022-01-24T18:45:54.000Z
2022-02-21T06:28:24.000Z
# This code is part of OpenFE and is licensed under the MIT license. # For details, see https://github.com/OpenFreeEnergy/openfe import abc from openff.toolkit.utils.serialization import Serializable class FEMethod(abc.ABC): """Base class for defining a free energy method Child classes must implement: - the associated Settings class and a default point for this - init, taking the Settings class - run() - to_dict and from_dict for serialization TODO ---- * Serializable was removed because of an MRO, needs to be re-added? """ @classmethod @abc.abstractmethod def get_default_settings(cls): """Get the default settings for this FE Method These can be modified and passed back in to the class init """ ... @abc.abstractmethod def is_complete(self) -> bool: """Check if the results of this workload already exist""" ... @abc.abstractmethod def run(self) -> bool: """Perform this method, returning success""" ...
27
71
0.660019
137
1,053
5.036496
0.59854
0.073913
0.086957
0
0
0
0
0
0
0
0
0
0.25736
1,053
38
72
27.710526
0.882353
0.588794
0
0.461538
0
0
0
0
0
0
0
0.026316
0
1
0.230769
false
0
0.153846
0
0.461538
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
0
0
0
0
0
0
2
e77e4dc700080418326c39439db7328ed34301f1
3,351
py
Python
miscellaneous_server_test/time_distribution/time_distribution.py
gellens/Master_thesis_JAQ_code
034de9d7883c0d81564f975405c8985aa4b4d428
[ "MIT" ]
null
null
null
miscellaneous_server_test/time_distribution/time_distribution.py
gellens/Master_thesis_JAQ_code
034de9d7883c0d81564f975405c8985aa4b4d428
[ "MIT" ]
null
null
null
miscellaneous_server_test/time_distribution/time_distribution.py
gellens/Master_thesis_JAQ_code
034de9d7883c0d81564f975405c8985aa4b4d428
[ "MIT" ]
1
2020-03-05T14:09:01.000Z
2020-03-05T14:09:01.000Z
# import matplotlib # import statsmodels as sm # import scipy.stats as st # import pandas as pd # import warnings import json import os from scipy.stats import gamma from scipy.stats import lognorm from scipy.stats import pareto from scipy.stats import norm import numpy as np import matplotlib.pyplot as plt def warmup_filter(d): warm_up_measures = 2 return d[warm_up_measures:] def load_data(): base_path = "./data" data_files_path = [os.path.join(base_path, f) for f in os.listdir(base_path) if os.path.isfile(os.path.join(base_path, f))] data_merged = [] for f_path in data_files_path: with open(f_path) as f: data = json.load(f) # add a filter for the 2 first data_merged += warmup_filter(data) return data_merged def compute_sse(times, d, arg, loc, scale): # source: https://stackoverflow.com/questions/6620471/fitting-empirical-distribution-to-theoretical-ones-with-scipy-python BINS = 50 # number of bar in the histogram y, x = np.histogram(times, bins=BINS, density=True) x = (x + np.roll(x, -1))[:-1] / 2.0 # x is now the value in the center of the bar # Calculate fitted PDF and error with fit in distribution pdf = d.pdf(x, *arg, loc=loc, scale=scale) sse = np.sum(np.power(y - pdf, 2.0)) return sse def fit_distributions(times): cut_off = 500 distribution = { "Gamma": gamma, "Lognormal": lognorm, "Pareto": pareto, "Nomal": norm } fig, ax = plt.subplots(1, 1) best_sse = 1 # worse value possible in our case best_d = None best_d_str = None best_arg = [] best_loc = None best_scale = None for d_str, d in distribution.items(): params = d.fit(times, scale=10) # Separate parts of parameters arg = params[:-2] loc = params[-2] scale = params[-1] # check if better sse sse = compute_sse(times, d, arg, loc, scale) if sse < best_sse: best_sse = sse best_d = d best_d_str = d_str best_arg = arg best_loc = loc best_scale = scale # plot the distribution x = np.linspace(d.ppf(0.001, *arg, loc=loc, scale=scale), d.ppf(0.99, *arg, loc=loc, scale=scale), 200) ax.plot(x, d.pdf(x, *arg, loc=loc, scale=scale), '-', lw=2, alpha=0.6, label=d_str+' pdf') # source clip: https://stackoverflow.com/questions/26218704/matplotlib-histogram-with-collection-bin-for-high-values ax.hist(np.clip(times, 0, cut_off), 50, density=True, histtype='stepfilled', alpha=0.2) ax.legend(loc='best', frameon=False) plt.xlabel('Response time') plt.ylabel('Probability density') plt.title('Distribution of response times') plt.show() print("The best distribution is the "+best_d_str+ (" with argument: " + str(best_arg) if len(best_arg) > 0 else "")+" [loc: "+str(best_loc)+" scale: "+str(best_scale)+"]") mean = best_d.mean(*best_arg, loc=best_loc, scale=best_scale) var = best_d.var(*best_arg, loc=best_loc, scale=best_scale) print("MODEL: Mean:", mean, "Variance:", var) print("DATA : Mean:", np.mean(times), "Variance:", np.var(times)) def main(): times = load_data() fit_distributions(times) if __name__ == "__main__": main()
31.027778
175
0.63026
504
3,351
4.05754
0.311508
0.035208
0.027384
0.03912
0.117359
0.098778
0.080196
0.05379
0
0
0
0.021662
0.242316
3,351
107
176
31.317757
0.783773
0.179946
0
0
0
0
0.081625
0
0
0
0
0
0
1
0.067568
false
0
0.108108
0
0.216216
0.040541
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e77f1fd0092c945c3470861a6d81a4b28e40b56f
422
py
Python
phasma_food_v2/devices/admin.py
VizLoreLabs/phasmaFoodPlatform
02c811c9178f338e081efbeacadb37c766dcb2db
[ "Apache-1.1" ]
null
null
null
phasma_food_v2/devices/admin.py
VizLoreLabs/phasmaFoodPlatform
02c811c9178f338e081efbeacadb37c766dcb2db
[ "Apache-1.1" ]
null
null
null
phasma_food_v2/devices/admin.py
VizLoreLabs/phasmaFoodPlatform
02c811c9178f338e081efbeacadb37c766dcb2db
[ "Apache-1.1" ]
null
null
null
from django.contrib import admin from .models import PhasmaDevice @admin.register(PhasmaDevice) class PhasmaDeviceAdmin(admin.ModelAdmin): list_display = ("mac", "name", "date_added") fieldsets = ( ("Info", {"fields": ("mac", "name")}), ("Date added/updated", {"fields": ("date_added", "date_updated")}) ) readonly_fields = ("date_added", "date_updated") ordering = ("-date_added",)
28.133333
74
0.64455
44
422
6
0.522727
0.170455
0.083333
0.121212
0.19697
0
0
0
0
0
0
0
0.177725
422
14
75
30.142857
0.760807
0
0
0
0
0
0.267773
0
0
0
0
0
0
1
0
false
0
0.181818
0
0.636364
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
e77f4dac26b5f08c981a363bf893116d6ff184f3
1,400
py
Python
scripts/af_assemblyPrep.py
aaronfang/small-Scripts
890b10ab19fa9cdf2415aaf2dc08b81cc64fc79d
[ "MIT" ]
1
2018-03-08T16:34:00.000Z
2018-03-08T16:34:00.000Z
scripts/af_assemblyPrep.py
aaronfang/personal_scripts
890b10ab19fa9cdf2415aaf2dc08b81cc64fc79d
[ "MIT" ]
null
null
null
scripts/af_assemblyPrep.py
aaronfang/personal_scripts
890b10ab19fa9cdf2415aaf2dc08b81cc64fc79d
[ "MIT" ]
null
null
null
import pymel.core as pm # Step1: # Select top group node. Run the following.It will generate a locator. # Send to the current group pivot position. # You can position the locator where you want to represent the final pivot position. curSel = pm.ls(sl=True,type='transform')[0] trans = pm.xform(curSel,ws=1,piv=1,q=1) rot = pm.xform(curSel,ws=1,ro=1,q=1) scl = pm.xform(curSel,ws=1,s=1,q=1) gpNd = pm.group(n=(curSel+'_GRP'),em=1) pm.xform(gpNd,s=scl,t=trans[0:3],ro=rot) # Step2: # Manually trans/rot the locator to the place of the final pivot. import pymel.core as pm # Step3: # Parent the top group to the locator. # Add attribute to store the trans/rot/scale information. transGrp = pm.xform(gpNd,ws=1,piv=1,q=1) rotGrp = pm.xform(gpNd,ws=1,ro=1,q=1) sclGrp = pm.xform(gpNd,ws=1,s=1,q=1) transVal = pm.xform(gpNd,ws=1,t=1,q=1) pm.parent(curSel,gpNd) # Move Grp to the origin and Freeze Transform. Then move it back to where it was, # With the right xform info. pm.xform(gpNd,ws=1,t=(transVal[0]-transGrp[0],transVal[1]-transGrp[1],transVal[2]-transGrp[2])) pm.xform(gpNd,r=1,ro=(-rotGrp[0],-rotGrp[1],-rotGrp[2])) pm.xform(gpNd,r=1,s=(1/sclGrp[0],1/sclGrp[1],1/sclGrp[2])) pm.makeIdentity(apply=1,t=1,r=1,s=1,n=0,pn=0) pm.xform(gpNd,ws=1,t=(transGrp[0],transGrp[1],transGrp[2])) pm.xform(gpNd,r=1,ro=(rotGrp[0],rotGrp[1],rotGrp[2])) pm.xform(gpNd,r=1,s=(sclGrp[0],sclGrp[1],sclGrp[2]))
33.333333
95
0.702857
291
1,400
3.378007
0.281787
0.099695
0.123093
0.079349
0.326551
0.214649
0.120041
0.120041
0.120041
0.120041
0
0.051037
0.104286
1,400
41
96
34.146341
0.732855
0.341429
0
0.1
1
0
0.014286
0
0
0
0
0
0
1
0
false
0
0.1
0
0.1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
e77fe1c595ad1068d9142dbaa9c92e34b2d27d6a
3,270
py
Python
util/vis_tool.py
LiderMyHand/AWR-Adaptive-Weighting-Regression
81c4c98edd98cd03d423d820ca1fe9e01dbbb242
[ "MIT" ]
90
2020-03-16T15:18:57.000Z
2022-03-16T10:02:52.000Z
util/vis_tool.py
LiderMyHand/AWR-Adaptive-Weighting-Regression
81c4c98edd98cd03d423d820ca1fe9e01dbbb242
[ "MIT" ]
16
2020-05-01T03:11:44.000Z
2021-12-14T13:03:38.000Z
util/vis_tool.py
LiderMyHand/AWR-Adaptive-Weighting-Regression
81c4c98edd98cd03d423d820ca1fe9e01dbbb242
[ "MIT" ]
16
2020-05-21T09:07:04.000Z
2022-02-22T13:00:19.000Z
import os.path as osp import matplotlib.pyplot as plt import numpy as np import cv2 class VisualUtil: def __init__(self, dataset): self.dataset = dataset # RED BGR self.color_pred = [(0,0,102), (0,0,179), (0,0,255), (77,77,255), (153,153,255)] # self.color_pred = ['#660000', '#b30000', '#ff0000', '#ff4d4d', '#ff9999'] # BLUE BGR self.color_gt = [(102,0,0), (179,0,0), (255,0,0), (255,77,77), (255,153,153)] # self.color_gt = ['#000066', '#0000b3', '#0000ff', '#4d4dff', '#9999ff'] def plot(self, img, path, jt_uvd_pred, jt_uvd_gt=None): uvd_pred = jt_uvd_pred.reshape(-1, 3) image = img.copy() image = (image.squeeze() + 1) * 100 image = image[:, :, np.newaxis].repeat(3, axis=-1) self._plot_fingers(image, uvd_pred, self.color_pred) if isinstance(jt_uvd_gt, np.ndarray): uvd_gt = jt_uvd_gt.reshape(-1, 3) self._plot_fingers(image, uvd_gt, self.color_gt) cv2.imwrite(path, image) def _plot_fingers(self, img, jt_uvd, colors): jt_idx, sketch = self._get_setting() for i in range(len(colors)): for idx in jt_idx[i]: cv2.circle(img, (int(jt_uvd[idx][0]), int(jt_uvd[idx][1])), 2, colors[i], -1) for (s, e) in sketch[i]: cv2.line(img, (int(jt_uvd[s][0]), int(jt_uvd[s][1])), (int(jt_uvd[e][0]), int(jt_uvd[e][1])), colors[i], 1) return def _get_setting(self): if self.dataset == 'nyu': jt_idx = [[0,1], [2,3], [4,5], [6,7], [8,9,10,11,12,13]] sketch = [[(0, 1), (1, 13)], [(2, 3), (3, 13)], [(4, 5), (5, 13)], [(6, 7), (7, 13)], [(8, 9), (9, 10),(10, 13), (11, 13), (12, 13)]] return jt_idx, sketch elif 'hands' in self.dataset: jt_idx = [[1,6,7,8], [2,9,10,11], [3,12,13,14], [4,15,16,17], [5,18,19,20,0]] sketch = [[(0, 1), (1, 6), (6, 7), (7, 8)], [(0, 2), (2, 9), (9, 10), (10, 11)], [(0, 3), (3, 12), (12, 13), (13, 14)], [(0, 4), (4, 15), (15, 16), (16, 17)], [(0, 5), (5, 18), (18, 19), (19, 20)]] return jt_idx, sketch elif self.dataset == 'icvl': jt_idx = [[1,2,3], [4,5,6], [7,8,9], [10,11,12], [13,14,15, 0]] sketch = [[(0, 1), (1, 2), (2, 3)], [(0, 4), (4, 5), (5, 6)], [(0, 7), (7, 8), (8, 9)], [(0, 10), (10, 11), (11, 12)], [(0, 13), (13, 14), (14, 15)]] return jt_idx, sketch elif self.dataset == 'msra': jt_idx = [[1,2,3,4], [5,6,7,8], [9,10,11,12], [13,14,15,16], [17,18,19,20,0]] sketch = [[(0, 1), (1, 2), (2, 3), (3, 4)], [(0, 5), (5, 6), (6, 7), (7, 8)], [(0, 9), (9, 10), (10, 11), (11, 12)], [(0, 13), (13, 14), (14, 15), (15, 16)], [(0, 17), (17, 18), (18, 19), (19, 20)]] return jt_idx, sketch
39.39759
89
0.410092
487
3,270
2.648871
0.201232
0.046512
0.037209
0.027907
0.314729
0.255039
0.245736
0.193023
0.127907
0.086047
0
0.204229
0.363609
3,270
82
90
39.878049
0.415666
0.046483
0
0.063492
0
0
0.005163
0
0
0
0
0
0
1
0.063492
false
0
0.063492
0
0.222222
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e7808d26b562a5ddeb70cd3327c78d41fcdc891d
1,113
py
Python
lib/exabgp/bgp/message/update/attribute/community/extended/mac_mobility.py
cloudscale-ch/exabgp
55ee496dfbc3fce75c5107fae7a7d38567154d46
[ "BSD-3-Clause" ]
1
2019-06-25T20:49:37.000Z
2019-06-25T20:49:37.000Z
lib/exabgp/bgp/message/update/attribute/community/extended/mac_mobility.py
nembery/exabgp
53cfff843ddde33bf1c437a1c4ce99de20c6bade
[ "BSD-3-Clause" ]
null
null
null
lib/exabgp/bgp/message/update/attribute/community/extended/mac_mobility.py
nembery/exabgp
53cfff843ddde33bf1c437a1c4ce99de20c6bade
[ "BSD-3-Clause" ]
1
2020-07-23T16:52:51.000Z
2020-07-23T16:52:51.000Z
# encoding: utf-8 """ mac_mobility.py Created by Anton Aksola on 2018-11-03 """ from struct import pack from struct import unpack from exabgp.bgp.message.update.attribute.community.extended import ExtendedCommunity # ================================================================== MacMobility # RFC 7432 Section 7.7. @ExtendedCommunity.register class MacMobility (ExtendedCommunity): COMMUNITY_TYPE = 0x06 COMMUNITY_SUBTYPE = 0x00 DESCRIPTION = 'mac-mobility' __slots__ = ['sequence','sticky'] def __init__ (self, sequence, sticky=False, community=None): self.sequence = sequence self.sticky = sticky ExtendedCommunity.__init__( self, community if community else pack( '!2sBxI', self._subtype(transitive=True), 1 if sticky else 0, sequence ) ) def __hash__ (self): return hash((self.sticky, self.sequence)) def __repr__ (self): s = "%s:%d" % (self.DESCRIPTION, self.sequence) if self.sticky: s += ":sticky" return s @staticmethod def unpack (data): flags, seq = unpack('!BxI', data[2:8]) return MacMobility(seq, True if flags == 1 else False)
22.26
84
0.666667
134
1,113
5.358209
0.485075
0.066852
0.044568
0
0
0
0
0
0
0
0
0.029064
0.165319
1,113
49
85
22.714286
0.743811
0.154537
0
0
0
0
0.051557
0
0
0
0.008593
0
0
1
0.125
false
0
0.09375
0.03125
0.46875
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e780d9438e176381d9f02f6add14e1524a0e07ab
867
py
Python
aprendizado/udemy/03_desafio_POO/main.py
renatodev95/Python
2adee4a01de41f8bbb68fce563100c135a5ab549
[ "MIT" ]
null
null
null
aprendizado/udemy/03_desafio_POO/main.py
renatodev95/Python
2adee4a01de41f8bbb68fce563100c135a5ab549
[ "MIT" ]
null
null
null
aprendizado/udemy/03_desafio_POO/main.py
renatodev95/Python
2adee4a01de41f8bbb68fce563100c135a5ab549
[ "MIT" ]
null
null
null
from banco import Banco from cliente import Cliente from conta import ContaCorrente, ContaPoupanca banco = Banco() cliente1 = Cliente('Luiz', 30) cliente2 = Cliente('Maria', 18) cliente3 = Cliente('João', 50) conta1 = ContaPoupanca(1111, 254136, 0) conta2 = ContaCorrente(2222, 254137, 0) conta3 = ContaPoupanca(1212, 254138, 0) cliente1.inserir_conta(conta1) cliente2.inserir_conta(conta2) cliente3.inserir_conta(conta3) banco.inserir_cliente(cliente1) banco.inserir_conta(conta1) banco.inserir_cliente(cliente2) banco.inserir_conta(conta2) if banco.autenticar(cliente1): cliente1.conta.depositar(40) cliente1.conta.sacar(20) else: print('Cliente não autenticado') print('#################################') if banco.autenticar(cliente2): cliente2.conta.depositar(40) cliente2.conta.sacar(20) else: print('Cliente não autenticado.')
22.815789
46
0.734717
105
867
6
0.352381
0.095238
0.057143
0.050794
0.133333
0.133333
0.133333
0.133333
0
0
0
0.089961
0.11534
867
38
47
22.815789
0.731421
0
0
0.071429
0
0
0.107143
0.038018
0
0
0
0
0
1
0
false
0
0.107143
0
0.107143
0.107143
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e781cdbe0452060897cb4aa77bea0b37fe424f36
386
py
Python
detection_tf/scripts/stuff/node_finder.py
hywel1994/SARosPerceptionKitti
82c307facb5b39e47c510fbdb132962cebf09d2e
[ "MIT" ]
5
2019-01-17T03:08:41.000Z
2021-10-31T17:02:11.000Z
detection_tf/scripts/stuff/node_finder.py
hywel1994/SARosPerceptionKitti
82c307facb5b39e47c510fbdb132962cebf09d2e
[ "MIT" ]
11
2020-02-05T00:36:38.000Z
2020-05-31T23:20:21.000Z
detection_tf/scripts/stuff/node_finder.py
hywel1994/SARosPerceptionKitti
82c307facb5b39e47c510fbdb132962cebf09d2e
[ "MIT" ]
4
2018-11-02T09:57:59.000Z
2021-04-27T01:20:04.000Z
#!/usr/bin/env python2 # -*- coding: utf-8 -*- """ Created on Thu Jan 11 15:58:42 2018 @author: gustav """ import tensorflow as tf NODE_OPS = ['Placeholder','Identity'] MODEL_FILE = '../models/ssd_mobilenet_v11_coco/frozen_inference_graph.pb' gf = tf.GraphDef() gf.ParseFromString(open(MODEL_FILE,'rb').read()) print([n.name + '=>' + n.op for n in gf.node if n.op in (NODE_OPS)])
21.444444
73
0.681347
63
386
4.031746
0.793651
0.055118
0
0
0
0
0
0
0
0
0
0.047761
0.132124
386
17
74
22.705882
0.710448
0.248705
0
0
0
0
0.288256
0.206406
0
0
0
0
0
1
0
false
0
0.166667
0
0.166667
0.166667
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e78557afcf99f289bebfa26454aa02ac75ba1622
1,483
py
Python
Clustering_Algorithms/create_chart.py
NikhilGupta1997/Data-Mining-Algorithms
56c9acca3d4f62b72e0ec22e150421eaee2dc850
[ "MIT" ]
7
2018-12-25T07:52:51.000Z
2021-05-17T23:53:18.000Z
Clustering_Algorithms/create_chart.py
NikhilGupta1997/Data-Mining-Algorithms
56c9acca3d4f62b72e0ec22e150421eaee2dc850
[ "MIT" ]
null
null
null
Clustering_Algorithms/create_chart.py
NikhilGupta1997/Data-Mining-Algorithms
56c9acca3d4f62b72e0ec22e150421eaee2dc850
[ "MIT" ]
null
null
null
import numpy as np import sys import matplotlib.pyplot as plt file = 'optics.txt' minpts = int(sys.argv[1]) epsilon = float(sys.argv[2]) X = [] Y = [] cluster_inds = [] inds = [] noise = [] buff = [] counter = 0 for i, line in enumerate(open(file).readlines()): counter += 1 val = line.strip().split() idx = int(val[0]) dist = float(val[1]) if dist < 0.0: dist = epsilon*epsilon buff.append(idx) if len(inds) > 100*minpts: cluster_inds.append(inds) noise.extend(buff) buff = [] inds = [] else: inds.append(idx) X.append(i) Y.append(dist) noise.extend(buff) noise.extend(inds) # if len(inds) >= 0: # cluster_inds.append(inds) # cluster_inds.append(noise) plt.figure() plt.plot(X, Y) plt.legend() plt.xlabel('Point ID') plt.ylabel('Reachability Distance') plt.xticks([]) plt.title('Reachability Graph') # plt.show() dataset = sys.argv[3] data = np.array([val.strip().split() for val in open(dataset, 'r').readlines()]) if data.shape[1] == 2: X = data[:, 0] Y = data[:, 1] color = {4: 'red', 1: 'blue', 2: 'green', 3: 'yellow', 0: 'black', 5: 'cyan', 6: 'magenta', } plt.figure() count = 0 for i, inds in enumerate(cluster_inds): count += len(inds) print(count, len(inds)) x_val = X[inds] y_val = Y[inds] plt.scatter(x_val, y_val, c=color[(i%6+1)], s=2, edgecolor=color[(i%6+1)]) # print noise count += len(noise) print(count, len(noise)) x_val = X[noise] y_val = Y[noise] plt.scatter(x_val, y_val, c='black', s=2) plt.show()
20.315068
94
0.631153
247
1,483
3.736842
0.323887
0.059588
0.055255
0.045504
0.04117
0.04117
0.04117
0
0
0
0
0.025122
0.167903
1,483
73
95
20.315068
0.722853
0.064059
0
0.135593
0
0
0.070137
0
0
0
0
0
0
1
0
false
0
0.050847
0
0.050847
0.033898
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e785fb58d580cc912f9ca3247c8aec7176d88b83
1,087
py
Python
setup.py
yudevan/django-indonesia-regions
5c01798c534e101115a1c943d13c6caa78897e01
[ "MIT" ]
5
2016-06-26T15:18:14.000Z
2020-09-25T06:22:35.000Z
setup.py
yudevan/django-indonesia-regions
5c01798c534e101115a1c943d13c6caa78897e01
[ "MIT" ]
5
2017-10-26T04:01:02.000Z
2021-02-03T04:34:11.000Z
setup.py
yudevan/django-indonesia-regions
5c01798c534e101115a1c943d13c6caa78897e01
[ "MIT" ]
7
2016-06-26T07:04:49.000Z
2021-02-02T13:41:02.000Z
import os from setuptools import find_packages, setup # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-indonesia-regions', version='1.0.6', packages=find_packages(), include_package_data=True, license='MIT License', description='Pluggable django providing indonesian regions model including the initial data', url='https://github.com/Keda87/django-indonesia-regions', author='Adiyat Mubarak', author_email='adiyatmubarak@gmail.com', classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ], )
33.96875
97
0.651334
122
1,087
5.729508
0.606557
0.16309
0.214592
0.148784
0
0
0
0
0
0
0
0.016413
0.215271
1,087
31
98
35.064516
0.803048
0.034959
0
0
0
0
0.544413
0.04489
0
0
0
0
0
1
0
true
0
0.074074
0
0.074074
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
e78630157ef111fb5facf453b100529fad826536
370
py
Python
objectModel/Python/cdm/enums/cdm_incremental_partition_type.py
Microsoft/CDM
7ea59264d661356ca1b44c31a352753928d08b5f
[ "CC-BY-4.0", "MIT" ]
265
2018-03-04T04:47:50.000Z
2019-05-06T13:31:18.000Z
objectModel/Python/cdm/enums/cdm_incremental_partition_type.py
Microsoft/CDM
7ea59264d661356ca1b44c31a352753928d08b5f
[ "CC-BY-4.0", "MIT" ]
39
2018-03-21T16:57:12.000Z
2019-05-06T17:30:23.000Z
objectModel/Python/cdm/enums/cdm_incremental_partition_type.py
Microsoft/CDM
7ea59264d661356ca1b44c31a352753928d08b5f
[ "CC-BY-4.0", "MIT" ]
75
2018-03-09T20:33:13.000Z
2019-05-05T06:55:43.000Z
# Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. from enum import Enum class CdmIncrementalPartitionType(Enum): NONE = 'None' INSERT = 'Insert' UPDATE = 'Update' DELETE = 'Delete' UPSERT = 'Upsert' UPSERT_AND_DELETE = 'UpsertAndDelete'
24.666667
94
0.716216
43
370
6.116279
0.72093
0.091255
0
0
0
0
0
0
0
0
0
0
0.202703
370
14
95
26.428571
0.891525
0.405405
0
0
0
0
0.199074
0
0
0
0
0
0
1
0
false
0
0.125
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
e7868823ca8a161623e547b6d7f6127e53f6a1db
534
py
Python
app/back/mongo/data/collect/maps/mongo.py
jgphilpott/polyplot
c46861174ee5881dadffbfb2278d555462523547
[ "MIT" ]
5
2021-05-17T14:17:14.000Z
2021-12-14T12:54:32.000Z
app/back/mongo/data/collect/maps/mongo.py
jgphilpott/iGraph
2a91ba57e4950856a83d3a109753f8f2badee829
[ "MIT" ]
8
2020-02-09T02:48:41.000Z
2021-05-16T04:57:02.000Z
app/back/mongo/data/collect/maps/mongo.py
jgphilpott/iGraph
2a91ba57e4950856a83d3a109753f8f2badee829
[ "MIT" ]
2
2016-09-12T03:48:16.000Z
2019-05-04T14:15:19.000Z
from back.mongo.data.collect.ions import find_collection from back.mongo.data.collect.maps.model import Map def find_map(query={}, filter={"_id": 0}, detail="micro"): collection = find_collection("maps_" + detail) return dict(collection.find_one(query, filter)) def find_maps(query={}, filter={"_id": 0}, sort=[("properties.code", 1)], limit=0, detail="micro"): collection = find_collection("maps_" + detail) collection.create_index(sort) return list(collection.find(query, filter).sort(sort).limit(limit))
31.411765
99
0.713483
73
534
5.068493
0.410959
0.118919
0.07027
0.091892
0.378378
0.248649
0.248649
0.248649
0
0
0
0.008529
0.121723
534
16
100
33.375
0.780384
0
0
0.222222
0
0
0.076779
0
0
0
0
0
0
1
0.222222
false
0
0.222222
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
e786a7b6d7533638ac5bb296afe211e365fb9e28
7,727
py
Python
test/unitTest/arg_test.py
alljoyn/devtools-codegen
388cac15e584dce3040d5090e8f627e5360e5c0f
[ "0BSD" ]
null
null
null
test/unitTest/arg_test.py
alljoyn/devtools-codegen
388cac15e584dce3040d5090e8f627e5360e5c0f
[ "0BSD" ]
null
null
null
test/unitTest/arg_test.py
alljoyn/devtools-codegen
388cac15e584dce3040d5090e8f627e5360e5c0f
[ "0BSD" ]
null
null
null
# Copyright AllSeen Alliance. All rights reserved. # # Permission to use, copy, modify, and/or distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. import unittest import fnmatch import os import sys import random import AllJoynCodeGen.argdef as argdef import AllJoynCodeGen.memberdef as memberdef class TestArg(unittest.TestCase): """Tests the ArgDef class.""" def test_init(self): """Tests initializing.""" a = argdef.ArgDef() self.assertEqual(a.name, None) self.assertEqual(a.arg_type, None) self.assertEqual(a.direction, None) self.assertEqual(a.variant_type, None) self.assertEqual(a.interface, None) a = argdef.ArgDef(None, "myArg", "(bid)", "in", "") self.assertEqual(a.name, "myArg") self.assertEqual(a.arg_type, "(bid)") self.assertEqual(a.direction, "in") self.assertEqual(a.variant_type, "") self.assertEqual(a.interface, None) return def test_is_structure(self): """Tests the is_structure() method.""" a = argdef.ArgDef(None, "myArg", "(bid)") self.assertTrue(a.is_structure()) a = argdef.ArgDef(None, "myArg", "a(bid)") self.assertTrue(a.is_structure()) self.assertTrue(memberdef.is_structure("(bid)")) self.assertTrue(memberdef.is_structure("a(bid)")) self.assertTrue(memberdef.is_structure("aa(bid)")) self.assertTrue(memberdef.is_structure("aaa(bid)")) self.assertFalse(memberdef.is_structure("a{is}")) self.assertFalse(memberdef.is_structure("a{i(sid)}")) return def test_is_dictionary(self): """Tests the is_dictionary() method.""" a = argdef.ArgDef(None, "myArg", "a{bid}") self.assertTrue(a.is_dictionary()) a = argdef.ArgDef(None, "myArg", "aa{bid}") self.assertTrue(a.is_dictionary()) # This is actually an invalid arg type. Because the xml is None # no validation is done. If this test fails because of validation # just remove the test. a = argdef.ArgDef(None, "myArg", "{bid}") self.assertFalse(a.is_dictionary()) self.assertTrue(memberdef.is_dictionary("a{bid}")) self.assertTrue(memberdef.is_dictionary("aa{bid}")) self.assertTrue(memberdef.is_dictionary("aaa{bid}")) self.assertFalse(memberdef.is_dictionary("a(is)")) self.assertFalse(memberdef.is_dictionary("a(ia{is})")) return def test_get_indirection_level(self): """Tests the get_indirection_level() method.""" a = argdef.ArgDef(None, "myArg", "a(bid)") self.assertEqual(a.get_indirection_level(), 1) a = argdef.ArgDef(None, "myArg", "aad") self.assertEqual(a.get_indirection_level(), 2) self.assertEqual(memberdef.get_indirection_level("i"), 0) self.assertEqual(memberdef.get_indirection_level("ai"), 1) self.assertEqual(memberdef.get_indirection_level("aai"), 2) self.assertEqual(memberdef.get_indirection_level("a{bid}"), 1) self.assertEqual(memberdef.get_indirection_level("aa{bid}"), 2) self.assertEqual(memberdef.get_indirection_level("aaa{bid}"), 3) self.assertEqual(memberdef.get_indirection_level("a(is)"), 1) self.assertEqual(memberdef.get_indirection_level("a(ia{is})"), 1) return def test_get_max_structure_depth(self): """Tests the get_max_structure_depth() method.""" sig = "bud" a = argdef.ArgDef(None, "myArg", sig) self.assertEqual(a.get_max_structure_depth(), 0) self.assertEqual(memberdef.get_max_structure_depth(sig), 0) sig = "(bud)" a = argdef.ArgDef(None, "myArg", sig) self.assertEqual(a.get_max_structure_depth(), 1) self.assertEqual(memberdef.get_max_structure_depth(sig), 1) sig = "(bud)(did)" a = argdef.ArgDef(None, "myArg", sig) self.assertEqual(a.get_max_structure_depth(), 1) self.assertEqual(memberdef.get_max_structure_depth(sig), 1) sig = "(bud(did))" a = argdef.ArgDef(None, "myArg", sig) self.assertEqual(a.get_max_structure_depth(), 2) self.assertEqual(memberdef.get_max_structure_depth(sig), 2) sig = "(q(bud)(did))" a = argdef.ArgDef(None, "myArg", sig) self.assertEqual(a.get_max_structure_depth(), 2) self.assertEqual(memberdef.get_max_structure_depth(sig), 2) sig = "(i((bud(did))i))" a = argdef.ArgDef(None, "myArg", sig) self.assertEqual(a.get_max_structure_depth(), 4) self.assertEqual(memberdef.get_max_structure_depth(sig), 4) sig = "(i((buda{did})i))" a = argdef.ArgDef(None, "myArg", sig) self.assertEqual(a.get_max_structure_depth(), 3) self.assertEqual(memberdef.get_max_structure_depth(sig), 3) return def test_get_max_dictionary_depth(self): """Tests the get_max_dictionary_depth() method.""" sig = "bud" a = argdef.ArgDef(None, "myArg", sig) self.assertEqual(a.get_max_dictionary_depth(), 0) self.assertEqual(memberdef.get_max_dictionary_depth(sig), 0) sig = "a{bud}" a = argdef.ArgDef(None, "myArg", sig) self.assertEqual(a.get_max_dictionary_depth(), 1) self.assertEqual(memberdef.get_max_dictionary_depth(sig), 1) sig = "a{bud}a{did}" a = argdef.ArgDef(None, "myArg", sig) self.assertEqual(a.get_max_dictionary_depth(), 1) self.assertEqual(memberdef.get_max_dictionary_depth(sig), 1) sig = "a{buda{did}}" a = argdef.ArgDef(None, "myArg", sig) self.assertEqual(a.get_max_dictionary_depth(), 2) self.assertEqual(memberdef.get_max_dictionary_depth(sig), 2) sig = "a{q{bud}a{did}}" a = argdef.ArgDef(None, "myArg", sig) self.assertEqual(a.get_max_dictionary_depth(), 2) self.assertEqual(memberdef.get_max_dictionary_depth(sig), 2) sig = "a{ia{a{buda{did}}i}}" a = argdef.ArgDef(None, "myArg", sig) self.assertEqual(a.get_max_dictionary_depth(), 4) self.assertEqual(memberdef.get_max_dictionary_depth(sig), 4) sig = "a{ia{a{buda(did)}i}}" a = argdef.ArgDef(None, "myArg", sig) self.assertEqual(a.get_max_dictionary_depth(), 3) self.assertEqual(memberdef.get_max_dictionary_depth(sig), 3) return def test_split_signature(self): """Tests the split_signature() method.""" fragments = ["b", "i", "d", "u", "x", "a{sv}", "(ii)", "(ia{sv})", "a{i(ss)}", "(((yyy)))"] for i in range(5000): nfrags = random.randint(1, len(fragments)-1) frags = [] for j in range(nfrags): frags.append(fragments[random.randint(0,len(fragments)-1)]) sig = "(" + "".join(frags) + ")" fields = memberdef.split_signature(sig) self.assertEqual(len(fields), nfrags) for j in range(nfrags): self.assertEqual(fields[j], frags[j]) return
39.423469
75
0.640481
998
7,727
4.804609
0.159319
0.156413
0.086757
0.077998
0.664025
0.589781
0.456309
0.378311
0.318874
0.310115
0
0.007646
0.221431
7,727
195
76
39.625641
0.789395
0.151547
0
0.362963
0
0
0.074769
0
0.007407
0
0
0
0.488889
1
0.051852
false
0
0.051852
0
0.162963
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
1
e78723495183f0fe8aa489d98c4605dbae6178f0
914
py
Python
nlpatl/sampling/uncertainty/least_confidence.py
dumpmemory/nlpatl
59209242d1ac26714b11b86261070ac50cc90432
[ "MIT" ]
18
2021-11-29T06:43:46.000Z
2022-03-29T09:58:32.000Z
nlpatl/sampling/uncertainty/least_confidence.py
dumpmemory/nlpatl
59209242d1ac26714b11b86261070ac50cc90432
[ "MIT" ]
null
null
null
nlpatl/sampling/uncertainty/least_confidence.py
dumpmemory/nlpatl
59209242d1ac26714b11b86261070ac50cc90432
[ "MIT" ]
1
2021-11-29T06:43:47.000Z
2021-11-29T06:43:47.000Z
from typing import Tuple import numpy as np from nlpatl.sampling import Sampling class LeastConfidenceSampling(Sampling): """ Sampling data points according to the least confidence. Pick the lowest probabilies for the highest class. https://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.219.1846&rep=rep1&type=pdf :param name: Name of this sampling :type name: str """ def __init__(self, name: str = "least_confidence_sampling"): super().__init__(name=name) def sample( self, data: np.ndarray, num_sample: int ) -> Tuple[np.ndarray, np.ndarray]: num_node = min(num_sample, len(data)) # Calucalte least confidence least_confidences = 1 - np.max(data, axis=1) indices = np.argpartition(-least_confidences, num_node - 1)[:num_node] return indices, least_confidences[indices]
30.466667
132
0.659737
117
914
5
0.529915
0.076923
0.041026
0
0
0
0
0
0
0
0
0.021645
0.241794
914
29
133
31.517241
0.822511
0.305252
0
0
0
0
0.043253
0.043253
0
0
0
0
0
1
0.153846
false
0
0.230769
0
0.538462
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
e787bcef0fe3c055b3e9f9ae08c31b576c247a87
2,376
py
Python
random_video.py
enriqueav/the_random_video
0dbeef2efbbad33351fd106b16095b4bb3ed8821
[ "MIT" ]
1
2020-11-07T17:15:27.000Z
2020-11-07T17:15:27.000Z
random_video.py
enriqueav/the_random_video
0dbeef2efbbad33351fd106b16095b4bb3ed8821
[ "MIT" ]
null
null
null
random_video.py
enriqueav/the_random_video
0dbeef2efbbad33351fd106b16095b4bb3ed8821
[ "MIT" ]
null
null
null
import argparse import time from taor.randomvideo import random_video if __name__ == "__main__": parser = argparse.ArgumentParser( description='Create random videos. The --seed argument can be used to generate' 'consistent results. By default the name of the video will contain the epoch' 'time of generation, otherwise --image_path can be used to overwrite this.' ) parser.add_argument("-s", "--seed", help="Initialize numpy with a given seed. " "Can be used to obtain consistent results.", type=int) parser.add_argument("-i", "--image_path", help="Name of the file to create. " "Epoch time is used as filename if -i is not specified.") parser.add_argument("-d", "--debug", help="Enter DEBUG mode.", action="store_true") parser.add_argument("-q", "--quantity", help="Quantity of videos to generate. Default is 1." "If --seed is set, the seed is used for the first video " "and then 1 is added for each one of the following.", type=int, default=1) parser.add_argument("-f", "--frames", help="Quantity of video frames to generate. " "Default of 24*60*2 == 2880, for a 2 minutes video at 24 FPS.", type=int, default=24*60*2) args = parser.parse_args() seed = args.seed image_path = args.image_path frames = args.frames for i in range(args.quantity): if args.seed: seed = args.seed + i pre = args.image_path or "./results/" + str(int(time.time())) image_path = pre + "_seed%d.avi" % seed elif args.quantity > 1: pre = args.image_path or "./results/" + str(int(time.time())) image_path = pre + "_number%d.avi" % i else: pre = args.image_path or "./results/" + str(int(time.time())) image_path = pre + ".avi" random_video(file_name=image_path, debug=args.debug, seed=seed, total_frames=frames)
44.830189
97
0.513889
278
2,376
4.276978
0.345324
0.083263
0.071489
0.027754
0.12868
0.12868
0.12868
0.12868
0.12868
0.12868
0
0.014354
0.384259
2,376
52
98
45.692308
0.79836
0
0
0.102041
0
0
0.322391
0
0
0
0
0
0
1
0
false
0
0.061224
0
0.061224
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e787dc94ca4111ab33a1d29a9785aad5e480ebdf
2,524
py
Python
src/leetcode_1743_restore_the_array_from_adjacent_pairs.py
sungho-joo/leetcode2github
ce7730ef40f6051df23681dd3c0e1e657abba620
[ "MIT" ]
null
null
null
src/leetcode_1743_restore_the_array_from_adjacent_pairs.py
sungho-joo/leetcode2github
ce7730ef40f6051df23681dd3c0e1e657abba620
[ "MIT" ]
null
null
null
src/leetcode_1743_restore_the_array_from_adjacent_pairs.py
sungho-joo/leetcode2github
ce7730ef40f6051df23681dd3c0e1e657abba620
[ "MIT" ]
null
null
null
# @l2g 1743 python3 # [1743] Restore the Array From Adjacent Pairs # Difficulty: Medium # https://leetcode.com/problems/restore-the-array-from-adjacent-pairs # # There is an integer array nums that consists of n unique elements,but you have forgotten it.However, # you do remember every pair of adjacent elements in nums. # You are given a 2D integer array adjacentPairs of size n - 1 where each adjacentPairs[i] = [ui, # vi] indicates that the elements ui and vi are adjacent in nums. # It is guaranteed that every adjacent pair of elements nums[i] and nums[i+1] will exist in adjacentPairs, # either as [nums[i],nums[i+1]] or [nums[i+1],nums[i]].The pairs can appear in any order. # Return the original array nums. If there are multiple solutions, return any of them. # # Example 1: # # Input: adjacentPairs = [[2,1],[3,4],[3,2]] # Output: [1,2,3,4] # Explanation: This array has all its adjacent pairs in adjacentPairs. # Notice that adjacentPairs[i] may not be in left-to-right order. # # Example 2: # # Input: adjacentPairs = [[4,-2],[1,4],[-3,1]] # Output: [-2,4,1,-3] # Explanation: There can be negative numbers. # Another solution is [-3,1,4,-2], which would also be accepted. # # Example 3: # # Input: adjacentPairs = [[100000,-100000]] # Output: [100000,-100000] # # # Constraints: # # nums.length == n # adjacentPairs.length == n - 1 # adjacentPairs[i].length == 2 # 2 <= n <= 10^5 # -10^5 <= nums[i], ui, vi <= 10^5 # There exists some nums that has adjacentPairs as its pairs. # # from typing import List class Solution: def restoreArray(self, adjacentPairs: List[List[int]]) -> List[int]: pair_counter = defaultdict(list) for pair in adjacentPairs: pair_counter[pair[0]].append(pair) pair_counter[pair[1]].append(pair[::-1]) single_node = set([node for node, value in pair_counter.items() if len(value) == 1]) ans = [] while len(single_node) != 0: start_node = single_node.pop() ans.append(start_node) link = pair_counter[start_node][0] pairs = pair_counter[link[1]] while len(pairs) != 1: pair_counter[link[1]].remove(link[::-1]) link = pairs[0] ans.append(link[0]) pairs = pair_counter[link[1]] ans.append(link[1]) single_node.remove(link[1]) return ans if __name__ == "__main__": import os import pytest pytest.main([os.path.join("tests", "test_1743.py")])
30.409639
106
0.638669
372
2,524
4.268817
0.360215
0.055416
0.011335
0.030227
0.06801
0.06801
0
0
0
0
0
0.049642
0.225832
2,524
82
107
30.780488
0.76305
0.557845
0
0.076923
0
0
0.023321
0
0
0
0
0
0
1
0.038462
false
0
0.115385
0
0.230769
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e789b7eb8d4c8742185f24806004dfff92a4a404
1,449
py
Python
timetable/timetable.py
Huy-Ngo/usth-timetable-2
d9f653ee2cb138b075c7c630b6f8be08d959cb08
[ "MIT" ]
null
null
null
timetable/timetable.py
Huy-Ngo/usth-timetable-2
d9f653ee2cb138b075c7c630b6f8be08d959cb08
[ "MIT" ]
null
null
null
timetable/timetable.py
Huy-Ngo/usth-timetable-2
d9f653ee2cb138b075c7c630b6f8be08d959cb08
[ "MIT" ]
null
null
null
import datetime from flask import ( Blueprint, flash, g, redirect, render_template, request, url_for, session ) from werkzeug.exceptions import abort from timetable.student_auth import login_required from . import db, updater bp = Blueprint('timetable', __name__) @bp.route('/', methods=['GET']) def index(): user_id = session.get('user_id') if user_id is None: return render_template('timetable/index.html') else: response = db.get({ 'table_name': 'student', 'id': user_id }) user_calendar_id = response['response']['timetable_id'] return redirect(url_for('r_calendar_view_now', calendar_id=user_calendar_id)) @bp.route('/<calendar_id>', methods=['GET']) def r_calendar_view_now(calendar_id): year = datetime.datetime.now().year month = datetime.datetime.now().month day = datetime.datetime.now().day view = 'day' # later: replace it with personal preferred view. Preferred view is saved in cookie/local storage return redirect(url_for('timetable.r_list_schedule', timetable_id=calendar_id, view=view, year=year, month=month, day=day)) @bp.route('/<timetable_id>/<view>/<int:year>/<int:month>/<int:day>', methods=['GET']) def r_list_schedule(timetable_id, view, year, month, day): response = updater.get_event(timetable_id, view, year, month, day) return render_template( 'timetable/timetable.html', events=response['response'], calendar_id=timetable_id, year=year, month=month, day=day, view=view )
30.1875
124
0.73844
207
1,449
4.961353
0.309179
0.06816
0.037975
0.056475
0.185979
0.149951
0
0
0
0
0
0
0.118012
1,449
47
125
30.829787
0.803599
0.065562
0
0
0
0
0.172337
0.076923
0
0
0
0
0
1
0.085714
false
0
0.142857
0
0.342857
0.057143
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e78c94bb5cf3e2d928816be2ee0ebeb373a52cb8
4,912
py
Python
apps/sepa/tests/integration.py
jfterpstra/onepercentclub-site
43e8e01ac4d3d1ffdd5959ebd048ce95bb2dba0e
[ "BSD-3-Clause" ]
7
2015-01-02T19:31:14.000Z
2021-03-22T17:30:23.000Z
apps/sepa/tests/integration.py
jfterpstra/onepercentclub-site
43e8e01ac4d3d1ffdd5959ebd048ce95bb2dba0e
[ "BSD-3-Clause" ]
1
2015-03-06T08:34:59.000Z
2015-03-06T08:34:59.000Z
apps/sepa/tests/integration.py
jfterpstra/onepercentclub-site
43e8e01ac4d3d1ffdd5959ebd048ce95bb2dba0e
[ "BSD-3-Clause" ]
null
null
null
import os import unittest import decimal from lxml import etree from apps.sepa.sepa import SepaAccount, SepaDocument from .base import SepaXMLTestMixin class ExampleXMLTest(SepaXMLTestMixin, unittest.TestCase): """ Attempt to test recreating an example XML file """ def setUp(self): super(ExampleXMLTest, self).setUp() # Read and validate example XML file example_file = os.path.join( self.directory, 'BvN-pain.001.001.03-example-message.xml' ) self.example = etree.parse(example_file) self.xmlschema.assertValid(self.example) def test_generate_example(self): """ Attempt to recreate example XML file. """ pass class CalculateMoneyDonatedTests(SepaXMLTestMixin, unittest.TestCase): """ Generate and attempt to validate an XML file modelled after actual transactions """ def setUp(self): super(CalculateMoneyDonatedTests, self).setUp() self.some_account = { 'name': '1%CLUB', 'iban': 'NL45RABO0132207044', 'bic': 'RABONL2U', 'id': 'A01' } self.another_account = { 'name': 'Nice Project', 'iban': 'NL13TEST0123456789', 'bic': 'TESTNL2A', 'id': 'P551' } self.third_account = { 'name': 'SHO', 'iban': 'NL28INGB0000000777', 'bic': 'INGBNL2A', 'id': 'P345' } self.payment1 = { 'amount': decimal.Decimal('50.00'), 'id': 'PAYMENT 1253675', 'remittance_info': 'some info' } self.payment2 = { 'amount': decimal.Decimal('25.00'), 'id': 'PAYMENT 234532', 'remittance_info': 'my info' } self.message_id = 'BATCH-1234' payment_id = 'PAYMENTS TODAY' # Create base for SEPA sepa = SepaDocument(type='CT') sepa.set_info(message_identification=self.message_id, payment_info_id=payment_id) sepa.set_initiating_party(name=self.some_account['name'], id=self.some_account['id']) some_account = SepaAccount(name=self.some_account['name'], iban=self.some_account['iban'], bic=self.some_account['bic']) sepa.set_debtor(some_account) # Add a payment another_account = SepaAccount(name=self.another_account['name'], iban=self.another_account['iban'], bic=self.another_account['bic']) sepa.add_credit_transfer(creditor=another_account, amount=self.payment1['amount'], creditor_payment_id=self.payment1['id'], remittance_information=self.payment1['remittance_info']) # Add another payment third_account = SepaAccount(name=self.third_account['name'], iban=self.third_account['iban'], bic=self.third_account['bic']) sepa.add_credit_transfer(creditor=third_account, creditor_payment_id=self.payment2['id'], amount=self.payment2['amount'], remittance_information=self.payment2['remittance_info']) # Now lets get the xml for these payments self.xml = sepa.as_xml() def test_parse_xml(self): """ Test parsing the generated XML """ # Still no errors? Lets check the xml. tree = etree.XML(self.xml) main = tree[0] self.assertEqual(main.tag, '{urn:iso:std:iso:20022:tech:xsd:pain.001.001.03}CstmrCdtTrfInitn' ) header = main[0] self.assertEqual(header.tag, '{urn:iso:std:iso:20022:tech:xsd:pain.001.001.03}GrpHdr') self.assertEqual(header[0].text, self.message_id) # We should have two payments self.assertEqual(header[2].text, "2") # Total amount should be the sum of two payments coverted to euros self.assertEqual(header[3].text, '75.00') # Now lets check The second payment IBANs second_payment = main[2] namespaces = { # Default 'pain': 'urn:iso:std:iso:20022:tech:xsd:pain.001.001.03', 'xsi': 'http://www.w3.org/2001/XMLSchema-instance' } self.assertEqual( second_payment.find( 'pain:DbtrAcct/pain:Id/pain:IBAN', namespaces=namespaces ).text, self.some_account['iban'] ) self.assertEqual( second_payment.find( 'pain:CdtTrfTxInf/pain:CdtrAcct/pain:Id/pain:IBAN', namespaces=namespaces ).text, self.third_account['iban'] ) def test_validate_xml(self): """ Assert the XML is valid according to schema """ tree = etree.XML(self.xml) self.xmlschema.assertValid(tree)
31.896104
107
0.578583
531
4,912
5.239171
0.306968
0.035586
0.037743
0.017254
0.155284
0.12509
0.099209
0.071172
0.040978
0.040978
0
0.042179
0.304967
4,912
153
108
32.104575
0.772701
0.112378
0
0.102041
0
0.030612
0.163687
0.065475
0
0
0
0
0.091837
1
0.05102
false
0.010204
0.061224
0
0.132653
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e78d0b3c483bba3574b16e118dcb9461ba02bf95
2,992
py
Python
freeflow/core/tests/dag.py
enorha/freeflow
5b655ce616d408e566b0b900f96b24804dc49578
[ "Apache-2.0" ]
1
2021-11-19T08:48:00.000Z
2021-11-19T08:48:00.000Z
freeflow/core/tests/dag.py
enorha/freeflow
5b655ce616d408e566b0b900f96b24804dc49578
[ "Apache-2.0" ]
1
2022-01-06T23:11:02.000Z
2022-01-06T23:11:02.000Z
freeflow/core/tests/dag.py
enorha/freeflow
5b655ce616d408e566b0b900f96b24804dc49578
[ "Apache-2.0" ]
2
2021-11-19T08:51:35.000Z
2021-12-24T14:39:00.000Z
#!/usr/bin/python # -*- coding: utf-8 -*- import unittest import freeflow.core.tests from airflow import models as af_models class DagTest(unittest.TestCase): @classmethod def setUpClass(cls): cls._dag_files = freeflow.core.tests.dag_files def test_dag_integrity(self): def check_valid_dag(dag): """ Checks whether the python file is really a runnable DAG. :param dag: python module (file) :type dag: module """ self.assertTrue( any(isinstance(var, af_models.DAG) for var in vars(dag).values()), "File does not contains a DAG instance" ) def check_single_dag_file(dag_class): """ Checks for count of the DAG in a single file. It should be only one. :param dag_class: list of DAG class instance :type dag_class: list(DAG) """ self.assertTrue( len(dag_class) <= 1, "File should only contains a single DAG" ) def check_dag_name(dag_class, filename): """ Checks that DAG name should be snake case and same with the filename. If DAG versioning is needed, use <name>_v<number> :param dag_class: list of DAG class instance :type dag_class: list(DAG) :param filename: the filename which DAG class(es) resides :type filename: str """ dag_id = dag_class[0].dag_id self.assertEqual( dag_id.split('_v')[0], filename, "File name and DAG name should be the same" ) self.assertTrue( all(c.islower() or c.isdigit() or c == '_' for c in dag_id), "DAG name should be all lower case" ) def check_task_name_within_dag(task_class): """ Checks uniqueness of task name within a DAG to ensure clarity :param task_class: list of task instance :type task_class: list(BaseOperator) """ tasks = task_class task_ids = [] for task in tasks: task_ids.append(task.task_id) self.assertTrue( all(c.islower() or c.isdigit() or c == '_' or c == '-' for c in task.task_id), "Task name should be all lower case" ) self.assertEqual( len(task_ids), len(set(task_ids)), "Task ID should not be duplicate" ) for file in self._dag_files: check_valid_dag(file['dag']) check_single_dag_file(file['instance']['dags']) check_dag_name(file['instance']['dags'], file['filename']) check_task_name_within_dag(file['instance']['tasks']) if __name__ == '__main__': unittest.main()
31.829787
98
0.531417
357
2,992
4.271709
0.296919
0.057705
0.031475
0.029508
0.19082
0.152131
0.120656
0.120656
0.120656
0.120656
0
0.002153
0.379011
2,992
93
99
32.172043
0.818622
0.234291
0
0.12
0
0
0.136071
0
0
0
0
0
0.12
1
0.12
false
0
0.06
0
0.2
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e78d767af998e6e80008e8c991011efc8624eff7
1,429
py
Python
pingdomexport/tests/load/test_checks_output.py
mattboston/pingdomexport
1cd7acbf813abee0b9a7865b9cd4a1e166d55c37
[ "MIT" ]
4
2018-01-25T09:18:38.000Z
2021-02-12T18:36:08.000Z
pingdomexport/tests/load/test_checks_output.py
mattboston/pingdomexport
1cd7acbf813abee0b9a7865b9cd4a1e166d55c37
[ "MIT" ]
1
2018-12-04T18:42:06.000Z
2021-05-25T14:03:32.000Z
pingdomexport/tests/load/test_checks_output.py
mattboston/pingdomexport
1cd7acbf813abee0b9a7865b9cd4a1e166d55c37
[ "MIT" ]
3
2019-04-30T11:52:14.000Z
2021-03-24T20:58:04.000Z
from pingdomexport.load import checks_output class TestOutput: def test_load(self, capsys): checks_output.Output().load( [ { 'hostname': 'www.a.com', 'use_legacy_notifications': True, 'lastresponsetime': 411, 'ipv6': False, 'type': 'http', 'name': 'A', 'resolution': 1, 'created': 1458372620, 'lasttesttime': 1459005934, 'status': 'up', 'id': 2057736 }, { 'lasterrortime': 1458938840, 'type': 'http', 'hostname': 'b.a.com', 'lastresponsetime': 827, 'created': 1458398619, 'lasttesttime': 1459005943, 'status': 'up', 'ipv6': False, 'use_legacy_notifications': True, 'resolution': 1, 'name': 'B', 'id': 2057910 } ] ) out = capsys.readouterr() assert len(out) == 2 assert 'Id,Name,Created at,Status,Hostname,Type\r\n2057736,A,1458372620,up,www.a.com,http\r\n2057910,B,1458398619,up,b.a.com,http\r\n' == out[0] assert '' == out[1]
34.853659
152
0.401679
111
1,429
5.108108
0.495496
0.028219
0.024691
0.091711
0
0
0
0
0
0
0
0.148396
0.476557
1,429
40
153
35.725
0.609626
0
0
0.263158
0
0.026316
0.253324
0.109867
0
0
0
0
0.078947
1
0.026316
false
0
0.026316
0
0.078947
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e78dbfdb3c598a36053c36006335775401473a5a
83
py
Python
Project_3/source/test_perihelion/test_perihelion.py
larsjbro/FYS4150
95ac4e09b5aad133b29c9aabb5be1302abdd8e65
[ "BSD-2-Clause" ]
null
null
null
Project_3/source/test_perihelion/test_perihelion.py
larsjbro/FYS4150
95ac4e09b5aad133b29c9aabb5be1302abdd8e65
[ "BSD-2-Clause" ]
null
null
null
Project_3/source/test_perihelion/test_perihelion.py
larsjbro/FYS4150
95ac4e09b5aad133b29c9aabb5be1302abdd8e65
[ "BSD-2-Clause" ]
null
null
null
''' Created on 30. okt. 2017 @author: LJB ''' if __name__ == '__main__': pass
10.375
26
0.590361
11
83
3.727273
1
0
0
0
0
0
0
0
0
0
0
0.09375
0.228916
83
8
27
10.375
0.546875
0.457831
0
0
0
0
0.210526
0
0
0
0
0
0
1
0
true
0.5
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
4
e79009e224114b1ec0ae87c6bd3d54452a3b6720
791
py
Python
story_with_exercises/5_ex_rename_like_a_pro.py
flrs/pycharm_tips_and_tricks
21a2768f2cc3b6b2af8e672d32c7bd178900adad
[ "MIT" ]
1
2020-02-17T15:08:57.000Z
2020-02-17T15:08:57.000Z
story_with_exercises/5_ex_rename_like_a_pro.py
flrs/pycharm_tips_and_tricks
21a2768f2cc3b6b2af8e672d32c7bd178900adad
[ "MIT" ]
null
null
null
story_with_exercises/5_ex_rename_like_a_pro.py
flrs/pycharm_tips_and_tricks
21a2768f2cc3b6b2af8e672d32c7bd178900adad
[ "MIT" ]
null
null
null
"""Rename Like a Pro Exercise: The contents of the people_on_meetup variable look more like a zoo. Let's rename the following items: - variable "people_on_meetup" -> "animals_in_zoo" - class "Meetup" -> "Zoo" To rename an item, put the cursor on the item you want to rename and then press Shift+F6 on Windows/Linux or ⇧+F6 on Mac OS. """ people_on_meetup = [ 'A tiny horse', 'Mystic Mouse', 'Steg O Saurus', 'Tardi Grade' ] class Meetup: def __init__(self, members): self.members = members def count_members(self): return len(self.members) if __name__ == '__main__': this_meetup = Meetup(people_on_meetup) print('Hello, Pythonistas!') print('We are a great group of {}.'.format( this_meetup.count_members() ))
22.6
105
0.663717
116
791
4.310345
0.586207
0.064
0.112
0
0
0
0
0
0
0
0
0.003284
0.230089
791
34
106
23.264706
0.816092
0.4311
0
0
0
0
0.230248
0
0
0
0
0
0
1
0.117647
false
0
0
0.058824
0.235294
0.117647
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e79061260d07a54089774bc6a61e074894413160
832
py
Python
bot/plugins/trivia/content/flags.py
best-coloc-ever/globibot
a4c4dd8bb3b71bff09bd2e1c4c5ce58ab1bad176
[ "MIT" ]
14
2016-02-03T16:47:01.000Z
2019-12-09T14:24:25.000Z
bot/plugins/trivia/content/flags.py
best-coloc-ever/globibot
a4c4dd8bb3b71bff09bd2e1c4c5ce58ab1bad176
[ "MIT" ]
11
2016-08-19T22:00:25.000Z
2022-01-13T00:39:48.000Z
bot/plugins/trivia/content/flags.py
best-coloc-ever/globibot
a4c4dd8bb3b71bff09bd2e1c4c5ce58ab1bad176
[ "MIT" ]
6
2016-08-19T21:30:58.000Z
2019-09-27T05:24:08.000Z
from .helpers import * from .behavior import trivia_behavior from io import BytesIO DELAY = 20 async def premise(item): country, image = item flag_image_url = 'https://www.countries-ofthe-world.com/{}'.format(image) flag_image = await Utils.fetch(flag_image_url) return dict( file_path=BytesIO(flag_image), filename='flag.png', content='You have {} seconds to guess the name of that country'.format(DELAY), ) def resolve(item, answers): country, _ = item winner, message = Resolve.fastest(answers, country.lower(), skill='geography') return winner, dict(content=message) FlagsTrivia = trivia_behavior( fetch = Fetch.read_json('flags.json'), pick = Pick.random_collection, premise = premise, query = Query.timed(DELAY), resolve = resolve, )
24.470588
86
0.679087
104
832
5.317308
0.576923
0.065099
0.0434
0
0
0
0
0
0
0
0
0.00303
0.206731
832
33
87
25.212121
0.834848
0
0
0
0
0
0.144231
0
0
0
0
0
0
1
0.041667
false
0
0.125
0
0.25
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e790a3d718923f21aded92fe9214fe4322d6f5f0
2,687
py
Python
pipe_text_wrapper.py
STealthy-and-haSTy/PipeText
518149cc3183ab9b6ba22be47405677259b7db7a
[ "MIT" ]
1
2020-10-16T08:41:23.000Z
2020-10-16T08:41:23.000Z
pipe_text_wrapper.py
STealthy-and-haSTy/PipeText
518149cc3183ab9b6ba22be47405677259b7db7a
[ "MIT" ]
null
null
null
pipe_text_wrapper.py
STealthy-and-haSTy/PipeText
518149cc3183ab9b6ba22be47405677259b7db7a
[ "MIT" ]
null
null
null
import sublime import sublime_plugin ### --------------------------------------------------------------------------- class PipeCommandHistory(): LIST_LIMIT = 50 def __init__(self): self.storage = [] def push(self, text, temp=False): self.del_duplicates(text) self.storage.insert(0, text) if len(self.storage) > self.LIST_LIMIT: del self.storage[self.LIST_LIMIT:] def del_duplicates(self, text): self.storage = [s for s in self.storage if s != text] def get(self): return self.storage def empty(self): return len(self.storage) == 0 _pipe_cmd_history = PipeCommandHistory() ### --------------------------------------------------------------------------- class PipeTextWrapperCommand(sublime_plugin.WindowCommand): def run(self, working_dir=None): last_cmd = '' if _pipe_cmd_history.empty() else _pipe_cmd_history.get()[0] panel = self.window.show_input_panel('shell_cmd', last_cmd, lambda shell_cmd: self.execute(shell_cmd, working_dir), None, None) panel.settings().set('_pipe_cmd_input', True) panel.settings().set('_pipe_cmd_idx', 0) panel.run_command('select_all') def execute(self, shell_cmd, working_dir): _pipe_cmd_history.push(shell_cmd) self.window.run_command('pipe_text', { 'shell_cmd': shell_cmd, 'working_dir': working_dir }) ### --------------------------------------------------------------------------- class PipeTextHistoryCommand(sublime_plugin.TextCommand): def run(self, edit, prev=False): history = _pipe_cmd_history.get() cur_idx = self.view.settings().get("_pipe_cmd_idx", 0) cur_idx = (cur_idx + (-1 if prev else 1)) % len(history) self.view.settings().set("_pipe_cmd_idx", cur_idx) self.view.replace(edit, sublime.Region(0, len(self.view)), history[cur_idx]) self.view.run_command('select_all') def is_enabled(self, prev=False): return len(_pipe_cmd_history.get()) > 1 ### --------------------------------------------------------------------------- class PipeTextEventListener(sublime_plugin.EventListener): def on_query_context(self, view, key, operator, operand, match_all): if key == 'pipe_text_input': lhs = view.settings().get('_pipe_cmd_input', False) rhs = bool(operand) return lhs == rhs if operator == sublime.OP_EQUAL else lhs != rhs return None ### ---------------------------------------------------------------------------
31.244186
100
0.534053
291
2,687
4.656357
0.268041
0.056827
0.061993
0.037638
0.15203
0
0
0
0
0
0
0.005278
0.224414
2,687
85
101
31.611765
0.644914
0.14105
0
0
0
0
0.061955
0
0
0
0
0
0
1
0.2
false
0
0.04
0.06
0.44
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e79113b7e230adcf093bfd7230288a6161d84a7d
4,663
py
Python
pytracer/texture/texturemap/__init__.py
zjiayao/pyTracer
c2b4ef299ecbdca1c519059488f7cd2438943ee4
[ "MIT" ]
9
2017-11-20T18:17:27.000Z
2022-01-27T23:00:31.000Z
pytracer/texture/texturemap/__init__.py
zjiayao/pyTracer
c2b4ef299ecbdca1c519059488f7cd2438943ee4
[ "MIT" ]
4
2021-06-08T19:03:51.000Z
2022-03-11T23:18:44.000Z
pytracer/texture/texturemap/__init__.py
zjiayao/pyTracer
c2b4ef299ecbdca1c519059488f7cd2438943ee4
[ "MIT" ]
1
2017-11-20T22:48:01.000Z
2017-11-20T22:48:01.000Z
""" __init__.py pytracer.texture.texturemap package Texture map definitions. Created by Jiayao on Aug 5, 2017 Modified on Aug 14, 2017 """ from __future__ import absolute_import from abc import (ABCMeta, abstractmethod) from pytracer import * import pytracer.geometry as geo import pytracer.transform as trans __all__ = ['TextureMapping2D', 'TextureMapping3D', 'SphericalMapping2D', 'UVMapping2D', 'CylindricalMapping2D', 'PlannarMapping2D', 'IdentityMapping3D'] class TextureMapping2D(object, metaclass=ABCMeta): def __repr__(self): return "{}".format(self.__class__) @abstractmethod def __call__(self, dg: 'geo.DifferentialGeometry') -> [FLOAT]: """ Mapping maps the point given by dg to (s, t) texture coordinates. Returning a list of `FLOAT`s: [s, t, dsdx, dtdx, dsdy, dtdy] """ raise NotImplementedError('src.core.texture.{}.map(): abstract method ' 'called'.format(self.__class__)) class UVMapping2D(TextureMapping2D): def __init__(self, su: FLOAT, sv: FLOAT, du: FLOAT, dv: FLOAT): self.su = su self.sv = sv self.du = du self.dv = dv def __call__(self, dg: 'geo.DifferentialGeometry') -> [FLOAT]: s = self.su * dg.u + self.du t = self.sv * dg.v + self.dv dsdx = self.su * dg.dudx dtdx = self.sv * dg.dvdx dsdy = self.su * dg.dudy dtdy = self.sv * dg.dvdy return [s, t, dsdx, dtdx, dsdy, dtdy] class SphericalMapping2D(TextureMapping2D): def __init__(self, w2t: 'trans.Transform'): self.w2t = w2t def __sphere(self, p: 'geo.Point') -> [FLOAT]: """ Spherical Mapping for single point. Returns list [s, t]. """ v = geo.normalize(self.w2t(p) - geo.Point(0., 0., 0.)) theta = geo.spherical_theta(v) phi = geo.spherical_phi(v) return [theta * INV_PI, phi * INV_2PI] def __call__(self, dg: 'geo.DifferentialGeometry') -> [FLOAT]: s, t = self.__sphere(dg.p) # compute texture coordinate # differentials # using forward differencing delta = .1 sx, tx = self.__sphere(dg.p + delta * dg.dpdx) dsdx = (sx - s) / delta dtdx = (tx - t) / delta if dtdx > .5: dtdx = 1. - dtdx elif dtdx < -.5: dtdx = -(dtdx + 1.) sy, ty = self.__sphere(dg.p + delta * dg.dpdy) dsdy = (sy - s) / delta dtdy = (ty - s) / delta if dtdy > .5: dtdy = 1. - dtdy elif dtdy < -.5: dtdy = -(dtdy + 1.) return [s, t, dsdx, dtdx, dsdy, dtdy] class CylindricalMapping2D(TextureMapping2D): def __init__(self, w2t: 'trans.Transform'): self.w2t = w2t def __cylinder(self, p: 'geo.Point') -> [FLOAT]: """ Cylinderical Mapping for single point. Returns list [s, t]. """ v = geo.normalize(self.w2t(p) - geo.Point(0., 0., 0.)) return [(PI + self.arctan2(v.y, v.x)) * INV_2PI, v.z] def __call__(self, dg: 'geo.DifferentialGeometry') -> [FLOAT]: s, t = self.__cylinder(dg.p) # compute texture coordinate # differentials # using forward differencing delta = .1 sx, tx = self.__cylinder(dg.p + delta * dg.dpdx) dsdx = (sx - s) / delta dtdx = (tx - t) / delta if dtdx > .5: dtdx = 1. - dtdx elif dtdx < -.5: dtdx = -(dtdx + 1.) sy, ty = self.__cylinder(dg.p + delta * dg.dpdy) dsdy = (sy - s) / delta dtdy = (ty - s) / delta if dtdy > .5: dtdy = 1. - dtdy elif dtdy < -.5: dtdy = -(dtdy + 1.) return [s, t, dsdx, dtdx, dsdy, dtdy] class PlannarMapping2D(TextureMapping2D): def __init__(self, vs: 'geo.Vector', vt: 'geo.Vector', ds: FLOAT = 0., dt: FLOAT = 0.): self.vs = vs self.vt = vt self.ds = ds self.dt = dt def __call__(self, dg: 'geo.DifferentialGeometry') -> [FLOAT]: v = dg.p - geo.Point(0., 0., 0.) return [self.ds + v.dot(self.vs), self.dt + v.dot(self.vt), dg.dpdx.dot(self.vs), dg.dpdx.dot(self.vt), dg.dpdy.dot(self.vs), dg.dpdy.dot(self.vt)] class TextureMapping3D(object, metaclass=ABCMeta): """ TextureMapping3D Class Base class for 3D texture mappings """ def __repr__(self): return "{}".format(self.__class__) @abstractmethod def __call__(self, dg: 'geo.DifferentialGeometry') -> ['geo.Point', 'geo.Vector', 'geo.Vector']: """ Mapping 3D point to texture Returns a list: [p, dpdx, dpdy] where p is the mapped point, dpdx, dpdy are mapped derivatives. """ raise NotImplementedError('src.core.texture.{}.map(): abstract method ' 'called'.format(self.__class__)) class IdentityMapping3D(TextureMapping3D): def __init__(self, w2t: 'trans.Transform'): self.w2t = w2t def __call__(self, dg: 'geo.DifferentialGeometry') -> ['geo.Point', 'geo.Vector', 'geo.Vector']: return [self.w2t(dg.p), self.w2t(dg.dpdx), self.w2t(dg.dpdy)]
25.620879
97
0.633712
645
4,663
4.417054
0.206202
0.027027
0.027027
0.031941
0.538084
0.525448
0.509302
0.484731
0.45981
0.45981
0
0.021415
0.208878
4,663
181
98
25.762431
0.750881
0.149689
0
0.519231
0
0
0.135484
0.056774
0
0
0
0
0
1
0.153846
false
0
0.048077
0.028846
0.355769
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e794ec0a4b84110bf4cfe442ef69d62eb12a32aa
5,780
py
Python
lib/task/worker.py
mrmansano/sublime-ycmd
fece62f0ce4e9cbf96ed8ba07f5cecb24b21427e
[ "MIT" ]
12
2018-01-24T20:58:10.000Z
2021-12-21T15:02:10.000Z
lib/task/worker.py
mrmansano/sublime-ycmd
fece62f0ce4e9cbf96ed8ba07f5cecb24b21427e
[ "MIT" ]
4
2018-01-13T14:39:45.000Z
2020-11-25T00:05:27.000Z
lib/task/worker.py
mrmansano/sublime-ycmd
fece62f0ce4e9cbf96ed8ba07f5cecb24b21427e
[ "MIT" ]
2
2018-10-23T17:13:44.000Z
2019-05-12T04:10:17.000Z
#!/usr/bin/env python3 ''' lib/task/worker.py Task pool worker thread. Meant for internal use only. Runs a thread to process items in a task pool. The class itself does not inherit from `threading.Thread` directly. Instead, a helper function is exposed for use in a thread target. Users should not need to access this. Task pools will generate and manage workers by itself. ''' import queue import logging import threading # for type annotations only: from ..task.task import Task # noqa: F401 logger = logging.getLogger('sublime-ycmd.' + __name__) def spawn_worker(pool, name=None): if name is not None and not isinstance(name, str): raise TypeError('name must be a str: %r' % (name)) worker_instance = Worker(pool) def run_worker(): try: worker_instance.run() except Exception as e: logger.error( 'unhandled exception during worker thread loop: %r', e, ) # explicitly delete references since worker is about to exit: worker_instance.clear() worker_thread = threading.Thread(target=run_worker, name=name) worker_thread.daemon = True worker_instance.handle = worker_thread logger.debug('created worker: %r', worker_instance) worker_thread.start() return worker_instance class Worker(object): ''' Worker thread abstraction class. Defines a worker unit that runs an infinite loop, processing tasks from a task pool. This class is compatible with (i.e. can inherit from) `threading.Thread`. It is deliberately left as a plain object though. This class does not use locking. It is expected that the owners will. ''' def __init__(self, pool, handle=None): self._pool = pool # type: Pool self._handle = None # type: threading.Thread self.handle = handle def run(self): ''' Starts the worker thread, running an infinite loop waiting for jobs. This should be run on an alternate thread, as it will block. ''' task_queue = self.pool.queue # type: queue.Queue logger.debug('task worker starting: %r', self) while True: # explicitly specify `block`, in case the queue has custom settings task = task_queue.get(block=True) # type: Task if task is not None: # NOTE : Tasks should catch their own exceptions. try: task.run() except Exception as e: logger.error( 'exception during task execution: %r', e, exc_info=True, ) # explicitly clear reference to task del task continue # task is none, so check if a shutdown is requested if not self.pool.running: logger.debug('task pool has stopped running, exit loop') # pass on the signal to any other worker threads try: task_queue.put(None, block=True, timeout=1) except queue.Full: logger.warning( 'task queue is full, ' 'cannot signal other workers to exit' ) break logger.warning('unhandled task on worker thread: %r', task) logger.debug('task worker exiting: %r', self) def join(self, timeout=None): ''' Joins the underlying thread for this worker. If `timeout` is omitted, this will block indefinitely until the thread has exited. If `timeout` is provided, it should be the maximum number of seconds to wait until returning. If the thread is still alive after the timeout expires, a `TimeoutError` will be raised. ''' handle = self._handle # type: threading.Thread if not handle: # worker is already dead return handle.join(timeout=timeout) if handle.is_alive(): timeout_desc = ( ' after %rs' % (timeout) if timeout is not None else '' ) raise TimeoutError('thread did not exit%s' % (timeout_desc)) def clear(self): ''' Clears the locally held reference to the task pool and thread handle. ''' self._pool = None self._handle = None @property def handle(self): ''' Retrieves the currently held thread handle, if any. ''' return self._handle @handle.setter def handle(self, handle): ''' Sets the thread handle for the worker. ''' if handle is None: # clear state self._handle = None return if handle is not None and not isinstance(handle, threading.Thread): raise TypeError( 'thread handle must be a threading.Thread: %r' % (handle) ) self._handle = handle @property def pool(self): ''' Retrieves the parent task pool. ''' return self._pool @property def name(self): ''' Retrieves the name from the thread handle, if available. ''' if self._handle: return self._handle.name return None @name.setter def name(self, name): ''' Sets the name of the held thread handle. ''' if self._handle: self._handle.name = name # else, meh, whatever def __repr__(self): return '%s(%r)' % ('Worker', { 'handle': self.handle, 'name': self.name, 'pool': self.pool, })
28.613861
79
0.569896
686
5,780
4.736152
0.297376
0.040012
0.024623
0.016005
0.035088
0.035088
0.019698
0
0
0
0
0.001335
0.351903
5,780
201
80
28.756219
0.86599
0.329412
0
0.171717
0
0
0.116039
0
0
0
0
0
0
1
0.121212
false
0
0.040404
0.010101
0.252525
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7960cdb285d6dd5c4c71de1f2286b8e8d673adb
547
py
Python
skynet-agent/plugins/extension/loadavg.py
skynetera/skynet
24a50f2a2eb95b777802934a2b66f162bf4b2d53
[ "Apache-2.0" ]
3
2016-09-12T08:54:46.000Z
2016-09-18T07:54:10.000Z
skynet-agent/plugins/extension/loadavg.py
skynetera/skynet
24a50f2a2eb95b777802934a2b66f162bf4b2d53
[ "Apache-2.0" ]
null
null
null
skynet-agent/plugins/extension/loadavg.py
skynetera/skynet
24a50f2a2eb95b777802934a2b66f162bf4b2d53
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python # coding: utf-8 __author__ = 'whoami' """ @version: 1.0 @author: whoami @license: Apache Licence 2.0 @contact: skutil@gmail.com @site: http://www.itweet.cn @software: PyCharm Community Edition @file: loadavg.py @time: 2015-11-28 下午1:51 """ def monitor(frist_invoke=1): f = open('/proc/loadavg') load = f.read().split() f.close() value_dic = { 'load_1min':load[0], 'load_5min':load[1], 'load_15min':load[2], } return value_dic if __name__ == '__main__': print monitor()
17.645161
36
0.6234
78
547
4.141026
0.75641
0.074303
0
0
0
0
0
0
0
0
0
0.055046
0.202925
547
30
37
18.233333
0.68578
0.062157
0
0
0
0
0.177419
0
0
0
0
0
0
0
null
null
0
0
null
null
0.076923
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
e79e7ea88c51825b1ee8f322458db326ca24c050
1,129
py
Python
datasets/augmentations.py
sithu31296/re_identification
28c2cf32c6c8c9d79330e1419a7156fe10d8ac95
[ "MIT" ]
null
null
null
datasets/augmentations.py
sithu31296/re_identification
28c2cf32c6c8c9d79330e1419a7156fe10d8ac95
[ "MIT" ]
null
null
null
datasets/augmentations.py
sithu31296/re_identification
28c2cf32c6c8c9d79330e1419a7156fe10d8ac95
[ "MIT" ]
null
null
null
from torchvision import transforms def get_transforms(cfg): train_transform = transforms.Compose([ transforms.Resize(cfg['TRAIN']['IMG_SIZE']), transforms.ColorJitter(cfg['TRAIN']['AUG']['B_P'], cfg['TRAIN']['AUG']['C_P'], cfg['TRAIN']['AUG']['S_P'], cfg['TRAIN']['AUG']['H_P']), transforms.RandomGrayscale(cfg['TRAIN']['AUG']['G_P']), # Local Grayscale Transformation https://arxiv.org/abs/2101.08533 transforms.Pad(10), transforms.RandomCrop(cfg['TRAIN']['IMG_SIZE']), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), transforms.RandomErasing(cfg['TRAIN']['AUG']['RE_P']), # Random Erasing Data Augmentation https://arxiv.org/pdf/1708.04896 ]) test_transform = transforms.Compose([ transforms.Resize(cfg['EVAL']['IMG_SIZE']), transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) ]) return train_transform, test_transform
51.318182
169
0.59876
131
1,129
5.053435
0.427481
0.108761
0.099698
0.054381
0.391239
0.320242
0.18429
0.18429
0.18429
0.18429
0
0.076923
0.217006
1,129
22
170
51.318182
0.671946
0.11426
0
0.210526
0
0
0.10521
0
0
0
0
0
0
1
0.052632
false
0
0.052632
0
0.157895
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e79f9590300eb19ba909c0b5322d32b4049e33b7
2,056
py
Python
dora/tests/test_xp.py
kingjr/dora
f70fab1620c6cad6fc094be15ab22994bd08dd01
[ "MIT" ]
98
2021-09-21T14:27:21.000Z
2022-03-18T17:46:45.000Z
dora/tests/test_xp.py
kingjr/dora
f70fab1620c6cad6fc094be15ab22994bd08dd01
[ "MIT" ]
6
2021-09-22T13:29:48.000Z
2022-03-14T16:45:30.000Z
dora/tests/test_xp.py
kingjr/dora
f70fab1620c6cad6fc094be15ab22994bd08dd01
[ "MIT" ]
5
2021-09-21T12:42:01.000Z
2022-01-27T17:22:17.000Z
# Copyright (c) Facebook, Inc. and its affiliates. # All rights reserved. # # This source code is licensed under the license found in the # LICENSE file in the root directory of this source tree. from pathlib import Path import torch import pytest from ..conf import DoraConfig from ..xp import XP class _Cfg: pass def get_dora(tmpdir: Path): return DoraConfig(dir=Path(tmpdir), exclude=["a"]) def test_dora_dir_abs(): dora = get_dora('outputs') assert dora.dir.is_absolute() dora.dir = 'plop' assert dora.dir.is_absolute() def test_sig(tmpdir): tmpdir = Path(str(tmpdir)) dora = get_dora(tmpdir) xp = XP(dora=dora, cfg=_Cfg(), argv=[], delta=[("a", 5), ("b", 12)]) assert xp.sig is not None xp2 = XP(dora=dora, cfg=_Cfg(), argv=[], delta=[("a", 12), ("b", 12)]) assert xp.sig == xp2.sig xp3 = XP(dora=dora, cfg=_Cfg(), argv=[], delta=[("a", 12), ("b", 24)]) assert xp.sig != xp3.sig def test_properties(tmpdir): tmpdir = Path(str(tmpdir)) dora = get_dora(tmpdir) xp = XP(dora=dora, cfg=_Cfg(), argv=[], delta=[("a", 5), ("b", 12)]) xp.folder.relative_to(tmpdir) xp.submitit.relative_to(tmpdir) xp.rendezvous_file.relative_to(tmpdir) xp.history.relative_to(tmpdir) xp._argv_cache.relative_to(tmpdir) def test_link(tmpdir): tmpdir = Path(str(tmpdir)) dora = get_dora(tmpdir) xp = XP(dora=dora, cfg=_Cfg(), argv=[], delta=[("a", 5), ("b", 12)]) xp.folder.mkdir(parents=True) xp.link.push_metrics({"plop": 42}) xp = XP(dora=dora, cfg=_Cfg(), argv=[], delta=[("a", 5), ("b", 12)]) assert xp.link.history == [] xp.link.load() assert xp.link.history == [{"plop": 42}] val = [{"plok": 43, "out": Path("plop"), "mat": torch.zeros(5)}] xp.link.update_history(val) assert xp.link.history == [{"plok": 43, "out": "plop", "mat": [0.] * 5}] with pytest.raises(ValueError): xp.link.update_history({"plop": 42}) with pytest.raises(ValueError): xp.link.update_history([{"plop": object()}])
26.701299
76
0.618191
305
2,056
4.062295
0.298361
0.038741
0.048426
0.062954
0.40678
0.356739
0.356739
0.356739
0.356739
0.277643
0
0.022236
0.190661
2,056
76
77
27.052632
0.722356
0.089981
0
0.285714
0
0
0.036461
0
0
0
0
0
0.163265
1
0.102041
false
0.020408
0.102041
0.020408
0.244898
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7a08c3c2295908e12651250df01fbce4284f4fe
3,227
py
Python
crawler/scheduler.py
dukov/simplecrawler
eb6c44b9e3c43cab34ee4a2b39c7283136ce9352
[ "Apache-2.0" ]
null
null
null
crawler/scheduler.py
dukov/simplecrawler
eb6c44b9e3c43cab34ee4a2b39c7283136ce9352
[ "Apache-2.0" ]
null
null
null
crawler/scheduler.py
dukov/simplecrawler
eb6c44b9e3c43cab34ee4a2b39c7283136ce9352
[ "Apache-2.0" ]
null
null
null
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json import plyvel import time import sys from crawler import base_service from crawler import util from oslo_config import cfg from crawler.config import COMMON_OPTIONS from crawler.logger import logger class Scheduler(base_service.BaseService): def __init__(self, conf): super(Scheduler, self).__init__(conf.gearman) self.conf = conf logger.info("Creating cache DB") try: self.cache = plyvel.DB('/tmp/cache', create_if_missing=True) except: logger.error("Failed to setup cache DB") raise def _update_cache(self): logger.info("Loading cache to DB") data = self.rpc_client.rpc_call('rpc_get_crawled', '').result with self.cache.write_batch() as wb: try: for k,v in json.loads(data).items(): wb.set(k,v) except: logger.error("Failed to load cache") raise def rpc_schedule(self, gm_w, job): logger.info("Got rquest %s" % job.data) self._update_cache() task = json.loads(job.data) payload = {} vid1 = util.vid2int(task.get('vid1', '7-Sl8uXOb5k')) vid2 = util.vid2int(task.get('vid2', '7-Sl8uXOb5t')) start_vid = min(vid1, vid2) stop_vid = max(vid1,vid2) batch = task.get('batch', 10) for int_vid in util.vid_gen(start_vid, stop_vid): vid_str = util.int2vid(int_vid) if not self.cache.get(vid_str): if len(payload) < batch: # TODO do re-factoring here. Move URL to consts url = "https://www.youtube.com/watch?v=%s" % vid_str payload[vid_str] = url else: logger.debug("Sending job %s" % payload) self.rpc_client.rpc_call('rpc_processURLs', json.dumps(payload), wait_until_complete=False, background=True) payload = {} # NOTE Send what's left if len(payload) > 0: logger.debug("Sending job %s" % payload) self.rpc_client.rpc_call('rpc_processURLs', json.dumps(payload), wait_until_complete=False, background=True) return "" def main(): CONF = cfg.CONF CONF.register_cli_opts(COMMON_OPTIONS) CONF(sys.argv[1:]) sched = Scheduler(CONF) sched.run() if __name__ == "__main__": sys.exit(main())
34.329787
75
0.570499
397
3,227
4.493703
0.450882
0.033632
0.021861
0.026906
0.169843
0.141816
0.128924
0.128924
0.128924
0.128924
0
0.011644
0.334676
3,227
93
76
34.698925
0.819283
0.182832
0
0.268657
0
0
0.096454
0
0
0
0
0.010753
0
1
0.059701
false
0
0.134328
0
0.223881
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
e7a093439e75bee53c35c1b7acda09ddedee4b02
1,897
py
Python
pyqtgraph_extensions/examples/demo_axis_alignment.py
draustin/pyqtgraph_extensions
9f53756bdab8c61749c3596d40024971d77c893a
[ "MIT" ]
10
2019-05-22T17:10:07.000Z
2022-02-09T08:14:28.000Z
pyqtgraph_extensions/examples/demo_axis_alignment.py
draustin/pyqtgraph_extensions
9f53756bdab8c61749c3596d40024971d77c893a
[ "MIT" ]
3
2020-06-09T22:36:43.000Z
2021-07-19T21:31:57.000Z
pyqtgraph_extensions/examples/demo_axis_alignment.py
draustin/pyqtgraph_extensions
9f53756bdab8c61749c3596d40024971d77c893a
[ "MIT" ]
null
null
null
"""Show how multiple AlignedPlotItems have aligned AxisItems by using their parent's graphics layout.""" import sys from textwrap import wrap import pyqtgraph as pg from pyqtgraph.Qt import QtCore, QtGui import pyqtgraph_extensions as pgx if QtGui.QApplication.instance() is None: qapp=QtGui.QApplication(sys.argv) else: # Presumably running in a GUI with event QApplication already created qapp=None long_label = 'multiline<br>axis label<br>(e.g. complex units)' # To hold AlignedPlotItems, need to use the extended version of GraphicsLayout/GraphicsLayoutWidget. glwx=pgx.GraphicsLayoutWidget() glwx.addLabel('<br>'.join(wrap("<em>pyqtgraph PlotItem</em> - since the label of the left axis of the first PlotItem is" "two lines, the left axes of the PlotItems aren't aligned.", 40))) glwx.addHorizontalSpacer(100) glwx.addLabel('<br>'.join(wrap("<em>pyqtgraph_extensions AlignedPlotItem</em> - because they use their parent's layout" "grid for their components (axes, title, ViewBox) these components are aligned.", 40))) glwx.nextRow() # Make left column showing pyqtgraph PlotItems. glo=pg.GraphicsLayout() glwx.addItem(glo) plt1=glo.addPlot(labels={'left':long_label, 'bottom': 'x'},title='PlotItem 1') glo.nextRow() plt2=glo.addPlot(labels={'left':'y (units)','bottom':'x'},title='PlotItem 2') glwx.nextColumn() # Make right column showing pyqtgraph_extensions AlignedPlotItems. glx=pgx.GraphicsLayout() glwx.addItem(glx) aplt1=glx.addAlignedPlot(labels={'left':long_label, 'bottom': 'x'},title='AlignedPlotItem 1') # aplt1 takes up 4 rows (title, top axis, view box, and bottom axis). glx.nextRows() # equivalent to 4 calls glx.nextRow() aplt2=glx.addAlignedPlot(labels={'left':'y (units)','bottom':'x'},title='AlignedPlotItem 2') glwx.resize(800,400) glwx.show() if qapp is not None: sys.exit(qapp.exec_())
43.113636
120
0.732736
265
1,897
5.218868
0.483019
0.028923
0.034707
0.02603
0.133044
0.133044
0.133044
0
0
0
0
0.014697
0.139167
1,897
43
121
44.116279
0.832211
0.253558
0
0
0
0.03125
0.340925
0.017082
0
0
0
0
0
1
0
false
0
0.15625
0
0.15625
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7a16d2263756e23b3fd10f42e7ec1fd27b2c6d0
377
py
Python
generators/cfg.py
Zarux/Steam-friend-graph-V2
1670a7e49904812b4f86b0d2590a25d77c05c6ee
[ "MIT" ]
null
null
null
generators/cfg.py
Zarux/Steam-friend-graph-V2
1670a7e49904812b4f86b0d2590a25d77c05c6ee
[ "MIT" ]
6
2021-03-09T09:46:03.000Z
2022-02-26T12:28:11.000Z
generators/cfg.py
Zarux/Steam-friend-graph-V2
1670a7e49904812b4f86b0d2590a25d77c05c6ee
[ "MIT" ]
null
null
null
import json class Config: api_key = None db_host = None db_pw = None db_user = None db_table_profile = None db_table_friends = None db = None def __init__(self): with open('../cfg/cfg.json', 'r') as f: config = json.loads(f.read()) for name, value in config.items(): setattr(self, name, value)
20.944444
47
0.559682
52
377
3.826923
0.596154
0.180905
0.110553
0
0
0
0
0
0
0
0
0
0.334218
377
17
48
22.176471
0.792829
0
0
0
0
0
0.04244
0
0
0
0
0
0
1
0.071429
false
0
0.071429
0
0.714286
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7a293afddd2a5b54862d63c4e761125a74a8ae5
73
py
Python
Python/Reals/river_in_brazil/matrix_rotation.py
Mr-Perfection/coding_practice
41df85292a151eef3266b01545124aeb4e831286
[ "Unlicense" ]
null
null
null
Python/Reals/river_in_brazil/matrix_rotation.py
Mr-Perfection/coding_practice
41df85292a151eef3266b01545124aeb4e831286
[ "Unlicense" ]
null
null
null
Python/Reals/river_in_brazil/matrix_rotation.py
Mr-Perfection/coding_practice
41df85292a151eef3266b01545124aeb4e831286
[ "Unlicense" ]
null
null
null
""" Rotate a matrix 90 degrees a certain direction based on the flag """
18.25
64
0.726027
12
73
4.416667
0.916667
0
0
0
0
0
0
0
0
0
0
0.033898
0.191781
73
3
65
24.333333
0.864407
0.876712
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
e7a2a74212459c5c2d287344ef757f817b24bd0e
58
py
Python
vector3d/__init__.py
jayman39tx/vector3d
6ea8a759b5102b65aea33173da1ed78cde9a67b0
[ "MIT" ]
2
2020-11-17T05:16:16.000Z
2021-11-07T19:48:30.000Z
vector3d/__init__.py
jayman39tx/vector3d
6ea8a759b5102b65aea33173da1ed78cde9a67b0
[ "MIT" ]
null
null
null
vector3d/__init__.py
jayman39tx/vector3d
6ea8a759b5102b65aea33173da1ed78cde9a67b0
[ "MIT" ]
3
2020-08-10T19:32:01.000Z
2021-04-09T12:58:50.000Z
name = "Vector3D" from . import vector from . import point
19.333333
20
0.741379
8
58
5.375
0.75
0.465116
0
0
0
0
0
0
0
0
0
0.020833
0.172414
58
3
21
19.333333
0.875
0
0
0
0
0
0.135593
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
e7a3a8f3dbc80823b966705e8802def2ed85da11
102
py
Python
Python_OCR_JE/venv/Lib/site-packages/numpy/typing/tests/data/pass/simple_py3.py
JE-Chen/je_old_repo
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
[ "MIT" ]
1
2022-01-08T12:30:44.000Z
2022-01-08T12:30:44.000Z
Python_OCR_JE/venv/Lib/site-packages/numpy/typing/tests/data/pass/simple_py3.py
JE-Chen/je_old_repo
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
[ "MIT" ]
null
null
null
Python_OCR_JE/venv/Lib/site-packages/numpy/typing/tests/data/pass/simple_py3.py
JE-Chen/je_old_repo
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
[ "MIT" ]
1
2021-04-26T22:41:56.000Z
2021-04-26T22:41:56.000Z
import numpy as np array = np.array([1, 2]) # The @ operator is not in python 2 array @ array
14.571429
36
0.627451
18
102
3.555556
0.722222
0.21875
0
0
0
0
0
0
0
0
0
0.040541
0.27451
102
6
37
17
0.824324
0.323529
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
e7a5af266194fe6108245ee31d8f2544211c0ebb
2,518
py
Python
__init__.py
devsysenv/usr
989ad012b790b6d20f8dae90f0724109cd27b761
[ "MIT" ]
null
null
null
__init__.py
devsysenv/usr
989ad012b790b6d20f8dae90f0724109cd27b761
[ "MIT" ]
null
null
null
__init__.py
devsysenv/usr
989ad012b790b6d20f8dae90f0724109cd27b761
[ "MIT" ]
1
2022-03-09T12:24:37.000Z
2022-03-09T12:24:37.000Z
#!/usr/bin/env python """User package initialization module.""" import os import sys import dselib def _context(varfile=None): """returns the DSE context object for this script.""" try: myself = __file__ except NameError: myself = sys.argv[0] return dselib.DSEContext(myself, varfile) _me = _context('user') def init(): """usr.init() method init() is an optional function that, if present, will be invoked at the start of DSE initialization. """ print('DSE_USER: pre-initialization for', dselib.GetDSEUser()) dseDefaults = dselib.GetDSESystemDefaults() # Check user.def[DSE_HOST], sys.platform, os.name and the root for environment variables to init sectionsToLoad = [dselib.GetDSEHost(), sys.platform, os.name, None] # If DSE_PROJECT is defined, put that first in the section list since PROJECT has highest precedence if dselib.GetDSEProject(): sectionsToLoad.insert(0, dselib.GetDSEProject()) for section in sectionsToLoad: # Load all the variables in 'section' to the environment (unless they are already there) dseDefaults.userenv.config.loadSectionToEnv(section) dselib.AddElementToSearchPath(_me.whereami(), 1, 1) if os.name == 'nt': print('DSE_USER: initializing for Windows OS ...') os.system(f"doskey.exe /macrofile={os.path.join(_me.whereami(), 'p', 'cmd', 'doskey.txt')}") if dselib.GetDSEProject(): projmacros = _me.whereami() / 'projects' / f"{dselib.GetDSEProject()}-doskey.txt" if projmacros.is_file(): _me.logger.info(f"Adding project macros from {projmacros}") os.system(f"doskey.exe /macrofile={projmacros}") # on posix systems, add symbolic links to the Python scripts w/o the .py if os.name == 'posix': _me.logger.debug(f"Adding symbolic links to Python scripts in {_me.whereami()}") # dselib.pysymlinkdir(_me.whereami(), None, ['grep.py', 'which.py']) def post(): """usr.post() method post() is an optional function that, if present, will be invoked at the end of DSE initialization. This is not normally used, but here in case you need to override something that init() did.""" print(f'{dselib.GetDSEUser()}: User init post routine.') if __name__ == "__main__": print('DSE User Package.') print('This module is not directly callable.') sys.exit(1)
32.282051
105
0.645751
317
2,518
5.044164
0.44795
0.03127
0.022514
0.025016
0.097561
0.097561
0.06379
0.06379
0.06379
0.06379
0
0.002604
0.23749
2,518
77
106
32.701299
0.830208
0.339158
0
0.057143
0
0.028571
0.289714
0.078125
0
0
0
0
0
1
0.085714
false
0
0.085714
0
0.2
0.142857
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7a69e21b01e752968007e111248ff8bffeec7d2
3,833
py
Python
migrations/utils/sql_enum_migrator.py
apaniukov/workbench
2f2653ecfd0143d2d53e33ad84379f13443fdfaa
[ "Apache-2.0" ]
23
2022-03-17T12:24:09.000Z
2022-03-31T09:13:30.000Z
migrations/utils/sql_enum_migrator.py
apaniukov/workbench
2f2653ecfd0143d2d53e33ad84379f13443fdfaa
[ "Apache-2.0" ]
18
2022-03-21T08:17:44.000Z
2022-03-30T12:42:30.000Z
migrations/utils/sql_enum_migrator.py
apaniukov/workbench
2f2653ecfd0143d2d53e33ad84379f13443fdfaa
[ "Apache-2.0" ]
16
2022-03-17T12:24:14.000Z
2022-03-31T12:15:12.000Z
""" OpenVINO DL Workbench Helping Class to migrate enums in database Copyright (c) 2020 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from typing import Tuple, Set import sqlalchemy.engine from alembic import op from sqlalchemy import Enum class SQLEnumMigrator: from_enum: Enum new_enum: Enum to_enum: Enum enable_enum_check = False table_column_pairs: Tuple[Tuple[str, str]] def __init__(self, # ((table_name, column_name)) table_column_pairs: Tuple[Tuple[str, str], ...], enum_name: str, from_types: Tuple[str, ...], to_types: Tuple[str, ...]): self.table_column_pairs = table_column_pairs self.from_enum = Enum(*from_types, name=enum_name) self.to_enum = Enum(*{*to_types, *from_types}, name=f'tmp_{enum_name}') self.new_enum = Enum(*to_types, name=enum_name) def upgrade(self): self._migrate(self.from_enum, self.to_enum, self.new_enum) def downgrade(self): self._migrate(self.new_enum, self.to_enum, self.from_enum) def _migrate(self, from_enum: Enum, tmp_enum: Enum, to_enum: Enum): if self.enable_enum_check: self._check_enum_values(op.get_bind()) # create a temporary "tmp_..." type tmp_enum.create(op.get_bind(), checkfirst=False) # assign columns to a tmp type for [table_name, column_name] in self.table_column_pairs: op.execute(f'ALTER TABLE {table_name} ALTER COLUMN {column_name} TYPE {tmp_enum.name}' f' USING {column_name}::text::{tmp_enum.name}') # drop old enum from_enum.drop(op.get_bind(), checkfirst=False) # Create new enum to_enum.create(op.get_bind(), checkfirst=False) # assign columns to a new enum for [table_name, column_name] in self.table_column_pairs: op.execute(f'ALTER TABLE {table_name} ALTER COLUMN {column_name} TYPE {to_enum.name}' f' USING {column_name}::text::{to_enum.name}') # drop tmp enum tmp_enum.drop(op.get_bind(), checkfirst=False) @staticmethod def _get_enum_values(enum_name: str, connection: sqlalchemy.engine.Connection) -> Set[str]: enum_values = next(iter(connection.execute(f'SELECT enum_range(NULL::{enum_name})'))) enum_values = enum_values[0].strip('{}').split(',') return set(enum_values) def _check_enum_values(self, connection: sqlalchemy.engine.Connection) -> None: db_enum_values = self._get_enum_values(self.from_enum.name, connection) migration_enum_values = set(self.from_enum.enums) missing_db_enum_values = db_enum_values - migration_enum_values if missing_db_enum_values: raise ValueError( f'Old enum tuple for {self.from_enum.name} has missing values: {missing_db_enum_values}. ' f'Please add them to the migration.' ) excess_migration_enum_values = migration_enum_values - db_enum_values if excess_migration_enum_values: raise ValueError( f'Old enum tuple for {self.from_enum.name} has excess values: {excess_migration_enum_values}. ' f'Please remove them from the migration.' )
38.717172
111
0.66658
526
3,833
4.614068
0.262357
0.082406
0.039555
0.031314
0.310672
0.247219
0.247219
0.171405
0.171405
0.171405
0
0.003091
0.240282
3,833
98
112
39.112245
0.830357
0.206366
0
0.071429
0
0
0.176393
0.06565
0
0
0
0
0
1
0.107143
false
0
0.071429
0
0.303571
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7a75ab051e3341f6c0083af645a52d0c765ee59
4,542
py
Python
post_process/box_sphere_SMC_NSC.py
Milad-Rakhsha/Friction-Contact
59d17b231c5dd764c741c941e5443141d43ec7e8
[ "BSD-3-Clause" ]
null
null
null
post_process/box_sphere_SMC_NSC.py
Milad-Rakhsha/Friction-Contact
59d17b231c5dd764c741c941e5443141d43ec7e8
[ "BSD-3-Clause" ]
null
null
null
post_process/box_sphere_SMC_NSC.py
Milad-Rakhsha/Friction-Contact
59d17b231c5dd764c741c941e5443141d43ec7e8
[ "BSD-3-Clause" ]
null
null
null
import csv,os,sys import subprocess,re import matplotlib #matplotlib.use('TkAgg') matplotlib.use('Agg') # Must be before importing matplotlib.pyplot or pylab! import matplotlib.pyplot as plt import numpy as np import pandas as pd from decimal import Decimal from collections import OrderedDict from matplotlib.ticker import FormatStrFormatter matplotlib.rcParams['mathtext.fontset'] = 'stix' matplotlib.rcParams['font.family'] = 'STIXGeneral' matplotlib.rcParams.update({'font.size': 18}) plt.rc('xtick',labelsize=24) plt.rc('ytick',labelsize=24) MARKERSIZE=5 path_DEM = str(sys.argv[1]) path_DVI_chrono = str(sys.argv[2]) path_DVI_python = str(sys.argv[3]) def prepare(path, prefix, suffix, prefix2, suffix2, pad): cmd=r'ls %s/%s* | wc -l '%(path,prefix) print(cmd) process = subprocess.check_output(cmd, shell=True) frame=int(process) dt=0.5/frame OUT=np.zeros((frame,17)) for i in range(1,frame): if (pad): i_frame="%03d"%i else: i_frame=i FILE=path+"/"+ prefix +str(i_frame)+ suffix table = pd.read_csv(FILE) N_SMC=table["bi"].shape[0] OUT[i,0]=i*dt for contact in range(0,N_SMC): c_i=table["bi"][contact] c_j=table["bj"][contact] #make sure i=0 and j!=0 if(c_j==0): c_j=c_i c_i=0 OUT[i,c_j*2-1]=table['Fn'][contact] OUT[i,c_j*2]=table['Ft'][contact] FILE2=path+"/"+ prefix2 +str(i_frame)+ suffix2 table2 = pd.read_csv(FILE2) OUT[i,N_SMC*2+1+0]=table2['x'][0] OUT[i,N_SMC*2+1+1]=table2['y'][0] OUT[i,N_SMC*2+1+2]=table2['z'][0] OUT[i,N_SMC*2+1+3]=table2['vx'][0] OUT[i,N_SMC*2+1+4]=table2['vy'][0] OUT[i,N_SMC*2+1+5]=table2['vz'][0] return OUT def make_highlights(ax): textstr = r'$F_t>0$' props = dict(boxstyle='round', facecolor='wheat', alpha=0.8) ax.text(0.8, 0.5, textstr, transform=ax.transAxes, fontsize=18, verticalalignment='top', bbox=props) ax.axvspan(0.25, 0.5, facecolor='blue', alpha=0.1) def plot(label,DVI_F): fig = plt.figure(num=None,figsize=(10, 10), facecolor='w', edgecolor='k') ax1 = fig.add_subplot(211) ax2 = fig.add_subplot(212) # ax3 = fig.add_subplot(313) fig.subplots_adjust(hspace=2.0) color=['ro','bo','b','r-o','k', 'ko'] for i in range(1,6): ax1.plot(DVI_F[:,0],DVI_F[:,i*2-1], color[i], linewidth=1, markersize=MARKERSIZE,label='contact %d'%i ) ax2.plot(DVI_F[:,0],DVI_F[:,i*2], color[i], linewidth=1, markersize=MARKERSIZE,label='contact %d'%i ) # ax3.plot(DVI_F[:,0],DVI_F[:,-6], # 'r', # linewidth=1, markersize=MARKERSIZE,label='x' # ) # ax3.plot(DVI_F[:,0],DVI_F[:,-6], # 'b', # linewidth=1, markersize=MARKERSIZE,label='u_x' # ) ax2.legend(fancybox=True, shadow=True, ncol=1) ax1.legend(fancybox=True, shadow=True, ncol=1) ax1.set_xlim(0, 0.5) ax1.set_ylim(0, 3) ax2.set_xlim(0, 0.5) # ax3.set_xlim(0, 0.5) ax2.set_ylim(0, 1.5) ax1.legend(loc='center left') ax2.legend(loc='center left') make_highlights(ax1) make_highlights(ax2) # make_highlights(ax3) ax2.set_xlabel(r'$t(s)$',fontsize=22,) ax1.set_ylabel(r'$F_n(N)$', fontsize=22,) ax2.set_ylabel(r'$F_t(N)$', fontsize=22,) # ax3.set_ylabel(r'$x(m)$',fontsize=22,) plt.tight_layout(pad=1.50) # ax3.yaxis.set_major_formatter(FormatStrFormatter('%.0e')) # ax2.set_ylabel(r'$F$') plt.savefig('DVI_DEM'+label+'.png') #plt.show() # DEM_F=prepare(path_DEM,'F_SCM_', '.txt', False) #DVI_F_chrono=prepare(path_DVI_chrono,'F_NSC_', '.txt', 'data_', '.csv', False) DVI_F_python=prepare(path_DVI_python,'stepforce','.csv', 'stepdata_sphere_', '.csv', True) #plot("_chrono",DVI_F_chrono) plot("_python",DVI_F_python)
34.671756
90
0.528181
636
4,542
3.636792
0.31761
0.022482
0.015132
0.020752
0.201902
0.130134
0.125811
0.102032
0.043234
0.043234
0
0.051911
0.308675
4,542
130
91
34.938462
0.684713
0.147732
0
0.044444
0
0
0.067273
0
0
0
0
0
0
1
0.033333
false
0
0.1
0
0.144444
0.011111
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7a7a001dc96d78bf26c54dfa2c539b4c6caf7e2
19,323
py
Python
codes/sensitivity_analysis_withRealParameters.py
atsoukevin93/tumorgrowth
96bda28a6ae6455c53c3b573c05746b6d4f2e802
[ "CC0-1.0" ]
null
null
null
codes/sensitivity_analysis_withRealParameters.py
atsoukevin93/tumorgrowth
96bda28a6ae6455c53c3b573c05746b6d4f2e802
[ "CC0-1.0" ]
null
null
null
codes/sensitivity_analysis_withRealParameters.py
atsoukevin93/tumorgrowth
96bda28a6ae6455c53c3b573c05746b6d4f2e802
[ "CC0-1.0" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- from fipy import * from numpy import * import scipy.sparse as sp import scipy.sparse.linalg as la import parameterFunctions.immuneResponse as delt import parameterFunctions.sigmaF as sigmaF import inspect from collections import OrderedDict import pygpc from pygpc.sobol_saltelli import get_sobol_indices_saltelli from pygpc.sobol_saltelli import saltelli_sampling # This function is a modified version of the original pygpc function def modified_get_sobol_indices(gpc_object, coeffs, n_samples=1e4): """ Calculate the available sobol indices from the gPC coefficients (standard) or by sampling. In case of sampling, the Sobol indices are calculated up to second order. sobol, sobol_idx, sobol_idx_bool = SGPC.get_sobol_indices(coeffs, algorithm="standard", n_samples=1e4) Parameters ---------- coeffs: ndarray of float [n_basis x n_out] GPC coefficients algorithm : str, optional, default: "standard" Algorithm to determine the Sobol indices - "standard": Sobol indices are determined from the gPC coefficients - "sampling": Sobol indices are determined from sampling using Saltelli's Sobol sampling sequence [1, 2, 3] n_samples : int, optional, default: 1e4 Number of samples to determine Sobol indices by sampling. The efficient number of samples increases to n_samples * (2*dim + 2) in Saltelli's Sobol sampling sequence. Returns ------- sobol: ndarray of float [n_sobol x n_out] Normalized Sobol indices w.r.t. total variance sobol_idx: list of ndarray of int [n_sobol x (n_sobol_included)] Parameter combinations in rows of sobol. sobol_idx_bool: ndarray of bool [n_sobol x dim] Boolean mask which contains unique multi indices. Notes ----- .. [1] Sobol, I. M. (2001). "Global sensitivity indices for nonlinear mathematical models and their Monte Carlo estimates." Mathematics and Computers in Simulation, 55(1-3):271-280, doi:10.1016/S0378-4754(00)00270-6. .. [2] Saltelli, A. (2002). "Making best use of model evaluations to compute sensitivity indices." Computer Physics Communications, 145(2):280-297, doi:10.1016/S0010-4655(02)00280-1. .. [3] Saltelli, A., P. Annoni, I. Azzini, F. Campolongo, M. Ratto, and S. Tarantola (2010). "Variance based sensitivity analysis of model output. Design and estimator for the total sensitivity index." Computer Physics Communications, 181(2):259-270, doi:10.1016/j.cpc.2009.09.018. """ if gpc_object.p_matrix is None: dim = gpc_object.problem.dim else: dim = gpc_object.problem_original.dim if gpc_object.problem_original is None: problem_original = gpc_object.problem else: problem_original = gpc_object.problem_original # generate uniform distributed sobol sequence (parameter space [0, 1]) coords_norm_01 = saltelli_sampling(n_samples=n_samples, dim=dim, calc_second_order=True) coords_norm = zeros(coords_norm_01.shape) # transform to respective input pdfs using inverse cdfs for i_key, key in enumerate(problem_original.parameters_random.keys()): coords_norm[:, i_key] = problem_original.parameters_random[key].icdf(coords_norm_01[:, i_key]) # run model evaluations res = gpc_object.get_approximation(coeffs=coeffs, x=coords_norm) # determine sobol indices sobol, sobol_idx, sobol_idx_bool = get_sobol_indices_saltelli(y=exp(res), dim=dim, calc_second_order=True, num_resamples=100, conf_level=0.95) # sort idx = flip(argsort(sobol[:, 0], axis=0)) sobol = sobol[idx, :] sobol_idx = [sobol_idx[i] for i in idx] sobol_idx_bool = sobol_idx_bool[idx, :] return sobol, sobol_idx, sobol_idx_bool # @wrap_non_picklable_objects class MyModel(pygpc.AbstractModel): def __init__(self): self.fname = inspect.getfile(inspect.currentframe()) # pass # def __reduce__(self): # return (MyModel, (self.fname,)) def validate(self): pass def dichotomy(self, mu_a, mu_b, eps, mesK, Q, aDiffE, aConVE, Id, gF, t_s, E, delta_tild): while mu_b - mu_a > eps: mu = (mu_b + mu_a) / 2. bE = (mesK * Q * mu).T E_new = la.spsolve((- aDiffE.tocsc() - mu * aConVE + Id.multiply(mesK * gF * t_s)), bE) E.setValue(E_new) E.updateOld() F_mu_m = numerix.sum(mesK * delta_tild.value * E.value) - 1. bE = (mesK * Q * mu_a).T E_new = la.spsolve((- aDiffE.tocsc() - mu_a * aConVE + Id.multiply(mesK * gF * t_s)), bE) E.setValue(E_new) E.updateOld() F_mu_a = numerix.sum(mesK * delta_tild.value * E.value) - 1. # print ('{0} x {1}'.format(F_mu_m, F_mu_a)) if F_mu_m * F_mu_a <= 0: mu_b = mu else: mu_a = mu return mu def not_converge(self, x, y): if (abs(x - y) / y) <= 1e-6: return True else: return False def simulate(self, process_id=None, matlab_engine=None): step = 0 res = asarray([]) print(self.p['a'].flatten()) print("PARAM/LA TAILLE: {0}/{1}".format(self.p["a"], self.p["a"].shape)) print('HIHIHIHIHIHI ', float64(self.p["a"])) for idx in range(self.p["a"].shape[0]): # print(self.p["a"]*self.p['D']*self.p['sF']) t_s = 1. / self.p["a"][idx] x_s = sqrt(self.p["D"][idx] * t_s) c_s = 1. / (t_s * (x_s ** 2) * self.p["delta"][idx]) # nu = (D/a)*sqrt(D/a)*(S*d*delta)/(chi*sF) # mu1_s = mu1_tild mu1_s = c_s / (self.p["R"][idx] * t_s) # mu0_s = a * mu1_s / V phi_s = (x_s ** 2) / (mu1_s * t_s * self.p["chi"][idx]) # Q = S*mu1_s*t_s/c_s Q = 1. # print(self.p["sF"] ) U = (self.p["sF"][idx] * x_s ** 2) / (self.p["K"][idx] * phi_s) # print('VOICI LA VALEUR DU PARAMETRE: {0}'.format(self.p["sF"][0])) radius = 1. / x_s cellSize = radius/10. mesh = Gmsh2D(''' cellSize = %(cellSize)g; radius = %(radius)g; Point(1) = {0, 0, 0, cellSize}; Point(2) = {-radius, 0, 0, cellSize}; Point(3) = {0, radius, 0, cellSize}; Point(4) = {radius, 0, 0, cellSize}; Point(5) = {0, -radius, 0, cellSize}; Circle(6) = {2, 1, 3}; Circle(7) = {3, 1, 4}; Circle(8) = {4, 1, 5}; Circle(9) = {5, 1, 2}; Line Loop(10) = {6, 7, 8, 9}; Plane Surface(11) = {10}; ''' % locals()) # print('je suis ici') x = mesh.cellCenters xt, yt = mesh.cellCenters nVol = mesh.numberOfCells nFaces = mesh.numberOfFaces intF = mesh.interiorFaceIDs extF = arange(0, nFaces, 1)[array(mesh.exteriorFaces)] intFacesCells = mesh.faceCellIDs[:, intF] extFacesCells = mesh.faceCellIDs[:, extF] TKL = mesh._calcFaceAreas() / mesh._calcFaceToCellDistAndVec()[0].sum(axis=0) mes_edge = mesh._calcFaceAreas() mesK = mesh.cellVolumes # ------------------------------------------ The Chemical Potential ------------------------------ aDiffP = zeros((nVol, nVol)) aDiffP = sp.csc_matrix(aDiffP) aDiffP = aDiffP + sp.coo_matrix((-TKL[intF], (intFacesCells[0], intFacesCells[0])), shape=(nVol, nVol)) aDiffP = aDiffP + sp.coo_matrix((TKL[intF], (intFacesCells[0], intFacesCells[1])), shape=(nVol, nVol)) aDiffP = aDiffP + sp.coo_matrix((TKL[intF], (intFacesCells[1], intFacesCells[0])), shape=(nVol, nVol)) aDiffP = aDiffP + sp.coo_matrix((-TKL[intF], (intFacesCells[1], intFacesCells[1])), shape=(nVol, nVol)) # -----------------------------------Neumann Boundary condition------------------------------------------ aDiffP = aDiffP + sp.coo_matrix((0. * TKL[extF], (extFacesCells[0], extFacesCells[0])), shape=(nVol, nVol)) e = ones((1, nVol)) EaDiffP = sp.csc_matrix(concatenate((concatenate((aDiffP.T.todense(), (mesK * e).T), axis=1), array([append((mesK * e).T, 0.)])), axis=0)) # -----------------------------------Dirichlet Boundary condition------------------------------------------ test = CellVariable(mesh=mesh, value=0.) phi = CellVariable(name="$\phi(t,x,y)$", mesh=mesh, value=0.0, hasOld=1) # sF = sigmaF.SigmaF2D(params.sF, xt, yt, Rs=0.05) sF = sigmaF.SigmaF2D(1. / x_s, xt, yt, Rs=0.05 / (x_s ** 2)) F = sF extendedF = append(mesK * U * F, 0.) phi_new = la.spsolve(EaDiffP, extendedF) phi.setValue(phi_new[0:nVol]) phi.updateOld() # ------------------------------------------ The Chemoattractant ------------------------------ aDiffE = zeros((nVol, nVol)) aDiffE = sp.csc_matrix(aDiffE) aDiffE = aDiffE + sp.coo_matrix((-TKL[intF], (intFacesCells[0], intFacesCells[0])), shape=(nVol, nVol)) aDiffE = aDiffE + sp.coo_matrix((TKL[intF], (intFacesCells[0], intFacesCells[1])), shape=(nVol, nVol)) aDiffE = aDiffE + sp.coo_matrix((TKL[intF], (intFacesCells[1], intFacesCells[0])), shape=(nVol, nVol)) aDiffE = aDiffE + sp.coo_matrix((-TKL[intF], (intFacesCells[1], intFacesCells[1])), shape=(nVol, nVol)) # -----------------------------------Dirichlet Boundary condition------------------------------------------ aDiffE = aDiffE + sp.coo_matrix((-TKL[extF], (extFacesCells[0], extFacesCells[0])), shape=(nVol, nVol)) aConVE = zeros((nVol, nVol)) aConVE = sp.csc_matrix(aConVE) dPhi_int = numerix.dot(phi.faceGrad.value, mesh.faceNormals)[intF] aConVE = aConVE + sp.coo_matrix((mes_edge[intF] * plus(dPhi_int), (intFacesCells[0], intFacesCells[0])), shape=(nVol, nVol)) aConVE = aConVE + sp.coo_matrix((-mes_edge[intF] * minus(dPhi_int), (intFacesCells[0], intFacesCells[1])), shape=(nVol, nVol)) aConVE = aConVE + sp.coo_matrix((-mes_edge[intF] * plus(dPhi_int), (intFacesCells[1], intFacesCells[0])), shape=(nVol, nVol)) aConVE = aConVE + sp.coo_matrix((mes_edge[intF] * minus(dPhi_int), (intFacesCells[1], intFacesCells[1])), shape=(nVol, nVol)) dPhi_ext = numerix.dot(phi.faceGrad.value, mesh.faceNormals)[extF] aConVE = aConVE + sp.coo_matrix((mes_edge[extF] * plus(dPhi_ext), (extFacesCells[0], extFacesCells[0])), shape=(nVol, nVol)) Id = sp.spdiags(numerix.ones(nVol), [0], nVol, nVol) # ---------------Variables and parameters for the Immune Cells Displacement equation--------- # E = CellVariable(name="$E(t,x,y)$", mesh=mesh, value=0.53235e6/c_s, hasOld=1) E = CellVariable(name="$E(t,x,y)$", mesh=mesh, value=0., hasOld=1) delta_tild = CellVariable(name="$\delta_t(x,y)$", mesh=mesh, value=0.) delta_tild.setValue(delt.GaussianImmuneResponse2D(1. / x_s, xt, yt, Ra=0.02 / x_s ** 2)) gF = self.p["gF"][idx] Id = sp.spdiags(numerix.ones(nVol), [0], nVol, nVol) # ---------------------------------------------- Dichotomie Method -------------------------------------------- mu_a = 0. mu_b = 1. F_mu_m = 0. F_mu_a = 0. eps = 1e-10 mu = self.dichotomy(mu_a, mu_b, eps, mesK, Q, aDiffE, aConVE, Id, gF, t_s, E, delta_tild) while self.not_converge(mu, mu_b): # print(mu, mu_b) mu_b = (mu_a + mu_b) / 2. mu = self.dichotomy(mu_a, mu_b, eps, mesK, Q, aDiffE, aConVE, Id, gF, t_s, E, delta_tild) print('Step -- > {0}'.format(step)) step = step + 1 res = append(res, [mu*mu1_s*1e-9]) print('mu:{0}'.format(mu)) # res = np.asarray([mu*mu1_s*1e-9]) # res[:, newaxis] res = log(res[:, newaxis]) return res def norm_n(V, dx, n): if n == 0: c_max = max(abs(V)) yield c_max else: norme = sum(abs(dx*V)**n) yield norme**(1./n) def mo(x): return numerix.L2norm(x) def plus(z): return 0.5*(z+abs(z)) def minus(z): return 0.5*(-z+abs(z)) def alphan(n): if n == 0: return 1 return 2 * alphan(n - 1.) / ((2. ** n) - 1.) def toss(deb, fin): return random.uniform(deb, fin) # --------------- Sensitivity Analysis--------------------------- # Create the coffee cup model # model = un.Model(run=evaluate_mu_un, labels=["tumor volume($mm^3$)"]) model = MyModel() parameters = OrderedDict() parameters["a"] = pygpc.Beta(pdf_shape=[1, 1], pdf_limits=[0.1, 0.5]) # parameters["a"] = pygpc.Norm(pdf_shape=[0.2, 0.09]) parameters["D"] = pygpc.Beta(pdf_shape=[1, 1], pdf_limits=[8.64e-5, 1e-3]) # parameters["D"] = [8.64e-5] parameters["delta"] = pygpc.Beta(pdf_shape=[1, 1], pdf_limits=[1., 60.]) # parameters["R"] = pygpc.Beta(pdf_shape=[1, 1], pdf_limits=[7.573e-8, 1.231e-6]) # parameters["R"] = pygpc.Beta(pdf_shape=[1, 1], pdf_limits=[6.456e-8, 1.520e-6])#IC1 # parameters["R"] = pygpc.Beta(pdf_shape=[1, 1], pdf_limits=[5.5e-7, 1.036e-6])#IC3_99 parameters["R"] = pygpc.Beta(pdf_shape=[1, 1], pdf_limits=[6.11e-7, 9.74e-7])#IC4_95 # parameters["R"] = pygpc.Norm(pdf_shape=[7.923174114490609e-07, 7.945822739100839e-15]) parameters["chi"] = pygpc.Beta(pdf_shape=[1, 1], pdf_limits=[86.4, 86.4e5]) # parameters["chi"] = [86.4] parameters["sF"] = pygpc.Beta(pdf_shape=[1, 1], pdf_limits=[5e-17, 0.625e-16]) # parameters["sF"] = [5e-17] parameters["K"] = pygpc.Beta(pdf_shape=[1, 1], pdf_limits=[1e-2, 1.]) # parameters["K"] = [1e-2] # parameters["gF"] = pygpc.Beta(pdf_shape=[1, 1], pdf_limits=[2e-2, 1.]) parameters["gF"] = pygpc.Beta(pdf_shape=[1, 1], pdf_limits=[2e-2, 1.]) interval = 'IC4_hetero' # parameters["chi"] = 86.4 # parameters["sF"] = 5e-17 # parameters["K"] = 1e-2 # parameters["gF"] = 2e-2 problem = pygpc.Problem(model=model, parameters=parameters) # basis = pygpc.Basis() # basis.init_basis_sgpc(problem=problem, # order=[5, 5, 5], # order_max=15, # order_max_norm=1, # interaction_order=3) # basis.plot_basis(dims=[0, 1, 2]) # fn_results = 'Sensitivity_data/PCE_data'.format(interval) save_session_format = ".hdf5" # ---------------------------------- Personnalized Options ------------------------------ options = dict() options["method"] = "reg" # options["method"] = "quad" options["solver"] = "Moore-Penrose" # options["solver"] = "OMP" options["settings"] = None options["order"] = [5] * problem.dim # The univariate polynomials expansion orders options["order_max"] = 5 options["order_max_norm"] = 0.7 # options["order_max_norm"] = 1. options["interaction_order"] = 2 # options["interaction_order"] = 2 options["matrix_ratio"] = 2 # options["error_type"] = "nrmsd" options["error_type"] = "loocv" options["n_samples_validation"] = 1e3 options["n_cpu"] = 2 options["fn_results"] = fn_results options["save_session_format"] = save_session_format options["gradient_enhanced"] = False options["gradient_calculation"] = "FD_1st2nd" options["gradient_calculation_options"] = {"dx": 0.001, "distance_weight": -2} options["backend"] = "omp" # options["grid"] = pygpc.Random # options["grid"] = pygpc.LHS(parameters_random=problem.parameters_random, seed=1) options["grid_options"] = None n_coeffs = pygpc.get_num_coeffs_sparse(order_dim_max=options["order"], order_glob_max=options["order_max"], order_inter_max=options["interaction_order"], dim=problem.dim) # problem.dim grid = pygpc.LHS(parameters_random=problem.parameters_random, n_grid=options["matrix_ratio"] * n_coeffs, seed=1) # grid = pygpc.Random(parameters_random=problem.parameters_random, # n_grid=options["matrix_ratio"] * n_coeffs, # seed=1) # print('taille grille', grid.n_grid) # options["fn_results"] = 'Sensitivity_data/PCE_data_{0}'.format(grid.n_grid) algorithm = pygpc.Static(problem=problem, options=options, grid=grid) # # gpc, coeffs, results = algorithm.run() session = pygpc.Session(algorithm=algorithm) # # session.grid = algorithm.grid # # # # # # # # # run gPC session session, coeffs, results = session.run() dataPath = 'Sensitivity_data/Pygpc_Sobol_idx.txt'.format(interval) outF = open(dataPath, "w") mean = session.gpc[0].get_mean(coeffs) # outF.write('Mean: '+mean) print("Mean: {}".format(mean)) std = session.gpc[0].get_std(coeffs) # outF.write('Std: '+std) print("Std: {}".format(std)) sobol, sobol_idx, sobol_idx_bool = modified_get_sobol_indices(session.gpc[0], coeffs, n_samples=10) n_idx = len(sobol_idx) for i in range(n_idx): print("Parameter x{}: {}".format(sobol_idx[i]+1, sobol[i][0])) str_tmp = '' for k in range(problem.dim): if len(sobol_idx[i])==k+1: if k+1==1: str_tmp = str(sobol_idx[i][k] + 1) elif k+1>1: for m in range(k): str_tmp = str_tmp + str(sobol_idx[i][m] + 1)+' ' str_tmp = str_tmp + str(sobol_idx[i][k] + 1) outF.write(str_tmp +','+str(sobol[i][0])) outF.write('\n') print(sobol_idx_bool) outF.close() pygpc.validate_gpc_plot(session=session, coeffs=coeffs, random_vars=["a", "delta"], n_grid=[25, 25], output_idx=0, fn_out=session.fn_results+'plot', folder="gpc_vs_original_plot", n_cpu=options["n_cpu"]) # Validate gPC approximation vs original model function using Monte Carlo simulation nrmsd = pygpc.validate_gpc_mc(session=session, coeffs=coeffs, n_samples=1e3, fn_out=session.fn_results+'mc', n_cpu=options["n_cpu"])
40.006211
123
0.549915
2,462
19,323
4.165719
0.198619
0.017941
0.016088
0.019891
0.331416
0.276716
0.23986
0.217239
0.17931
0.156299
0
0.038517
0.278476
19,323
482
124
40.089212
0.697102
0.28391
0
0.091255
0
0
0.100088
0.00655
0
0
0
0
0
1
0.045627
false
0.003802
0.041825
0.015209
0.13308
0.034221
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7a8eb29f59a7169a8670d3bd2d5361afff30ad8
1,509
py
Python
setup.py
StreetHawkInc/behave-http
236f62faaa7448c55d13767c21bfd2711b11456c
[ "BSD-2-Clause" ]
null
null
null
setup.py
StreetHawkInc/behave-http
236f62faaa7448c55d13767c21bfd2711b11456c
[ "BSD-2-Clause" ]
null
null
null
setup.py
StreetHawkInc/behave-http
236f62faaa7448c55d13767c21bfd2711b11456c
[ "BSD-2-Clause" ]
null
null
null
import os from setuptools import setup with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme_file: long_description = readme_file.read() setup_requires = ['wheel'] install_requires = [ 'behave>=1.2.4', 'Jinja2>=2.5', 'jpath>=1.1', 'ensure>=0.1.6', 'requests>=2.0.0', 'six', ] setup( name='behave-http', version='0.1.1', packages=['behave_http', 'behave_http.steps'], setup_requires=setup_requires, install_requires=install_requires, description="Behave HTTP steps", long_description=long_description, url='https://github.com/mikek/behave-http', author='Mykhailo Kolesnyk', author_email='mike@openbunker.org', license='BSD 2-Clause', classifiers=[ 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Development Status :: 4 - Beta', 'Natural Language :: English', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Testing', ], )
30.18
71
0.609013
163
1,509
5.527607
0.490798
0.147614
0.194229
0.115427
0.059933
0
0
0
0
0
0
0.025261
0.239231
1,509
49
72
30.795918
0.759582
0
0
0
0
0
0.51226
0
0
0
0
0
0
1
0
false
0
0.045455
0
0.045455
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
e7a94a2ff34c1076fb500ca11f0f92ab4291d4b0
2,314
py
Python
src/day11.py
birdman74/advent-of-code-2021
190cd4110ef3553258a26c8521bdf372c006a77c
[ "Apache-2.0" ]
null
null
null
src/day11.py
birdman74/advent-of-code-2021
190cd4110ef3553258a26c8521bdf372c006a77c
[ "Apache-2.0" ]
null
null
null
src/day11.py
birdman74/advent-of-code-2021
190cd4110ef3553258a26c8521bdf372c006a77c
[ "Apache-2.0" ]
null
null
null
import os from typing import List MODULE_DIR = os.path.dirname(os.path.realpath(__file__)) PROJECT_DIR = os.path.join(MODULE_DIR, "..") INPUT_SOURCE_DIR = os.path.join(PROJECT_DIR, "input") def get_data_lines(input_file_name): input_file = os.path.join(INPUT_SOURCE_DIR, input_file_name) print(f"Input file: {input_file}") data_file = open(input_file) return data_file.read().split("\n") def gain_energy(octopi: List[List[int]], x: int, y: int): energy = octopi[x][y] if energy == 10: return octopi[x][y] = new_energy = energy + 1 if new_energy == 10: [[gain_energy(octopi, new_x, new_y) for new_y in range(max(0, y - 1), min(y + 2, 10))] for new_x in range(max(0, x - 1), min(x + 2, 10))] def perform_steps(octopi: List[List[int]], steps: int): flasher_count = 0 for _ in range(steps): for x in range(len(octopi)): for y in range(len(octopi[0])): gain_energy(octopi, x, y) for x in range(len(octopi)): for y in range(len(octopi[0])): octopi[x][y] = octopi[x][y] % 10 iteration_flasher_count = sum(x.count(0) for x in octopi) flasher_count += iteration_flasher_count return flasher_count def do_the_thing(input_file_name): data_lines = get_data_lines(input_file_name) print(f"Number of data lines: {len(data_lines)}") octopi = [] for data_line in data_lines: octopi.append(list(map(int, data_line))) flasher_count = perform_steps(octopi, 100) print(f"Total flashers after 100 steps: {flasher_count}\n#################################\n") def do_the_thing_2(input_file_name): data_lines = get_data_lines(input_file_name) print(f"Number of data lines: {len(data_lines)}") octopi = [] for data_line in data_lines: octopi.append(list(map(int, data_line))) iteration = flasher_count = 0 while flasher_count < 100: iteration += 1 flasher_count = perform_steps(octopi, 1) print(f"First all flash event on iteration: {iteration}\n#################################\n") def day_11_do(input_file_name): do_the_thing(input_file_name) def day_11_do_2(input_file_name): do_the_thing_2(input_file_name) day_11_do("day11.txt") day_11_do_2("day11.txt")
27.223529
98
0.639585
362
2,314
3.81768
0.201657
0.091172
0.094067
0.04631
0.40521
0.348046
0.287265
0.261939
0.261939
0.261939
0
0.02682
0.210458
2,314
84
99
27.547619
0.729611
0
0
0.254545
0
0
0.128349
0.043215
0
0
0
0
0
1
0.127273
false
0
0.036364
0
0.218182
0.090909
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7a9abe833ad06b98a4c2551f4c70ddffb9249c0
30
py
Python
a.py
Ankitgarg453/resume_builder
8ccffb4191d270685da446f0b2808409f282cecd
[ "MIT" ]
null
null
null
a.py
Ankitgarg453/resume_builder
8ccffb4191d270685da446f0b2808409f282cecd
[ "MIT" ]
null
null
null
a.py
Ankitgarg453/resume_builder
8ccffb4191d270685da446f0b2808409f282cecd
[ "MIT" ]
null
null
null
print "ankit" print "Saimon"
7.5
14
0.7
4
30
5.25
0.75
0
0
0
0
0
0
0
0
0
0
0
0.166667
30
3
15
10
0.84
0
0
0
0
0
0.366667
0
0
0
0
0
0
0
null
null
0
0
null
null
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
5
e7a9c1267f6e65a3675addd7271b9a0a997345e4
3,430
py
Python
src/models/adversarial_validation.py
solery-git/Yandex_MIPT_user_identification
6861c14ebeeaef963b1d180080b87637a9578dd5
[ "FTL" ]
null
null
null
src/models/adversarial_validation.py
solery-git/Yandex_MIPT_user_identification
6861c14ebeeaef963b1d180080b87637a9578dd5
[ "FTL" ]
null
null
null
src/models/adversarial_validation.py
solery-git/Yandex_MIPT_user_identification
6861c14ebeeaef963b1d180080b87637a9578dd5
[ "FTL" ]
null
null
null
# -*- coding: utf-8 -*- import warnings warnings.filterwarnings('ignore') import pickle import yaml from pathlib import Path import numpy as np import pandas as pd from scipy.sparse import csr_matrix, hstack as sparse_hstack, vstack as sparse_vstack from sklearn.linear_model import LogisticRegression from sklearn.metrics import roc_auc_score import eli5 PROJECT_DIR = Path(__file__).resolve().parents[2] PATH_PROCESSED = 'data/processed' PATH_MODELS = 'models' PARAMS_ALL = yaml.safe_load(open(PROJECT_DIR.joinpath('params.yaml'))) SEED = PARAMS_ALL['meta']['seed'] def csr_hstack(arglist): return csr_matrix(sparse_hstack(arglist)) def csr_vstack(arglist): return csr_matrix(sparse_vstack(arglist)) def get_mask_top_n(arr, n): indices = np.argpartition(arr, -n)[-n:] result = np.zeros(len(arr), dtype=np.bool) result[indices] = True return result def show_feature_weights(estimator, data_feature_names, fe_feature_names): feature_names = data_feature_names + fe_feature_names # top 30 data features data_feature_names_set = set(data_feature_names) data_explanation = eli5.explain_weights(estimator, feature_names=feature_names, top=30, feature_filter=lambda name: name in data_feature_names_set) print(eli5.format_as_text(data_explanation, highlight_spaces=True)) # features from feature engineering fe_feature_names_set = set(fe_feature_names) fe_explanation = eli5.explain_weights(estimator, feature_names=feature_names, feature_filter=lambda name: name in fe_feature_names_set) print(eli5.format_as_text(fe_explanation, show=['targets'])) def main(): with open(PROJECT_DIR.joinpath(PATH_PROCESSED, 'X_train.pkl'), 'rb') as fin: X_train_sparse = pickle.load(fin) with open(PROJECT_DIR.joinpath(PATH_PROCESSED, 'X_test.pkl'), 'rb') as fin: X_test_sparse = pickle.load(fin) with open(PROJECT_DIR.joinpath(PATH_PROCESSED, 'y.pkl'), 'rb') as fin: target = pickle.load(fin) with open(PROJECT_DIR.joinpath(PATH_PROCESSED, 'data_feature_names.pkl'), 'rb') as fin: data_feature_names = pickle.load(fin) with open(PROJECT_DIR.joinpath(PATH_PROCESSED, 'fe_feature_names.pkl'), 'rb') as fin: fe_feature_names = pickle.load(fin) train_len = X_train_sparse.shape[0] test_len = X_test_sparse.shape[0] y = np.array([0] * train_len + [1] * test_len) X = csr_vstack([X_train_sparse, X_test_sparse]) logit = LogisticRegression(C=1, random_state=SEED, solver='liblinear') logit.fit(X, y) predictions_proba = logit.predict_proba(X)[:, 1] logit_score = roc_auc_score(y, predictions_proba) print('Score:', logit_score) print('Number of train examples:', X_train_sparse.shape[0]) adv_valid_mask = get_mask_top_n(predictions_proba[:train_len], 50000) validation_examples = X_train_sparse[adv_valid_mask] print('Number of adversarial validation examples:', validation_examples.shape[0]) validation_targets = target[adv_valid_mask] class_0, class_1 = list(np.bincount(validation_targets)) print(f'Class 0: {class_0}, class 1: {class_1}') show_feature_weights(logit, data_feature_names, fe_feature_names) with open(PROJECT_DIR.joinpath(PATH_PROCESSED, 'adv_valid_mask.pkl'), 'wb') as fout: pickle.dump(adv_valid_mask, fout, protocol=2) if __name__ == '__main__': main()
37.692308
151
0.732653
494
3,430
4.767206
0.259109
0.107006
0.054352
0.065393
0.356263
0.292144
0.210616
0.194055
0.146072
0.093418
0
0.010742
0.158601
3,430
91
152
37.692308
0.805267
0.022157
0
0
0
0
0.08296
0.006565
0
0
0
0
0
1
0.076923
false
0
0.153846
0.030769
0.276923
0.092308
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7aa8cac8e96224781686bd1388c518adf6e852b
3,803
py
Python
U3/src/e6.py
craciunescu/algo
81e91fa72d8896b459900510ee270d25de15f6fd
[ "MIT" ]
null
null
null
U3/src/e6.py
craciunescu/algo
81e91fa72d8896b459900510ee270d25de15f6fd
[ "MIT" ]
null
null
null
U3/src/e6.py
craciunescu/algo
81e91fa72d8896b459900510ee270d25de15f6fd
[ "MIT" ]
null
null
null
""" @author: David E. Craciunescu @date: 2020/04/30 (yyyy/mm/dd) 6. After passing through the Tile Room and stealing the Craddle of Life, Indiana Croft faces a new challenge before leaving the Cursed Temple! The Temple itself is located on a bridge under which there is a deep darkness. Fortunately, this place also appears in the diary. The bridge crosses the so-called Valley of Shadows, which begins with a descent slope (not necessarily constant), so that after reaching the lowest point he must start to climb to the other end of the bridge. Just at the bottom of the valley, one can find a river, but the diary does not give any specific information about its whereabouts, so Indiana Croft only knows the river can be found "at the bottom of the valley" and nothing else. On the slopes, there are sharp rocks. If Indiana Croft had time, he could easily find the point where to get off the bridge to get exactly to the river, given that he has a laser pointer that he can measure heights with and tells him how many meters there are from the bridge to the ground at a certain point. Unfortunately, the priests of the Temple have already found him and they are chasing him down. If he doesn't jump off the bridge they'll catch him before he gets off the bridge. Our adventurer must quickly find the position of the river to get off and flee safely. In order to save our hero, design the algorithm that Indiana Croft should use to find the minimum point of the valley under the conditions mentioned above. The algorithm must be efficient, for he cannot afford to waste a single second: at least in the best case it must have a logarithmic order. You can consider the time that it takes for Indiana Croft to travel along the bridge as negligible and that the estimate of the point of the river where to drop off can have an approximation error of ε meters (ε is a given constant). Explain the reasoning behind the provided solution and analyze its efficiency and complexity. --- The problem basically forces us to use Gradient Descent. Since we have to optimize at each move and cannot afford to waste time on the absolute optimal of answers, we look at what happens to the slope of the function created by the heights of the bridge. Even though recursive, the complexity of this algorithm is clearly O(logn), since at each iteration, no matter what happens, the dataset is divided in half. I also took extra efford to make the implementation space efficient as well. This means that no extra storage elements or auxiliary temporal variables are used when calculating the gradient descent, only a dataset, a start point and an endpoint. Last thing. I ignored the "the estimate of the point of the river where to drop off can have an approximation error of ε meters" and chose to go directly with the lowest possible error there could be. """ from typing import List from numbers import Number def grad_descent(data: List[Number]) -> Number: """ Simple algorithm for gradient descent """ start = 0 end = len(data) - 1 def grad_descent_aux(data, start, end): """ grad_descent auxiliary function """ # Basic cases. is_tuple = (end - start) <= 2 is_increasing = data[start] < data[end] if is_tuple: return start if is_increasing else end # Not-so-basic cases. mid_idx = (start + end) // 2 is_descending = data[mid_idx - 1] >= data[mid_idx] if is_descending: return grad_descent_aux(data, mid_idx, end) return grad_descent_aux(data, start, mid_idx) return grad_descent_aux(data, start, end)
43.215909
80
0.720221
615
3,803
4.419512
0.429268
0.022075
0.020603
0.02649
0.11663
0.1078
0.059603
0.059603
0.059603
0.059603
0
0.004843
0.239811
3,803
87
81
43.712644
0.935317
0.777018
0
0
0
0
0
0
0
0
0
0
0
1
0.142857
false
0
0.142857
0
0.428571
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7ab93c674e1c7b0c7b235ef1ec15c5f79897159
184
py
Python
ptb/ledger/views.py
vkpdeveloper/ShaktiDeep-Traders-Bill-Management-Project
566a64268fabf256e80bee680d1fbde2c6c0787d
[ "MIT" ]
2
2019-11-26T11:57:56.000Z
2020-06-17T05:16:47.000Z
ptb/ledger/views.py
vkpdeveloper/ShaktiDeep-Traders-Bill-Management-Project
566a64268fabf256e80bee680d1fbde2c6c0787d
[ "MIT" ]
null
null
null
ptb/ledger/views.py
vkpdeveloper/ShaktiDeep-Traders-Bill-Management-Project
566a64268fabf256e80bee680d1fbde2c6c0787d
[ "MIT" ]
null
null
null
from django.shortcuts import render from django.shortcuts import render from django.http import HttpResponse def index(request): return render(request, 'ledger/index.html')
26.285714
47
0.777174
24
184
5.958333
0.541667
0.20979
0.265734
0.34965
0.503497
0.503497
0.503497
0
0
0
0
0
0.152174
184
7
47
26.285714
0.916667
0
0
0.4
0
0
0.094972
0
0
0
0
0
0
1
0.2
false
0
0.6
0.2
1
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
1
0
0
8
e7ad41b8d693a843781c9b1f0c9248a6d00b3029
492
py
Python
olamundo.py/exercicios_refeitos/ex024.py
gabrielviticov/exercicios-python
4068cb0029513f8ab8bd12fa3a9055f37b4040d4
[ "MIT" ]
null
null
null
olamundo.py/exercicios_refeitos/ex024.py
gabrielviticov/exercicios-python
4068cb0029513f8ab8bd12fa3a9055f37b4040d4
[ "MIT" ]
null
null
null
olamundo.py/exercicios_refeitos/ex024.py
gabrielviticov/exercicios-python
4068cb0029513f8ab8bd12fa3a9055f37b4040d4
[ "MIT" ]
null
null
null
''' ex024: Crie um programa que leia o nome de uma cidade e diga se ela começa ou não com o nome ‘SANTO’ ''' from colorise import set_color, reset_color cores = { 'limpa': '\033[m', 'white': '\033[1;97m', } set_color(fg='cyan') nome_cidade = str(input('Informe o nome de uma cidade: ')).strip().title() separador = nome_cidade.split() print('O nome da cidade começa com Santo? ', end='') reset_color() print('{}{}{}'.format(cores['white'], separador[0] == 'Santo', cores['limpa']))
28.941176
100
0.658537
77
492
4.12987
0.61039
0.062893
0.044025
0.062893
0.100629
0
0
0
0
0
0
0.031175
0.152439
492
16
101
30.75
0.731415
0.203252
0
0
0
0
0.302083
0
0
0
0
0
0
1
0
false
0
0.090909
0
0.090909
0.181818
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7ae68bcd377bc20a3f17e6c42c1085c7778f122
3,154
py
Python
preprocessing/preprocessTrainingfiles_generateFasttextinput.py
gerbentimmerman/community-based-abuse-detection
8ac03ccf1e594c2588b243e45ac535a0977bbcb0
[ "MIT" ]
null
null
null
preprocessing/preprocessTrainingfiles_generateFasttextinput.py
gerbentimmerman/community-based-abuse-detection
8ac03ccf1e594c2588b243e45ac535a0977bbcb0
[ "MIT" ]
null
null
null
preprocessing/preprocessTrainingfiles_generateFasttextinput.py
gerbentimmerman/community-based-abuse-detection
8ac03ccf1e594c2588b243e45ac535a0977bbcb0
[ "MIT" ]
null
null
null
#!/usr/bin/python3 import pandas as pd import csv import re import emoji import redditcleaner from nltk.tokenize import TweetTokenizer def filterText(text, tokenizer): # Filter URLs text = re.sub(r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', "<URL>", text) # Filter numbers text = re.sub(r'\b\d+\b', "<NUMBER>", text) # Filter usernames text = re.sub(r'\b@\w\b', "@USER", text) # Convert emojis to text text = emoji.demojize(text) text = redditcleaner.clean(text) # Tokenize text tokens = tokenizer.tokenize(text) return " ".join(tokens) def createFasttextEmbeddingInput(dataset): """ Create file for training the fasttext embeddings """ readfile = "../data/reddit/preprocessed_reddit_{}_large.csv".format(dataset) df = pd.read_csv(readfile, header=0, engine='python') outputfile = "{}_train_fasttext_large.en".format(dataset) with open(outputfile, "a+", encoding='utf-8') as f: comments = df.iloc[:, 1].values for comment in comments: f.write(str(comment) + "\n") def preprocessComments(dataset): tokenizer = TweetTokenizer(strip_handles=True, reduce_len=True) # Non-abusive locations years = ['2012', '2013', '2014','2015', '2016', '2017'] months = ['01', '04', '07', '10'] # Choose whole text, 1 sentence or 2 sentences files = ['reddish_', 'reddish1sent_', 'reddish2sent_'] if dataset == "non_abusive": csvfile = "../data/reddit/preprocessed_reddit_non_abusive.csv" fieldnames = ['subreddit', 'text', 'labels'] with open(csvfile, "a+", encoding='utf-8') as f: writer = csv.DictWriter(f, fieldnames=fieldnames) writer.writeheader() for year in years: for month in months: print(year, month) file = "../data/reddit/non-abusive/{}/{}{}-{}.csv".format(year, files[0], year, month) df = pd.read_csv(file, header=None) # Drop empty rows df.dropna(subset=[9], inplace=True) # Assign label to non-abusive data df['labels'] = "NOT" # Clean message and add (subreddit, text) to csvfile rows = df.iloc[:, [4,9, 11]].values for row in rows: clean_comment = filterText(row[1], tokenizer) row_dict = {'subreddit':row[0] ,'text': clean_comment, 'labels': row[2]} writer.writerow(row_dict) elif dataset == "abusive": input_file = "../data/reddit/abusive/reddish.csv" csvfile = "../data/reddit/preprocessed_reddit_abusive_large.csv" fieldnames = ['subreddit', 'text', 'labels'] with open(csvfile, "a+", encoding='utf-8') as f: writer = csv.DictWriter(f, fieldnames=fieldnames) writer.writeheader() # read inputfile df = pd.read_csv(input_file, header=None) # Drop empty rows df.dropna(subset=[9], inplace=True) # Clean messages and add (message, labels) to csvfile rows = df.iloc[:, [4,9,10]].values for row in rows[1:]: clean_comment = filterText(row[1], tokenizer) row_dict = {'subreddit': row[0],'text': clean_comment, 'labels': row[2]} writer.writerow(row_dict) def main(): dataset = "abusive" preprocessComments(dataset) #createFasttextEmbeddingInput(dataset) if __name__ == '__main__': main()
27.666667
112
0.657895
418
3,154
4.868421
0.363636
0.02457
0.013268
0.014742
0.361671
0.298772
0.290909
0.27027
0.27027
0.27027
0
0.025229
0.170577
3,154
114
113
27.666667
0.752676
0.137603
0
0.25
0
0.015625
0.212829
0.121617
0
0
0
0
0
1
0.0625
false
0
0.09375
0
0.171875
0.015625
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7b099e6d68f75cea2e020cf38d8d5668c1b2d20
644
py
Python
chunked/models.py
grschafer/django-meetup-fileupload
e89e158e3d26a5ce41571f8d164e3f7aa0f1f079
[ "MIT" ]
null
null
null
chunked/models.py
grschafer/django-meetup-fileupload
e89e158e3d26a5ce41571f8d164e3f7aa0f1f079
[ "MIT" ]
null
null
null
chunked/models.py
grschafer/django-meetup-fileupload
e89e158e3d26a5ce41571f8d164e3f7aa0f1f079
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from django.db import models # Create your models here. class Upload(models.Model): file = models.FileField(null=False, blank=False) num_chunks = models.PositiveIntegerField(null=False, blank=False) filesize = models.PositiveIntegerField(null=False, blank=False) chunk_size = models.PositiveIntegerField(null=False, blank=False) # TODO: status, checksum class Chunk(models.Model): upload = models.ForeignKey(Upload, null=False, blank=False, related_name='chunks') index = models.PositiveIntegerField(null=False, blank=False) size = models.PositiveIntegerField(null=False, blank=False)
37.882353
86
0.745342
78
644
6.115385
0.410256
0.132075
0.205451
0.278826
0.48847
0.48847
0.205451
0
0
0
0
0.001795
0.135093
644
16
87
40.25
0.854578
0.107143
0
0
0
0
0.010508
0
0
0
0
0.0625
0
1
0
false
0
0.1
0
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
1
0
0
4
e7b0d46435cdec670279bd998320fd1810c2643c
918
py
Python
fp_demo/functional3.py
AegirAexx/python-sandbox
fa1f584f615c6ed04f80b9dd92d2b241248c9ebe
[ "Unlicense" ]
null
null
null
fp_demo/functional3.py
AegirAexx/python-sandbox
fa1f584f615c6ed04f80b9dd92d2b241248c9ebe
[ "Unlicense" ]
null
null
null
fp_demo/functional3.py
AegirAexx/python-sandbox
fa1f584f615c6ed04f80b9dd92d2b241248c9ebe
[ "Unlicense" ]
null
null
null
"""" Playing around with MAP higher order function and lambdas. """ from datetime import datetime from pprint import pprint from scientist import scientists def age(yob): """ Accepts year of birth and returns the persons age. """ return datetime.now().year - yob NAMES_AND_AGES = tuple( map(lambda x: {'name': x.name, 'age': age(x.born)}, scientists)) pprint(NAMES_AND_AGES) print('---------------------------') def ip_str_1(sci): """String interpolation using format_map() & vars() | Also see format().""" message = f'{sci.name} is {datetime.now().year - sci.born} years old.' return message def ip_str_2(sci): """String interpolation using format_map() & vars() | Also see format().""" data = '%s is %d years old' % (sci.name, (datetime.now().year - sci.born)) return data NAMES_AND_AGES2 = tuple( map(lambda x: ip_str_2(x), scientists)) pprint(NAMES_AND_AGES2)
24.810811
79
0.652505
131
918
4.450382
0.427481
0.054889
0.077187
0.051458
0.25729
0.181818
0.181818
0.181818
0.181818
0.181818
0
0.006623
0.17756
918
36
80
25.5
0.765563
0.27451
0
0
0
0
0.169518
0.041991
0
0
0
0
0
1
0.166667
false
0
0.166667
0
0.5
0.222222
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7b19bb8fade9b6e53704f91808df520b32c215d
1,217
py
Python
src/mqtt_bridge/app.py
dftossem/mqtt_ros_aws_iot
0e0ae8d30d25753c5a12d936d07ae94730f3eccd
[ "MIT" ]
5
2021-07-23T09:52:40.000Z
2021-09-22T21:11:53.000Z
src/mqtt_bridge/app.py
dftossem/mqtt_ros_aws_iot
0e0ae8d30d25753c5a12d936d07ae94730f3eccd
[ "MIT" ]
null
null
null
src/mqtt_bridge/app.py
dftossem/mqtt_ros_aws_iot
0e0ae8d30d25753c5a12d936d07ae94730f3eccd
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from __future__ import absolute_import import inject import rospy import time import json from AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTClient from .bridge import create_bridge from .util import lookup_object def mqtt_bridge_node(): # init node rospy.init_node('mqtt_bridge_node') # load parameters params = rospy.get_param('~', {}) bridge_params = params.get('bridge', []) # create mqtt client mqtt_client_factory_name = rospy.get_param( '~mqtt_client_factory', '.mqtt_client:createMqttClient') mqtt_client_factory = lookup_object(mqtt_client_factory_name) mqtt_client = mqtt_client_factory(params) # dependency injection config = create_config(mqtt_client) inject.configure(config) # configure bridges, one per factory bridges = [] for bridge_args in bridge_params: bridges.append(create_bridge(**bridge_args)) rospy.on_shutdown(mqtt_client.disconnect) # Connect and subscribe to AWS IoT mqtt_client.connect() rospy.spin() def create_config(mqtt_client): def config(binder): binder.bind(AWSIoTMQTTClient, mqtt_client) return config __all__ = ['mqtt_bridge_node']
24.836735
65
0.72309
149
1,217
5.583893
0.38255
0.15625
0.102163
0.048077
0.064904
0
0
0
0
0
0
0.001014
0.189811
1,217
49
66
24.836735
0.842799
0.127362
0
0
0
0
0.083412
0.027488
0
0
0
0
0
1
0.103448
false
0
0.275862
0
0.413793
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7b35b10fd121d0b29cd1d39fc4c75e4f568a1f4
25
py
Python
chapter2_ws/devel/lib/python3/dist-packages/mastering_ros_demo_pkg/srv/__init__.py
Rajat-Arora/ros_packt_book
a715485ea6e36d298bc6f6f306af0595d89e1174
[ "MIT" ]
null
null
null
chapter2_ws/devel/lib/python3/dist-packages/mastering_ros_demo_pkg/srv/__init__.py
Rajat-Arora/ros_packt_book
a715485ea6e36d298bc6f6f306af0595d89e1174
[ "MIT" ]
null
null
null
chapter2_ws/devel/lib/python3/dist-packages/mastering_ros_demo_pkg/srv/__init__.py
Rajat-Arora/ros_packt_book
a715485ea6e36d298bc6f6f306af0595d89e1174
[ "MIT" ]
null
null
null
from ._demo_srv import *
12.5
24
0.76
4
25
4.25
1
0
0
0
0
0
0
0
0
0
0
0
0.16
25
1
25
25
0.809524
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
e7b5ecbc66cbd464a70b67852e8b57e73a22b34a
422
py
Python
system-test/tweet/test_tweet.py
jaimebuelta/django-docker-template
f850626a3bc6ac7ccf791ca56b859a7b1d3d87a1
[ "MIT" ]
94
2017-07-30T21:33:46.000Z
2022-01-10T13:41:03.000Z
system-test/tweet/test_tweet.py
jaimebuelta/django-docker-template
f850626a3bc6ac7ccf791ca56b859a7b1d3d87a1
[ "MIT" ]
1
2019-02-01T13:45:42.000Z
2019-02-01T13:45:42.000Z
system-test/tweet/test_tweet.py
jaimebuelta/django-docker-template
f850626a3bc6ac7ccf791ca56b859a7b1d3d87a1
[ "MIT" ]
19
2017-07-31T12:03:12.000Z
2021-11-27T05:43:04.000Z
import os import requests HOSTPORT = os.environ.get('SYSTEM_TEST_HOSTPORT') TWEET_URL = HOSTPORT + 'tweet/' def test_tweets(): result = requests.get(TWEET_URL) assert result.status_code == 200 tweets = result.json() assert len(tweets) == 2 for tweet in tweets: # Get all the linked urls url = tweet['href'] result = requests.get(url) assert result.status_code == 200
23.444444
49
0.651659
56
422
4.785714
0.482143
0.097015
0.126866
0.156716
0.208955
0.208955
0
0
0
0
0
0.021944
0.244076
422
17
50
24.823529
0.818182
0.054502
0
0.153846
0
0
0.075567
0
0
0
0
0
0.230769
1
0.076923
false
0
0.153846
0
0.230769
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7b5f75431381733711c32b614a4943fecd06b68
348
py
Python
src/PrintSubscriber.py
mansandersson/modbus-monitor
092c7f05c54a912a60d69f1461aeb54105d7b660
[ "MIT" ]
null
null
null
src/PrintSubscriber.py
mansandersson/modbus-monitor
092c7f05c54a912a60d69f1461aeb54105d7b660
[ "MIT" ]
null
null
null
src/PrintSubscriber.py
mansandersson/modbus-monitor
092c7f05c54a912a60d69f1461aeb54105d7b660
[ "MIT" ]
null
null
null
import Constants from pubsub import pub class PrintSubscriber: def __init__(self, verbose): pub.subscribe(self.valueChanged, Constants.VALUECHANGED_TOPIC) if verbose: print('Print subscriber inited ...') def valueChanged(self, entity): print(entity.dis + ' = ' + str(entity.get_value(to_string=True)))
29
73
0.678161
39
348
5.871795
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.215517
348
12
73
29
0.838828
0
0
0
0
0
0.08596
0
0
0
0
0
0
1
0.222222
false
0
0.222222
0
0.555556
0.222222
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
e7bc9e4e1a533046092b0e42330e482c0a72d04c
3,990
py
Python
PressCurve.py
RichBu/PressCurve
4c89f04489abae5d3c87be2663133b8c974457bf
[ "MIT" ]
null
null
null
PressCurve.py
RichBu/PressCurve
4c89f04489abae5d3c87be2663133b8c974457bf
[ "MIT" ]
null
null
null
PressCurve.py
RichBu/PressCurve
4c89f04489abae5d3c87be2663133b8c974457bf
[ "MIT" ]
null
null
null
""" This program uses Digital Oscilliscope data read in from a PicScope. The PicoScope outputs the data as a CSV and this Python app reads it in. Then, we plot on an X-Y chart By Rich Budek 02/12/2021 in Python 3.8 """ import pandas as pd import numpy as np import jinja2 import math import re from pandas import DataFrame import matplotlib.pyplot as plt # program to read in CSV file from PicoScope and create a graph # excel was dragged to a halt because the data set is so large class Config_Data: #set up by user once filepath = "Z:\Shared Folders\Data\WCO\Customer\BHPB\BHPB_Pressure\Graph-Python\PressCurve" filename_readings = "Test_01_02b_csv.csv" class Project_Data: #data that gets transferred between functions full_filename_readings = "" file_orders_is_csv = False def ReadAllReadings(_project_data): #read all of the current orders orders = pd.read_excel(_project_data.full_filename_readings) return orders #main function or run def main(): #this is the "main" program #print welcome print(" ") print("Sample Program") print("by Rich Budek") print(" ") #setup needed variables config_data = Config_Data() project_data = Project_Data() #create all the full file path names here, so only have to do it once project_data.full_filename_readings = config_data.filepath + "\\" + config_data.filename_readings if project_data.full_filename_readings[-3:].lower() == 'csv': project_data.file_readings_is_csv = True else: project_data.file_readings_is_csv = False #these are all the data tables readings = [] #read in the readings #this can be a database, but for this example write to xls file so can see the output #if write to cloud database, anyone can read it if project_data.file_readings_is_csv: #FUTURE read in csv file readings = pd.read_csv(project_data.full_filename_readings,index_col=0, skiprows=3) pass else: readings = pd.read_excel(project_data.full_filename_readings) readings_len = len(readings.index) print ("number of readings = {:d}".format( readings_len ) ) #plot #01 all the hole diameters df_readings = readings df_readings_len = len(df_readings.index) print ("number of df readings = {:d}".format( df_readings_len ) ) #start plt #01 fig_01 = plt.figure(figsize=(11,8), dpi=100.0) #fig_01 = plt.figure(figsize=(11,8)) ax01=df_readings.plot(title='Mini Bone Air Pressure', kind='line',figsize=(11,8),color=['blue','red']) ax01.set_ylim(-0.5, 3.0) ax01.set(xlabel='Time (in secs) ', ylabel='Measured Air Pressure (in Volts)') xticks_num = np.arange(-1.1, 4.1, step=0.1) #xticks_label = map(str, xticks_num) xticks_label = ['{:1.3f}'.format(x) for x in xticks_num] ax01.set_xticks(xticks_num) ax01.set_xticklabels(xticks_label, rotation=90) #put notes on the plot ax01.text(-1.000, 2.9, 'Test conducted 01/23/2019 on-site by Rich Budek using portable PLC with valves', fontsize=12) ax01.text(-1.000, 2.8, 'to control the moldset. PLC was adjusted to provide overlap between close and', fontsize=12) ax01.text(-1.000, 2.7, 'eject operation. Holes were drilled oversize by the customer.', fontsize=12) ax01.text(-1.000, 2.6, 'Results: Steady state eject never hits supply air pressure.', fontsize=12) #set up secondary axis ax02 = ax01.twinx() #instantiate a second axis with same x-axis data ax02.set_ylim(-23.8, 143) ax02.set(ylabel='Non-Calibrated Calculated Air Pressure (in PSI)') df_sec_axis = pd.DataFrame(range(0,readings_len)) df_sec_axis = pd.DataFrame({'shop air': range(120, 120)}) ax02 = df_sec_axis.plot( legend='False', figsize=(11,8), secondary_y=True ) fig_03 = ax01.get_figure() fig_03.savefig('plot_01.svg') print (" ") print (".Program start.") if __name__ == "__main__": main() print (".Program end.")
31.171875
121
0.69599
621
3,990
4.318841
0.388084
0.049217
0.044743
0.042878
0.180089
0.106264
0.074944
0.03132
0
0
0
0.045355
0.198747
3,990
127
122
31.417323
0.793556
0.245865
0
0.078125
0
0
0.21961
0.022834
0
0
0
0
0
1
0.03125
false
0.015625
0.109375
0
0.25
0.140625
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7bca805299e41e9d298d69596d25e7ce1959ef5
580
py
Python
src/model/bow.py
slein89/BOW_transferlearning
785fe5e48da0dc0e9170e526f221daee154bebec
[ "MIT" ]
1
2019-03-05T11:23:26.000Z
2019-03-05T11:23:26.000Z
src/model/bow.py
slein89/BOW_transferlearning
785fe5e48da0dc0e9170e526f221daee154bebec
[ "MIT" ]
null
null
null
src/model/bow.py
slein89/BOW_transferlearning
785fe5e48da0dc0e9170e526f221daee154bebec
[ "MIT" ]
null
null
null
from sklearn.pipeline import Pipeline from sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer from sklearn.ensemble import GradientBoostingClassifier def bow_pipeline(X_train, y_train): pipeline = Pipeline([ ('countvect', CountVectorizer(analyzer='word', min_df=0.0, max_df=0.7, ngram_range=(1,2))), ('GradientBoosting', GradientBoostingClassifier(n_estimators=200)) ]) model = pipeline.fit(X_train,y_train) return model
38.666667
77
0.624138
57
580
6.175439
0.614035
0.09375
0.039773
0.068182
0
0
0
0
0
0
0
0.021898
0.291379
580
14
78
41.428571
0.83455
0
0
0
0
0
0.05
0
0
0
0
0
0
1
0.076923
false
0
0.230769
0
0.384615
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7bd68b12a5e8867bbc35ac7cd372730ec172944
3,967
py
Python
train_offline.py
denisyarats/exorl
a3fb07a420939280aa0918150923dcca7e82bf2a
[ "MIT" ]
23
2022-02-08T20:28:47.000Z
2022-03-31T11:00:25.000Z
train_offline.py
denisyarats/exorl
a3fb07a420939280aa0918150923dcca7e82bf2a
[ "MIT" ]
1
2022-03-10T04:45:19.000Z
2022-03-10T04:45:19.000Z
train_offline.py
denisyarats/exorl
a3fb07a420939280aa0918150923dcca7e82bf2a
[ "MIT" ]
null
null
null
import warnings warnings.filterwarnings('ignore', category=DeprecationWarning) import os os.environ['MKL_SERVICE_FORCE_INTEL'] = '1' os.environ['MUJOCO_GL'] = 'egl' from pathlib import Path import hydra import numpy as np import torch from dm_env import specs import dmc import utils from logger import Logger from replay_buffer import make_replay_loader from video import VideoRecorder torch.backends.cudnn.benchmark = True def get_domain(task): if task.startswith('point_mass_maze'): return 'point_mass_maze' return task.split('_', 1)[0] def get_data_seed(seed, num_data_seeds): return (seed - 1) % num_data_seeds + 1 def eval(global_step, agent, env, logger, num_eval_episodes, video_recorder): step, episode, total_reward = 0, 0, 0 eval_until_episode = utils.Until(num_eval_episodes) while eval_until_episode(episode): time_step = env.reset() video_recorder.init(env, enabled=(episode == 0)) while not time_step.last(): with torch.no_grad(), utils.eval_mode(agent): action = agent.act(time_step.observation, global_step, eval_mode=True) time_step = env.step(action) video_recorder.record(env) total_reward += time_step.reward step += 1 episode += 1 video_recorder.save(f'{global_step}.mp4') with logger.log_and_dump_ctx(global_step, ty='eval') as log: log('episode_reward', total_reward / episode) log('episode_length', step / episode) log('step', global_step) @hydra.main(config_path='.', config_name='config') def main(cfg): work_dir = Path.cwd() print(f'workspace: {work_dir}') utils.set_seed_everywhere(cfg.seed) device = torch.device(cfg.device) # create logger logger = Logger(work_dir, use_tb=cfg.use_tb) # create envs env = dmc.make(cfg.task, seed=cfg.seed) # create agent agent = hydra.utils.instantiate(cfg.agent, obs_shape=env.observation_spec().shape, action_shape=env.action_spec().shape) # create replay buffer data_specs = (env.observation_spec(), env.action_spec(), env.reward_spec(), env.discount_spec()) # create data storage domain = get_domain(cfg.task) datasets_dir = work_dir / cfg.replay_buffer_dir replay_dir = datasets_dir.resolve() / domain / cfg.expl_agent / 'buffer' print(f'replay dir: {replay_dir}') replay_loader = make_replay_loader(env, replay_dir, cfg.replay_buffer_size, cfg.batch_size, cfg.replay_buffer_num_workers, cfg.discount) replay_iter = iter(replay_loader) # create video recorders video_recorder = VideoRecorder(work_dir if cfg.save_video else None) timer = utils.Timer() global_step = 0 train_until_step = utils.Until(cfg.num_grad_steps) eval_every_step = utils.Every(cfg.eval_every_steps) log_every_step = utils.Every(cfg.log_every_steps) while train_until_step(global_step): # try to evaluate if eval_every_step(global_step): logger.log('eval_total_time', timer.total_time(), global_step) eval(global_step, agent, env, logger, cfg.num_eval_episodes, video_recorder) metrics = agent.update(replay_iter, global_step) logger.log_metrics(metrics, global_step, ty='train') if log_every_step(global_step): elapsed_time, total_time = timer.reset() with logger.log_and_dump_ctx(global_step, ty='train') as log: log('fps', cfg.log_every_steps / elapsed_time) log('total_time', total_time) log('step', global_step) global_step += 1 if __name__ == '__main__': main()
30.992188
79
0.637509
509
3,967
4.67387
0.261297
0.067255
0.035309
0.015973
0.094998
0.052963
0.029424
0.029424
0.029424
0
0
0.004798
0.264432
3,967
127
80
31.23622
0.810487
0.029745
0
0.022989
0
0
0.060922
0.005988
0
0
0
0
0
1
0.045977
false
0
0.137931
0.011494
0.218391
0.022989
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7bf132349c97d22d52cdb3650636fe8ed882551
1,606
py
Python
December-15/python3_ASHIK11ab.py
ASHIK11ab/A-December-of-Algorithms-2021
2eeb1192c69b67b1c64033c1df155a705d1219c4
[ "MIT" ]
null
null
null
December-15/python3_ASHIK11ab.py
ASHIK11ab/A-December-of-Algorithms-2021
2eeb1192c69b67b1c64033c1df155a705d1219c4
[ "MIT" ]
null
null
null
December-15/python3_ASHIK11ab.py
ASHIK11ab/A-December-of-Algorithms-2021
2eeb1192c69b67b1c64033c1df155a705d1219c4
[ "MIT" ]
null
null
null
import ast class Solution: def __init__(self, orders, deadline): self.orders = orders self.deadline = deadline def solve(self): # `y` -> no of bracelets to be made in a day to deliver all orders # on time. The starting value of `y` will be >= maximum element in # the orders list since, no order can be half complete on the end # of a day. y = max(self.orders) while True: cont_sub_arrays = find_cont_sub_arrays(self.orders[:], y) if len(cont_sub_arrays) <= self.deadline: print(y) break else: y += 1 def find_cont_sub_arrays(array, y): """ Returns the continuous sub arrays where each sub array sum is <= y. """ cont_sub_arrays = [] outer_index = 0 while array != [] : temp = [] for j in range(len(array)): cont_sub_array = array[:j+1] # Store the continuous sub arrays temporarily since we # are only intrested in the longest continuous sub array # whose sum is <= y. if sum(cont_sub_array) <= y: temp = cont_sub_array # If a valid sub array found is the last sub array of the array # then add it to the list of sub arrays. if j+1 == len(array): cont_sub_arrays.append(temp) array = array[j+1:] else: cont_sub_arrays.append(temp) array = array[j:] break return cont_sub_arrays def main(): orders = ast.literal_eval(input("number of bracelets = ")) no_of_days = int(input("n = ")) s = Solution(orders=orders, deadline=no_of_days) s.solve() if __name__ == "__main__": main()
27.220339
77
0.620174
241
1,606
3.958506
0.373444
0.080713
0.109015
0.035639
0.071279
0.071279
0.071279
0.071279
0
0
0
0.004325
0.280199
1,606
59
78
27.220339
0.820934
0.313823
0
0.157895
0
0
0.03125
0
0
0
0
0
0
1
0.105263
false
0
0.026316
0
0.184211
0.026316
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7bf5114270c4c1ba3fa4db47d240b4427db5d56
31
py
Python
is_ipfs/__init__.py
Barabazs/py-is_ipfs
c8ac622879b2223e298d9cc4a59ae7da89eb0479
[ "MIT" ]
1
2022-03-09T14:23:44.000Z
2022-03-09T14:23:44.000Z
is_ipfs/__init__.py
Barabazs/py-is_ipfs
c8ac622879b2223e298d9cc4a59ae7da89eb0479
[ "MIT" ]
null
null
null
is_ipfs/__init__.py
Barabazs/py-is_ipfs
c8ac622879b2223e298d9cc4a59ae7da89eb0479
[ "MIT" ]
null
null
null
from .is_ipfs import Validator
15.5
30
0.83871
5
31
5
1
0
0
0
0
0
0
0
0
0
0
0
0.129032
31
1
31
31
0.925926
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
e7c296cbcbe2a40314c5ebd466c9648b4f998c96
3,993
py
Python
Assignment_3/marbles.py
cVoltic/MIDS
417b7d2e6e7eff20292c5429cbe5245813e9a1d9
[ "MIT" ]
null
null
null
Assignment_3/marbles.py
cVoltic/MIDS
417b7d2e6e7eff20292c5429cbe5245813e9a1d9
[ "MIT" ]
null
null
null
Assignment_3/marbles.py
cVoltic/MIDS
417b7d2e6e7eff20292c5429cbe5245813e9a1d9
[ "MIT" ]
null
null
null
# A Marbles Game # A Marbles Game - Problem # Cuong Trinh import sys class MarblesBoard(): def __init__(self, board): self.board = list(board) def __str__(self): return f"{ ' '.join(map(str, self.board)) }" def __repr__(self): return f"MarblesBoard(({ ', '.join(map(str, self.board)) }))" def switch(self): """ Switch the order of the first two items """ self.board[0], self.board[1] = self.board[1], self.board[0] print(str(self)) def rotate(self): """ Move first item to the end of the list """ first_val = self.board.pop(0) self.board.append(first_val) print(str(self)) def solved(self): for i in range(1, len(self.board)): if self.board[i] < self.board[i-1]: return False return True class Solver(): def __init__(self, board): self.board = board self.total_step = 0 def __str__(self): return f"total steps: {self.total_step}" def __repr__(self): return f"Solver({self.board})" def solve(self): print(str(self.board)) is_solved = self.board.solved() while not is_solved: first_item = self.board.board[0] second_item = self.board.board[1] if first_item == 0 or second_item == 0: # edge case: if either first or second element is zero, # first item element cannot be switch with second element # only possible move is to rotate self.board.rotate() self.total_step += 1 elif first_item > second_item: # if first element is lower than second element, switch the two self.board.switch() self.total_step += 1 else: # final case when second item is bigger, # check first item against final item. # if first item is bigger than final item, rotate, # board is sorted otherwise self.board.rotate() self.total_step += 1 is_solved = self.board.solved() print(str(self)) def main(sequence): board = MarblesBoard(sequence) player = Solver(board) player.solve() if __name__ == "__main__": """ Big O Complexity for this Algorithm (Logic/Deduction): 1) The Algorithm repeatedly call onto switch and/or rotate method until the list is sorted 2) At each switch/rotate call, the algorithm perform a check of the current list against to see if it is sorted - To do this, we can either compared the current list with a sorted list, which the best case for the quickest sorting algorithm is O(NlogN), worst is O(N^2) - Or we can iterate through the current list to check if item(i - 1) < item(i), which the worst case is O(N) 3) Each time the list is not sorted, we only perform 1 check between the first two elements - In other words, each one of this check is O(1) - constant time - Each switch is O(1) - Each rotate O(N) since every other element follow element 0 has to be shifted - assuming the worst case O(N) 4) At any given state of the board, the algorithm is essentially performing: - (N*N + N*N + N*N + ......) = a*N^2, where a is all real numbers and only rotate is performed - (1*N + 1*N + 1*N + ......) = a*N, where a is all real numbers and only switch is performed Deduction: - Therefore, the algorithm has a lower bound of O(N) and an upper bound of O(N^2) in term of time complexity Ex: O(N): given: [2,1] perform 1 switch and 1 check => (O(1*N)) = O(N) """ sequence = tuple(sys.argv[1].split(",")) sequence = [int(i) for i in sequence] main(sequence)
35.651786
169
0.569747
560
3,993
3.976786
0.269643
0.096991
0.029187
0.020207
0.160754
0.094297
0.052088
0.026044
0
0
0
0.013559
0.335086
3,993
111
170
35.972973
0.825235
0.121963
0
0.301887
0
0
0.074227
0
0
0
0
0
0
1
0.207547
false
0
0.018868
0.075472
0.377358
0.075472
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
e7c5214a6861e2cb908a3d5d48328445859dbfbd
4,237
py
Python
windows.py
dcxSt/pfb-mod
1615790d0782f3becbba72a3e40c5b79ca4dc28b
[ "MIT" ]
1
2021-11-09T13:02:29.000Z
2021-11-09T13:02:29.000Z
windows.py
dcxSt/pfb-mod
1615790d0782f3becbba72a3e40c5b79ca4dc28b
[ "MIT" ]
null
null
null
windows.py
dcxSt/pfb-mod
1615790d0782f3becbba72a3e40c5b79ca4dc28b
[ "MIT" ]
null
null
null
#!/usr/bin/python3.8 """ Created on 2021-06-07 Author : Stephen Fay """ import numpy as np from constants import * import helper as h #%% spectrum transformers (spectrum ~ ft_block) f3 = lambda x:x*(1/(np.abs(x)+0.1)+0.3) f4 = lambda x:x*(1/(np.abs(x)+0.000001)) f5 = lambda x:x*(1/(np.abs(x)+0.01)) f6 = lambda x:x*(1/(np.abs(x)+0.000000000001)) f7 = lambda x:x*(1/(np.abs(x)+10.0**(-50))) # repete the transformation procedure n times def repete_func(f,ft_block,n,ntap=NTAP,lblock=LBLOCK): # apply f to ft_block n times for i in range(n): ft_block = f(ft_block) complex_rec = h.matrix_eig_to_window_complex(ft_block,ntap) ft_block = h.window_to_matrix_eig(np.real(complex_rec),ntap,lblock) return ft_block,complex_rec #%% candidate replacement windows def william_wallace(ntap=NTAP,lblock=LBLOCK): """ input : a sinc or sinc hamming window, produces similar results output : a candidate window that doesn't have as much leaking """ sinc = h.sinc_window(ntap,lblock) # input("type(sinc) {}\nsinc start: {}".format(type(sinc),sinc[:10])) ft_block = h.window_to_matrix_eig(sinc,ntap,lblock) ft_block,complex_rec = repete_func(f6,ft_block,10,ntap,lblock) # result is almost identitcal if we use f7 instead of f6 candidate_1 = np.real(complex_rec) return candidate_1 #%% run this file if __name__=="__main__": import matplotlib.pyplot as plt from datetime import datetime as dt ntap,lblock = NTAP,32 # LBLOCK sinc = h.sinc_window(ntap,lblock) ft_block_original = h.window_to_matrix_eig(sinc,ntap,lblock) # alternatively use SINC_HAMMING ft_block = ft_block_original.copy() ft_block,complex_rec = repete_func(f6,ft_block,10,ntap,lblock) abs_rec = np.abs(complex_rec) imag_rec = np.imag(complex_rec) reconstructed_window = np.real(complex_rec) ### modified spectrum plt.subplots(figsize=(16,14)) plt.subplot(431) plt.imshow(np.real(ft_block_original),aspect="auto") plt.title("real original") plt.colorbar() plt.subplot(432) plt.imshow(np.abs(ft_block_original),aspect="auto") plt.title("absolute original") plt.colorbar() plt.subplot(433) plt.imshow(np.imag(ft_block_original),aspect="auto") plt.title("imaginary original") plt.colorbar() ### corresponding reconstruction from window plt.subplot(434) plt.imshow(np.real(ft_block),aspect="auto") plt.title("real (constructed from window)\nTHE ACTUAL THING") plt.colorbar() plt.subplot(435) plt.imshow(np.abs(ft_block),aspect="auto") plt.title("absolute (constructed from window)\nTHE ACTUAL THING") plt.colorbar() plt.subplot(436) plt.imshow(np.imag(ft_block),aspect="auto") plt.title("imaginary (constructed from window)\nTHE ACTUAL THING") plt.colorbar() ### the window plt.subplot(425) plt.plot(abs_rec,"k-.",alpha=0.3,label="abs") plt.plot(imag_rec,alpha=0.4,color="orange",label="imaginary") plt.plot(sinc,color="grey",alpha=0.4,label="sinc") plt.plot(reconstructed_window,"b-",label="real") plt.title("window") plt.legend() ### the boxcar box = h.window_to_box(reconstructed_window) plt.subplot(426) short_box = box[int(ntap*lblock/2-15):int(ntap*lblock/2+15)] plt.plot(np.real(short_box),"b-",alpha=0.3,label="real") plt.plot(np.abs(short_box),"k-",label="abs") plt.grid() plt.title("box zoom") plt.legend() plt.subplot(427) short_box = box[int(ntap*lblock/2-150):int(ntap*lblock/2+150)] plt.plot(np.real(short_box),"b-",alpha=0.3,label="real") plt.plot(np.abs(short_box),"k-",label="abs") plt.title("box zoom") plt.grid() plt.legend() plt.subplot(428) plt.plot(np.real(box),"b-",alpha=0.3,label="real") plt.plot(np.abs(box),"k-",label="abs") plt.grid() plt.title("box") plt.legend() plt.tight_layout() # strdatetime = dt.today().strftime("%Y-%m-%d_%H.%M.%S") # np.save("figures/experiments/series3_{}.npy".format(strdatetime),reconstructed_window) # print("saved window") # plt.savefig("figures/experiments/series3_{}.png".format(strdatetime)) # print("saved figure") plt.show()
30.482014
123
0.671702
658
4,237
4.194529
0.275076
0.055797
0.023913
0.03913
0.424275
0.366304
0.28913
0.218116
0.15471
0.138406
0
0.036189
0.165211
4,237
139
124
30.482014
0.744133
0.201794
0
0.267442
0
0
0.097898
0
0
0
0
0
0
1
0.023256
false
0
0.05814
0
0.104651
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7c57019e7800b7bf091d492331fc64fbacfb1a2
291
py
Python
package/PartSegCore/channel_class.py
neuromusic/PartSeg
a4edff1b9fbe55eb7f5e1fc8b5b3f8e730b35caf
[ "BSD-3-Clause" ]
15
2020-03-21T03:27:56.000Z
2022-03-21T07:46:39.000Z
package/PartSegCore/channel_class.py
neuromusic/PartSeg
a4edff1b9fbe55eb7f5e1fc8b5b3f8e730b35caf
[ "BSD-3-Clause" ]
479
2019-10-27T22:57:22.000Z
2022-03-30T12:48:14.000Z
package/PartSegCore/channel_class.py
neuromusic/PartSeg
a4edff1b9fbe55eb7f5e1fc8b5b3f8e730b35caf
[ "BSD-3-Clause" ]
5
2020-02-05T14:25:02.000Z
2021-12-21T03:44:52.000Z
class Channel(int): """ This class is introduced to distinguish numerical algorithm parameter from choose chanel. In autogenerated interface field with this type limits input values to number of current image channels """ def __str__(self): return str(self + 1)
32.333333
107
0.714777
38
291
5.368421
0.868421
0.068627
0
0
0
0
0
0
0
0
0
0.004464
0.230241
291
8
108
36.375
0.90625
0.66323
0
0
1
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0.333333
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
4
e7c7b17b656904abd64163333b17badb2111d9ac
986
py
Python
Train/test.py
fanchy888/digit_ANN
5faf0e574321ff5e3c6b8ec82992d20177be15a0
[ "MIT" ]
null
null
null
Train/test.py
fanchy888/digit_ANN
5faf0e574321ff5e3c6b8ec82992d20177be15a0
[ "MIT" ]
null
null
null
Train/test.py
fanchy888/digit_ANN
5faf0e574321ff5e3c6b8ec82992d20177be15a0
[ "MIT" ]
null
null
null
#-*- coding:utf-8 -*- from PIL import Image import numpy as np from scipy.io import loadmat from scipy.io import savemat def sigmoid(z): g=1/(1+np.exp(-z)) return g img=Image.open('test.png') img=img.convert('L') grey=img.getdata() X=np.asarray(grey) X=np.mat(X.ravel()) theta=loadmat('theta') theta1=theta['theta1'] theta2=theta['theta2'] theta3=theta['theta3'] a1=np.hstack((np.mat(np.ones((1,1))),X)) a2=sigmoid(a1*theta1.T) a2=np.hstack((np.mat(np.ones((1,1))),a2)) a3=sigmoid(a2*theta2.T) a3=np.hstack((np.mat(np.ones((1,1))),a3)) h=sigmoid(a3*theta3.T) y1=np.argmax(h,axis=1) print(h) train_set=loadmat('test') weight=loadmat('theta') X=np.mat(train_set['X']) y=np.mat(train_set['y']) m=y.shape[0] a1=np.hstack((np.mat(np.ones((m,1))),X)) a2=sigmoid(a1*theta1.T) a2=np.hstack((np.mat(np.ones((m,1))),a2)) a3=sigmoid(a2*theta2.T) a3=np.hstack((np.mat(np.ones((m,1))),a3)) h=sigmoid(a3*theta3.T) y1=np.argmax(h,axis=1) accuracy=np.mean(np.double(y1==y))*100 print(accuracy)
21.434783
41
0.679513
202
986
3.30198
0.292079
0.067466
0.089955
0.116942
0.428786
0.428786
0.428786
0.422789
0.356822
0.356822
0
0.057421
0.063895
986
45
42
21.911111
0.665222
0.020284
0
0.210526
0
0
0.044652
0
0
0
0
0
0
1
0.026316
false
0
0.105263
0
0.157895
0.052632
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7c8356eb7291024a35ca2cbb8863750d4cda4f4
3,465
py
Python
dateien_lesen.py
hansalemaos/Everything2TXT
33dc7f15d9003441d7e38a8d872c3b7f6b3fa00b
[ "MIT" ]
1
2022-02-27T19:07:08.000Z
2022-02-27T19:07:08.000Z
dateien_lesen.py
hansalemaos/Everything2TXT
33dc7f15d9003441d7e38a8d872c3b7f6b3fa00b
[ "MIT" ]
null
null
null
dateien_lesen.py
hansalemaos/Everything2TXT
33dc7f15d9003441d7e38a8d872c3b7f6b3fa00b
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- import docx2txt from pdfminer.high_level import extract_text from pptx import Presentation from bs4 import BeautifulSoup from epubextract import epub2txt from xlsx2html import xlsx2html import tempfile from tkinter import Tk from tkinter.filedialog import askopenfilename import re def create_temp_file(ending): fp = tempfile.TemporaryFile(suffix=ending, delete=False) return fp.name def powerpointlesen(pfad): prs = Presentation(pfad) ganzertext = "" for slide in prs.slides: for shape in slide.shapes: try: if hasattr(shape, "text"): ganzertext = ganzertext + "\n" + shape.text except Exception as Fehler: print(Fehler) return ganzertext def docxlesen(pfad): return docx2txt.process(pfad) def txtdateien_lesen(pfad): try: with open(pfad, mode="rb") as f: dateiohnehtml = f.read() dateiohnehtml = ( b"""<!DOCTYPE html><html><body><p>""" + dateiohnehtml + b"""</p></body></html>""" ) soup = BeautifulSoup(dateiohnehtml, "lxml") soup = soup.text return soup.strip() except Exception as Fehler: print(Fehler) with open(pfad, mode="r", encoding="utf-8") as f: dateiohnehtml = f.read() return dateiohnehtml def html_htm_dateien_lesen(pfad): try: with open(pfad, mode="rb") as f: dateiohnehtml = f.read() soup = BeautifulSoup(dateiohnehtml, "lxml") soup = soup.text soup = soup.strip() return soup except Exception as Fehler: print(Fehler) def pdf_datei_lesen(pfad): return extract_text(pfad) def xlsx_datei_einlesen(pfad): tmpdatei = create_temp_file(ending="html") xlsx2html(pfad, tmpdatei) text = html_htm_dateien_lesen(tmpdatei) return text def dateienauslesen(pfad): if str(pfad).endswith("pptx"): text = powerpointlesen(pfad) return text elif str(pfad).endswith("docx"): text = docxlesen(pfad) return text elif str(pfad).endswith("html") or str(pfad).endswith("htm"): text = txtdateien_lesen(pfad) return text elif str(pfad).endswith("pdf"): text = pdf_datei_lesen(pfad) return text elif str(pfad).endswith("epub"): text = epub2txt(pfad) text = text.convert() return text elif str(pfad).endswith("xlsx"): text = xlsx_datei_einlesen(pfad) return text else: text = txtdateien_lesen(pfad) return text def datei_auswaehlen_mit_tkinter(): Tk().withdraw() dateiname = askopenfilename() ausgabeordner = re.sub(r"/[^/]+\.\w+$", "", dateiname) ausgabedatei = re.sub(r"^.*(/[^/]+)\.\w{,8}", "\g<1>.txt", dateiname) ausgabedatei = ausgabeordner + ausgabedatei return dateiname, ausgabedatei if __name__ == "__main__": dateiname, ausgabedatei = datei_auswaehlen_mit_tkinter() textzumspeichern = dateienauslesen(dateiname) if not str(dateiname).endswith(".txt"): with open(ausgabedatei, mode="w", encoding="utf-8") as f: if isinstance(textzumspeichern, str): f.write(textzumspeichern) if isinstance(textzumspeichern, list): textzumspeichern = "\n".join(textzumspeichern) f.write(textzumspeichern) print(textzumspeichern)
27.283465
73
0.621356
384
3,465
5.507813
0.286458
0.037825
0.049645
0.040189
0.280378
0.243026
0.158865
0.084161
0.048227
0.048227
0
0.005104
0.264935
3,465
126
74
27.5
0.825285
0.006061
0
0.29703
0
0
0.038755
0
0
0
0
0
0
1
0.089109
false
0
0.09901
0.019802
0.346535
0.039604
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7c92ca14bcce898f8943bb76fd1a82f7d605a8a
307
py
Python
tests/test_source.py
Kadas36/NEWS-App
ee0504ed04f5e3d8c7e06ea478d163e8209ac425
[ "MIT" ]
null
null
null
tests/test_source.py
Kadas36/NEWS-App
ee0504ed04f5e3d8c7e06ea478d163e8209ac425
[ "MIT" ]
null
null
null
tests/test_source.py
Kadas36/NEWS-App
ee0504ed04f5e3d8c7e06ea478d163e8209ac425
[ "MIT" ]
null
null
null
import unittest from app.models import Source class MovieTest(unittest.TestCase): def setUp(self): self.new_source = Source('cnn','cnn','Elections set for 2020','https://edition.cnn.com/','general','us') def test_instance(self): self.assertTrue(isinstance(self.new_source,Source))
30.7
112
0.70684
41
307
5.219512
0.658537
0.074766
0.121495
0.17757
0
0
0
0
0
0
0
0.015267
0.14658
307
10
113
30.7
0.801527
0
0
0
0
0
0.198052
0
0
0
0
0
0.142857
1
0.285714
false
0
0.285714
0
0.714286
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
2
e7c9f67c367034f596fa0db99d380d2232e79275
195
py
Python
steppygraph/test/utils_test.py
mfrawley/steppy-graph
2094c95d2cfe3145ecc42311d799dad8ba2f007b
[ "MIT" ]
4
2019-02-12T21:56:03.000Z
2020-10-19T07:14:44.000Z
steppygraph/test/utils_test.py
mfrawley/steppy-graph
2094c95d2cfe3145ecc42311d799dad8ba2f007b
[ "MIT" ]
null
null
null
steppygraph/test/utils_test.py
mfrawley/steppy-graph
2094c95d2cfe3145ecc42311d799dad8ba2f007b
[ "MIT" ]
null
null
null
from steppygraph.utils import filter_props d = { "_foo": True, "foo": 1 } def test_filter_keys(): o = filter_props(d) assert o['foo'] == True assert '_foo' not in o.keys()
15
42
0.605128
29
195
3.862069
0.586207
0.196429
0.214286
0
0
0
0
0
0
0
0
0.006849
0.251282
195
12
43
16.25
0.760274
0
0
0
0
0
0.071795
0
0
0
0
0
0.222222
1
0.111111
false
0
0.111111
0
0.222222
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e7cc0e525cff12bc2b3486753006977533ce881a
9,346
py
Python
train.py
WKUAILAB/Risk_Level_Prediction
7074953cf2c19cf2f2ad5c1cad5df0ad30637418
[ "MIT" ]
null
null
null
train.py
WKUAILAB/Risk_Level_Prediction
7074953cf2c19cf2f2ad5c1cad5df0ad30637418
[ "MIT" ]
null
null
null
train.py
WKUAILAB/Risk_Level_Prediction
7074953cf2c19cf2f2ad5c1cad5df0ad30637418
[ "MIT" ]
1
2022-02-28T06:29:08.000Z
2022-02-28T06:29:08.000Z
import numpy as np import pandas as pd import os from joblib import dump from sklearn.model_selection import train_test_split, RandomizedSearchCV, GridSearchCV from sklearn.metrics import classification_report, recall_score, precision_recall_fscore_support from sklearn.ensemble import GradientBoostingClassifier from scipy.stats import randint as sp_randint from scipy.stats import uniform as sp_uniform import tensorflow as tf from tensorflow import keras from tensorflow.keras.models import Model, load_model from tensorflow.keras.layers import Dense, Input, Dropout, Activation, LSTM, concatenate, Reshape, Permute, Lambda, RepeatVector, Multiply from tensorflow.keras.layers import Embedding, Bidirectional from tensorflow.keras.initializers import Constant import tensorflow.keras.backend as K import lightgbm as lgb from lightgbm import LGBMClassifier from utils import fetch_df, actions_to_indices, pretrained_embedding_layer, attention_3d_block, RiskLevelPredict, make_model, read_action_vecs, time_scalar, convert_to_one_hot, learning_rate_010_decay_power_0995, evaluate_recall cur_dir = '.' print('current working directory:') print(os.getcwd()) print(os.listdir()) emb_fn = 'action_page_fasttext.dict' emb_dir = os.path.join(cur_dir,'data',emb_fn) model_fn = 'attention_lstm_3' model_dir = os.path.join('/data/luyining','models',model_fn) cols = ['has_risk', 'ds', 'user_id', 'order_id', 'reg_days', 'platform', 'usertype', 'mobil_prefix3', 'mobile_prefix5', 'len_sequence', 'cnt_pay', 'max_time_diff', 'min_time_diff', 'avg_time_diff', 'std_time_diff', 'cnt_src', 'device_ios', 'device_android', 'device_wap', 'device_web', 'device_app', 'device_mini', 'cnt_login', 'is_bk_log', 'is_wzp_log', 'is_dc_log', 'cnt_item', 'cnt_cheap_item', 'cnt_lyl_item', 'roi', 'avg_roi', 'is_gift_inclued', 'is_virtual_inclued', 'actions', 'times'] data = fetch_df('temp','rc_risklevel_labels4train_fin4', cols = cols) action_sequences = pd.DataFrame.to_numpy(data['actions']) X = [] index = 0 for index in range(len(action_sequences)): temp_action_sequence = action_sequences[index] X.append(temp_action_sequence.strip().split(",")) time_sequences = pd.DataFrame.to_numpy(data['times']) T = [] index = 0 for index in range(len(time_sequences)): temp_time_sequence = time_sequences[index] T.append(list(map(np.int64, temp_time_sequence.strip().split(",")))) X = np.asarray(X) # array of action_sequences T = np.asarray(T) # array of time_sequences Y = pd.DataFrame.to_numpy(data['has_risk'], dtype = 'int64') # has_risk(categorical) X_train,X_test,T_train,T_test,y_train,y_test = train_test_split(X, T, Y, test_size=0.3, random_state=0) ## training set t_scalar = [list(map(time_scalar,i)) for i in T_train] # time scaling maxLen = len(max(X_train, key=len)) Y_indices = y_train # 读取单个动作的 embedding,作数值索引 action_to_index, index_to_action, action_to_vec_map = read_action_vecs(emb_dir) # 把动作转为数值索引 X_indices = actions_to_indices(X_train, action_to_index, maxLen) # 反过来,最后的动作放在最后面 X_indices = np.array([i[::-1] for i in X_indices]) T_indices = np.array([[-1]*(maxLen-len(i))+i[::-1] for i in t_scalar]) T_indices = T_indices.reshape(T_indices.shape[0], T_indices.shape[1], 1) ## test set t_scalar_test = [list(map(time_scalar, i)) for i in T_test] maxLen = len(max(X_train, key =len)) Y_indices_test = y_test action_to_index, index_to_action, action_to_vec_map = read_action_vecs(emb_dir) X_indices_test = actions_to_indices(X_test, action_to_index, maxLen) X_indices_test = np.array([i[::-1] for i in X_indices_test]) T_indices_test = np.array([[-1]*(maxLen-len(i))+i[::-1] for i in t_scalar_test]) T_indices_test = T_indices_test.reshape(T_indices_test.shape[0], T_indices_test.shape[1], 1) METRICS = [ keras.metrics.TruePositives(name='tp'), keras.metrics.FalsePositives(name='fp'), keras.metrics.TrueNegatives(name='tn'), keras.metrics.FalseNegatives(name='fn'), keras.metrics.BinaryAccuracy(name='accuracy'), keras.metrics.Precision(name='precision'), keras.metrics.Recall(name='recall'), keras.metrics.AUC(name='auc'), keras.metrics.AUC(name='prc', curve='PR'), # precision-recall curve ] initial_bias = np.log(sum(Y==1) / (Y.shape[0]-sum(Y==1))) early_stopping = tf.keras.callbacks.EarlyStopping( monitor='val_recall', verbose=1, patience=5, mode='max', min_delta=0.003, restore_best_weights=True) model = make_model(metrics=METRICS, output_bias = initial_bias, attention_share = False, bidirectional = True) model.summary() history = model.fit( [X_indices,T_indices], Y_indices, epochs=50, batch_size=64, shuffle=True, validation_data=([X_indices_test, T_indices_test], Y_indices_test), validation_split = 0.2, #从测试集中划分80%给训练集 validation_freq = 1, #测试的间隔次数为1, callbacks=[early_stopping] ) model.save(model_dir) feature_columns = ['len_sequence', 'cnt_pay', 'max_time_diff', 'min_time_diff', 'avg_time_diff', 'std_time_diff', 'cnt_src', 'device_ios', 'device_android', 'device_wap', 'device_web', 'device_app', 'device_mini', 'cnt_login', 'is_bk_log', 'is_wzp_log', 'is_dc_log', 'cnt_item', 'cnt_cheap_item', 'cnt_lyl_item', 'roi', 'avg_roi', 'is_gift_inclued','is_virtual_inclued'] feature_columns.append('lstm') target_column = ['has_risk'] t_scalar_total = [list(map(time_scalar,i)) for i in T] # time scaling Y_indices_total = Y # 把动作转为数值索引 X_indices_total = actions_to_indices(X, action_to_index, maxLen) # 反过来,最后的动作放在最后面 X_indices_total = np.array([i[::-1] for i in X_indices_total]) # T_indices = np.array([[-1]*(maxLen-len(i))+i[::-1] for i in t]) T_indices_total = np.array([[-1]*(maxLen-len(i))+i[::-1] for i in t_scalar_total]) T_indices_total = T_indices_total.reshape(T_indices_total.shape[0], T_indices_total.shape[1], 1) data['lstm'] = model.predict([X_indices_total, T_indices_total], batch_size=64) data[feature_columns] = data[feature_columns].astype(float) data[target_column] = data[target_column].astype(int) train_x, test_x, train_y, test_y = train_test_split(data[feature_columns], data[target_column], test_size = 0.2, random_state = 0) train_x, validation_x, train_y, validation_y = train_test_split(train_x, train_y, test_size = 0.2, random_state = 0) fit_params={"early_stopping_rounds":30, "eval_metric" : evaluate_recall, "eval_set" : [(validation_x,validation_y)], 'eval_names': ['valid'], 'callbacks': [lgb.reset_parameter(learning_rate=learning_rate_010_decay_power_0995)], 'verbose': 100 } param_test ={'num_leaves': sp_randint(6, 50), 'min_child_samples': sp_randint(100, 500), 'min_child_weight': [1e-5, 1e-3, 1e-2, 1e-1, 1, 1e1, 1e2, 1e3, 1e4], 'subsample': sp_uniform(loc=0.2, scale=0.8), 'colsample_bytree': sp_uniform(loc=0.4, scale=0.6), 'reg_alpha': [0, 1e-1, 1, 2, 5, 7, 10, 50, 100], 'reg_lambda': [0, 1e-1, 1, 5, 10, 20, 50, 100]} n_HP_points_to_test = 500 clf = lgb.LGBMClassifier(objective = 'binary', boosting = 'gbdt', seed = 0, max_depth=-1, learning_rate = 0.05, random_state=314, silent=True, metric=None, n_jobs=4, n_estimators=5000) gs = RandomizedSearchCV( estimator=clf, param_distributions=param_test, n_iter=n_HP_points_to_test, scoring='recall', cv=5, refit=True, random_state=314, verbose=True) gs.fit(train_x, train_y, **fit_params) opt_parameters = gs.best_params_ clf_sw = lgb.LGBMClassifier(**clf.get_params()) #set optimal parameters clf_sw.set_params(**opt_parameters) gs_sample_weight = GridSearchCV(estimator=clf_sw, param_grid={'scale_pos_weight':[1,2,6,7,8,12]}, scoring='recall', cv=5, refit=True, verbose=True) gs_sample_weight.fit(train_x, train_y, **fit_params) opt_parameters["scale_pos_weight"] = gs_sample_weight.best_params_['scale_pos_weight'] #Configure locally from hardcoded values clf_final = lgb.LGBMClassifier(**clf.get_params()) #set optimal parameters clf_final.set_params(**opt_parameters) # #Train the final model with learning rate decay clf_final.fit(train_x, train_y, **fit_params ) train_prob_cv = clf_final.predict_proba(train_x)[:,1] validation_prob_cv = clf_final.predict_proba(validation_x)[:,1] test_prob_cv = clf_final.predict_proba(test_x)[:,1] print(classification_report(train_y,train_prob_cv>0.5)) print('--------------------------------------------------') print(classification_report(validation_y,validation_prob_cv>0.5)) print('--------------------------------------------------') print(classification_report(test_y,test_prob_cv>0.5)) dump(clf_final, '/data/luyining/models/lgb_3.pkl')
43.469767
492
0.675904
1,338
9,346
4.404335
0.246637
0.025793
0.010182
0.008315
0.313423
0.280672
0.2318
0.212116
0.185644
0.120143
0
0.023763
0.184999
9,346
214
493
43.672897
0.749902
0.046437
0
0.107143
0
0
0.133048
0.0233
0
0
0
0
0
1
0
false
0
0.113095
0
0.113095
0.047619
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7cd2f7b970a9548bf93ebb125e5939b4572b405
2,543
py
Python
mptb/models/commons.py
to-aoki/my-pytorch-bert
8e412ae6331f5f19fee55b430be389de2f5c49a6
[ "Apache-2.0" ]
21
2019-03-04T03:43:19.000Z
2022-02-14T15:50:41.000Z
mptb/models/commons.py
to-aoki/my-pytorch-bert
8e412ae6331f5f19fee55b430be389de2f5c49a6
[ "Apache-2.0" ]
1
2019-10-07T17:49:21.000Z
2019-12-14T11:50:10.000Z
mptb/models/commons.py
to-aoki/my-pytorch-bert
8e412ae6331f5f19fee55b430be389de2f5c49a6
[ "Apache-2.0" ]
5
2019-07-19T07:04:55.000Z
2020-07-01T13:24:14.000Z
# This file is based on # https://github.com/huggingface/pytorch-pretrained-BERT/blob/master/pytorch_pretrained_bert/modeling.py. # changing class names and variables names for my understanding of BERT. # and Modified a bit to visualize with bertviz. # # Copyright 2018 The Google AI Language Team Authors and The HugginFace Inc. team. # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.o # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Common Network model.""" import math import torch import torch.nn as nn def gelu(x): return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0))) try: from apex.normalization.fused_layer_norm import FusedLayerNorm as LayerNorm except ImportError: class LayerNorm(nn.Module): """A layernorm module in the TF style (epsilon inside the square root).""" def __init__(self, hidden_size, eps=1e-12): super().__init__() self.weight = nn.Parameter(torch.ones(hidden_size)) # gamma self.bias = nn.Parameter(torch.zeros(hidden_size)) # beta self.variance_epsilon = eps def forward(self, x): mean = x.mean(dim=-1, keepdim=True) var = ((x - mean)**2).mean(dim=-1, keepdim=True) std = (var + self.variance_epsilon).sqrt() return self.weight * (x - mean)/std + self.bias class PositionwiseFeedForward(nn.Module): """ FeedForward Neural Networks for each position """ def __init__(self, config, eps=1e-12): super().__init__() self.intermediate = nn.Linear(config.hidden_size, config.intermediate_size) self.output = nn.Linear(config.intermediate_size, config.hidden_size) self.dropout = nn.Dropout(config.hidden_dropout_prob) self.layer_norm = LayerNorm(config.hidden_size, eps=eps) def forward(self, attention_output): hidden_states = gelu(self.intermediate(attention_output)) hidden_states = self.dropout(self.output(hidden_states)) return self.layer_norm(hidden_states + attention_output)
40.365079
105
0.701534
353
2,543
4.937677
0.467422
0.034423
0.027539
0.018359
0.04475
0.022949
0
0
0
0
0
0.013242
0.198191
2,543
62
106
41.016129
0.841589
0.416044
0
0.066667
0
0
0
0
0
0
0
0
0
1
0.166667
false
0
0.166667
0.033333
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7cd8e5b68df4a3597e07017cc8ad1f64fc2dc4a
1,388
py
Python
simple_server.py
mlacayoemery/owslib-pywps-echo
9b19936989d8261986f6184547206386afb8c1fe
[ "Unlicense" ]
null
null
null
simple_server.py
mlacayoemery/owslib-pywps-echo
9b19936989d8261986f6184547206386afb8c1fe
[ "Unlicense" ]
null
null
null
simple_server.py
mlacayoemery/owslib-pywps-echo
9b19936989d8261986f6184547206386afb8c1fe
[ "Unlicense" ]
null
null
null
import flask import pywps class EchoVector(pywps.Process): def __init__(self): inputs = [pywps.ComplexInput('message', 'Input message', supported_formats=[pywps.Format('application/gml+xml'), pywps.Format('text/xml')], mode=pywps.validator.mode.MODE.NONE)] outputs = [pywps.ComplexOutput('response', 'Output response', supported_formats=[pywps.Format('application/gml+xml')])] super(EchoVector, self).__init__( self._handler, identifier='echo_vector', title='Echo Vector Test', abstract='Returns the given vector', version='1.0.0.0', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True ) def _handler(self, request, response): response.outputs['response'].data = request.inputs['message'][0].data return response app = flask.Flask(__name__) wps_processes = [EchoVector()] service = pywps.Service(wps_processes) @app.route('/wps', methods=['GET', 'POST']) def wps(): return service bind_host='127.0.0.1' app.run(threaded=True,host=bind_host)
30.844444
96
0.53098
131
1,388
5.450382
0.465649
0.046218
0.058824
0.07563
0.123249
0.123249
0.123249
0
0
0
0
0.012236
0.352305
1,388
44
97
31.545455
0.78198
0
0
0
0
0
0.131124
0
0
0
0
0
0
1
0.088235
false
0
0.058824
0.029412
0.235294
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
e7cdb089436833732bfa37700ee373f489662c99
889
py
Python
tests/test_jaccard_score.py
jo-mueller/biapol-utilities
773b60a64fa12641ba869addd7b4be9a4ab87ecd
[ "BSD-3-Clause" ]
4
2021-12-17T19:37:11.000Z
2022-03-29T16:39:31.000Z
tests/test_jaccard_score.py
jo-mueller/biapol-utilities
773b60a64fa12641ba869addd7b4be9a4ab87ecd
[ "BSD-3-Clause" ]
34
2021-11-04T14:10:24.000Z
2022-01-31T13:23:44.000Z
tests/test_jaccard_score.py
jo-mueller/biapol-utilities
773b60a64fa12641ba869addd7b4be9a4ab87ecd
[ "BSD-3-Clause" ]
2
2021-12-14T13:53:16.000Z
2021-12-15T12:30:51.000Z
# -*- coding: utf-8 -*- from biapol_utilities import label import numpy as np def test_compare_labels(): a = np.asarray([5, 0, 0, 1, 1, 1, 2, 2]) b = np.asarray([5, 0, 0, 1, 1, 1, 2, 3]) result = label.compare_labels(a, b) assert('jaccard_score' in result.columns) assert('dice_score' in result.columns) def test_compare_labels2(): a = np.asarray([5, 0, 0, 1, 1, 1, 2, 2]) b = np.asarray([6, 0, 0, 1, 1, 1, 2, 3]) result = label.compare_labels(a, b) assert(np.max(result.label) == np.max([a, b])) def test_compare_labels3(): a = np.asarray([5, 0, 0, 1, 1, 1, 2, 2]) b = np.asarray([6, 0, 0, 1, 1, 1, 2, 3]) result = label.compare_labels(a, b) assert(result[result.label == 0].jaccard_score.to_numpy()[0] == 1.0) if __name__ == "__main__": test_compare_labels() test_compare_labels2() test_compare_labels3()
21.166667
72
0.598425
153
889
3.294118
0.248366
0.047619
0.035714
0.047619
0.404762
0.404762
0.404762
0.404762
0.404762
0.402778
0
0.082133
0.219348
889
41
73
21.682927
0.644092
0.023622
0
0.363636
0
0
0.035797
0
0
0
0
0
0.181818
1
0.136364
false
0
0.090909
0
0.227273
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0