hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
6fec6faaa91848a4b52ba40cfe4231343b1391f7
102
py
Python
app/link_shortener/apps.py
bruno5barros/API_Link_Shortener
06f03ec59187d638575cd57cee186cd2176b1841
[ "MIT" ]
null
null
null
app/link_shortener/apps.py
bruno5barros/API_Link_Shortener
06f03ec59187d638575cd57cee186cd2176b1841
[ "MIT" ]
null
null
null
app/link_shortener/apps.py
bruno5barros/API_Link_Shortener
06f03ec59187d638575cd57cee186cd2176b1841
[ "MIT" ]
null
null
null
from django.apps import AppConfig class LinkShortenerConfig(AppConfig): name = 'link_shortener'
17
37
0.784314
11
102
7.181818
0.909091
0
0
0
0
0
0
0
0
0
0
0
0.147059
102
5
38
20.4
0.908046
0
0
0
0
0
0.137255
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
b50ee1d9b3db73792d06de1dcfe28e96fe59e01f
1,393
py
Python
utils/settings.py
sghick/tools-AutoArchiveIPA
ed9de807949d71fd952c32c1b0d6d75a6fcb7d12
[ "MIT" ]
2
2019-01-10T02:02:21.000Z
2019-05-28T01:59:54.000Z
utils/settings.py
sghick/tools-AutoArchiveIPA
ed9de807949d71fd952c32c1b0d6d75a6fcb7d12
[ "MIT" ]
null
null
null
utils/settings.py
sghick/tools-AutoArchiveIPA
ed9de807949d71fd952c32c1b0d6d75a6fcb7d12
[ "MIT" ]
null
null
null
# coding: utf-8 import os #################################################################################################### # 基本路径的配置-推荐不要修改 #################################################################################################### # 脚本的存放目录,不需要修改 kScriptRootPath = os.getcwd() + '/' # 脚本配置的根目录,不需要修改 kAutoArchiveConifgRootPath = kScriptRootPath + 'conf/' # 源代码存放的根目录,不需要修改 kAutoArchiveRepositoryRootPath = kScriptRootPath + '__repository/' # 输出文件的根目录,不需要修改 kAutoArchiveExportRootPath = kScriptRootPath + '__export/' # '.xcodeproj/.xcworkspace/Podfile'文件所在目录,必须将这些文件放在同一个目录下,用于执行build命令和git命令 def cmd_cd(repositoryName): return 'cd %s' % kAutoArchiveRepositoryRootPath + repositoryName def export_option_dis_path(targetName): return kAutoArchiveConifgRootPath + targetName + '-Dis-ExportOptions.plist' def export_option_dev_path(targetName): return kAutoArchiveConifgRootPath + targetName + '-Dev-ExportOptions.plist' def export_path_app_store(repositoryName): return kAutoArchiveExportRootPath + repositoryName + 'AppStore/' def export_path_dev_inner(repositoryName): return kAutoArchiveExportRootPath + repositoryName + 'DevInner/' def export_path_dev_outer(repositoryName): return kAutoArchiveExportRootPath + repositoryName + 'DevOuter/' def export_path_dev_rc(repositoryName): return kAutoArchiveExportRootPath + repositoryName + 'DevRC/'
37.648649
100
0.686289
106
1,393
8.801887
0.45283
0.057878
0.055734
0.257235
0.120043
0
0
0
0
0
0
0.000793
0.09476
1,393
37
101
37.648649
0.739096
0.116296
0
0
0
0
0.111328
0.046875
0
0
0
0
0
1
0.368421
false
0
0.052632
0.368421
0.789474
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
82ffd5c6c41aeba4b08c68b798e0b84cd4985531
46,773
py
Python
Framework/LanguageSupport/thrift/gen-py/MMIStandard/constraints/ttypes.py
Daimler/MOSIM_Core
b0457767415ecf14c51197cc0cb77e9f31ca01d8
[ "MIT" ]
19
2020-11-30T09:29:11.000Z
2021-12-10T06:10:11.000Z
Framework/LanguageSupport/thrift/gen-py/MMIStandard/constraints/ttypes.py
Daimler/MOSIM_Core
b0457767415ecf14c51197cc0cb77e9f31ca01d8
[ "MIT" ]
null
null
null
Framework/LanguageSupport/thrift/gen-py/MMIStandard/constraints/ttypes.py
Daimler/MOSIM_Core
b0457767415ecf14c51197cc0cb77e9f31ca01d8
[ "MIT" ]
6
2021-01-20T01:46:37.000Z
2021-09-28T10:22:14.000Z
# # Autogenerated by Thrift Compiler (0.13.0) # # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING # # options string: py # from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException from thrift.protocol.TProtocol import TProtocolException from thrift.TRecursive import fix_spec import sys import MMIStandard.math.ttypes import MMIStandard.avatar.ttypes from thrift.transport import TTransport all_structs = [] class MTranslationConstraintType(object): BOX = 0 ELLIPSOID = 1 _VALUES_TO_NAMES = { 0: "BOX", 1: "ELLIPSOID", } _NAMES_TO_VALUES = { "BOX": 0, "ELLIPSOID": 1, } class MInterval(object): """ Attributes: - Min - Max """ def __init__(self, Min=None, Max=None,): self.Min = Min self.Max = Max def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.DOUBLE: self.Min = iprot.readDouble() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.DOUBLE: self.Max = iprot.readDouble() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('MInterval') if self.Min is not None: oprot.writeFieldBegin('Min', TType.DOUBLE, 1) oprot.writeDouble(self.Min) oprot.writeFieldEnd() if self.Max is not None: oprot.writeFieldBegin('Max', TType.DOUBLE, 2) oprot.writeDouble(self.Max) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.Min is None: raise TProtocolException(message='Required field Min is unset!') if self.Max is None: raise TProtocolException(message='Required field Max is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class MInterval3(object): """ Attributes: - X - Y - Z """ def __init__(self, X=None, Y=None, Z=None,): self.X = X self.Y = Y self.Z = Z def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.X = MInterval() self.X.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.Y = MInterval() self.Y.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.Z = MInterval() self.Z.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('MInterval3') if self.X is not None: oprot.writeFieldBegin('X', TType.STRUCT, 1) self.X.write(oprot) oprot.writeFieldEnd() if self.Y is not None: oprot.writeFieldBegin('Y', TType.STRUCT, 2) self.Y.write(oprot) oprot.writeFieldEnd() if self.Z is not None: oprot.writeFieldBegin('Z', TType.STRUCT, 3) self.Z.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.X is None: raise TProtocolException(message='Required field X is unset!') if self.Y is None: raise TProtocolException(message='Required field Y is unset!') if self.Z is None: raise TProtocolException(message='Required field Z is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class MTranslationConstraint(object): """ Attributes: - Type - Limits """ def __init__(self, Type=None, Limits=None,): self.Type = Type self.Limits = Limits def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I32: self.Type = iprot.readI32() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.Limits = MInterval3() self.Limits.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('MTranslationConstraint') if self.Type is not None: oprot.writeFieldBegin('Type', TType.I32, 1) oprot.writeI32(self.Type) oprot.writeFieldEnd() if self.Limits is not None: oprot.writeFieldBegin('Limits', TType.STRUCT, 2) self.Limits.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.Type is None: raise TProtocolException(message='Required field Type is unset!') if self.Limits is None: raise TProtocolException(message='Required field Limits is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class MRotationConstraint(object): """ Attributes: - Limits """ def __init__(self, Limits=None,): self.Limits = Limits def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 2: if ftype == TType.STRUCT: self.Limits = MInterval3() self.Limits.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('MRotationConstraint') if self.Limits is not None: oprot.writeFieldBegin('Limits', TType.STRUCT, 2) self.Limits.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.Limits is None: raise TProtocolException(message='Required field Limits is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class MGeometryConstraint(object): """ Attributes: - ParentObjectID - ParentToConstraint - TranslationConstraint - RotationConstraint - WeightingFactor """ def __init__(self, ParentObjectID=None, ParentToConstraint=None, TranslationConstraint=None, RotationConstraint=None, WeightingFactor=None,): self.ParentObjectID = ParentObjectID self.ParentToConstraint = ParentToConstraint self.TranslationConstraint = TranslationConstraint self.RotationConstraint = RotationConstraint self.WeightingFactor = WeightingFactor def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.ParentObjectID = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ParentToConstraint = MMIStandard.math.ttypes.MTransform() self.ParentToConstraint.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.TranslationConstraint = MTranslationConstraint() self.TranslationConstraint.read(iprot) else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRUCT: self.RotationConstraint = MRotationConstraint() self.RotationConstraint.read(iprot) else: iprot.skip(ftype) elif fid == 5: if ftype == TType.DOUBLE: self.WeightingFactor = iprot.readDouble() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('MGeometryConstraint') if self.ParentObjectID is not None: oprot.writeFieldBegin('ParentObjectID', TType.STRING, 1) oprot.writeString(self.ParentObjectID.encode('utf-8') if sys.version_info[0] == 2 else self.ParentObjectID) oprot.writeFieldEnd() if self.ParentToConstraint is not None: oprot.writeFieldBegin('ParentToConstraint', TType.STRUCT, 2) self.ParentToConstraint.write(oprot) oprot.writeFieldEnd() if self.TranslationConstraint is not None: oprot.writeFieldBegin('TranslationConstraint', TType.STRUCT, 3) self.TranslationConstraint.write(oprot) oprot.writeFieldEnd() if self.RotationConstraint is not None: oprot.writeFieldBegin('RotationConstraint', TType.STRUCT, 4) self.RotationConstraint.write(oprot) oprot.writeFieldEnd() if self.WeightingFactor is not None: oprot.writeFieldBegin('WeightingFactor', TType.DOUBLE, 5) oprot.writeDouble(self.WeightingFactor) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.ParentObjectID is None: raise TProtocolException(message='Required field ParentObjectID is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class MVelocityConstraint(object): """ Attributes: - ParentObjectID - ParentToConstraint - TranslationalVelocity - RotationalVelocity - WeightingFactor """ def __init__(self, ParentObjectID=None, ParentToConstraint=None, TranslationalVelocity=None, RotationalVelocity=None, WeightingFactor=None,): self.ParentObjectID = ParentObjectID self.ParentToConstraint = ParentToConstraint self.TranslationalVelocity = TranslationalVelocity self.RotationalVelocity = RotationalVelocity self.WeightingFactor = WeightingFactor def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.ParentObjectID = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ParentToConstraint = MMIStandard.math.ttypes.MTransform() self.ParentToConstraint.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.TranslationalVelocity = MMIStandard.math.ttypes.MVector3() self.TranslationalVelocity.read(iprot) else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRUCT: self.RotationalVelocity = MMIStandard.math.ttypes.MVector3() self.RotationalVelocity.read(iprot) else: iprot.skip(ftype) elif fid == 5: if ftype == TType.DOUBLE: self.WeightingFactor = iprot.readDouble() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('MVelocityConstraint') if self.ParentObjectID is not None: oprot.writeFieldBegin('ParentObjectID', TType.STRING, 1) oprot.writeString(self.ParentObjectID.encode('utf-8') if sys.version_info[0] == 2 else self.ParentObjectID) oprot.writeFieldEnd() if self.ParentToConstraint is not None: oprot.writeFieldBegin('ParentToConstraint', TType.STRUCT, 2) self.ParentToConstraint.write(oprot) oprot.writeFieldEnd() if self.TranslationalVelocity is not None: oprot.writeFieldBegin('TranslationalVelocity', TType.STRUCT, 3) self.TranslationalVelocity.write(oprot) oprot.writeFieldEnd() if self.RotationalVelocity is not None: oprot.writeFieldBegin('RotationalVelocity', TType.STRUCT, 4) self.RotationalVelocity.write(oprot) oprot.writeFieldEnd() if self.WeightingFactor is not None: oprot.writeFieldBegin('WeightingFactor', TType.DOUBLE, 5) oprot.writeDouble(self.WeightingFactor) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.ParentObjectID is None: raise TProtocolException(message='Required field ParentObjectID is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class MAccelerationConstraint(object): """ Attributes: - ParentObjectID - ParentToConstraint - TranslationalAcceleration - RotationalAcceleration - WeightingFactor """ def __init__(self, ParentObjectID=None, ParentToConstraint=None, TranslationalAcceleration=None, RotationalAcceleration=None, WeightingFactor=None,): self.ParentObjectID = ParentObjectID self.ParentToConstraint = ParentToConstraint self.TranslationalAcceleration = TranslationalAcceleration self.RotationalAcceleration = RotationalAcceleration self.WeightingFactor = WeightingFactor def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.ParentObjectID = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.ParentToConstraint = MMIStandard.math.ttypes.MTransform() self.ParentToConstraint.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.TranslationalAcceleration = MMIStandard.math.ttypes.MVector3() self.TranslationalAcceleration.read(iprot) else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRUCT: self.RotationalAcceleration = MMIStandard.math.ttypes.MVector3() self.RotationalAcceleration.read(iprot) else: iprot.skip(ftype) elif fid == 5: if ftype == TType.DOUBLE: self.WeightingFactor = iprot.readDouble() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('MAccelerationConstraint') if self.ParentObjectID is not None: oprot.writeFieldBegin('ParentObjectID', TType.STRING, 1) oprot.writeString(self.ParentObjectID.encode('utf-8') if sys.version_info[0] == 2 else self.ParentObjectID) oprot.writeFieldEnd() if self.ParentToConstraint is not None: oprot.writeFieldBegin('ParentToConstraint', TType.STRUCT, 2) self.ParentToConstraint.write(oprot) oprot.writeFieldEnd() if self.TranslationalAcceleration is not None: oprot.writeFieldBegin('TranslationalAcceleration', TType.STRUCT, 3) self.TranslationalAcceleration.write(oprot) oprot.writeFieldEnd() if self.RotationalAcceleration is not None: oprot.writeFieldBegin('RotationalAcceleration', TType.STRUCT, 4) self.RotationalAcceleration.write(oprot) oprot.writeFieldEnd() if self.WeightingFactor is not None: oprot.writeFieldBegin('WeightingFactor', TType.DOUBLE, 5) oprot.writeDouble(self.WeightingFactor) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.ParentObjectID is None: raise TProtocolException(message='Required field ParentObjectID is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class MPathConstraint(object): """ Attributes: - PolygonPoints - WeightingFactor """ def __init__(self, PolygonPoints=None, WeightingFactor=None,): self.PolygonPoints = PolygonPoints self.WeightingFactor = WeightingFactor def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.LIST: self.PolygonPoints = [] (_etype3, _size0) = iprot.readListBegin() for _i4 in range(_size0): _elem5 = MGeometryConstraint() _elem5.read(iprot) self.PolygonPoints.append(_elem5) iprot.readListEnd() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.DOUBLE: self.WeightingFactor = iprot.readDouble() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('MPathConstraint') if self.PolygonPoints is not None: oprot.writeFieldBegin('PolygonPoints', TType.LIST, 1) oprot.writeListBegin(TType.STRUCT, len(self.PolygonPoints)) for iter6 in self.PolygonPoints: iter6.write(oprot) oprot.writeListEnd() oprot.writeFieldEnd() if self.WeightingFactor is not None: oprot.writeFieldBegin('WeightingFactor', TType.DOUBLE, 2) oprot.writeDouble(self.WeightingFactor) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.PolygonPoints is None: raise TProtocolException(message='Required field PolygonPoints is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class MJointConstraint(object): """ Attributes: - JointType - GeometryConstraint - VelocityConstraint - AccelerationConstraint """ def __init__(self, JointType=None, GeometryConstraint=None, VelocityConstraint=None, AccelerationConstraint=None,): self.JointType = JointType self.GeometryConstraint = GeometryConstraint self.VelocityConstraint = VelocityConstraint self.AccelerationConstraint = AccelerationConstraint def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I32: self.JointType = iprot.readI32() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.GeometryConstraint = MGeometryConstraint() self.GeometryConstraint.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.VelocityConstraint = MVelocityConstraint() self.VelocityConstraint.read(iprot) else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRUCT: self.AccelerationConstraint = MAccelerationConstraint() self.AccelerationConstraint.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('MJointConstraint') if self.JointType is not None: oprot.writeFieldBegin('JointType', TType.I32, 1) oprot.writeI32(self.JointType) oprot.writeFieldEnd() if self.GeometryConstraint is not None: oprot.writeFieldBegin('GeometryConstraint', TType.STRUCT, 2) self.GeometryConstraint.write(oprot) oprot.writeFieldEnd() if self.VelocityConstraint is not None: oprot.writeFieldBegin('VelocityConstraint', TType.STRUCT, 3) self.VelocityConstraint.write(oprot) oprot.writeFieldEnd() if self.AccelerationConstraint is not None: oprot.writeFieldBegin('AccelerationConstraint', TType.STRUCT, 4) self.AccelerationConstraint.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.JointType is None: raise TProtocolException(message='Required field JointType is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class MJointPathConstraint(object): """ Attributes: - JointType - PathConstraint """ def __init__(self, JointType=None, PathConstraint=None,): self.JointType = JointType self.PathConstraint = PathConstraint def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I32: self.JointType = iprot.readI32() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.PathConstraint = MPathConstraint() self.PathConstraint.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('MJointPathConstraint') if self.JointType is not None: oprot.writeFieldBegin('JointType', TType.I32, 1) oprot.writeI32(self.JointType) oprot.writeFieldEnd() if self.PathConstraint is not None: oprot.writeFieldBegin('PathConstraint', TType.STRUCT, 2) self.PathConstraint.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.JointType is None: raise TProtocolException(message='Required field JointType is unset!') if self.PathConstraint is None: raise TProtocolException(message='Required field PathConstraint is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class MPostureConstraint(object): """ Attributes: - posture - JointConstraints """ def __init__(self, posture=None, JointConstraints=None,): self.posture = posture self.JointConstraints = JointConstraints def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.posture = MMIStandard.avatar.ttypes.MAvatarPostureValues() self.posture.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.LIST: self.JointConstraints = [] (_etype10, _size7) = iprot.readListBegin() for _i11 in range(_size7): _elem12 = MJointConstraint() _elem12.read(iprot) self.JointConstraints.append(_elem12) iprot.readListEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('MPostureConstraint') if self.posture is not None: oprot.writeFieldBegin('posture', TType.STRUCT, 1) self.posture.write(oprot) oprot.writeFieldEnd() if self.JointConstraints is not None: oprot.writeFieldBegin('JointConstraints', TType.LIST, 2) oprot.writeListBegin(TType.STRUCT, len(self.JointConstraints)) for iter13 in self.JointConstraints: iter13.write(oprot) oprot.writeListEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.posture is None: raise TProtocolException(message='Required field posture is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class MConstraint(object): """ Attributes: - ID - GeometryConstraint - VelocityConstraint - AccelerationConstraint - PathConstraint - JointPathConstraint - PostureConstraint - JointConstraint - Properties """ def __init__(self, ID=None, GeometryConstraint=None, VelocityConstraint=None, AccelerationConstraint=None, PathConstraint=None, JointPathConstraint=None, PostureConstraint=None, JointConstraint=None, Properties=None,): self.ID = ID self.GeometryConstraint = GeometryConstraint self.VelocityConstraint = VelocityConstraint self.AccelerationConstraint = AccelerationConstraint self.PathConstraint = PathConstraint self.JointPathConstraint = JointPathConstraint self.PostureConstraint = PostureConstraint self.JointConstraint = JointConstraint self.Properties = Properties def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.ID = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.GeometryConstraint = MGeometryConstraint() self.GeometryConstraint.read(iprot) else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRUCT: self.VelocityConstraint = MVelocityConstraint() self.VelocityConstraint.read(iprot) else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRUCT: self.AccelerationConstraint = MAccelerationConstraint() self.AccelerationConstraint.read(iprot) else: iprot.skip(ftype) elif fid == 5: if ftype == TType.STRUCT: self.PathConstraint = MPathConstraint() self.PathConstraint.read(iprot) else: iprot.skip(ftype) elif fid == 6: if ftype == TType.STRUCT: self.JointPathConstraint = MJointPathConstraint() self.JointPathConstraint.read(iprot) else: iprot.skip(ftype) elif fid == 7: if ftype == TType.STRUCT: self.PostureConstraint = MPostureConstraint() self.PostureConstraint.read(iprot) else: iprot.skip(ftype) elif fid == 8: if ftype == TType.STRUCT: self.JointConstraint = MJointConstraint() self.JointConstraint.read(iprot) else: iprot.skip(ftype) elif fid == 9: if ftype == TType.MAP: self.Properties = {} (_ktype15, _vtype16, _size14) = iprot.readMapBegin() for _i18 in range(_size14): _key19 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() _val20 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() self.Properties[_key19] = _val20 iprot.readMapEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('MConstraint') if self.ID is not None: oprot.writeFieldBegin('ID', TType.STRING, 1) oprot.writeString(self.ID.encode('utf-8') if sys.version_info[0] == 2 else self.ID) oprot.writeFieldEnd() if self.GeometryConstraint is not None: oprot.writeFieldBegin('GeometryConstraint', TType.STRUCT, 2) self.GeometryConstraint.write(oprot) oprot.writeFieldEnd() if self.VelocityConstraint is not None: oprot.writeFieldBegin('VelocityConstraint', TType.STRUCT, 3) self.VelocityConstraint.write(oprot) oprot.writeFieldEnd() if self.AccelerationConstraint is not None: oprot.writeFieldBegin('AccelerationConstraint', TType.STRUCT, 4) self.AccelerationConstraint.write(oprot) oprot.writeFieldEnd() if self.PathConstraint is not None: oprot.writeFieldBegin('PathConstraint', TType.STRUCT, 5) self.PathConstraint.write(oprot) oprot.writeFieldEnd() if self.JointPathConstraint is not None: oprot.writeFieldBegin('JointPathConstraint', TType.STRUCT, 6) self.JointPathConstraint.write(oprot) oprot.writeFieldEnd() if self.PostureConstraint is not None: oprot.writeFieldBegin('PostureConstraint', TType.STRUCT, 7) self.PostureConstraint.write(oprot) oprot.writeFieldEnd() if self.JointConstraint is not None: oprot.writeFieldBegin('JointConstraint', TType.STRUCT, 8) self.JointConstraint.write(oprot) oprot.writeFieldEnd() if self.Properties is not None: oprot.writeFieldBegin('Properties', TType.MAP, 9) oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.Properties)) for kiter21, viter22 in self.Properties.items(): oprot.writeString(kiter21.encode('utf-8') if sys.version_info[0] == 2 else kiter21) oprot.writeString(viter22.encode('utf-8') if sys.version_info[0] == 2 else viter22) oprot.writeMapEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): if self.ID is None: raise TProtocolException(message='Required field ID is unset!') return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) all_structs.append(MInterval) MInterval.thrift_spec = ( None, # 0 (1, TType.DOUBLE, 'Min', None, None, ), # 1 (2, TType.DOUBLE, 'Max', None, None, ), # 2 ) all_structs.append(MInterval3) MInterval3.thrift_spec = ( None, # 0 (1, TType.STRUCT, 'X', [MInterval, None], None, ), # 1 (2, TType.STRUCT, 'Y', [MInterval, None], None, ), # 2 (3, TType.STRUCT, 'Z', [MInterval, None], None, ), # 3 ) all_structs.append(MTranslationConstraint) MTranslationConstraint.thrift_spec = ( None, # 0 (1, TType.I32, 'Type', None, None, ), # 1 (2, TType.STRUCT, 'Limits', [MInterval3, None], None, ), # 2 ) all_structs.append(MRotationConstraint) MRotationConstraint.thrift_spec = ( None, # 0 None, # 1 (2, TType.STRUCT, 'Limits', [MInterval3, None], None, ), # 2 ) all_structs.append(MGeometryConstraint) MGeometryConstraint.thrift_spec = ( None, # 0 (1, TType.STRING, 'ParentObjectID', 'UTF8', None, ), # 1 (2, TType.STRUCT, 'ParentToConstraint', [MMIStandard.math.ttypes.MTransform, None], None, ), # 2 (3, TType.STRUCT, 'TranslationConstraint', [MTranslationConstraint, None], None, ), # 3 (4, TType.STRUCT, 'RotationConstraint', [MRotationConstraint, None], None, ), # 4 (5, TType.DOUBLE, 'WeightingFactor', None, None, ), # 5 ) all_structs.append(MVelocityConstraint) MVelocityConstraint.thrift_spec = ( None, # 0 (1, TType.STRING, 'ParentObjectID', 'UTF8', None, ), # 1 (2, TType.STRUCT, 'ParentToConstraint', [MMIStandard.math.ttypes.MTransform, None], None, ), # 2 (3, TType.STRUCT, 'TranslationalVelocity', [MMIStandard.math.ttypes.MVector3, None], None, ), # 3 (4, TType.STRUCT, 'RotationalVelocity', [MMIStandard.math.ttypes.MVector3, None], None, ), # 4 (5, TType.DOUBLE, 'WeightingFactor', None, None, ), # 5 ) all_structs.append(MAccelerationConstraint) MAccelerationConstraint.thrift_spec = ( None, # 0 (1, TType.STRING, 'ParentObjectID', 'UTF8', None, ), # 1 (2, TType.STRUCT, 'ParentToConstraint', [MMIStandard.math.ttypes.MTransform, None], None, ), # 2 (3, TType.STRUCT, 'TranslationalAcceleration', [MMIStandard.math.ttypes.MVector3, None], None, ), # 3 (4, TType.STRUCT, 'RotationalAcceleration', [MMIStandard.math.ttypes.MVector3, None], None, ), # 4 (5, TType.DOUBLE, 'WeightingFactor', None, None, ), # 5 ) all_structs.append(MPathConstraint) MPathConstraint.thrift_spec = ( None, # 0 (1, TType.LIST, 'PolygonPoints', (TType.STRUCT, [MGeometryConstraint, None], False), None, ), # 1 (2, TType.DOUBLE, 'WeightingFactor', None, None, ), # 2 ) all_structs.append(MJointConstraint) MJointConstraint.thrift_spec = ( None, # 0 (1, TType.I32, 'JointType', None, None, ), # 1 (2, TType.STRUCT, 'GeometryConstraint', [MGeometryConstraint, None], None, ), # 2 (3, TType.STRUCT, 'VelocityConstraint', [MVelocityConstraint, None], None, ), # 3 (4, TType.STRUCT, 'AccelerationConstraint', [MAccelerationConstraint, None], None, ), # 4 ) all_structs.append(MJointPathConstraint) MJointPathConstraint.thrift_spec = ( None, # 0 (1, TType.I32, 'JointType', None, None, ), # 1 (2, TType.STRUCT, 'PathConstraint', [MPathConstraint, None], None, ), # 2 ) all_structs.append(MPostureConstraint) MPostureConstraint.thrift_spec = ( None, # 0 (1, TType.STRUCT, 'posture', [MMIStandard.avatar.ttypes.MAvatarPostureValues, None], None, ), # 1 (2, TType.LIST, 'JointConstraints', (TType.STRUCT, [MJointConstraint, None], False), None, ), # 2 ) all_structs.append(MConstraint) MConstraint.thrift_spec = ( None, # 0 (1, TType.STRING, 'ID', 'UTF8', None, ), # 1 (2, TType.STRUCT, 'GeometryConstraint', [MGeometryConstraint, None], None, ), # 2 (3, TType.STRUCT, 'VelocityConstraint', [MVelocityConstraint, None], None, ), # 3 (4, TType.STRUCT, 'AccelerationConstraint', [MAccelerationConstraint, None], None, ), # 4 (5, TType.STRUCT, 'PathConstraint', [MPathConstraint, None], None, ), # 5 (6, TType.STRUCT, 'JointPathConstraint', [MJointPathConstraint, None], None, ), # 6 (7, TType.STRUCT, 'PostureConstraint', [MPostureConstraint, None], None, ), # 7 (8, TType.STRUCT, 'JointConstraint', [MJointConstraint, None], None, ), # 8 (9, TType.MAP, 'Properties', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 9 ) fix_spec(all_structs) del all_structs
38.057771
222
0.586578
4,575
46,773
5.824262
0.045464
0.016888
0.030399
0.036478
0.778203
0.730841
0.696803
0.668918
0.657397
0.64779
0
0.010374
0.311676
46,773
1,228
223
38.088762
0.81727
0.02386
0
0.719124
1
0
0.047419
0.006875
0
0
0
0
0
1
0.083665
false
0
0.006972
0.023904
0.179283
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
d203639bd992204ada2cfc3bb675d04a169c0ae0
27
py
Python
tests/__init__.py
ms32035/Pyhiveapi
c84389aa8118acd006a4b228e58b6a966e49e7dc
[ "MIT" ]
10
2020-10-11T20:50:36.000Z
2021-05-01T16:11:19.000Z
tests/__init__.py
ms32035/Pyhiveapi
c84389aa8118acd006a4b228e58b6a966e49e7dc
[ "MIT" ]
11
2020-10-27T19:34:12.000Z
2021-03-11T22:30:13.000Z
tests/__init__.py
ms32035/Pyhiveapi
c84389aa8118acd006a4b228e58b6a966e49e7dc
[ "MIT" ]
8
2020-10-05T18:55:41.000Z
2021-03-04T23:45:05.000Z
"""Tests for pyhiveapi."""
13.5
26
0.62963
3
27
5.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.111111
27
1
27
27
0.708333
0.740741
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
d21c90df8fd631bb59dcc22daf5caef9103a8e5f
89
py
Python
tests/perf/test_long_cycles_nbrows_cycle_length_31000_200.py
shaido987/pyaf
b9afd089557bed6b90b246d3712c481ae26a1957
[ "BSD-3-Clause" ]
377
2016-10-13T20:52:44.000Z
2022-03-29T18:04:14.000Z
tests/perf/test_long_cycles_nbrows_cycle_length_31000_200.py
ysdede/pyaf
b5541b8249d5a1cfdc01f27fdfd99b6580ed680b
[ "BSD-3-Clause" ]
160
2016-10-13T16:11:53.000Z
2022-03-28T04:21:34.000Z
tests/perf/test_long_cycles_nbrows_cycle_length_31000_200.py
ysdede/pyaf
b5541b8249d5a1cfdc01f27fdfd99b6580ed680b
[ "BSD-3-Clause" ]
63
2017-03-09T14:51:18.000Z
2022-03-27T20:52:57.000Z
import tests.perf.test_cycles_full_long_long as gen gen.test_nbrows_cycle(31000 , 200)
17.8
51
0.831461
16
89
4.25
0.8125
0
0
0
0
0
0
0
0
0
0
0.1
0.101124
89
4
52
22.25
0.75
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
d25232a1e2443e82cfd1c8448e61c4f764464ef4
83
py
Python
evernotebot/wsgi.py
fakegit/evernote-telegram-bot
a8eb03d1bed9670ef927db952100907520ac3a90
[ "MIT" ]
51
2016-08-23T15:33:09.000Z
2022-02-04T23:12:01.000Z
evernotebot/wsgi.py
fakegit/evernote-telegram-bot
a8eb03d1bed9670ef927db952100907520ac3a90
[ "MIT" ]
34
2016-09-08T07:17:27.000Z
2021-09-06T21:54:41.000Z
evernotebot/wsgi.py
fakegit/evernote-telegram-bot
a8eb03d1bed9670ef927db952100907520ac3a90
[ "MIT" ]
17
2016-11-28T14:12:04.000Z
2022-01-26T11:13:24.000Z
from evernotebot.app import EvernoteBotApplication app = EvernoteBotApplication()
20.75
50
0.855422
7
83
10.142857
0.714286
0
0
0
0
0
0
0
0
0
0
0
0.096386
83
3
51
27.666667
0.946667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
d256c3676a1fbc08c87fe5c91eb9f7d668021b0d
5,004
py
Python
tests/test_properties.py
swansonk14/entry-cli
2b426ebf706354ceab807a700e717a880fda699a
[ "CC0-1.0" ]
null
null
null
tests/test_properties.py
swansonk14/entry-cli
2b426ebf706354ceab807a700e717a880fda699a
[ "CC0-1.0" ]
null
null
null
tests/test_properties.py
swansonk14/entry-cli
2b426ebf706354ceab807a700e717a880fda699a
[ "CC0-1.0" ]
null
null
null
from nose.tools import * import openbabel import pybel import os from .context import calc_props THIS_DIR = os.path.dirname(os.path.abspath(__file__)) def test_smiles_benzene(): mol = calc_props.smiles_to_ob("c1ccccc1") assert(isinstance(mol, openbabel.OBMol)) assert_equals(mol.NumAtoms(), 12) def test_rb_basic(): # DNM mol = calc_props.smiles_to_ob("CC(C1=CC(C(C)=CC(N2C)=O)=C2C3=C1N4CO3)=CC4=O") pymol = pybel.Molecule(mol) assert_equals(calc_props.rotatable_bonds(pymol), 0) # Ribocil C mol = calc_props.smiles_to_ob("C1CC(CN(C1)CC2=CN(C=N2)C3=NC=CC=N3)C4=NC(=O)C=C(N4)C5=CC=CS5") pymol = pybel.Molecule(mol) assert_equals(calc_props.rotatable_bonds(pymol), 5) # Triphenylphosphine mol = calc_props.smiles_to_ob("C1(P(C2=CC=CC=C2)C3=CC=CC=C3)=CC=CC=C1") pymol = pybel.Molecule(mol) assert_equals(calc_props.rotatable_bonds(pymol), 3) def test_rb_alcohol(): # n-butanol mol = calc_props.smiles_to_ob("CCCCO") pymol = pybel.Molecule(mol) assert_equals(calc_props.rotatable_bonds(pymol), 2) def test_rb_amine(): # n-butylamine mol = calc_props.smiles_to_ob("CCCCN") pymol = pybel.Molecule(mol) assert_equals(calc_props.rotatable_bonds(pymol), 2) def test_rb_amide(): # Ala-Ala mol = calc_props.smiles_to_ob("[H]N[C@H](C(N[C@H](C(O)=O)C)=O)C") pymol = pybel.Molecule(mol) assert_equals(calc_props.rotatable_bonds(pymol), 3) def test_rb_ketene(): # pent-1-en-1-one mol = calc_props.smiles_to_ob("[H]C(CCC)=C=O") pymol = pybel.Molecule(mol) assert_equals(calc_props.rotatable_bonds(pymol), 2) def test_rb_allene(): # 3-methylocta-3,4-diene mol = calc_props.smiles_to_ob("[H]C(CCC)=C=C(C)CC") pymol = pybel.Molecule(mol) assert_equals(calc_props.rotatable_bonds(pymol), 3) def test_rb_alkyne(): # but-1-yn-1-ylbenzene mol = calc_props.smiles_to_ob("CCC#CC1=CC=CC=C1") pymol = pybel.Molecule(mol) assert_equals(calc_props.rotatable_bonds(pymol), 1) def test_rb_symmetric_alkyne(): # hex-3-yne mol = calc_props.smiles_to_ob("CCC#CCC") pymol = pybel.Molecule(mol) assert_equals(calc_props.rotatable_bonds(pymol), 1) def test_rb_cyclohexane_alkyne(): # but-1-yn-1-ylcyclohexane mol = calc_props.smiles_to_ob("CCC#CC1CCCCC1") pymol = pybel.Molecule(mol) assert_equals(calc_props.rotatable_bonds(pymol), 1) def test_rb_cyclohexene_alkyne(): # 1-(but-1-yn-1-yl)cyclohex-1-ene mol = calc_props.smiles_to_ob("CCC#CC1=CCCCC1") pymol = pybel.Molecule(mol) assert_equals(calc_props.rotatable_bonds(pymol), 1) def test_rb_alkene(): # (E)-but-1-en-1-ylbenzene mol = calc_props.smiles_to_ob("CC/C=C/C1=CC=CC=C1") pymol = pybel.Molecule(mol) assert_equals(calc_props.rotatable_bonds(pymol), 2) def test_rb_nitrile(): # (E)-5-phenylpent-4-enenitrile mol = calc_props.smiles_to_ob("N#CCC/C=C/C1=CC=CC=C1") pymol = pybel.Molecule(mol) assert_equals(calc_props.rotatable_bonds(pymol), 3) def test_rb_azide(): # (E)-(4-azidobut-1-en-1-yl)benzene mol = calc_props.smiles_to_ob("[N-]=[N+]=NCC/C=C/C1=CC=CC=C1") pymol = pybel.Molecule(mol) assert_equals(calc_props.rotatable_bonds(pymol), 4) def test_rb_ester(): # phenyl butyrate mol = calc_props.smiles_to_ob("CCCC(OC1=CC=CC=C1)=O") pymol = pybel.Molecule(mol) assert_equals(calc_props.rotatable_bonds(pymol), 4) def test_rb_ketone(): # 1-phenylpentan-2-one mol = calc_props.smiles_to_ob("CCCC(CC1=CC=CC=C1)=O") pymol = pybel.Molecule(mol) assert_equals(calc_props.rotatable_bonds(pymol), 4) def test_pbf(): obmol = openbabel.OBMol() obConv = openbabel.OBConversion() obConv.SetInFormat("mol") obConv.ReadFile(obmol, os.path.join(THIS_DIR, "data/triphenylphosphine.mol")) pymol = pybel.Molecule(obmol) assert_almost_equal(calc_props.calc_pbf(pymol), 1.0072297, 6, 1) def test_glob(): obmol = openbabel.OBMol() obConv = openbabel.OBConversion() obConv.SetInFormat("mol") obConv.ReadFile(obmol, os.path.join(THIS_DIR, "data/triphenylphosphine.mol")) pymol = pybel.Molecule(obmol) assert_almost_equal(calc_props.calc_glob(pymol), 0.245503, 6, 1) def test_glob_benzene(): mol = calc_props.smiles_to_ob("c1ccccc1") properties = calc_props.average_properties(mol) assert_almost_equal(properties['glob'], 0, 2, 1) def test_adamantane(): mol = calc_props.smiles_to_ob("C1C3CC2CC(CC1C2)C3") properties = calc_props.average_properties(mol) assert_almost_equal(properties['glob'], 1, 2, 1) def test_cipro(): mol = calc_props.smiles_to_ob("O=C1C(C(O)=O)=CN(C2CC2)C3=CC(N4CCNCC4)=C(F)C=C31") properties = calc_props.average_properties(mol) assert_almost_equal(properties['glob'], 0.04, 2, 1) def test_dnm(): mol = calc_props.smiles_to_ob("CC(C1=CC(C(C)=CC(N2C)=O)=C2C3=C1N4CO3)=CC4=O") properties = calc_props.average_properties(mol) assert_almost_equal(properties['glob'], 0.024, 2, 1)
33.810811
97
0.705236
805
5,004
4.145342
0.175155
0.124064
0.079113
0.118669
0.776146
0.760863
0.721307
0.665568
0.614025
0.614025
0
0.033598
0.143485
5,004
148
98
33.810811
0.744984
0.063149
0
0.471698
0
0.056604
0.1231
0.079212
0
0
0
0
0.235849
1
0.207547
false
0
0.04717
0
0.254717
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
96711455fe46051c6debbac9a1e3154662ce4bfd
260
py
Python
pandas_gbq/exceptions.py
cbandy/pandas-gbq
5d0346aa02e4a4473c050cf773ef9ed1cbba1b1c
[ "BSD-3-Clause" ]
1
2022-01-09T19:33:34.000Z
2022-01-09T19:33:34.000Z
pandas_gbq/exceptions.py
cbandy/pandas-gbq
5d0346aa02e4a4473c050cf773ef9ed1cbba1b1c
[ "BSD-3-Clause" ]
null
null
null
pandas_gbq/exceptions.py
cbandy/pandas-gbq
5d0346aa02e4a4473c050cf773ef9ed1cbba1b1c
[ "BSD-3-Clause" ]
null
null
null
class AccessDenied(ValueError): """ Raised when invalid credentials are provided, or tokens have expired. """ pass class InvalidPrivateKeyFormat(ValueError): """ Raised when provided private key has invalid format. """ pass
17.333333
73
0.673077
26
260
6.730769
0.730769
0.182857
0.228571
0
0
0
0
0
0
0
0
0
0.246154
260
14
74
18.571429
0.892857
0.469231
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
4
9671599b749c27d930d642daddaf1ff625e93216
95
py
Python
lclpy/problem/__init__.py
nobody1570/lspy
1cf6efbafbbf8ddb54ba7a875e82c562f010edd1
[ "MIT" ]
3
2021-11-27T22:11:38.000Z
2022-02-10T11:42:06.000Z
lclpy/problem/__init__.py
nobody1570/lspy
1cf6efbafbbf8ddb54ba7a875e82c562f010edd1
[ "MIT" ]
null
null
null
lclpy/problem/__init__.py
nobody1570/lspy
1cf6efbafbbf8ddb54ba7a875e82c562f010edd1
[ "MIT" ]
null
null
null
"""This package contains a template class for Problems and implementations of said class. """
19
77
0.768421
13
95
5.615385
0.923077
0
0
0
0
0
0
0
0
0
0
0
0.157895
95
4
78
23.75
0.9125
0.905263
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
968db3c44113026a31cd3284d0ca03240b07b5c8
9,866
py
Python
Algorithm/Recognition/tr_sq_recognition_test.py
aliaydin96/EngineeringDesign
185630cbba509ca1f872c3a3f847e9b155c5172b
[ "MIT" ]
1
2019-11-16T09:13:37.000Z
2019-11-16T09:13:37.000Z
Algorithm/Recognition/tr_sq_recognition_test.py
aliaydin96/EngineeringDesign
185630cbba509ca1f872c3a3f847e9b155c5172b
[ "MIT" ]
null
null
null
Algorithm/Recognition/tr_sq_recognition_test.py
aliaydin96/EngineeringDesign
185630cbba509ca1f872c3a3f847e9b155c5172b
[ "MIT" ]
null
null
null
import numpy as np import math #def recognit(forCounter,x_in,y_in,heading): #data = np.genfromtxt('data'+str(forCounter)+'.csv',delimiter=',') x_in = 1000 y_in = -800 heading = math.pi*0.1355 #data = np.genfromtxt('T1/data36.csv',delimiter=',') data = np.concatenate ((data[211:400], data[0:211]),axis=0) pos_flag = 0 heading-=math.pi/2 global min_ind,max_ind theta = [] for i in range(400): theta.append(i*math.pi/200) for i in range(400): if (data[i]==0): data[i] = 500 rmin=min(data) rmin_ind=np.argmin(data) if(rmin_ind>370)|(rmin_ind<30): pos_flag = 1 data = np.concatenate ((data[100:400], data[0:100]),axis=0) rmin=min(data) rmin_ind=np.argmin(data) for i in range(30): if(data[(rmin_ind+i)] < 240): max_ind = rmin_ind+i+1 if(data[(rmin_ind-i)] < 240): min_ind = rmin_ind-i sel_r = data[min_ind:(max_ind+1)] sel_th = theta[min_ind:(max_ind+1)] rm_ind=np.argmin(sel_r) sel_x = np.multiply(sel_r,np.cos(sel_th)) sel_y = np.multiply(sel_r,np.sin(sel_th)) der = sel_r[1:len(sel_r)]-sel_r[0:(len(sel_r)-1)] filt_der = np.convolve(der,[1/3, 1/3, 1/3]) filt_der = filt_der[1:(len(filt_der)-1)] xmin = sel_x[rm_ind] ymin = sel_y[rm_ind] p1x = sel_x[0] p1y = sel_y[0] p4x = sel_x[(len(sel_x)-1)] p4y = sel_y[(len(sel_y)-1)] rms = math.sqrt(sum(np.multiply(der,der))/len(der)) rms = math.sqrt(rms) for i in range(1,len(der)): if(filt_der[i]>=-rms): p2x = sel_x[i] p2y = sel_y[i] break for i in range(1,len(der)): if(filt_der[(len(filt_der)-i)] <= rms): p3x = sel_x[(len(sel_r)-i)] p3y = sel_y[(len(sel_th)-i)] break de1 = np.power((p1x-p2x),2)+np.power((p1y-p2y),2); de2 = np.power((p3x-p2x),2)+np.power((p3y-p2y),2); de3 = np.power((p4x-p3x),2)+np.power((p3y-p4y),2); dq1 = np.power((p1x-p3x),2)+np.power((p1y-p3y),2); dq2 = np.power((p2x-p4x),2)+np.power((p2y-p4y),2); a1 = (de1+de2-dq1)/(2*math.sqrt(de1*de2)); a2 = (de3+de2-dq2)/(2*math.sqrt(de3*de2)); a1 = math.acos(a1)*180/math.pi a2 = math.acos(a2)*180/math.pi orian = (p3x-p2x)*(p2x+p3x)+(p3y-p2y)*(p2y+p3y) orian = orian/(math.sqrt(de2*(np.power((p2x+p3x),2)+np.power((p2y+p3y),2)))) orian = math.acos(orian)*180/math.pi d1 = np.power((ymin-p1y),2)+np.power((xmin-p1x),2) d2 = np.power((ymin-p4y),2)+np.power((xmin-p4x),2) corner_angle = np.power((p1x-p4x),2)+np.power((p1y-p4y),2); corner_angle = math.acos((d1+d2-corner_angle)/(2*math.sqrt(d1*d2))) corner_angle = corner_angle*180/math.pi; #classification if(de2 < 1300): if(corner_angle < 75): print("T cor.an= ",corner_angle) r_center = rmin+ 49.0748 x_center = x_in+r_center*np.cos(math.pi*rmin_ind/200+heading+pos_flag*math.pi/2) y_center = y_in+r_center*np.sin(math.pi*rmin_ind/200+heading+pos_flag*math.pi/2) x_corner = x_in+rmin*np.cos(math.pi*rmin_ind/200+heading+pos_flag*math.pi/2) y_corner = y_in+rmin*np.sin(math.pi*rmin_ind/200+heading+pos_flag*math.pi/2) print(x_center,y_center,x_corner,y_corner,0) print( math.sqrt(np.power((x_center-x_corner),2)+np.power((y_center-y_corner),2))*math.sqrt(3)) #return x_center,y_center,x_corner,y_corner,0 if(corner_angle > 75): print("S cor.an= ",corner_angle) r_center = rmin+49.5 x_center = x_in+r_center*np.cos(math.pi*rmin_ind/200+heading+pos_flag*math.pi/2) y_center = y_in+r_center*np.sin(math.pi*rmin_ind/200+heading+pos_flag*math.pi/2) x_corner = x_in+rmin*np.cos(math.pi*rmin_ind/200+heading+pos_flag*math.pi/2) y_corner = y_in+rmin*np.sin(math.pi*rmin_ind/200+heading+pos_flag*math.pi/2) print(x_center,y_center,x_corner,y_corner,1) print( math.sqrt(np.power((x_center-x_corner),2)+np.power((y_center-y_corner),2))*math.sqrt(2)) #return x_center,y_center,x_corner,y_corner,1 if((a1 >= 130)&(de1 > 400)): if(a2 < 80): print("T ed.an2= ",a2) xc1 = p2x yc1 = p2y xc2 = p3x yc2 = p3y leng = math.sqrt(np.power((xc1-xc2),2)+np.power((yc1-yc2),2)) unit_vectorx = 85*(xc1-xc2)/leng unit_vectory = 85*(yc1-yc2)/leng x_corner1 = xc2 + unit_vectorx y_corner1 = yc2 + unit_vectory x_corner2 = xc2 y_corner2 = yc2 r1 = math.sqrt(np.power(x_corner1,2)+np.power(y_corner1,2)) t1 = np.arctan2(y_corner1,x_corner1) r2 = math.sqrt(np.power(x_corner2,2)+np.power(y_corner2,2)) t2 = np.arctan2(y_corner2,x_corner2) x_corner1 = x_in+r1*np.cos(t1+heading+pos_flag*math.pi/2) y_corner1 = y_in+r1*np.sin(t1+heading+pos_flag*math.pi/2) x_corner2 = x_in+r2*np.cos(t2+heading+pos_flag*math.pi/2) y_corner2 = y_in+r2*np.sin(t2+heading+pos_flag*math.pi/2) print(x_corner1,y_corner1,x_corner2,y_corner2,3) print(math.sqrt(np.power((x_corner1-x_corner2),2)+np.power((y_corner1-y_corner2),2))) #return x_corner1,y_corner1,x_corner2,y_corner2,3 if((a2 >= 130)&(de3 > 400)): if(a1 < 80): print("T ed.an1= ",a1) xc1 = p2x yc1 = p2y xc2 = p3x yc2 = p3y leng = math.sqrt(np.power((xc1-xc2),2)+np.power((yc1-yc2),2)) unit_vectorx = 85*(xc2-xc1)/leng unit_vectory = 85*(yc2-yc1)/leng x_corner2 = xc1 + unit_vectorx y_corner2 = yc1 + unit_vectory x_corner1 = xc1 y_corner1 = yc1 r1 = math.sqrt(np.power(x_corner1,2)+np.power(y_corner1,2)) t1 = np.arctan2(y_corner1,x_corner1) r2 = math.sqrt(np.power(x_corner2,2)+np.power(y_corner2,2)) t2 = np.arctan2(y_corner2,x_corner2) x_corner1 = x_in+r1*np.cos(t1+heading+pos_flag*math.pi/2) y_corner1 = y_in+r1*np.sin(t1+heading+pos_flag*math.pi/2) x_corner2 = x_in+r2*np.cos(t2+heading+pos_flag*math.pi/2) y_corner2 = y_in+r2*np.sin(t2+heading+pos_flag*math.pi/2) print(x_corner1,y_corner1,x_corner2,y_corner2,3) print(math.sqrt(np.power((x_corner1-x_corner2),2)+np.power((y_corner1-y_corner2),2))) #return x_corner1,y_corner1,x_corner2,y_corner2,3 if(orian<90): rotation = 90-orian a = a1 if((rotation >= 30)&(a <= 85)): print("T1< ") xc1 = p2x yc1 = p2y xc2 = p3x yc2 = p3y leng = math.sqrt(np.power((xc1-xc2),2)+np.power((yc1-yc2),2)) unit_vectorx = 85*(xc2-xc1)/leng unit_vectory = 85*(yc2-yc1)/leng x_corner2 = xc1 + unit_vectorx y_corner2 = yc1 + unit_vectory x_corner1 = xc1 y_corner1 = yc1 r1 = math.sqrt(np.power(x_corner1,2)+np.power(y_corner1,2)) t1 = np.arctan2(y_corner1,x_corner1) r2 = math.sqrt(np.power(x_corner2,2)+np.power(y_corner2,2)) t2 = np.arctan2(y_corner2,x_corner2) x_corner1 = x_in+r1*np.cos(t1+heading+pos_flag*math.pi/2) y_corner1 = y_in+r1*np.sin(t1+heading+pos_flag*math.pi/2) x_corner2 = x_in+r2*np.cos(t2+heading+pos_flag*math.pi/2) y_corner2 = y_in+r2*np.sin(t2+heading+pos_flag*math.pi/2) print( x_corner1,y_corner1,x_corner2,y_corner2,3) print(math.sqrt(np.power((x_corner1-x_corner2),2)+np.power((y_corner1-y_corner2),2))) #return x_corner1,y_corner1,x_corner2,y_corner2,3 if(orian>90): rotation = orian-90 a = a2 if((rotation >= 30)&(a <= 85)): print("T1> ") xc1 = p2x yc1 = p2y xc2 = p3x yc2 = p3y leng = math.sqrt(np.power((xc1-xc2),2)+np.power((yc1-yc2),2)) unit_vectorx = 85*(xc1-xc2)/leng unit_vectory = 85*(yc1-yc2)/leng x_corner1 = xc2 + unit_vectorx y_corner1 = yc2 + unit_vectory x_corner2 = xc2 y_corner2 = yc2 r1 = math.sqrt(np.power(x_corner1,2)+np.power(y_corner1,2)) t1 = np.arctan2(y_corner1,x_corner1) r2 = math.sqrt(np.power(x_corner2,2)+np.power(y_corner2,2)) t2 = np.arctan2(y_corner2,x_corner2) x_corner1 = x_in+r1*np.cos(t1+heading+pos_flag*math.pi/2) y_corner1 = y_in+r1*np.sin(t1+heading+pos_flag*math.pi/2) x_corner2 = x_in+r2*np.cos(t2+heading+pos_flag*math.pi/2) y_corner2 = y_in+r2*np.sin(t2+heading+pos_flag*math.pi/2) print(x_corner1,y_corner1,x_corner2,y_corner2,3) print(math.sqrt(np.power((x_corner1-x_corner2),2)+np.power((y_corner1-y_corner2),2))) #return x_corner1,y_corner1,x_corner2,y_corner2,3 ax1 = np.power((p2x-xmin),2)+np.power((p2y-ymin),2) ay1 = np.power((p1x-xmin),2)+np.power((p1y-ymin),2) ay1 = (ax1+de1-ay1)/(2*math.sqrt(ax1*de1)) ay1 = math.acos(ay1)*180/math.pi ax2 = np.power((p3x-xmin),2)+np.power((p3y-ymin),2) ay2 = np.power((p4x-xmin),2)+np.power((p4y-ymin),2) ay2 = (ax2+de3-ay2)/(2*math.sqrt(ax2*de3)) ay2 = math.acos(ay2)*180/math.pi if((ay1>155)&(ay2>155)): if(corner_angle <= 75): print("SX") r_center = rmin+49.5 x_center = x_in+r_center*np.cos(math.pi*rmin_ind/200+heading+pos_flag*math.pi/2) y_center = y_in+r_center*np.sin(math.pi*rmin_ind/200+heading+pos_flag*math.pi/2) x_corner = x_in+rmin*np.cos(math.pi*rmin_ind/200+heading+pos_flag*math.pi/2) y_corner = y_in+rmin*np.sin(math.pi*rmin_ind/200+heading+pos_flag*math.pi/2) print(x_center,y_center,x_corner,y_corner,1) print( math.sqrt(np.power((x_center-x_corner),2)+np.power((y_center-y_corner),2))*math.sqrt(2)) #return x_center,y_center,x_corner,y_corner,1 if(math.sqrt(de2) >= 69): print("T2 ") xc1 = p2x yc1 = p2y xc2 = p3x yc2 = p3y leng = math.sqrt(np.power((xc1-xc2),2)+np.power((yc1-yc2),2)) unit_vectorx = 85*(xc1-xc2)/leng unit_vectory = 85*(yc1-yc2)/leng x_corner1 = xc2 + unit_vectorx y_corner1 = yc2 + unit_vectory x_corner2 = xc2 y_corner2 = yc2 r1 = math.sqrt(np.power(x_corner1,2)+np.power(y_corner1,2)) t1 = np.arctan2(y_corner1,x_corner1) r2 = math.sqrt(np.power(x_corner2,2)+np.power(y_corner2,2)) t2 = np.arctan2(y_corner2,x_corner2) x_corner1 = x_in+r1*np.cos(t1+heading+pos_flag*math.pi/2) y_corner1 = y_in+r1*np.sin(t1+heading+pos_flag*math.pi/2) x_corner2 = x_in+r2*np.cos(t2+heading+pos_flag*math.pi/2) y_corner2 = y_in+r2*np.sin(t2+heading+pos_flag*math.pi/2) print(x_corner1,y_corner1,x_corner2,y_corner2,3) print(math.sqrt(np.power((x_corner1-x_corner2),2)+np.power((y_corner1-y_corner2),2))) #return x_corner1,y_corner1,x_corner2,y_corner2,3
38.84252
101
0.673018
1,938
9,866
3.239422
0.069143
0.08028
0.045874
0.091749
0.7367
0.713444
0.708028
0.708028
0.691303
0.685569
0
0.092042
0.137746
9,866
254
102
38.84252
0.645939
0.05524
0
0.586957
0
0
0.005693
0
0
0
0
0
0
1
0
false
0
0.008696
0
0.008696
0.104348
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
96bb595e2d504e6318bdeae8ed9c15ef7ad59e5e
6,949
py
Python
yesterday/extensions/rq_scheduler/tests/test_views.py
imkevinxu/yesterday
105ffa95dbba576c5ed8f36ded4d75e61fd7dc60
[ "MIT" ]
3
2015-01-27T10:39:51.000Z
2021-01-27T05:03:55.000Z
yesterday/extensions/rq_scheduler/tests/test_views.py
imkevinxu/yesterday
105ffa95dbba576c5ed8f36ded4d75e61fd7dc60
[ "MIT" ]
1
2015-01-24T14:32:15.000Z
2015-01-24T17:42:53.000Z
yesterday/extensions/rq_scheduler/tests/test_views.py
imkevinxu/yesterday
105ffa95dbba576c5ed8f36ded4d75e61fd7dc60
[ "MIT" ]
null
null
null
from django.test import TestCase from django.conf import settings from django.core.mail import send_mail from django.core.urlresolvers import reverse from django_rq import get_scheduler from accounts.factories import AdminUserFactory from datetime import datetime from pytz import timezone class RQSchedulerViewExtensionsTestCase(TestCase): def setUp(self): """ Create a superuser and log in """ self.user = AdminUserFactory(email='test@example.com') self.client.login(email='test@example.com', password='password') self.subject = "[Test] RQSchedulerViewExtensionsTestCase" self.message = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean lobortis ornare vestibulum. Sed euismod euismod mattis. Suspendisse potenti. Vestibulum eget faucibus lacus. Quisque in eros augue. Sed diam lorem, finibus congue auctor vel, volutpat a lacus. Proin ut pellentesque nisi, ut dignissim erat. Donec fringilla venenatis est, a tempor turpis tempus a. Praesent eu magna lectus." self.from_email = "Testbot <test@%s>" % settings.PROJECT_DOMAIN self.recipient_list = ["test@example.com"] self.scheduler = get_scheduler() self.western = timezone('America/Los_Angeles') self.scheduled_time = self.western.localize(datetime(2020, 1, 1)) self.scheduler.enqueue_at(self.scheduled_time, send_mail, self.subject, self.message, self.from_email, self.recipient_list) def tearDown(self): for job in self.scheduler.get_jobs(): job.cancel() def test_jobs_view_extension(self): response = self.client.get(reverse('rq_scheduler:jobs')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'rq_scheduler/templates/jobs.html') self.assertContains(response, "Jan. 1, 2020, midnight") def test_job_detail_view_extension(self): job_id = self.scheduler.get_jobs()[0].id response = self.client.get(reverse('rq_scheduler:job_detail', kwargs={'job_id': job_id})) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'rq_scheduler/templates/job_detail.html') self.assertContains(response, "Jan. 1, 2020, midnight") self.assertContains(response, "Not queued yet") response = self.client.get(reverse('rq_scheduler:job_detail', kwargs={'job_id': "fake"})) self.assertEqual(response.status_code, 404) def test_delete_job_view_extension(self): job_id = self.scheduler.get_jobs()[0].id response = self.client.get(reverse('rq_scheduler:delete_job', kwargs={'job_id': job_id})) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'rq_scheduler/templates/delete_job.html') self.assertContains(response, job_id) self.assertEqual(len(self.scheduler.get_jobs()), 1) response = self.client.post(reverse('rq_scheduler:delete_job', kwargs={'job_id': job_id})) self.assertRedirects(response, reverse('rq_scheduler:jobs')) self.assertEqual(len(self.scheduler.get_jobs()), 0) def test_enqueue_job_view_extension(self): job_id = self.scheduler.get_jobs()[0].id response = self.client.get(reverse('rq_scheduler:enqueue_job', kwargs={'job_id': job_id})) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'rq_scheduler/templates/enqueue_job.html') self.assertContains(response, job_id) self.assertEqual(len(self.scheduler.get_jobs()), 1) response = self.client.post(reverse('rq_scheduler:enqueue_job', kwargs={'job_id': job_id})) self.assertRedirects(response, reverse('rq_scheduler:jobs')) self.assertEqual(len(self.scheduler.get_jobs()), 0) response = self.client.get(reverse('rq_scheduler:job_detail', kwargs={'job_id': job_id})) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'rq_scheduler/templates/job_detail.html') self.assertContains(response, "django.core.mail.send_mail") def test_clear_jobs_view_extension(self): for i in range(2): self.scheduler.enqueue_at(self.scheduled_time, send_mail, self.subject, self.message, self.from_email, self.recipient_list) response = self.client.get(reverse('rq_scheduler:clear_jobs')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'rq_scheduler/templates/clear_jobs.html') self.assertEqual(len(self.scheduler.get_jobs()), 3) response = self.client.post(reverse('rq_scheduler:clear_jobs')) self.assertRedirects(response, reverse('rq_scheduler:jobs')) self.assertEqual(len(self.scheduler.get_jobs()), 0) def test_action_view_extension(self): response = self.client.get(reverse('rq_scheduler:actions')) self.assertRedirects(response, reverse('rq_scheduler:jobs')) def test_action_delete_view_extension(self): for i in range(2): self.scheduler.enqueue_at(self.scheduled_time, send_mail, self.subject, self.message, self.from_email, self.recipient_list) job_ids = [job.id for job in self.scheduler.get_jobs()] delete_action_payload = {'action': 'delete', '_selected_action': job_ids} response = self.client.post(reverse('rq_scheduler:actions'), delete_action_payload) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'rq_scheduler/templates/confirm_action.html') self.assertContains(response, job_ids[0]) self.assertEqual(len(self.scheduler.get_jobs()), 3) delete_action_payload = {'action': 'delete', 'job_ids': job_ids} response = self.client.post(reverse('rq_scheduler:actions'), delete_action_payload) self.assertRedirects(response, reverse('rq_scheduler:jobs')) self.assertEqual(len(self.scheduler.get_jobs()), 0) def test_action_enqueue_view_extension(self): for i in range(2): self.scheduler.enqueue_at(self.scheduled_time, send_mail, self.subject, self.message, self.from_email, self.recipient_list) job_ids = [job.id for job in self.scheduler.get_jobs()] enqueue_action_payload = {'action': 'enqueue', '_selected_action': job_ids} response = self.client.post(reverse('rq_scheduler:actions'), enqueue_action_payload) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'rq_scheduler/templates/confirm_action.html') self.assertContains(response, job_ids[0]) self.assertEqual(len(self.scheduler.get_jobs()), 3) enqueue_action_payload = {'action': 'enqueue', 'job_ids': job_ids} response = self.client.post(reverse('rq_scheduler:actions'), enqueue_action_payload) self.assertRedirects(response, reverse('rq_scheduler:jobs')) self.assertEqual(len(self.scheduler.get_jobs()), 0)
53.453846
411
0.717082
878
6,949
5.472665
0.166287
0.066389
0.078668
0.066597
0.749636
0.716129
0.713215
0.680749
0.659729
0.659729
0
0.010508
0.164628
6,949
129
412
53.868217
0.817227
0.004173
0
0.509804
0
0.009804
0.213976
0.083189
0
0
0
0
0.401961
1
0.098039
false
0.009804
0.078431
0
0.186275
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
4
7382ddf1dd8aa0fbb345676ed265b9ac0913e2b4
498
py
Python
Edabit/LastDigit-Medium.py
JLJTECH/TutorialTesting
f2dbbd49a86b3b086d0fc156ac3369fb74727f86
[ "MIT" ]
null
null
null
Edabit/LastDigit-Medium.py
JLJTECH/TutorialTesting
f2dbbd49a86b3b086d0fc156ac3369fb74727f86
[ "MIT" ]
null
null
null
Edabit/LastDigit-Medium.py
JLJTECH/TutorialTesting
f2dbbd49a86b3b086d0fc156ac3369fb74727f86
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 ''' Create a function, that takes 3 numbers: a, b, c and returns True if the last digit of (the last digit of a * the last digit of b) = the last digit of c. ''' def last_dig(a, b, c): a = list(str(a)) b = list(str(b)) c = list(str(c)) val = list(str(int(a[-1]) * int(b[-1]))) return int(val[-1]) == int(c[-1]) #Alternative Solutions def last_dig(a, b, c): return str(a*b)[-1] == str(c)[-1] def last_dig(a, b, c): return ((a % 10) * (b % 10) % 10) == (c % 10)
26.210526
84
0.576305
101
498
2.811881
0.316832
0.042254
0.042254
0.197183
0.179577
0.179577
0.133803
0
0
0
0
0.040609
0.208835
498
18
85
27.666667
0.680203
0.395582
0
0.3
0
0
0
0
0
0
0
0
0
1
0.3
false
0
0
0.2
0.6
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
7388c12906f98678eb2551d63aabe5338f9992cd
1,450
py
Python
tests/test_response_local.py
c-pher/PyWinOS
a16a16a24abaa53a06b9365b2535c8ab31a7fdfb
[ "MIT" ]
4
2020-04-17T15:54:43.000Z
2020-11-08T06:39:05.000Z
tests/test_response_local.py
c-pher/PyWinOS
a16a16a24abaa53a06b9365b2535c8ab31a7fdfb
[ "MIT" ]
65
2020-01-05T21:45:17.000Z
2022-03-31T16:50:20.000Z
tests/test_response_local.py
c-pher/PyWinOS
a16a16a24abaa53a06b9365b2535c8ab31a7fdfb
[ "MIT" ]
null
null
null
from pywinos import ResponseParser class TestResponseLocal: def test_ok(self, response_cmd_local): response = ResponseParser(response_cmd_local) assert response.ok, 'Response is not OK' def test_ok_err(self, response_cmd_local_err): response = ResponseParser(response_cmd_local_err) assert not response.ok, 'Response is OK. Must be False' def test_stdout(self, response_cmd_local): response = ResponseParser(response_cmd_local) assert response.stdout, 'STDOUT is null or empty' def test_stdout_err(self, response_cmd_local_err): response = ResponseParser(response_cmd_local_err) assert not response.stdout, 'STDOUT is not null or empty' def test_stderr(self, response_cmd_local): response = ResponseParser(response_cmd_local) assert not response.stderr, 'STDERR is not null' def test_stderr_err(self, response_cmd_local_err): response = ResponseParser(response_cmd_local_err) assert response.stderr, ('STDERR is null. ' 'It must contain entries about error') def test_exited(self, response_cmd_local): response = ResponseParser(response_cmd_local) assert not response.exited, 'Exit code is not 0' def test_exited_err(self, response_cmd_local_err): response = ResponseParser(response_cmd_local_err) assert response.exited == 1, 'Exit code is not 1'
39.189189
71
0.710345
187
1,450
5.229947
0.187166
0.179959
0.261759
0.163599
0.674847
0.638037
0.638037
0.638037
0.638037
0.638037
0
0.002662
0.222759
1,450
36
72
40.277778
0.865129
0
0
0.296296
0
0
0.13931
0
0
0
0
0
0.296296
1
0.296296
false
0
0.037037
0
0.37037
0
0
0
0
null
0
1
1
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
7393d33fded4abb3dd324ce27bf8dd2985006ccf
111
py
Python
codewof/programming/content/en/how-many-dozens/solution.py
taskmaker1/codewof
92d52cd3ee91f0f311ff01a92cf6ec07e5593b8d
[ "MIT" ]
3
2019-08-29T04:11:22.000Z
2021-06-22T16:05:51.000Z
codewof/programming/content/en/how-many-dozens/solution.py
taskmaker1/codewof
92d52cd3ee91f0f311ff01a92cf6ec07e5593b8d
[ "MIT" ]
265
2019-05-30T03:51:46.000Z
2022-03-31T01:05:12.000Z
codewof/programming/content/en/how-many-dozens/solution.py
samuelsandri/codewof
c9b8b378c06b15a0c42ae863b8f46581de04fdfc
[ "MIT" ]
7
2019-06-29T12:13:37.000Z
2021-09-06T06:49:14.000Z
def dozens_of_eggs(num_eggs): dozens = num_eggs // 12 return "There are {} dozen eggs!".format(dozens)
27.75
52
0.684685
17
111
4.235294
0.647059
0.194444
0
0
0
0
0
0
0
0
0
0.022222
0.189189
111
3
53
37
0.777778
0
0
0
0
0
0.216216
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
739509696d1f778ecf448e73c0119b5889e49a4a
343
py
Python
airavata_django_portal_sdk/util.py
apache/airavata-django-portal-sdk
bad5b05352250f0247363f2e312d19f64cb666de
[ "Apache-2.0" ]
1
2021-11-07T21:18:51.000Z
2021-11-07T21:18:51.000Z
airavata_django_portal_sdk/util.py
apache/airavata-django-portal-sdk
bad5b05352250f0247363f2e312d19f64cb666de
[ "Apache-2.0" ]
null
null
null
airavata_django_portal_sdk/util.py
apache/airavata-django-portal-sdk
bad5b05352250f0247363f2e312d19f64cb666de
[ "Apache-2.0" ]
3
2021-03-15T17:28:45.000Z
2021-11-07T21:18:44.000Z
import datetime def convert_iso8601_to_datetime(iso8601string, microseconds=True): """Convert ISO8601 datetime string to a datetime instance.""" if microseconds: return datetime.datetime.strptime(iso8601string, "%Y-%m-%dT%H:%M:%S.%fZ") else: return datetime.datetime.strptime(iso8601string, "%Y-%m-%dT%H:%M:%SZ")
34.3
81
0.702624
44
343
5.409091
0.522727
0.117647
0.184874
0.252101
0.411765
0.411765
0.411765
0.411765
0.411765
0.411765
0
0.068729
0.151604
343
9
82
38.111111
0.749141
0.16035
0
0
0
0
0.138298
0.074468
0
0
0
0
0
1
0.166667
false
0
0.166667
0
0.666667
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
73a87aa61d2e20d256d94cfe57989059d8d751e7
304
py
Python
testprojects/tests/python/pants/dummies/test_with_thirdparty_dep.py
billybecker/pants
ee101f3e360b712aceb9dacf7723aaf9b5567f04
[ "Apache-2.0" ]
94
2015-01-15T21:24:20.000Z
2022-02-16T16:55:43.000Z
testprojects/tests/python/pants/dummies/test_with_thirdparty_dep.py
billybecker/pants
ee101f3e360b712aceb9dacf7723aaf9b5567f04
[ "Apache-2.0" ]
5
2020-07-18T01:04:43.000Z
2021-05-10T08:40:56.000Z
testprojects/tests/python/pants/dummies/test_with_thirdparty_dep.py
billybecker/pants
ee101f3e360b712aceb9dacf7723aaf9b5567f04
[ "Apache-2.0" ]
47
2015-02-25T02:20:07.000Z
2022-03-21T00:59:16.000Z
# coding=utf-8 # Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import absolute_import, division, print_function, unicode_literals from builtins import str def test_f(): assert isinstance("foo", str)
25.333333
82
0.779605
42
304
5.452381
0.833333
0
0
0
0
0
0
0
0
0
0
0.026718
0.138158
304
11
83
27.636364
0.847328
0.457237
0
0
0
0
0.018634
0
0
0
0
0
0.25
1
0.25
true
0
0.5
0
0.75
0.25
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
4
73ab49f7b80f273d254c1c672a5713bc408cf820
151
py
Python
user.py
manav310/atm-interface
8af4dcba8bad4bf853933893a79e03dc4d21ba05
[ "Apache-2.0" ]
null
null
null
user.py
manav310/atm-interface
8af4dcba8bad4bf853933893a79e03dc4d21ba05
[ "Apache-2.0" ]
null
null
null
user.py
manav310/atm-interface
8af4dcba8bad4bf853933893a79e03dc4d21ba05
[ "Apache-2.0" ]
null
null
null
class User: def __init__(self,account_name,card,pin): self.account_name = account_name self.card = card self.pin = pin
25.166667
45
0.615894
20
151
4.3
0.45
0.383721
0.348837
0
0
0
0
0
0
0
0
0
0.298013
151
6
46
25.166667
0.811321
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0
0
0.4
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
73ce642674699c9a8df80140bb265353a35e5ba1
679
py
Python
tests/reconciler/data/pods.py
datapio/klander
d862bb1640a6cf4c0010246e1d53316103321a4d
[ "Apache-2.0" ]
2
2021-05-14T22:00:55.000Z
2021-09-17T20:09:17.000Z
tests/reconciler/data/pods.py
datapio/klander
d862bb1640a6cf4c0010246e1d53316103321a4d
[ "Apache-2.0" ]
null
null
null
tests/reconciler/data/pods.py
datapio/klander
d862bb1640a6cf4c0010246e1d53316103321a4d
[ "Apache-2.0" ]
1
2021-07-16T08:35:43.000Z
2021-07-16T08:35:43.000Z
pod_examples = [ dict( apiVersion='v1', kind='Pod', metadata=dict( name='good', namespace='default' ), spec=dict( serviceAccountName='default' ) ), dict( apiVersion='v1', kind='Pod', metadata=dict( name='bad', namespace='default' ), spec=dict( serviceAccountName='bad' ) ), dict( apiVersion='v1', kind='Pod', metadata=dict( name='raise', namespace='default' ), spec=dict( serviceAccountName='bad' ) ) ]
18.861111
40
0.412371
47
679
5.93617
0.319149
0.150538
0.172043
0.215054
0.892473
0.741935
0.419355
0.419355
0
0
0
0.008197
0.460972
679
35
41
19.4
0.754098
0
0
0.714286
0
0
0.089838
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
73e59a85e08bee1d80f948fe6207791961e06edc
3,799
py
Python
tools/blender/q_math.py
raynorpat/xreal
2fcbf9179fa22dc6e808bb65b879ac2ee7616ebd
[ "BSD-3-Clause" ]
11
2016-06-03T07:46:15.000Z
2021-09-09T19:35:32.000Z
tools/blender/q_math.py
raynorpat/xreal
2fcbf9179fa22dc6e808bb65b879ac2ee7616ebd
[ "BSD-3-Clause" ]
1
2016-10-14T23:06:19.000Z
2016-10-14T23:06:19.000Z
tools/blender/q_math.py
raynorpat/xreal
2fcbf9179fa22dc6e808bb65b879ac2ee7616ebd
[ "BSD-3-Clause" ]
5
2016-10-13T04:43:58.000Z
2019-08-24T14:03:35.000Z
import sys, struct, string, math def ANGLE2SHORT(x): return int((x * 65536 / 360) & 65535) def SHORT2ANGLE(x): return x * (360.0 / 65536.0) def DEG2RAD(a): return (a * math.pi) / 180.0 def RAD2DEG(a): return (a * 180.0) / math.pi def DotProduct(x, y): return x[0] * y[0] + x[1] * y[1] + x[2] * y[2] def CrossProduct(a,b): return [a[1]*b[2] - a[2]*b[1], a[2]*b[0]-a[0]*b[2], a[0]*b[1]-a[1]*b[0]] def VectorLength(v): return math.sqrt(v[0] * v[0] + v[1] * v[1] + v[2] * v[2]) def VectorSubtract(a, b): return [a[0] - b[0], a[1] - b[1], a[2] - b[2]] def VectorAdd(a, b): return [a[0] + b[0], a[1] + b[1], a[2] + b[2]] def VectorCopy(v): return [v[0], v[1], v[2]] def VectorInverse(v): return [-v[0], -v[1], -v[2]] #define VectorCopy(a,b) ((b)[0]=(a)[0],(b)[1]=(a)[1],(b)[2]=(a)[2]) #define VectorScale(v, s, o) ((o)[0]=(v)[0]*(s),(o)[1]=(v)[1]*(s),(o)[2]=(v)[2]*(s)) #define VectorMA(v, s, b, o) ((o)[0]=(v)[0]+(b)[0]*(s),(o)[1]=(v)[1]+(b)[1]*(s),(o)[2]=(v)[2]+(b)[2]*(s)) def RadiusFromBounds(mins, maxs): corner = [0, 0, 0] a = 0 b = 0 for i in range(0, 3): a = abs(mins[i]) b = abs(maxs[i]) if a > b: corner[i] = a else: corner[i] = b return VectorLength(corner) # NOTE: Tr3B - matrix is in column-major order def MatrixIdentity(): return [[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0]] def MatrixFromAngles(pitch, yaw, roll): sp = math.sin(DEG2RAD(pitch)) cp = math.cos(DEG2RAD(pitch)) sy = math.sin(DEG2RAD(yaw)) cy = math.cos(DEG2RAD(yaw)) sr = math.sin(DEG2RAD(roll)) cr = math.cos(DEG2RAD(roll)) # return [[cp * cy, (sr * sp * cy + cr * -sy), (cr * sp * cy + -sr * -sy), 0.0], # [cp * sy, (sr * sp * sy + cr * cy), (cr * sp * sy + -sr * cy), 0.0], # [-sp, sr * cp, cr * cp, 0.0], # [0.0, 0.0, 0.0, 1.0]] return [[cp * cy, cp * sy, -sp, 0.0], [(sr * sp * cy + cr * -sy), (sr * sp * sy + cr * cy), sr * cp, 0.0], [(cr * sp * cy + -sr * -sy), (cr * sp * sy + -sr * cy), cr * cp, 0.0], [0.0, 0.0, 0.0, 1.0]] def MatrixTransformPoint(m, p): return [m[0][0] * p[0] + m[1][0] * p[1] + m[2][0] * p[2] + m[3][0], m[0][1] * p[0] + m[1][1] * p[1] + m[2][1] * p[2] + m[3][1], m[0][2] * p[0] + m[1][2] * p[1] + m[2][2] * p[2] + m[3][2]] def MatrixTransformNormal(m, p): return [m[0][0] * p[0] + m[1][0] * p[1] + m[2][0] * p[2], m[0][1] * p[0] + m[1][1] * p[1] + m[2][1] * p[2], m[0][2] * p[0] + m[1][2] * p[1] + m[2][2] * p[2]] def MatrixMultiply(b, a): return [[ a[0][0] * b[0][0] + a[0][1] * b[1][0] + a[0][2] * b[2][0], a[0][0] * b[0][1] + a[0][1] * b[1][1] + a[0][2] * b[2][1], a[0][0] * b[0][2] + a[0][1] * b[1][2] + a[0][2] * b[2][2], 0.0, ],[ a[1][0] * b[0][0] + a[1][1] * b[1][0] + a[1][2] * b[2][0], a[1][0] * b[0][1] + a[1][1] * b[1][1] + a[1][2] * b[2][1], a[1][0] * b[0][2] + a[1][1] * b[1][2] + a[1][2] * b[2][2], 0.0, ],[ a[2][0] * b[0][0] + a[2][1] * b[1][0] + a[2][2] * b[2][0], a[2][0] * b[0][1] + a[2][1] * b[1][1] + a[2][2] * b[2][1], a[2][0] * b[0][2] + a[2][1] * b[1][2] + a[2][2] * b[2][2], 0.0, ],[ a[3][0] * b[0][0] + a[3][1] * b[1][0] + a[3][2] * b[2][0] + b[3][0], a[3][0] * b[0][1] + a[3][1] * b[1][1] + a[3][2] * b[2][1] + b[3][1], a[3][0] * b[0][2] + a[3][1] * b[1][2] + a[3][2] * b[2][2] + b[3][2], 1.0, ]] def MatrixSetupTransform(forward, left, up, origin): return [[forward[0], forward[1], forward[2], origin[0]], [left[0], left[1], left[2], origin[1]], [up[0], up[1], up[2], origin[2]], [0.0, 0.0, 0.0, 1.0]]
31.139344
105
0.410898
813
3,799
1.920049
0.098401
0.078155
0.07303
0.079436
0.401666
0.240231
0.185138
0.162716
0.140935
0.140935
0
0.147006
0.265859
3,799
121
106
31.396694
0.412693
0.15741
0
0.104651
0
0
0
0
0
0
0
0
0
1
0.209302
false
0
0.011628
0.186047
0.430233
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
4
73ef174cfdfd4c5eb6eff27ca0ea8c950ec1065f
76
py
Python
__init__.py
EdgewiseNetworks/five-sigma
d11a772261ee1a40425f9d035def94f38bcdbd8d
[ "MIT" ]
7
2018-11-01T02:40:55.000Z
2019-12-01T20:53:59.000Z
__init__.py
EdgewiseNetworks/five-sigma
d11a772261ee1a40425f9d035def94f38bcdbd8d
[ "MIT" ]
null
null
null
__init__.py
EdgewiseNetworks/five-sigma
d11a772261ee1a40425f9d035def94f38bcdbd8d
[ "MIT" ]
1
2022-01-11T07:18:31.000Z
2022-01-11T07:18:31.000Z
# # Copyright (c) 2016-2017, Edgewise Networks Inc. All rights reserved. #
15.2
70
0.710526
10
76
5.4
1
0
0
0
0
0
0
0
0
0
0
0.126984
0.171053
76
4
71
19
0.730159
0.894737
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
73f2c5acdabb840bd71321aedd5e98ea3611a9dc
1,100
py
Python
mpesaApp/migrations/0001_initial.py
oronibrian/django-mpesa
fb5de34829fedf0d898d4daa5ad8a36efefd3aee
[ "MIT" ]
1
2020-04-06T08:28:46.000Z
2020-04-06T08:28:46.000Z
mpesaApp/migrations/0001_initial.py
oronibrian/django-mpesa
fb5de34829fedf0d898d4daa5ad8a36efefd3aee
[ "MIT" ]
4
2020-02-11T23:54:32.000Z
2021-06-10T21:16:48.000Z
mpesaApp/migrations/0001_initial.py
oronibrian/django-mpesa
fb5de34829fedf0d898d4daa5ad8a36efefd3aee
[ "MIT" ]
1
2022-02-19T21:00:56.000Z
2022-02-19T21:00:56.000Z
# Generated by Django 2.1.7 on 2019-03-14 08:42 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='mpesaDetail', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('BusinessShortCode', models.CharField(max_length=255)), ('Password', models.CharField(max_length=255)), ('Timestamp', models.CharField(max_length=255)), ('TransactionType', models.CharField(max_length=255)), ('Amount', models.CharField(max_length=255)), ('PartyA', models.CharField(max_length=255)), ('PartyB', models.CharField(max_length=255)), ('CallBackURL', models.CharField(max_length=255)), ('AccountReference', models.CharField(max_length=255)), ('TransactionDesc', models.CharField(max_length=255)), ], ), ]
35.483871
114
0.582727
105
1,100
5.980952
0.47619
0.238854
0.286624
0.382166
0.429936
0
0
0
0
0
0
0.056962
0.281818
1,100
30
115
36.666667
0.737975
0.040909
0
0
1
0
0.117759
0
0
0
0
0
0
1
0
false
0.043478
0.043478
0
0.217391
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
fb4a63b7fb13bc829e7788495fc126912c943f9b
214
py
Python
events_protocol/core/logging/mixins/loggable.py
gb-jairo/events-protocol-python
9c71abf1c896edb1050bf2e37d0947c7c3ca0080
[ "Apache-2.0" ]
1
2021-07-20T04:12:06.000Z
2021-07-20T04:12:06.000Z
events_protocol/core/logging/mixins/loggable.py
gb-jairo/events-protocol-python
9c71abf1c896edb1050bf2e37d0947c7c3ca0080
[ "Apache-2.0" ]
11
2020-02-13T13:19:54.000Z
2021-06-10T20:23:10.000Z
events_protocol/core/logging/mixins/loggable.py
gb-jairo/events-protocol-python
9c71abf1c896edb1050bf2e37d0947c7c3ca0080
[ "Apache-2.0" ]
4
2020-01-31T13:31:34.000Z
2020-07-24T13:25:26.000Z
from events_protocol.core.logging import JsonLogger class LoggableMixin: logger = JsonLogger() def __new__(cls, *args, **kwargs): cls.logger = JsonLogger(cls) return super().__new__(cls)
21.4
51
0.682243
24
214
5.708333
0.708333
0.233577
0
0
0
0
0
0
0
0
0
0
0.21028
214
9
52
23.777778
0.810651
0
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
false
0
0.166667
0
0.833333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
fb582c0611b02e1b3a6a7bb2490a44ff56414902
231
py
Python
backend/depot/serializers/commodity_transaction_serializer.py
mrader1248/depocalypse
0662e7a096fdb68b6e5cc55be4e17c7fb1ed8241
[ "MIT" ]
null
null
null
backend/depot/serializers/commodity_transaction_serializer.py
mrader1248/depocalypse
0662e7a096fdb68b6e5cc55be4e17c7fb1ed8241
[ "MIT" ]
9
2021-11-30T17:31:57.000Z
2022-01-03T18:47:09.000Z
backend/depot/serializers/commodity_transaction_serializer.py
mrader1248/depocalypse
0662e7a096fdb68b6e5cc55be4e17c7fb1ed8241
[ "MIT" ]
null
null
null
from rest_framework import serializers from ..models import CommodityTransaction class CommodityTransactionSerializer(serializers.ModelSerializer): class Meta: model = CommodityTransaction fields = '__all__'
23.1
66
0.774892
19
231
9.157895
0.736842
0
0
0
0
0
0
0
0
0
0
0
0.177489
231
9
67
25.666667
0.915789
0
0
0
0
0
0.030303
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
fb7f3de39186c700b8a1f8c4612e42bc1cd092a6
251
py
Python
tests/data/expected/main/simple_json_snake_case_field/output.py
adaamz/datamodel-code-generator
3b34573f35f8d420e4668a85047c757fd1da7754
[ "MIT" ]
891
2019-07-23T04:23:32.000Z
2022-03-31T13:36:33.000Z
tests/data/expected/main/simple_json_snake_case_field/output.py
adaamz/datamodel-code-generator
3b34573f35f8d420e4668a85047c757fd1da7754
[ "MIT" ]
663
2019-07-23T09:50:26.000Z
2022-03-29T01:56:55.000Z
tests/data/expected/main/simple_json_snake_case_field/output.py
adaamz/datamodel-code-generator
3b34573f35f8d420e4668a85047c757fd1da7754
[ "MIT" ]
108
2019-07-23T08:50:37.000Z
2022-03-09T10:50:22.000Z
# generated by datamodel-codegen: # filename: simple.json # timestamp: 2019-07-26T00:00:00+00:00 from __future__ import annotations from pydantic import BaseModel, Field class Model(BaseModel): pet_name: str = Field(..., alias='petName')
20.916667
47
0.729084
33
251
5.393939
0.787879
0.067416
0.067416
0
0
0
0
0
0
0
0
0.084906
0.155378
251
11
48
22.818182
0.754717
0.378486
0
0
1
0
0.046053
0
0
0
0
0
0
1
0
true
0
0.5
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
fb8a8ff017746b1f30d21e7fdb3648f8886fa330
106
py
Python
src/tox_poetry_dev_dependencies/__init__.py
jayvdb/tox-poetry-dev-dependencies
389fa7724c9cf3846292f2d1d9f24823d53704c6
[ "Apache-2.0" ]
null
null
null
src/tox_poetry_dev_dependencies/__init__.py
jayvdb/tox-poetry-dev-dependencies
389fa7724c9cf3846292f2d1d9f24823d53704c6
[ "Apache-2.0" ]
null
null
null
src/tox_poetry_dev_dependencies/__init__.py
jayvdb/tox-poetry-dev-dependencies
389fa7724c9cf3846292f2d1d9f24823d53704c6
[ "Apache-2.0" ]
null
null
null
# """tox-poetry-dev-dependencies.""" from . import _meta __version__ = _meta.VERSION # PEP 396 # EOF
10.6
38
0.669811
13
106
5
0.846154
0.338462
0
0
0
0
0
0
0
0
0
0.034483
0.179245
106
9
39
11.777778
0.712644
0.386792
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
fb8f6e9de8998d2275fe0840d31a90f690f14f5d
120
py
Python
documents/api/__init__.py
City-of-Helsinki/atv
dca73dab09ab0f3a051a9f691aec5674c6369bde
[ "MIT" ]
null
null
null
documents/api/__init__.py
City-of-Helsinki/atv
dca73dab09ab0f3a051a9f691aec5674c6369bde
[ "MIT" ]
34
2021-05-28T06:23:38.000Z
2022-03-08T12:42:01.000Z
documents/api/__init__.py
City-of-Helsinki/atv
dca73dab09ab0f3a051a9f691aec5674c6369bde
[ "MIT" ]
1
2021-05-27T10:37:42.000Z
2021-05-27T10:37:42.000Z
from .viewsets import AttachmentViewSet, DocumentViewSet __all__ = [ "AttachmentViewSet", "DocumentViewSet", ]
17.142857
56
0.741667
8
120
10.625
0.75
0.752941
0
0
0
0
0
0
0
0
0
0
0.166667
120
6
57
20
0.85
0
0
0
0
0
0.266667
0
0
0
0
0
0
1
0
false
0
0.2
0
0.2
0
1
0
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
fb92f621cc68e159d1ee4d5c778189f549e04148
85
py
Python
decks/apps.py
cedricnoel/django-hearthstone
1c7f84b1101725365f08677e6800c789111ce58b
[ "PSF-2.0", "BSD-3-Clause" ]
null
null
null
decks/apps.py
cedricnoel/django-hearthstone
1c7f84b1101725365f08677e6800c789111ce58b
[ "PSF-2.0", "BSD-3-Clause" ]
1
2021-03-30T14:15:00.000Z
2021-03-30T14:15:00.000Z
decks/apps.py
cedricnoel/django-hearthstone
1c7f84b1101725365f08677e6800c789111ce58b
[ "PSF-2.0", "BSD-3-Clause" ]
null
null
null
from django.apps import AppConfig class DecksConfig(AppConfig): name = 'decks'
14.166667
33
0.741176
10
85
6.3
0.9
0
0
0
0
0
0
0
0
0
0
0
0.176471
85
5
34
17
0.9
0
0
0
0
0
0.058824
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
fba7cae4e68d648132654d4adde0eccd8df0db8f
14,035
py
Python
models/python/basal_ganglia/layout.py
ABRG-Models/MammalBot
0b153232b94197c7a65156c1c3451ab2b9f725ae
[ "MIT" ]
null
null
null
models/python/basal_ganglia/layout.py
ABRG-Models/MammalBot
0b153232b94197c7a65156c1c3451ab2b9f725ae
[ "MIT" ]
null
null
null
models/python/basal_ganglia/layout.py
ABRG-Models/MammalBot
0b153232b94197c7a65156c1c3451ab2b9f725ae
[ "MIT" ]
null
null
null
# Layout for Dash visualisation of BG data # Dash components import dash_bootstrap_components as dbc import dash_core_components as dcc import dash_html_components as html import plotly.graph_objs as go # Fixed BG values BG_CHANNELS = 4 # Plot attributes PLOT_LENGTH = 100 PLOT_COLOURS = ( 'crimson', # CH1 'steelblue', # CH2 'seagreen', # CH3 'purple', # CH4 'darkorange', # CH5 'sienna' # CH6 ) PLOT_GAP = 0.02 PLOT_SHOWGRIDX = True PLOT_SHOWGRIDY = False # Initialise input slider controls # TODO: Make shorter and spaced further apart dash_controls = { 'Input': [ dcc.Slider( id='input-' + str(ch), min=0, max=1, step=0.01, updatemode='drag', value=0, vertical=True, className='float-left', verticalHeight=120 ) for ch in range(BG_CHANNELS) ], 'LH': [ dcc.Slider( id='lh-' + str(ch), min=0, max=1, step=0.01, updatemode='drag', value=0, vertical=True, className='float-left', verticalHeight=120 ) for ch in range(BG_CHANNELS) ], } # Graph objects dash_graphs = { 'Input': dcc.Graph( id='input-graph', config={'displayModeBar': False}, ), 'Ventral': dcc.Graph( id='ventral-graph', config={'displayModeBar': False}, # style={ # 'height': '400px', # 'width' : '100%', # } ), 'Dorsal': dcc.Graph( id='dorsal-graph', config={'displayModeBar': False}, # style={ # 'height': '400px', # 'width' : '100%', # } ), } # Update intervals dash_intervals = html.Div([ dcc.Interval( id='interval-fast', # Too short an interval causes issues as not all plots can be updated before the next callback interval=0.1 * 1000, n_intervals=0 ), ]) # Graph layouts dash_layouts = { 'Input': go.Layout( legend={ 'orientation': 'v', 'x' : 1, 'xanchor' : 'right', 'y' : 1, 'yanchor' : 'top', }, showlegend=True, margin={ 'b': 20, 'l': 20, 'r': 0, 't': 0 }, xaxis={ 'fixedrange' : True, 'range' : [0, PLOT_LENGTH], 'showgrid' : PLOT_SHOWGRIDX, 'showticklabels': False, 'title' : 'Time', 'zeroline' : True }, yaxis={ 'fixedrange' : True, 'range' : [0, 1], 'showgrid' : PLOT_SHOWGRIDY, 'showticklabels': False, 'title' : 'Activation', 'zeroline' : True } ), 'Ventral': go.Layout( annotations=[ { 'showarrow': False, 'text' : 'Striatal dMSNs', 'x' : (0.5 - PLOT_GAP) / 2, 'y' : 1.0, 'xanchor' : 'center', 'yanchor' : 'bottom', 'xref' : 'paper', 'yref' : 'paper', }, { 'showarrow': False, 'text' : 'Striatal iMSNs', 'x' : 1 - ((0.5 - PLOT_GAP) / 2), 'y' : 1.0, 'xanchor' : 'center', 'yanchor' : 'bottom', 'xref' : 'paper', 'yref' : 'paper', }, { 'showarrow': False, 'text' : 'Ventral tegmental area', 'x' : (0.5 - PLOT_GAP) / 2, 'y' : 0.82, 'xanchor' : 'center', 'yanchor' : 'bottom', 'xref' : 'paper', 'yref' : 'paper', }, { 'showarrow': False, 'text' : 'Ventral pallidum', 'x' : 1 - ((0.5 - PLOT_GAP) / 2), 'y' : 0.82, 'xanchor' : 'center', 'yanchor' : 'bottom', 'xref' : 'paper', 'yref' : 'paper', }, { 'showarrow': False, 'text' : 'Pedunculopontine nucleus', 'x' : 0.5, 'y' : 0.64, 'xanchor' : 'center', 'yanchor' : 'bottom', 'xref' : 'paper', 'yref' : 'paper', }, ], showlegend=False, margin={ 'b': 5, 'l': 0, 'r': 0, 't': 20 }, # dMSN xaxis1={ 'anchor' : 'y1', 'domain' : [0, 0.5 - PLOT_GAP], 'fixedrange' : True, 'range' : [0, PLOT_LENGTH], 'showgrid' : PLOT_SHOWGRIDX, 'showticklabels': False, # 'title' : 'Time', 'zeroline' : True }, # iMSN xaxis2={ 'anchor' : 'y1', 'domain' : [0.5 + PLOT_GAP, 1], 'fixedrange' : True, 'range' : [0, PLOT_LENGTH], 'showgrid' : PLOT_SHOWGRIDX, 'showticklabels': False, # 'title' : 'Time', 'zeroline' : True }, # VTA xaxis3={ 'anchor' : 'y2', 'domain' : [0, 0.5 - PLOT_GAP], 'fixedrange' : True, 'range' : [0, PLOT_LENGTH], 'showgrid' : PLOT_SHOWGRIDX, 'showticklabels': False, # 'title' : 'Time', 'zeroline' : True }, # Pal xaxis4={ 'anchor' : 'y2', 'domain' : [0.5 + PLOT_GAP, 1], 'fixedrange' : True, 'range' : [0, PLOT_LENGTH], 'showgrid' : PLOT_SHOWGRIDX, 'showticklabels': False, # 'title' : 'Time', 'zeroline' : True }, # PPn xaxis5={ 'anchor' : 'y3', 'domain' : [0, 1], 'fixedrange' : True, 'range' : [0, PLOT_LENGTH], 'showgrid' : PLOT_SHOWGRIDX, 'showticklabels': False, # 'title' : 'Time', 'zeroline' : True }, # All MSNs yaxis1={ 'anchor' : 'x1', 'domain' : [0.9, 1], 'fixedrange' : True, 'range' : [0, 1.05], 'showgrid' : PLOT_SHOWGRIDY, 'showticklabels': False, # 'title' : 'Activation', 'zeroline' : True }, # VTA & Pal yaxis2={ 'anchor' : 'x3', 'domain' : [0.72, 0.82], 'fixedrange' : True, 'range' : [0, 1.05], 'showgrid' : PLOT_SHOWGRIDY, 'showticklabels': False, # 'title' : 'Activation', 'zeroline' : True }, #PPn yaxis3={ 'anchor' : 'x5', 'domain' : [0.54, 0.64], 'fixedrange' : True, 'range' : [0, 1.05], 'showgrid' : PLOT_SHOWGRIDY, 'showticklabels': False, # 'title' : 'Activation', 'zeroline' : True }, ), 'Dorsal': go.Layout( annotations=[ { 'showarrow': False, 'text' : 'Striatal dMSNs', 'x' : (0.5 - PLOT_GAP) / 2, 'y' : 1.0, 'xanchor' : 'center', 'yanchor' : 'bottom', 'xref' : 'paper', 'yref' : 'paper', }, { 'showarrow': False, 'text' : 'Striatal iMSNs', 'x' : 1 - ((0.5 - PLOT_GAP) / 2), 'y' : 1.0, 'xanchor' : 'center', 'yanchor' : 'bottom', 'xref' : 'paper', 'yref' : 'paper', }, { 'showarrow': False, 'text' : 'Subthalamic nucleus', 'x' : 0.5, 'y' : 0.82, 'xanchor' : 'center', 'yanchor' : 'bottom', 'xref' : 'paper', 'yref' : 'paper', }, { 'showarrow': False, 'text' : 'Substantia nigra pars reticulata', 'x' : (0.5 - PLOT_GAP) / 2, 'y' : 0.64, 'xanchor' : 'center', 'yanchor' : 'bottom', 'xref' : 'paper', 'yref' : 'paper', }, { 'showarrow': False, 'text' : 'Globus pallidus (external)', 'x' : 1 - ((0.5 - PLOT_GAP) / 2), 'y' : 0.64, 'xanchor' : 'center', 'yanchor' : 'bottom', 'xref' : 'paper', 'yref' : 'paper', }, { 'showarrow': False, 'text' : 'Thalamus', 'x' : (0.5 - PLOT_GAP) / 2, 'y' : 0.46, 'xanchor' : 'center', 'yanchor' : 'bottom', 'xref' : 'paper', 'yref' : 'paper', }, { 'showarrow': False, 'text' : 'Thalamic retiuclar nucleus', 'x' : 1 - ((0.5 - PLOT_GAP) / 2), 'y' : 0.46, 'xanchor' : 'center', 'yanchor' : 'bottom', 'xref' : 'paper', 'yref' : 'paper', }, { 'showarrow': False, 'text' : 'Motor cortex', 'x' : 0.5, 'y' : 0.28, 'xanchor' : 'center', 'yanchor' : 'bottom', 'xref' : 'paper', 'yref' : 'paper', }, { 'showarrow': False, 'text' : 'Dopamine', 'x' : 0.5, 'y' : 0.1, 'xanchor' : 'center', 'yanchor' : 'bottom', 'xref' : 'paper', 'yref' : 'paper', }, ], showlegend=False, margin={ 'b': 5, 'l': 0, 'r': 0, 't': 20 }, # dMSN xaxis1={ 'anchor' : 'y1', 'domain' : [0, 0.5 - PLOT_GAP], 'fixedrange' : True, 'range' : [0, PLOT_LENGTH], 'showgrid' : PLOT_SHOWGRIDX, 'showticklabels': False, # 'title' : 'Time', 'zeroline' : True }, # iMSN xaxis2={ 'anchor' : 'y1', 'domain' : [0.5 + PLOT_GAP, 1], 'fixedrange' : True, 'range' : [0, PLOT_LENGTH], 'showgrid' : PLOT_SHOWGRIDX, 'showticklabels': False, # 'title' : 'Time', 'zeroline' : True }, # STN xaxis3={ 'anchor' : 'y2', 'domain' : [0, 1], 'fixedrange' : True, 'range' : [0, PLOT_LENGTH], 'showgrid' : PLOT_SHOWGRIDX, 'showticklabels': False, # 'title' : 'Time', 'zeroline' : True }, # SNr xaxis4={ 'anchor' : 'y3', 'domain' : [0, 0.5 - PLOT_GAP], 'fixedrange' : True, 'range' : [0, PLOT_LENGTH], 'showgrid' : PLOT_SHOWGRIDX, 'showticklabels': False, # 'title' : 'Time', 'zeroline' : True }, # GPe xaxis5={ 'anchor' : 'y3', 'domain' : [0.5 + PLOT_GAP, 1], 'fixedrange' : True, 'range' : [0, PLOT_LENGTH], 'showgrid' : PLOT_SHOWGRIDX, 'showticklabels': False, # 'title' : 'Time', 'zeroline' : True }, # Thal xaxis6={ 'anchor' : 'y4', 'domain' : [0, 0.5 - PLOT_GAP], 'fixedrange' : True, 'range' : [0, PLOT_LENGTH], 'showgrid' : PLOT_SHOWGRIDX, 'showticklabels': False, # 'title' : 'Time', 'zeroline' : True }, # TRN xaxis7={ 'anchor' : 'y4', 'domain' : [0.5 + PLOT_GAP, 1], 'fixedrange' : True, 'range' : [0, PLOT_LENGTH], 'showgrid' : PLOT_SHOWGRIDX, 'showticklabels': False, # 'title' : 'Time', 'zeroline' : True }, # Ctx xaxis8={ 'anchor' : 'y5', 'domain' : [0, 1], 'fixedrange' : True, 'range' : [0, PLOT_LENGTH], 'showgrid' : PLOT_SHOWGRIDX, 'showticklabels': False, # 'title' : 'Time', 'zeroline' : True }, # DA xaxis9={ 'anchor' : 'y6', 'domain' : [0, 1], 'fixedrange' : True, 'range' : [0, PLOT_LENGTH], 'showgrid' : PLOT_SHOWGRIDX, 'showticklabels': False, # 'title' : 'Time', 'zeroline' : True }, # All MSNs yaxis1={ 'anchor' : 'x1', 'domain' : [0.9, 1], 'fixedrange' : True, 'range' : [0, 1.05], 'showgrid' : PLOT_SHOWGRIDY, 'showticklabels': False, # 'title' : 'Activation', 'zeroline' : True }, # STN yaxis2={ 'anchor' : 'x3', 'domain' : [0.72, 0.82], 'fixedrange' : True, 'range' : [0, 1.05], 'showgrid' : PLOT_SHOWGRIDY, 'showticklabels': False, # 'title' : 'Activation', 'zeroline' : True }, # SNr & GPe yaxis3={ 'anchor' : 'x4', 'domain' : [0.54, 0.64], 'fixedrange' : True, 'range' : [0, 1.05], 'showgrid' : PLOT_SHOWGRIDY, 'showticklabels': False, # 'title' : 'Activation', 'zeroline' : True }, # Thal & TRN yaxis4={ 'anchor' : 'x6', 'domain' : [0.36, 0.46], 'fixedrange' : True, 'range' : [0, 1.05], 'showgrid' : PLOT_SHOWGRIDY, 'showticklabels': False, # 'title' : 'Activation', 'zeroline' : True }, # Ctx yaxis5={ 'anchor' : 'x8', 'domain' : [0.18, 0.28], 'fixedrange' : True, 'range' : [0, 1.05], 'showgrid' : PLOT_SHOWGRIDY, 'showticklabels': False, # 'title' : 'Activation', 'zeroline' : True }, # DA yaxis6={ 'anchor' : 'x9', 'domain' : [0, 0.1], 'fixedrange' : True, 'range' : [0, 1.05], 'showgrid' : PLOT_SHOWGRIDY, 'showticklabels': False, # 'title' : 'Activation', 'zeroline' : True }, ), } # Link BG graphs to specific layout axes bg_ax = { 'Ventral': { 'dMSN': { 'x': 'x1', 'y': 'y1', }, 'iMSN': { 'x': 'x2', 'y': 'y1', }, 'VTA': { 'x': 'x3', 'y': 'y2', }, 'Pal': { 'x': 'x4', 'y': 'y2', }, 'PPn': { 'x': 'x5', 'y': 'y3', }, }, 'Dorsal': { 'dMSN': { 'x': 'x1', 'y': 'y1', }, 'iMSN': { 'x': 'x2', 'y': 'y1', }, 'STN' : { 'x': 'x3', 'y': 'y2', }, 'SNr': { 'x': 'x4', 'y': 'y3', }, 'GPe' : { 'x': 'x5', 'y': 'y3', }, 'Thal': { 'x': 'x6', 'y': 'y4', }, 'TRN': { 'x': 'x7', 'y': 'y4', }, 'Ctx': { 'x': 'x8', 'y': 'y5', }, 'DA': { 'x': 'x9', 'y': 'y6', }, # TEMP 'LH_APPROACH': { 'x': 'x7', 'y': 'y4', }, 'LH_AVOID': { 'x': 'x7', 'y': 'y4', } } } # Page layout dash_rows = { 'Input': dbc.Row( dbc.Col( dbc.Card( [ dbc.CardHeader( ['Input'], className='bg-primary font-weight-bold lead' ), dbc.CardBody([ dash_graphs['Input'], html.Div(dash_controls['Input']), html.Div(dash_controls['LH']), ]), ], color='primary', inverse=True, outline=True, ), ), ), 'Output': dbc.Row([ dbc.Col( dbc.Card( [ dbc.CardHeader( 'Ventral BG', className='bg-info font-weight-bold lead' ), dbc.CardBody(dash_graphs['Ventral']), ], color='info', inverse=True, outline=True, ), ), dbc.Col( dbc.Card( [ dbc.CardHeader( 'Dorsal BG', className='bg-success font-weight-bold lead' ), dbc.CardBody(dash_graphs['Dorsal']), ], color='success', inverse=True, outline=True, ), ), ]), }
20.916542
96
0.448878
1,360
14,035
4.566912
0.183088
0.056352
0.076477
0.080502
0.734664
0.712929
0.705361
0.705361
0.675737
0.646595
0
0.040035
0.355753
14,035
670
97
20.947761
0.64687
0.089491
0
0.652614
0
0
0.248406
0
0
0
0
0.001493
0
1
0
false
0
0.006745
0
0.006745
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
fbbda67f502e7a61c768882813606add37f53202
183
py
Python
gamepicker/games/lottery.py
rouleau/gamepicker
581334bf6d340010af5cefb227a854d5275a2f39
[ "MIT" ]
null
null
null
gamepicker/games/lottery.py
rouleau/gamepicker
581334bf6d340010af5cefb227a854d5275a2f39
[ "MIT" ]
null
null
null
gamepicker/games/lottery.py
rouleau/gamepicker
581334bf6d340010af5cefb227a854d5275a2f39
[ "MIT" ]
null
null
null
""" Lottery module """ class Astro: """ Create an Astro ticket """ def __init__(self): """ Initialize Astro ticket """ self.name = "Astro"
14.076923
34
0.497268
17
183
5.117647
0.705882
0.252874
0
0
0
0
0
0
0
0
0
0
0.360656
183
12
35
15.25
0.74359
0.338798
0
0
0
0
0.060241
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
fbd1fa23d18f264961ca8841aa3e72c3677be3ec
280
py
Python
samples/data-serialization/ds-python/SimpleData.py
obecto/perper
ce25abde413bdb4c054a06d810939e98fac04d62
[ "MIT" ]
24
2019-11-11T13:26:12.000Z
2022-03-18T23:38:07.000Z
samples/data-serialization/ds-python/SimpleData.py
obecto/perper
ce25abde413bdb4c054a06d810939e98fac04d62
[ "MIT" ]
76
2020-01-25T16:48:37.000Z
2022-01-03T09:26:11.000Z
samples/data-serialization/ds-python/SimpleData.py
obecto/perper
ce25abde413bdb4c054a06d810939e98fac04d62
[ "MIT" ]
4
2020-06-25T13:21:37.000Z
2021-11-03T09:05:11.000Z
from collections import OrderedDict from pyignite import GenericObjectMeta from pyignite.datatypes import String, IntObject class SimpleData(metaclass=GenericObjectMeta, schema=OrderedDict([ ('name', String), ('priority', IntObject), ('json', String), ])): pass
23.333333
66
0.742857
27
280
7.703704
0.62963
0.115385
0
0
0
0
0
0
0
0
0
0
0.15
280
11
67
25.454545
0.87395
0
0
0
0
0
0.057143
0
0
0
0
0
0
1
0
true
0.111111
0.333333
0
0.444444
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
0
0
0
4
fbd31fc9d179165a1af4d54659b96771713abbf3
115
py
Python
thenewboston_node/project/settings/celery.py
olegtropinin/thenewboston-node
2de4e14ef6855646121840224a82fcfc505b213c
[ "MIT" ]
30
2021-03-05T22:08:17.000Z
2021-09-23T02:45:45.000Z
thenewboston_node/project/settings/celery.py
olegtropinin/thenewboston-node
2de4e14ef6855646121840224a82fcfc505b213c
[ "MIT" ]
148
2021-03-05T23:37:50.000Z
2021-11-02T02:18:58.000Z
thenewboston_node/project/settings/celery.py
olegtropinin/thenewboston-node
2de4e14ef6855646121840224a82fcfc505b213c
[ "MIT" ]
14
2021-03-05T21:58:46.000Z
2021-10-15T17:27:52.000Z
CELERY_BROKER_URL = 'amqp://guest:guest@127.0.0.1:5672//' # keep it for demo purpose although it is exact default
57.5
114
0.73913
21
115
3.952381
0.857143
0
0
0
0
0
0
0
0
0
0
0.1
0.130435
115
1
115
115
0.73
0.46087
0
0
0
0
0.583333
0.583333
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
1
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
8375c10af540f7ec7d166584837ce6285304ae4f
173
py
Python
arizona/utils/print_utils.py
phanxuanphucnd/wav2asr
6e4d6f6ce0165bd1f2baf3c219b7755dc2202c36
[ "MIT" ]
1
2021-06-23T01:41:46.000Z
2021-06-23T01:41:46.000Z
arizona/utils/print_utils.py
phanxuanphucnd/wav2asr
6e4d6f6ce0165bd1f2baf3c219b7755dc2202c36
[ "MIT" ]
null
null
null
arizona/utils/print_utils.py
phanxuanphucnd/wav2asr
6e4d6f6ce0165bd1f2baf3c219b7755dc2202c36
[ "MIT" ]
2
2021-07-28T14:51:47.000Z
2021-10-30T19:53:34.000Z
from arizona.version import __version__ def print_name(): print("") print('\n'.join([ ' 🅰 🆁 🅸 🆉 🅾 🅽 🅰 🅰 🆂 🆁 ({})'.format(__version__), '' ]))
21.625
59
0.462428
24
173
3.375
0.708333
0
0
0
0
0
0
0
0
0
0
0
0.306358
173
8
60
21.625
0.591667
0
0
0
0
0
0.166667
0
0
0
0
0
0
1
0.142857
true
0
0.142857
0
0.285714
0.428571
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
4
8376a910a92b0361c41e412378afb02719a702c3
212
py
Python
train/gen/freeze/models/particles/v4_kl_trunc7_limit100/setup.py
sammysiegel/SubtLeNet
94d1507a8a7c60548b59400109b6c4086ad83141
[ "MIT" ]
null
null
null
train/gen/freeze/models/particles/v4_kl_trunc7_limit100/setup.py
sammysiegel/SubtLeNet
94d1507a8a7c60548b59400109b6c4086ad83141
[ "MIT" ]
null
null
null
train/gen/freeze/models/particles/v4_kl_trunc7_limit100/setup.py
sammysiegel/SubtLeNet
94d1507a8a7c60548b59400109b6c4086ad83141
[ "MIT" ]
2
2019-07-08T20:18:22.000Z
2020-06-01T20:04:08.000Z
from subtlenet import config from subtlenet.generators import gen as generator from subtlenet.utils import set_processor config.limit = 100 generator.truncate = 7 set_processor("gpu") config.smear_params = None
23.555556
49
0.825472
30
212
5.733333
0.633333
0.226744
0
0
0
0
0
0
0
0
0
0.02139
0.117925
212
8
50
26.5
0.898396
0
0
0
0
0
0.014218
0
0
0
0
0
0
1
0
true
0
0.428571
0
0.428571
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
837c2f825d2607f1606644704d21f15868bbc38b
320
py
Python
legacy/admin.py
naderm/farnsworth
f7a635a82eae20ca395a939966bfa1e296d4e3a2
[ "BSD-2-Clause" ]
null
null
null
legacy/admin.py
naderm/farnsworth
f7a635a82eae20ca395a939966bfa1e296d4e3a2
[ "BSD-2-Clause" ]
null
null
null
legacy/admin.py
naderm/farnsworth
f7a635a82eae20ca395a939966bfa1e296d4e3a2
[ "BSD-2-Clause" ]
null
null
null
""" Project: Farnsworth Author: Karandeep Singh Nagra Legacy Kingman site admin pages. """ from django.contrib import admin from legacy.models import TeacherRequest, TeacherResponse, TeacherNote, \ TeacherEvent for p in [TeacherRequest, TeacherResponse, TeacherNote, TeacherEvent]: admin.site.register(p)
17.777778
73
0.771875
35
320
7.057143
0.685714
0.234818
0.323887
0.421053
0
0
0
0
0
0
0
0
0.15
320
17
74
18.823529
0.908088
0.2625
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
8387c3ffd24030ac4bc5859b95f05fdb3c744000
22
py
Python
dingtalk/python/alibabacloud_dingtalk/__init__.py
aliyun/dingtalk-sdk
ab4f856b8cfe94f6b69f10a0730a2e5a7d4901c5
[ "Apache-2.0" ]
15
2020-08-27T04:10:26.000Z
2022-03-07T06:25:42.000Z
dingtalk/python/alibabacloud_dingtalk/__init__.py
aliyun/dingtalk-sdk
ab4f856b8cfe94f6b69f10a0730a2e5a7d4901c5
[ "Apache-2.0" ]
1
2020-09-27T01:30:46.000Z
2021-12-29T09:15:34.000Z
dingtalk/python/alibabacloud_dingtalk/__init__.py
aliyun/dingtalk-sdk
ab4f856b8cfe94f6b69f10a0730a2e5a7d4901c5
[ "Apache-2.0" ]
5
2020-08-27T04:07:44.000Z
2021-12-03T02:55:20.000Z
__version__ = '1.2.24'
22
22
0.681818
4
22
2.75
1
0
0
0
0
0
0
0
0
0
0
0.2
0.090909
22
1
22
22
0.35
0
0
0
0
0
0.26087
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
83de52a388e3ae88f22555ed38121a5b799a06a5
757
py
Python
adlib/learners/iterative_retraining_learner.py
xyvivian/adlib
79a93baa8aa542080bbf55734168eb89317df83c
[ "MIT" ]
null
null
null
adlib/learners/iterative_retraining_learner.py
xyvivian/adlib
79a93baa8aa542080bbf55734168eb89317df83c
[ "MIT" ]
null
null
null
adlib/learners/iterative_retraining_learner.py
xyvivian/adlib
79a93baa8aa542080bbf55734168eb89317df83c
[ "MIT" ]
null
null
null
# iterative_retraining_learner.py # A learner that iteratively retrains and removes outliers based on loss. # Matthew Sedam from adlib.learners.learner import learner from typing import Dict class IterativeRetrainingLearner(learner): """ A learner that iteratively retrains and removes outliers based on loss. """ def __init__(self): learner.__init__(self) raise NotImplementedError def train(self): raise NotImplementedError def predict(self, instances): raise NotImplementedError def set_params(self, params: Dict): raise NotImplementedError def predict_proba(self, X): raise NotImplementedError def decision_function(self, X): raise NotImplementedError
23.65625
75
0.717305
82
757
6.463415
0.463415
0.271698
0.254717
0.086792
0.226415
0.226415
0.226415
0.226415
0.226415
0.226415
0
0
0.225892
757
31
76
24.419355
0.904437
0.250991
0
0.375
0
0
0
0
0
0
0
0
0
1
0.375
false
0
0.125
0
0.5625
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
83f28e442865534a1a794c7ae1f3ad770dad1f39
9,456
py
Python
models.py
JFlommersfeld/Actomyosin-contractions-in-soft-pillar-rings
0f8e9375f53da432a9cc54a208e5655bc80f9f45
[ "MIT" ]
null
null
null
models.py
JFlommersfeld/Actomyosin-contractions-in-soft-pillar-rings
0f8e9375f53da432a9cc54a208e5655bc80f9f45
[ "MIT" ]
null
null
null
models.py
JFlommersfeld/Actomyosin-contractions-in-soft-pillar-rings
0f8e9375f53da432a9cc54a208e5655bc80f9f45
[ "MIT" ]
null
null
null
import numpy as np from parameter_loader import load_parameters from math import pi class FullModel: """ A class that defines the full model for actomyosin contraction in soft pillar rings that accounts for both myosin filament binding and density changes Attributes: parameter_file (str): path to a file that contains all necessary parameters for the model (see provided examples). Methods: k_off_fil(total_force): calculates the load dependent steady state off-rate of a myosin filament. rhs(t, y): calculates the right hand side of the set of differential equations that describe the model. velocity(t, force, N): calculates the deflection velocity of the tip of the pillar. """ def __init__(self, parameter_file, pillar_stiffness): """ Sets all the necessary parameters for the FullModel object. Parameters: parameter_file (str): path to a file that contains all necessary parameters for the model (see provided examples). pillar_stiffness (float): stiffness of the pillars in the pillar ring in pN/um. """ self.x_catch, self.x_slip, self.k_off0_catch, self.k_off0_slip, self.k_on, self.k_on_fil, self.a_per_kBT, \ self.Nh, self.Nmax, self.h_eta, self.xi_rho_a2, self.rho_max_per_rho, \ self.R0 = load_parameters('full model', parameter_file) self.k_p = pillar_stiffness self.parameter_dict = {"x_catch": self.x_catch, "x_slip": self.x_slip, "k_off0_catch": self.k_off0_catch, "k_off0_slip": self.k_off0_slip, "k_on": self.k_on, "k_on_fil": self.k_on_fil, "a_per_kBT": self.a_per_kBT, "Nh": self.Nh, "Nmax": self.Nmax, "h_eta": self.h_eta, "xi_rho_a2": self.xi_rho_a2, "rho_max_per_rho": self.rho_max_per_rho, "R0": self.R0, "k_p": self.k_p} self.A0 = pi * self.R0**2 self.tau = 6. / 5. * pi * self.h_eta / self.k_p def __k_off(self, force): """Calculates the load dependent off-rate of an individual myosin head. Parameters: force (float): the average load that is applied to an individual myosin head. Returns: float: the average off-rate of the head. """ return self.k_off0_catch * np.exp(-self.a_per_kBT * force * self.x_catch) + \ self.k_off0_slip * np.exp(self.a_per_kBT * force * self.x_slip) def __calc_prob_dist(self, total_force): """Calculates the load dependent steady state probability distribution of the number of bound heads per myosin filament Parameters: total_force (float): the total load that is applied to the myosin filament. Returns: list(float): list of probabilities that n heads are bound per filament, where n is given by the list index. """ pns = [] for n in range(0, self.Nh + 1): nom = 1 for i in range(0, n): nom = nom * ((self.Nh - i) * self.k_on) / ((i + 1) * self.__k_off(total_force / (i + 1))) denom = 1 for k in range(1, self.Nh + 1): prod = 1 for j in range(0, k): prod = prod * ((self.Nh - j) * self.k_on) / ((j + 1) * self.__k_off(total_force / (j + 1))) denom = denom + prod pns.append(nom / denom) return pns def k_off_fil(self, total_force): """Calculates the load dependent steady state off-rate of a myosin filament. Parameters: total_force (float): the total load that is applied to the myosin filament. Returns: float: the off-rate of the filament. """ T_off_av = 0 pns = self.__calc_prob_dist(total_force) for NB_init in range(1, self.Nh + 1): T_off = 0 for NB in range(1, NB_init + 1): s = 0 for j in range(NB, self.Nh + 1): s = s + pns[j] T_off = T_off + 1 / (NB * self.__k_off(total_force / NB) * pns[NB]) * s T_off_av = T_off_av + pns[NB_init] * T_off return 1 / T_off_av def rhs(self, t, y): """Calculates the right hand side of the set of differential equations that describe the model. Parameters: t (float): the time point. y (list(float)): a list with elements y[0] = force on the pillar at time t and y[1] = number of bound filaments at time t Returns: list(float): the temporal derivative of the input y """ force = y[0] N = y[1] area = pi * (self.R0 - force / self.k_p) ** 2 density_factor = -self.A0 / area * (self.A0 / area - self.rho_max_per_rho) force_prime = -force / self.tau + self.xi_rho_a2 * N * density_factor / self.tau N_prime = self.k_on_fil * (self.Nmax - N) - self.k_off_fil(force) * N return [force_prime, N_prime] def velocity(self, t, force, N): """Calculates the deflection velocity of the tip of the pillar. Parameters: t (float): the time point. force (float): force on the pillar at time t N: number of bound filaments at time t Returns: float: the deflection velocity of the pillar tip at time t """ area = pi * (self.R0 - force / self.k_p) ** 2 density_factor = -self.A0 / area * (self.A0 / area - self.rho_max_per_rho) return (-force / self.tau + self.xi_rho_a2 * N * density_factor / self.tau) / self.k_p def get_parameter(self, parameter_name): """Get all model parameters Parameters: parameter_name (str): parameter name. Returns: float/int: the value of the specified parameter. """ return self.parameter_dict[parameter_name] class DensityModel: """ A class that defines the purley density dependent model for actomyosin contraction in soft pillar rings. ... Attributes: parameter_file (str): path to a file that contains all necessary parameters for the model (see provided examples). Methods: k_off_fil(total_force): calculates the load dependent steady state off-rate of a myosin filament. rhs(t, y): calculates the right hand side of the set of differential equations that describe the model. velocity(t, force, N): calculates the deflection velocity of the tip of the pillar. """ def __init__(self, parameter_file, pillar_stiffness): """ Sets all the necessary parameters for the DensityModel object. Parameters: parameter_file (str): path to a file that contains all necessary parameters for the model (see provided examples). pillar_stiffness (float): stiffness of the pillars in the pillar ring in pN/um. """ self.h_eta, self.xi_N_rho_a2, self.rho_max_per_rho, self.R0 = load_parameters('density model', parameter_file) self.k_p = pillar_stiffness self.parameter_dict = {"h_eta": self.h_eta, "xi_N_rho_a2": self.xi_N_rho_a2, "rho_max_per_rho": self.rho_max_per_rho, "R0": self.R0, "k_p": self.k_p} self.A0 = pi * self.R0 ** 2 self.tau = 6. / 5. * pi * self.h_eta / self.k_p def rhs(self, t, y): """Calculates the right hand side of the set of differential equations that describe the model. Parameters: t (float): the time point. y (list(float)): a list with a single element y[0] = force on the pillar at time t Returns: list(float): the temporal derivative of the input y """ force = y[0] area = pi * (self.R0 - force / self.k_p) ** 2 density_factor = -self.A0 / area * (self.A0 / area - self.rho_max_per_rho) force_prime = -force/self.tau + self.xi_N_rho_a2 * density_factor / self.tau return [force_prime] def velocity(self, t, force): """Calculates the deflection velocity of the tip of the pillar. Parameters: t (float): the time point. force (float): force on the pillar at time t Returns: float: the deflection velocity of the pillar tip at time t """ area = pi * (self.R0 - force / self.k_p) ** 2 density_factor = -self.A0 / area * (self.A0 / area - self.rho_max_per_rho) return (-force/self.tau + self.xi_N_rho_a2 * density_factor / self.tau)/self.k_p def get_parameter(self, parameter_name): """Get all model parameters Parameters: parameter_name (str): parameter name. Returns: float/int: the value of the specified parameter. """ return self.parameter_dict[parameter_name]
36.369231
120
0.570431
1,296
9,456
3.98534
0.125772
0.028074
0.01394
0.023233
0.787028
0.743078
0.708228
0.708228
0.681897
0.652856
0
0.012726
0.343486
9,456
259
121
36.509653
0.819265
0.430626
0
0.303797
0
0
0.034628
0
0
0
0
0
0
1
0.139241
false
0
0.037975
0
0.316456
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
83f55e63269f45a46cbfc719262a77d65d91d419
108
py
Python
python/testData/inspections/PyCompatibilityInspection/noWarningAboutStarredExpressionsInFunctionTypeComments.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/inspections/PyCompatibilityInspection/noWarningAboutStarredExpressionsInFunctionTypeComments.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/inspections/PyCompatibilityInspection/noWarningAboutStarredExpressionsInFunctionTypeComments.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
def create_instance(self, task_config, **kwargs): # type: (TaskConfig, **Text) -> TaskInstance pass
27
49
0.675926
12
108
5.916667
1
0
0
0
0
0
0
0
0
0
0
0
0.175926
108
3
50
36
0.797753
0.388889
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0.5
0
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
4
83f5696f9af16d5bf9ebf7d654d668dd426d38b1
151
py
Python
bugtests/test209.py
doom38/jython_v2.2.1
0803a0c953c294e6d14f9fc7d08edf6a3e630a15
[ "CNRI-Jython" ]
null
null
null
bugtests/test209.py
doom38/jython_v2.2.1
0803a0c953c294e6d14f9fc7d08edf6a3e630a15
[ "CNRI-Jython" ]
null
null
null
bugtests/test209.py
doom38/jython_v2.2.1
0803a0c953c294e6d14f9fc7d08edf6a3e630a15
[ "CNRI-Jython" ]
null
null
null
""" """ import support import cPickle import test209p.foo.bar o = test209p.foo.bar.baz() s = cPickle.dumps(o) #print s o2 = cPickle.loads(s)
7.55
26
0.655629
23
151
4.304348
0.565217
0.222222
0.282828
0
0
0
0
0
0
0
0
0.057377
0.192053
151
19
27
7.947368
0.754098
0.046358
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
f7ad44b9fecc3a74b58eecabc86a0d674dd912a4
187
py
Python
src/core/__init__.py
abodacs/mistral
90a0ba9680a35bce214c82460f81a90577151230
[ "Apache-2.0" ]
null
null
null
src/core/__init__.py
abodacs/mistral
90a0ba9680a35bce214c82460f81a90577151230
[ "Apache-2.0" ]
null
null
null
src/core/__init__.py
abodacs/mistral
90a0ba9680a35bce214c82460f81a90577151230
[ "Apache-2.0" ]
null
null
null
""" Modules for core training, evaluation, and W&B logging processes """ from .callbacks import CustomCheckpointCallback, CustomWandbCallback from .trainer import OnlineBenchmarkTrainer
26.714286
68
0.823529
19
187
8.105263
0.894737
0
0
0
0
0
0
0
0
0
0
0
0.112299
187
6
69
31.166667
0.927711
0.342246
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
f7b7ab76b0e52354de75714e8e5c68edfc2111b8
167
py
Python
victimsdb_lib/errors.py
tisnik/victimsdb-lib
50c24b3791e2a42b0dea1c0d59009a67c2ddead3
[ "Apache-2.0" ]
2
2019-11-04T13:19:49.000Z
2022-03-09T21:53:51.000Z
victimsdb_lib/errors.py
tisnik/victimsdb-lib
50c24b3791e2a42b0dea1c0d59009a67c2ddead3
[ "Apache-2.0" ]
46
2018-09-08T06:51:39.000Z
2019-09-06T14:48:45.000Z
victimsdb_lib/errors.py
tisnik/victimsdb-lib
50c24b3791e2a42b0dea1c0d59009a67c2ddead3
[ "Apache-2.0" ]
4
2018-09-06T17:31:16.000Z
2020-04-16T14:03:23.000Z
"""Error definitions.""" class VictimsDBError(Exception): """Generic VictimsDB error.""" class ParseError(VictimsDBError): """Error parsing YAML files."""
16.7
35
0.688623
15
167
7.666667
0.733333
0
0
0
0
0
0
0
0
0
0
0
0.149701
167
9
36
18.555556
0.809859
0.413174
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
1
0
0
4
f7d35cf1338f2a854ee2c87095c6607c30026f00
716
py
Python
src/tensorneko/layer/log.py
ControlNet/tensorneko
70dfb2f6395e1703dbdf5d5adcfed7b1334efb8f
[ "MIT" ]
9
2021-05-23T17:38:09.000Z
2021-12-30T19:12:12.000Z
src/tensorneko/layer/log.py
ControlNet/tensorneko
70dfb2f6395e1703dbdf5d5adcfed7b1334efb8f
[ "MIT" ]
null
null
null
src/tensorneko/layer/log.py
ControlNet/tensorneko
70dfb2f6395e1703dbdf5d5adcfed7b1334efb8f
[ "MIT" ]
null
null
null
import torch from torch import Tensor, log from ..neko_module import NekoModule class Log(NekoModule): """ The module version of :func:`torch.log` operation. Args: eps (``float``, optional): A bias applied to the input to avoid ``-inf``. Default ``0``. Examples:: >>> log = Log() >>> a = torch.randn(5) >>> a tensor([ 2.3020, -0.8679, -0.2174, 2.4228, -1.2341]) >>> log(a) tensor([0.8338, nan, nan, 0.8849, nan]) """ def __init__(self, eps: float = 0.): super().__init__() self.eps: float = eps def forward(self, x: Tensor) -> Tensor: return log(x) if self.eps == 0 else log(x + self.eps)
23.096774
96
0.536313
97
716
3.865979
0.505155
0.074667
0.058667
0.085333
0
0
0
0
0
0
0
0.077689
0.298883
716
30
97
23.866667
0.669323
0.48324
0
0
0
0
0
0
0
0
0
0
0
1
0.222222
false
0
0.333333
0.111111
0.777778
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
4
f7e39fe394d3295f9ba155a22c5dbfd4a85085cd
413
py
Python
qss/utils/__init__.py
ATLAS-Titan/allocation-modeling
b315aa7ac0cf613ed02c59188ff19e9738f36aca
[ "Apache-2.0" ]
null
null
null
qss/utils/__init__.py
ATLAS-Titan/allocation-modeling
b315aa7ac0cf613ed02c59188ff19e9738f36aca
[ "Apache-2.0" ]
null
null
null
qss/utils/__init__.py
ATLAS-Titan/allocation-modeling
b315aa7ac0cf613ed02c59188ff19e9738f36aca
[ "Apache-2.0" ]
null
null
null
# # Copyright European Organization for Nuclear Research (CERN) # National Research Centre "Kurchatov Institute" # Rutgers University # # Licensed under the Apache License, Version 2.0 (the 'License'); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # from .enum import EnumTypes
29.5
66
0.714286
57
413
5.175439
0.77193
0.101695
0.088136
0.108475
0
0
0
0
0
0
0
0.012195
0.205811
413
13
67
31.769231
0.887195
0.874092
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
f7fd4b12c53e66fb958f8caf63bbc2ade57a36d4
263
py
Python
Stack_Using_List.py
thegautamkumarjaiswal/Data_Structures_in-_Python
5ca83b278aaa13b3eee9e8109aad97909545b523
[ "Apache-2.0" ]
null
null
null
Stack_Using_List.py
thegautamkumarjaiswal/Data_Structures_in-_Python
5ca83b278aaa13b3eee9e8109aad97909545b523
[ "Apache-2.0" ]
null
null
null
Stack_Using_List.py
thegautamkumarjaiswal/Data_Structures_in-_Python
5ca83b278aaa13b3eee9e8109aad97909545b523
[ "Apache-2.0" ]
null
null
null
# python stack using list # my_Stack = [10, 12, 13, 11, 33, 24, 56, 78, 13, 56, 31, 32, 33, 10, 15] # array # print(my_Stack) print(my_Stack.pop()) # think python simple just pop and push # print(my_Stack.pop()) print(my_Stack.pop()) print(my_Stack.pop())
21.916667
85
0.65019
48
263
3.4375
0.5
0.254545
0.363636
0.363636
0.272727
0.272727
0.272727
0.272727
0
0
0
0.138889
0.178707
263
11
86
23.909091
0.625
0.262357
0
0.666667
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.833333
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
f73eed7e2c3b541fcdd6322f243ee0ce268c9008
89
py
Python
coaches/apps.py
keeks-mtl/go-tennis
af3f325a9cfb2faba4d935824492f4aea6d10309
[ "W3C", "PostgreSQL" ]
null
null
null
coaches/apps.py
keeks-mtl/go-tennis
af3f325a9cfb2faba4d935824492f4aea6d10309
[ "W3C", "PostgreSQL" ]
null
null
null
coaches/apps.py
keeks-mtl/go-tennis
af3f325a9cfb2faba4d935824492f4aea6d10309
[ "W3C", "PostgreSQL" ]
null
null
null
from django.apps import AppConfig class CoachesConfig(AppConfig): name = 'coaches'
14.833333
33
0.752809
10
89
6.7
0.9
0
0
0
0
0
0
0
0
0
0
0
0.168539
89
5
34
17.8
0.905405
0
0
0
0
0
0.078652
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
f77c0e6ac1bd6b9c34c0d2d474c653895e99e94b
60
py
Python
ex50.py
cohadar/learn-python-the-hard-way
10d88fe59a8abc5303661cfe91c6db9fa71bdd56
[ "MIT" ]
null
null
null
ex50.py
cohadar/learn-python-the-hard-way
10d88fe59a8abc5303661cfe91c6db9fa71bdd56
[ "MIT" ]
null
null
null
ex50.py
cohadar/learn-python-the-hard-way
10d88fe59a8abc5303661cfe91c6db9fa71bdd56
[ "MIT" ]
null
null
null
# ah but I am not interested in web development with python!
60
60
0.783333
11
60
4.272727
1
0
0
0
0
0
0
0
0
0
0
0
0.183333
60
1
60
60
0.959184
0.966667
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
f79bc2bb8679ab0e3f70c8b1bd2ac21ab1d11d84
6,724
py
Python
dkmri/tests/test_dkmri.py
kerkelae/dkmri
af07880faa09b007d7ea56018ab9dbd9ae1ca223
[ "MIT" ]
2
2022-03-23T12:53:55.000Z
2022-03-31T08:54:05.000Z
dkmri/tests/test_dkmri.py
kerkelae/dkmri
af07880faa09b007d7ea56018ab9dbd9ae1ca223
[ "MIT" ]
3
2022-02-02T09:07:18.000Z
2022-02-03T16:59:28.000Z
dkmri/tests/test_dkmri.py
kerkelae/dkmri
af07880faa09b007d7ea56018ab9dbd9ae1ca223
[ "MIT" ]
null
null
null
import numpy as np import numpy.testing as npt import dkmri SEED = 123 params = np.array( [ 7.90764792, 0.88660664, 0.82186469, 0.81741033, 0.25016042, 0.12341918, 0.28344717, 0.97744794, 0.64809536, 0.54047796, 0.09333558, -0.06614247, 0.07547532, 0.16822022, 0.12438352, 0.14840455, 0.16173709, 0.17534938, 0.42078548, -0.05851049, 0.07203667, 0.12034342, ] ) def test_design_matrix(): bvals = np.arange(5) bvecs = np.array( [ [1.0, 0.0, 0.0], [1.0, 0.0, 0.0], [1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0], ] ) desired_X = np.array( [ [1.0, 1.0, 1.0, 1.0, 1.0], [0.0, -1.0, -2.0, -0.0, -0.0], [0.0, -0.0, -0.0, -3.0, -0.0], [0.0, -0.0, -0.0, -0.0, -4.0], [0.0, -0.0, -0.0, -0.0, -0.0], [0.0, -0.0, -0.0, -0.0, -0.0], [0.0, -0.0, -0.0, -0.0, -0.0], [0.0, 1 / 6, 2 / 3, 0.0, 0.0], [0.0, 0.0, 0.0, 1.5, 0.0], [0.0, 0.0, 0.0, 0.0, 8 / 3], [0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0], ] ).T X = dkmri.design_matrix(bvals, bvecs) npt.assert_almost_equal(X, desired_X) def test_params_to_D(): desired_D = np.array( [ [0.88660664, 0.25016042, 0.12341918], [0.25016042, 0.82186469, 0.28344717], [0.12341918, 0.28344717, 0.81741033], ] ) D = dkmri.params_to_D(params) npt.assert_almost_equal(D, desired_D) def test_params_to_W(): desired_W = np.array( [ [ [ [1.37882815, 0.131663, -0.09330328], [0.131663, 0.22815298, -0.0825373], [-0.09330328, -0.0825373, 0.24735503], ], [ [0.131663, 0.22815298, -0.0825373], [0.22815298, 0.10646858, 0.10161789], [-0.0825373, 0.10161789, 0.16976136], ], [ [-0.09330328, -0.0825373, 0.24735503], [-0.0825373, 0.10161789, 0.16976136], [0.24735503, 0.16976136, 0.17546049], ], ], [ [ [0.131663, 0.22815298, -0.0825373], [0.22815298, 0.10646858, 0.10161789], [-0.0825373, 0.10161789, 0.16976136], ], [ [0.22815298, 0.10646858, 0.10161789], [0.10646858, 0.9142299, 0.23729835], [0.10161789, 0.23729835, 0.59357726], ], [ [-0.0825373, 0.10161789, 0.16976136], [0.10161789, 0.23729835, 0.59357726], [0.16976136, 0.59357726, 0.20934554], ], ], [ [ [-0.09330328, -0.0825373, 0.24735503], [-0.0825373, 0.10161789, 0.16976136], [0.24735503, 0.16976136, 0.17546049], ], [ [-0.0825373, 0.10161789, 0.16976136], [0.10161789, 0.23729835, 0.59357726], [0.16976136, 0.59357726, 0.20934554], ], [ [0.24735503, 0.16976136, 0.17546049], [0.16976136, 0.59357726, 0.20934554], [0.17546049, 0.20934554, 0.76242038], ], ], ] ) W = dkmri.params_to_W(params) npt.assert_almost_equal(W, desired_W) def test_tensors_to_params(): S0 = np.exp(params[..., 0]) D = dkmri.params_to_D(params) W = dkmri.params_to_W(params) npt.assert_almost_equal(dkmri.tensors_to_params(S0, D, W), params) return def test__adc(): np.random.seed(SEED) D = dkmri.params_to_D(params) for _ in range(100): v = np.random.random((1, 3)) - 0.5 v /= np.linalg.norm(v) desired_adc = (v @ D @ v.T)[0] adc = np.asarray(dkmri._adc(params, v)) npt.assert_almost_equal(adc, desired_adc) vs = np.vstack((v, v)) adcs = np.asarray(dkmri._adc(params, vs)) npt.assert_almost_equal(adcs[0], adc) npt.assert_almost_equal(adcs[1], adc) def test_params_to_md(): desired_md = 0.8419605533333335 md = dkmri.params_to_md(params) npt.assert_almost_equal(md, desired_md) def test_params_to_ad(): desired_ad = 1.2839527280964818 ad = dkmri.params_to_ad(params) npt.assert_almost_equal(ad, desired_ad) def test_params_to_rd(): desired_rd = 0.6209644659517595 rd = dkmri.params_to_rd(params) npt.assert_almost_equal(rd, desired_rd) def test_params_to_fa(): desired_fa = 0.4425100287524919 fa = dkmri.params_to_fa(params) npt.assert_almost_equal(fa, desired_fa) def test__akc(): np.random.seed(SEED) D = dkmri.params_to_D(params) W = dkmri.params_to_W(params) for _ in range(100): v = np.random.random((1, 3)) - 0.5 v /= np.linalg.norm(v) md = dkmri.params_to_md(params) adc = dkmri._adc(params, v) desired_akc = (md / adc) ** 2 * v[0] @ (v[0] @ W @ v[0]) @ v[0] akc = np.asarray(dkmri._akc(params, v)) npt.assert_almost_equal(akc, desired_akc) vs = np.vstack((v, v)) akcs = np.asarray(dkmri._akc(params, vs)) npt.assert_almost_equal(akcs[0], akc) npt.assert_almost_equal(akcs[1], akc) def test_params_to_mk(): desired_mk = 1.1124342668323295 mk = dkmri.params_to_mk(params) npt.assert_almost_equal(mk, desired_mk) def test_params_to_ak(): desired_ak = 0.7109767625600302 ak = dkmri.params_to_ak(params) npt.assert_almost_equal(ak, desired_ak) def test_params_to_rk(): desired_rk = 1.5180490434619633 rk = dkmri.params_to_rk(params) npt.assert_almost_equal(rk, desired_rk) def test__mtk(): desired_mtk = 1.0387297963232285 mtk = dkmri._mtk(params) npt.assert_almost_equal(mtk, desired_mtk)
28.371308
71
0.48022
948
6,724
3.257384
0.113924
0.136658
0.194301
0.244819
0.576101
0.433614
0.376295
0.32513
0.321891
0.321891
0
0.328037
0.363474
6,724
236
72
28.491525
0.393458
0
0
0.357843
0
0
0
0
0
0
0
0
0.088235
1
0.068627
false
0
0.014706
0
0.088235
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
f7a143d01a5666c968bc655ce365cacc376d51e8
91
py
Python
revscoring/features/wikibase/features/__init__.py
kevinbazira/revscoring
625f8b8048eb3c0c1c872ed9c15687c56f125747
[ "MIT" ]
49
2015-07-15T14:53:06.000Z
2018-08-20T15:00:31.000Z
revscoring/features/wikibase/features/__init__.py
kevinbazira/revscoring
625f8b8048eb3c0c1c872ed9c15687c56f125747
[ "MIT" ]
224
2015-06-14T23:22:43.000Z
2018-08-08T22:52:46.000Z
revscoring/features/wikibase/features/__init__.py
kevinbazira/revscoring
625f8b8048eb3c0c1c872ed9c15687c56f125747
[ "MIT" ]
36
2015-07-03T03:25:01.000Z
2018-05-25T10:21:08.000Z
from .diff import Diff from .revision_oriented import Revision __all__ = [Revision, Diff]
18.2
39
0.791209
12
91
5.583333
0.5
0
0
0
0
0
0
0
0
0
0
0
0.142857
91
4
40
22.75
0.858974
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
e3986976c80b850d6ea7e0f24fc0c0430594110b
25
py
Python
data/studio21_generated/introductory/4848/starter_code.py
vijaykumawat256/Prompt-Summarization
614f5911e2acd2933440d909de2b4f86653dc214
[ "Apache-2.0" ]
null
null
null
data/studio21_generated/introductory/4848/starter_code.py
vijaykumawat256/Prompt-Summarization
614f5911e2acd2933440d909de2b4f86653dc214
[ "Apache-2.0" ]
null
null
null
data/studio21_generated/introductory/4848/starter_code.py
vijaykumawat256/Prompt-Summarization
614f5911e2acd2933440d909de2b4f86653dc214
[ "Apache-2.0" ]
null
null
null
def char_freq(message):
12.5
23
0.76
4
25
4.5
1
0
0
0
0
0
0
0
0
0
0
0
0.12
25
2
24
12.5
0.818182
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
e3b0386edd0c1dadd823a4398651a6ed34f90b72
1,009
py
Python
python/Chapter1/Chapter1/bitwise/bitmask-fixed.py
wboswall/academia
1571e8f9aceb21564f601cb79120ae56068fe3dd
[ "MIT" ]
null
null
null
python/Chapter1/Chapter1/bitwise/bitmask-fixed.py
wboswall/academia
1571e8f9aceb21564f601cb79120ae56068fe3dd
[ "MIT" ]
null
null
null
python/Chapter1/Chapter1/bitwise/bitmask-fixed.py
wboswall/academia
1571e8f9aceb21564f601cb79120ae56068fe3dd
[ "MIT" ]
null
null
null
#! /bin/env python3 ''' Class that represents a bit mask. It has methods representing all the bitwise operations plus some additional features. The methods return a new BitMask object or a boolean result. See the bits module for more on the operations provided. ''' class BitMask(int): def AND(self,bm): return BitMask(self & bm) def OR(self,bm): return BitMask(self | bm) def XOR(self,bm): return BitMask(self ^ bm) def NOT(self): return BitMask(~self) def shiftleft(self, num): return BitMask(self << num) def shiftright(self, num): return BitMask(self >> num) def bit(self, num): mask = 1 << num return bool(self & mask) def setbit(self, num): mask = 1 << num return BitMask(self | mask) def zerobit(self, num): mask = ~(1 << num) return BitMask(self & mask) def listbits(self, start=0,end=None): if end: end = end if end < 0 else end+2 return [int(c) for c in bin(self)[start+2:end]]
26.552632
53
0.630327
152
1,009
4.184211
0.388158
0.163522
0.213836
0.125786
0.382075
0.382075
0.349057
0.122642
0.122642
0.122642
0
0.010667
0.25669
1,009
37
54
27.27027
0.837333
0.258672
0
0.08
0
0
0
0
0
0
0
0
0
1
0.4
false
0
0
0.24
0.84
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
e3b22700087fd0b25bc8f078b592b47e07c653cc
165
py
Python
codedigger/codeforces/scraper.py
jyothiprakashpanaik/Backend
9ab1b57436a0a1a6197777c0b36c842e71121d3a
[ "Apache-2.0" ]
17
2020-10-07T22:40:37.000Z
2022-01-20T07:19:09.000Z
codedigger/codeforces/scraper.py
jyothiprakashpanaik/Backend
9ab1b57436a0a1a6197777c0b36c842e71121d3a
[ "Apache-2.0" ]
42
2021-06-03T01:58:04.000Z
2022-01-31T14:49:22.000Z
codedigger/codeforces/scraper.py
jyothiprakashpanaik/Backend
9ab1b57436a0a1a6197777c0b36c842e71121d3a
[ "Apache-2.0" ]
25
2020-10-06T17:55:19.000Z
2021-12-09T07:56:50.000Z
import requests from bs4 import BeautifulSoup def problem_page(url): res = requests.get(url) soup = BeautifulSoup(res.content, 'html5lib') return soup
18.333333
49
0.727273
21
165
5.666667
0.714286
0
0
0
0
0
0
0
0
0
0
0.014925
0.187879
165
8
50
20.625
0.873134
0
0
0
0
0
0.048485
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
e3c4fcbf4f18dc9854adc2ac683132d96f8f8372
192
py
Python
setup.py
andrewp-as-is/django-objects-count.py
d34fea15dbe6a80100f2ad3004b3f32d0e5cbfa9
[ "Unlicense" ]
null
null
null
setup.py
andrewp-as-is/django-objects-count.py
d34fea15dbe6a80100f2ad3004b3f32d0e5cbfa9
[ "Unlicense" ]
null
null
null
setup.py
andrewp-as-is/django-objects-count.py
d34fea15dbe6a80100f2ad3004b3f32d0e5cbfa9
[ "Unlicense" ]
null
null
null
from setuptools import setup setup( name='django-objects-count', version='2021.6.24', packages=[ 'django_objects_count', 'django_objects_count.migrations' ] )
17.454545
41
0.645833
21
192
5.714286
0.666667
0.325
0.45
0
0
0
0
0
0
0
0
0.047619
0.234375
192
10
42
19.2
0.768707
0
0
0
0
0
0.416667
0.161458
0
0
0
0
0
1
0
true
0
0.111111
0
0.111111
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
e3d765a6da8f5ccb470cfb18e3b703a57ad9e394
95
py
Python
tests/testdata/word_count/map_invalid.py
eecs485staff/michigan-hadoop
e1e2abcafe807ee620bf0bd809af43d6974ea7fd
[ "MIT" ]
1
2022-03-29T00:05:08.000Z
2022-03-29T00:05:08.000Z
tests/testdata/word_count/map_invalid.py
eecs485staff/madoop
e1e2abcafe807ee620bf0bd809af43d6974ea7fd
[ "MIT" ]
33
2021-10-24T01:58:29.000Z
2022-03-31T08:08:20.000Z
tests/testdata/word_count/map_invalid.py
eecs485staff/madoop
e1e2abcafe807ee620bf0bd809af43d6974ea7fd
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 """Invalid map executable returns non-zero.""" import sys sys.exit(1)
13.571429
46
0.705263
15
95
4.466667
0.933333
0
0
0
0
0
0
0
0
0
0
0.024096
0.126316
95
6
47
15.833333
0.783133
0.652632
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
e3da1d0e75cf32b6d5100db02234098dfa0fd253
163
py
Python
examples/docs_snippets/docs_snippets/guides/dagster/dagster_type_factories/schema_execution.py
rpatil524/dagster
6f918d94cbd543ab752ab484a65e3a40fd441716
[ "Apache-2.0" ]
1
2021-01-31T19:16:29.000Z
2021-01-31T19:16:29.000Z
examples/docs_snippets/docs_snippets/guides/dagster/dagster_type_factories/schema_execution.py
rpatil524/dagster
6f918d94cbd543ab752ab484a65e3a40fd441716
[ "Apache-2.0" ]
null
null
null
examples/docs_snippets/docs_snippets/guides/dagster/dagster_type_factories/schema_execution.py
rpatil524/dagster
6f918d94cbd543ab752ab484a65e3a40fd441716
[ "Apache-2.0" ]
1
2019-09-11T03:02:27.000Z
2019-09-11T03:02:27.000Z
from .schema import df, trips_schema trips_schema.validate(df) # => SchemaError: non-nullable series 'end_time' contains null values: # => 22 NaT # => 43 NaT
23.285714
70
0.711656
23
163
4.913043
0.782609
0.19469
0
0
0
0
0
0
0
0
0
0.02963
0.171779
163
6
71
27.166667
0.807407
0.564417
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
e3dfdc0b12d509469e74ffe8413188bf5e40f70f
2,079
py
Python
usr/lib64/python2.6/site-packages/svn/delta.py
devop-mmcgrath/openshift-svn-cartridge
6cfe801adcdb68186a8c420b420ff6c0ccaadbb5
[ "Apache-2.0" ]
2
2017-09-28T15:02:43.000Z
2018-02-09T05:52:33.000Z
usr/lib64/python2.6/site-packages/svn/delta.py
devop-mmcgrath/openshift-svn-cartridge
6cfe801adcdb68186a8c420b420ff6c0ccaadbb5
[ "Apache-2.0" ]
null
null
null
usr/lib64/python2.6/site-packages/svn/delta.py
devop-mmcgrath/openshift-svn-cartridge
6cfe801adcdb68186a8c420b420ff6c0ccaadbb5
[ "Apache-2.0" ]
null
null
null
# # delta.py: public Python interface for delta components # # Subversion is a tool for revision control. # See http://subversion.tigris.org for more information. # ###################################################################### # # Copyright (c) 2000-2004 CollabNet. All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. The terms # are also available at http://subversion.tigris.org/license-1.html. # If newer versions of this license are posted there, you may use a # newer version instead, at your option. # ###################################################################### from libsvn.delta import * from svn.core import _unprefix_names _unprefix_names(locals(), 'svn_delta_') _unprefix_names(locals(), 'svn_txdelta_', 'tx_') del _unprefix_names class Editor: def set_target_revision(self, target_revision, pool=None): pass def open_root(self, base_revision, dir_pool=None): return None def delete_entry(self, path, revision, parent_baton, pool=None): pass def add_directory(self, path, parent_baton, copyfrom_path, copyfrom_revision, dir_pool=None): return None def open_directory(self, path, parent_baton, base_revision, dir_pool=None): return None def change_dir_prop(self, dir_baton, name, value, pool=None): pass def close_directory(self, dir_baton, pool=None): pass def add_file(self, path, parent_baton, copyfrom_path, copyfrom_revision, file_pool=None): return None def open_file(self, path, parent_baton, base_revision, file_pool=None): return None def apply_textdelta(self, file_baton, base_checksum, pool=None): return None def change_file_prop(self, file_baton, name, value, pool=None): pass def close_file(self, file_baton, text_checksum, pool=None): pass def close_edit(self, pool=None): pass def abort_edit(self, pool=None): pass def make_editor(editor, pool=None): return svn_swig_py_make_editor(editor, pool)
27.72
77
0.683502
285
2,079
4.775439
0.382456
0.08817
0.070536
0.08817
0.394563
0.360764
0.232182
0.171932
0
0
0
0.005193
0.166426
2,079
74
78
28.094595
0.78015
0.246753
0
0.368421
0
0
0.017731
0
0
0
0
0
0
1
0.394737
false
0.210526
0.052632
0.184211
0.657895
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
1
0
0
4
5406ad3920b6488f5e9abdb94556a4c8561e67ae
72
py
Python
easymindoc/easymindoc/files/video.py
zsb514/easy_mindoc
326d926af8025ebcd69097028c2684c47d99f900
[ "WTFPL" ]
null
null
null
easymindoc/easymindoc/files/video.py
zsb514/easy_mindoc
326d926af8025ebcd69097028c2684c47d99f900
[ "WTFPL" ]
null
null
null
easymindoc/easymindoc/files/video.py
zsb514/easy_mindoc
326d926af8025ebcd69097028c2684c47d99f900
[ "WTFPL" ]
null
null
null
vid_parttern = r'' class Video: def __init__(self): pass
9
23
0.583333
9
72
4.111111
1
0
0
0
0
0
0
0
0
0
0
0
0.319444
72
7
24
10.285714
0.755102
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0.25
0
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
4
5428d4691b6a7ee529ad9dbc6df1527d624fb365
318
py
Python
src/tsgettoolbox/ulmo/nasa/daymet/__init__.py
timcera/tsgettoolbox
828306aefaa097a74abd8e71605bd19eeda29058
[ "BSD-3-Clause" ]
4
2017-11-21T20:22:47.000Z
2021-09-27T13:27:05.000Z
src/tsgettoolbox/ulmo/nasa/daymet/__init__.py
timcera/tsgettoolbox
828306aefaa097a74abd8e71605bd19eeda29058
[ "BSD-3-Clause" ]
21
2016-04-28T16:52:18.000Z
2021-12-16T17:00:27.000Z
src/tsgettoolbox/ulmo/nasa/daymet/__init__.py
timcera/tsgettoolbox
828306aefaa097a74abd8e71605bd19eeda29058
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- """ `NASA EARTHDATA ORNL DAAC Daymet`_ web services .. _NASA EARTHDATA ORNL DAAC Daymet: https://daymet.ornl.gov/dataaccess.html """ from __future__ import absolute_import from tsgettoolbox.ulmo import util from . import core from .core import get_daymet_singlepixel, get_variables
22.714286
80
0.745283
42
318
5.404762
0.595238
0.114537
0.14978
0.185022
0.237885
0
0
0
0
0
0
0.003731
0.157233
318
13
81
24.461538
0.843284
0.468553
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
581721ea729a3f0f284d0f91ce507d44e3b294d9
264
py
Python
tests/services.py
tolomea/django-lazy-services
035220e4945673d6c08930c610149085b4918d82
[ "BSD-3-Clause" ]
5
2020-03-15T11:38:01.000Z
2020-03-26T10:29:15.000Z
tests/services.py
tolomea/django-lazy-services
035220e4945673d6c08930c610149085b4918d82
[ "BSD-3-Clause" ]
null
null
null
tests/services.py
tolomea/django-lazy-services
035220e4945673d6c08930c610149085b4918d82
[ "BSD-3-Clause" ]
null
null
null
class Service: def __init__(self): self.base = 7 def set_val(self, val): self.func_val = val def get_val(self): return self.func_val class Service2(Service): def __init__(self): # pragma: no cover self.base = 8
17.6
43
0.594697
37
264
3.918919
0.459459
0.144828
0.193103
0.248276
0
0
0
0
0
0
0
0.016393
0.306818
264
14
44
18.857143
0.775956
0.060606
0
0.2
0
0
0
0
0
0
0
0
0
1
0.4
false
0
0
0.1
0.7
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
5822dd45d5c396484a5e2f07890bd99e44f0e1ff
63
py
Python
test/__init__.py
staadecker/formula-prompt
dfe0b2025a327d1da81146257c3def6693fdf1e4
[ "MIT" ]
1
2021-03-10T22:27:42.000Z
2021-03-10T22:27:42.000Z
test/__init__.py
staadecker/formula-prompt
dfe0b2025a327d1da81146257c3def6693fdf1e4
[ "MIT" ]
null
null
null
test/__init__.py
staadecker/formula-prompt
dfe0b2025a327d1da81146257c3def6693fdf1e4
[ "MIT" ]
null
null
null
# Copyright (c) 2021 Martin Staadecker under the MIT License
21
61
0.761905
9
63
5.333333
1
0
0
0
0
0
0
0
0
0
0
0.078431
0.190476
63
2
62
31.5
0.862745
0.920635
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
586e2168d3107927ae2f9f9bdef9900e45f7fec4
399
py
Python
saturns_rings/ringer/forms.py
AjaySRathore/saturnsrings
76e17e2ee9252841cf1e406f4ad271b8ffedef38
[ "BSD-3-Clause" ]
1
2020-11-08T06:58:20.000Z
2020-11-08T06:58:20.000Z
saturns_rings/ringer/forms.py
AjaySRathore/saturnsrings
76e17e2ee9252841cf1e406f4ad271b8ffedef38
[ "BSD-3-Clause" ]
null
null
null
saturns_rings/ringer/forms.py
AjaySRathore/saturnsrings
76e17e2ee9252841cf1e406f4ad271b8ffedef38
[ "BSD-3-Clause" ]
null
null
null
from django import forms from ringer.models import Ringer class RingerLoginForm(forms.Form): """Generates a login form with two fields. Attributes: username -- form field for username. password -- form field for password with forms.PasswordInput() widget. """ username = forms.CharField(max_length=150) password = forms.CharField(widget=forms.PasswordInput())
30.692308
77
0.714286
47
399
6.042553
0.553191
0.06338
0.084507
0
0
0
0
0
0
0
0
0.009375
0.197995
399
12
78
33.25
0.878125
0.401003
0
0
1
0
0
0
0
0
0
0
0
1
0
false
0.2
0.4
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
1
1
0
1
0
0
4
54ab1b020fb5834f3477b73f0fad1f0629944b83
493
py
Python
django-server/fras/attendance/admin.py
ArleneAndrews/Facial-Recognition-Attendance-System
104d17e56af87358974331ef491949b557ab2f01
[ "MIT" ]
52
2019-01-29T14:46:17.000Z
2022-01-14T16:11:37.000Z
django-server/fras/attendance/admin.py
etrigaen47/Facial-Recognition-Attendance-System
ad0bd18cf9582cc12002baf8c92f6638f632c46e
[ "MIT" ]
13
2018-11-04T12:29:48.000Z
2020-02-11T23:47:35.000Z
django-server/fras/attendance/admin.py
etrigaen47/Facial-Recognition-Attendance-System
ad0bd18cf9582cc12002baf8c92f6638f632c46e
[ "MIT" ]
16
2019-03-07T11:07:16.000Z
2021-08-13T07:19:28.000Z
# Register your models here. from django.contrib import admin from attendance.models.CapturedFrame import CapturedFrame from attendance.models.FaceId import FaceId from attendance.models.LectureAttendance import LectureAttendance from attendance.models.Student import Student from attendance.models.WorkingDay import WorkingDay admin.site.register(WorkingDay) admin.site.register(LectureAttendance) admin.site.register(CapturedFrame) admin.site.register(Student) admin.site.register(FaceId)
32.866667
65
0.860041
59
493
7.186441
0.271186
0.165094
0.235849
0.127358
0
0
0
0
0
0
0
0
0.073022
493
14
66
35.214286
0.92779
0.052738
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.545455
0
0.545455
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
54c0e74b48bbf3e9c7c4bb959f4f5af9fcd53390
125
py
Python
example/api/service/__init__.py
WandyYing/mussel
61711ec07078ee089ba8011a8ef688beaee10de7
[ "MIT" ]
null
null
null
example/api/service/__init__.py
WandyYing/mussel
61711ec07078ee089ba8011a8ef688beaee10de7
[ "MIT" ]
1
2021-12-15T16:28:37.000Z
2021-12-15T16:28:37.000Z
example/api/service/__init__.py
WandyYing/mussel
61711ec07078ee089ba8011a8ef688beaee10de7
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ @author: ying jun @email: wandy1208@live.com @time: 2021/12/12 22:55 """
13.888889
26
0.608
20
125
3.8
0.95
0
0
0
0
0
0
0
0
0
0
0.168224
0.144
125
8
27
15.625
0.542056
0.896
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
b71727878b24a92463b6f7d794089814acdc3c9a
16
py
Python
main/texinfo/update.py
RoastVeg/cports
803c7f07af341eb32f791b6ec1f237edb2764bd5
[ "BSD-2-Clause" ]
46
2021-06-10T02:27:32.000Z
2022-03-27T11:33:24.000Z
main/texinfo/update.py
RoastVeg/cports
803c7f07af341eb32f791b6ec1f237edb2764bd5
[ "BSD-2-Clause" ]
58
2021-07-03T13:58:20.000Z
2022-03-13T16:45:35.000Z
main/texinfo/update.py
RoastVeg/cports
803c7f07af341eb32f791b6ec1f237edb2764bd5
[ "BSD-2-Clause" ]
6
2021-07-04T10:46:40.000Z
2022-01-09T00:03:59.000Z
ignore = ["37"]
8
15
0.5
2
16
4
1
0
0
0
0
0
0
0
0
0
0
0.153846
0.1875
16
1
16
16
0.461538
0
0
0
0
0
0.125
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
3f987b289d3eca79e3e805989fde246879cf69eb
122
py
Python
apk_parse/__init__.py
ph4r05/apk_parse
41918c2ef425f949d42853ee7c7bc4d67f9abcb4
[ "Apache-2.0" ]
9
2017-04-18T06:39:00.000Z
2021-03-02T13:49:37.000Z
apk_parse/__init__.py
ph4r05/apk_parse
41918c2ef425f949d42853ee7c7bc4d67f9abcb4
[ "Apache-2.0" ]
null
null
null
apk_parse/__init__.py
ph4r05/apk_parse
41918c2ef425f949d42853ee7c7bc4d67f9abcb4
[ "Apache-2.0" ]
3
2017-03-29T03:28:18.000Z
2018-12-04T17:40:05.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- """ File: __init__.py.py Author: limingdong Date: 12/31/14 Description: """
13.555556
23
0.639344
18
122
4.111111
0.944444
0
0
0
0
0
0
0
0
0
0
0.066038
0.131148
122
9
24
13.555556
0.632075
0.901639
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
3fa23f1910cfe805853465d766aea342510a2b29
1,065
py
Python
api/client/test/test_inference_service_api.py
Zachary-Fernandes/mlx
d5117c5585b969ca0de5f321d14b5a27cd468280
[ "Apache-2.0" ]
null
null
null
api/client/test/test_inference_service_api.py
Zachary-Fernandes/mlx
d5117c5585b969ca0de5f321d14b5a27cd468280
[ "Apache-2.0" ]
null
null
null
api/client/test/test_inference_service_api.py
Zachary-Fernandes/mlx
d5117c5585b969ca0de5f321d14b5a27cd468280
[ "Apache-2.0" ]
null
null
null
# Copyright 2021 The MLX Contributors # # SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ MLX API MLX API Extension for Kubeflow Pipelines # noqa: E501 OpenAPI spec version: 0.1.29-filter-categories Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import unittest import swagger_client from swagger_client.api.inference_service_api import InferenceServiceApi # noqa: E501 from swagger_client.rest import ApiException class TestInferenceServiceApi(unittest.TestCase): """InferenceServiceApi unit test stubs""" def setUp(self): self.api = swagger_client.api.inference_service_api.InferenceServiceApi() # noqa: E501 def tearDown(self): pass def test_get_service(self): """Test case for get_service """ pass def test_list_services(self): """Test case for list_services Gets all KFServing services # noqa: E501 """ pass if __name__ == '__main__': unittest.main()
20.882353
95
0.69108
128
1,065
5.539063
0.53125
0.045134
0.047955
0.070522
0.098731
0.098731
0
0
0
0
0
0.027778
0.222535
1,065
50
96
21.3
0.828502
0.404695
0
0.1875
0
0
0.01406
0
0
0
0
0
0
1
0.25
false
0.1875
0.3125
0
0.625
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
1
0
1
0
0
4
3fb2183b3961aab5a7edb30b19c594e9f6e13086
888
py
Python
manga_py/providers/sleepypandascans_co.py
paulolimac/manga-py
3d180846750a4e770b5024eb8cd15629362875b1
[ "MIT" ]
null
null
null
manga_py/providers/sleepypandascans_co.py
paulolimac/manga-py
3d180846750a4e770b5024eb8cd15629362875b1
[ "MIT" ]
null
null
null
manga_py/providers/sleepypandascans_co.py
paulolimac/manga-py
3d180846750a4e770b5024eb8cd15629362875b1
[ "MIT" ]
null
null
null
from manga_py.provider import Provider from .helpers.std import Std class ManhwaCo(Provider, Std): def get_chapter_index(self) -> str: chapter = self.chapter return self.re.search(r'\.co/Reader/[^/]+/([^/]+)', chapter).group(1) def get_main_content(self): return self._get_content('{}/Series/{}') def get_manga_name(self) -> str: return self._get_name(r'\.co/(?:Series|Reader)/([^/]+)') def get_chapters(self): return self._elements('.list-group .list-group-item') def get_files(self): content = self.http_get(self.chapter) parser = self.document_fromstring(content) return self._images_helper(parser, 'img.img-fluid') def get_cover(self) -> str: return self._cover_from_content('img.card-img-top') def book_meta(self) -> dict: # todo meta pass main = ManhwaCo
26.117647
77
0.634009
117
888
4.615385
0.410256
0.066667
0.051852
0.062963
0
0
0
0
0
0
0
0.001435
0.21509
888
33
78
26.909091
0.773314
0.010135
0
0
0
0
0.141391
0.062714
0
0
0
0.030303
0
1
0.333333
false
0.047619
0.095238
0.190476
0.761905
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
0
0
1
1
0
0
4
3fb2853493c14253246295049ae19264a578349c
110
py
Python
executework.py
AlekseiShkurin/EDWARD_IBM_hackathon
02db278d40f2d757ae1b1a014d16d32ad98efd71
[ "MIT" ]
null
null
null
executework.py
AlekseiShkurin/EDWARD_IBM_hackathon
02db278d40f2d757ae1b1a014d16d32ad98efd71
[ "MIT" ]
null
null
null
executework.py
AlekseiShkurin/EDWARD_IBM_hackathon
02db278d40f2d757ae1b1a014d16d32ad98efd71
[ "MIT" ]
1
2019-06-09T17:17:42.000Z
2019-06-09T17:17:42.000Z
from testdesign import simpleapp_tk app = simpleapp_tk() app.title('EDWARD, The Calculator') app.mainloop()
18.333333
36
0.772727
15
110
5.533333
0.733333
0.26506
0.337349
0
0
0
0
0
0
0
0
0
0.118182
110
5
37
22
0.85567
0
0
0
0
0
0.2
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
3fd51de8f59c78dc8d373de14e0751e6da396ffa
235
py
Python
simplecorrector/lm/DLM.py
chenmingxiang110/SimpleChinese2
91f90672f25daadbfccd2ab22f026a65889705af
[ "MIT" ]
78
2021-06-21T02:28:14.000Z
2022-03-18T13:35:16.000Z
simplecorrector/lm/DLM.py
chenmingxiang110/SimpleChinese2
91f90672f25daadbfccd2ab22f026a65889705af
[ "MIT" ]
3
2021-06-30T11:03:58.000Z
2021-09-09T10:39:27.000Z
simplecorrector/lm/DLM.py
chenmingxiang110/SimpleChinese2
91f90672f25daadbfccd2ab22f026a65889705af
[ "MIT" ]
24
2021-06-21T02:30:49.000Z
2021-08-23T09:49:03.000Z
#!usr/bin/env python #-*- coding:utf-8 -*- class Model(object): """ DNN LM """ def __init__(self, model_path): pass def score(self, sentence): pass def PPL(self, sentence): pass
13.055556
35
0.514894
28
235
4.142857
0.714286
0.12069
0.275862
0
0
0
0
0
0
0
0
0.006452
0.340426
235
17
36
13.823529
0.741935
0.195745
0
0.428571
0
0
0
0
0
0
0
0
0
1
0.428571
false
0.428571
0
0
0.571429
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
4
3fd83b1ba3e9b4f65ec39f494b5caadd27e40f3c
93
py
Python
src/ToolChainSCDG/procedures/windows/custom_package/FlsSetValue.py
AnonymousSEMA/SEMA-ToolChain
05d6a7e43e10d4b1f6c5dfb70fbabeab3d4daf82
[ "BSD-2-Clause" ]
null
null
null
src/ToolChainSCDG/procedures/windows/custom_package/FlsSetValue.py
AnonymousSEMA/SEMA-ToolChain
05d6a7e43e10d4b1f6c5dfb70fbabeab3d4daf82
[ "BSD-2-Clause" ]
null
null
null
src/ToolChainSCDG/procedures/windows/custom_package/FlsSetValue.py
AnonymousSEMA/SEMA-ToolChain
05d6a7e43e10d4b1f6c5dfb70fbabeab3d4daf82
[ "BSD-2-Clause" ]
null
null
null
from .TlsSetValue import TlsSetValue class FlsSetValue(TlsSetValue): KEY = "win32_fls"
15.5
36
0.763441
10
93
7
0.8
0
0
0
0
0
0
0
0
0
0
0.025641
0.16129
93
5
37
18.6
0.871795
0
0
0
0
0
0.096774
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
b74914d81e3549db831f5c6b54cdc8e35c3406de
80
py
Python
server/errors/__init__.py
Arun89-crypto/codechefsrm
bd793a40bf034f88deee3c98f342b86b3010d554
[ "MIT" ]
null
null
null
server/errors/__init__.py
Arun89-crypto/codechefsrm
bd793a40bf034f88deee3c98f342b86b3010d554
[ "MIT" ]
1
2021-11-20T20:56:47.000Z
2021-11-20T21:00:10.000Z
server/errors/__init__.py
Arun89-crypto/codechefsrm
bd793a40bf034f88deee3c98f342b86b3010d554
[ "MIT" ]
3
2021-11-20T16:48:40.000Z
2021-12-05T13:44:17.000Z
from .auth_errors import AuthenticationError from .data_error import DataErrors
26.666667
44
0.875
10
80
6.8
0.8
0
0
0
0
0
0
0
0
0
0
0
0.1
80
2
45
40
0.944444
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
b78eb8078a238aa7bf347ce4f4b07a24eae4d460
28
py
Python
homeassistant/components/trackr/__init__.py
domwillcode/home-assistant
f170c80bea70c939c098b5c88320a1c789858958
[ "Apache-2.0" ]
23
2017-11-15T21:03:53.000Z
2021-03-29T21:33:48.000Z
homeassistant/components/trackr/__init__.py
jagadeeshvenkatesh/core
1bd982668449815fee2105478569f8e4b5670add
[ "Apache-2.0" ]
79
2020-07-23T07:13:37.000Z
2022-03-22T06:02:37.000Z
homeassistant/components/trackr/__init__.py
jagadeeshvenkatesh/core
1bd982668449815fee2105478569f8e4b5670add
[ "Apache-2.0" ]
14
2018-08-19T16:28:26.000Z
2021-09-02T18:26:53.000Z
"""The trackr component."""
14
27
0.642857
3
28
6
1
0
0
0
0
0
0
0
0
0
0
0
0.107143
28
1
28
28
0.72
0.75
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
4d09034e4f367befcaec1e1e77f1263719cddcc4
37,332
py
Python
arl/graphs/graphs.py
Song655/sdp-algorithm-reference
fc7c0da9461d5a1606ebb30ed913a44cdcd9b112
[ "Apache-2.0" ]
null
null
null
arl/graphs/graphs.py
Song655/sdp-algorithm-reference
fc7c0da9461d5a1606ebb30ed913a44cdcd9b112
[ "Apache-2.0" ]
null
null
null
arl/graphs/graphs.py
Song655/sdp-algorithm-reference
fc7c0da9461d5a1606ebb30ed913a44cdcd9b112
[ "Apache-2.0" ]
null
null
null
""" Common functions converted to Dask.delayed graphs. `Dask <http://dask.pydata.org/>`_ is a python-based flexible parallel computing library for analytic computing. Dask.delayed can be used to wrap functions for deferred execution thus allowing construction of graphs. For example, to build a graph for a major/minor cycle algorithm:: model_graph = delayed(create_image_from_visibility)(vt, npixel=512, cellsize=0.001, npol=1) solution_graph = create_solve_image_graph(vt, model_graph=model_graph, psf_graph=psf_graph, invert_residual=invert_timeslice, predict_residual=predict_timeslice, iterator=vis_timeslice_iter, algorithm='hogbom', niter=1000, fractional_threshold=0.1, threshold=1.0, nmajor=3, gain=0.1) solution_graph.visualize() The visualize step produces the following graph: .. image:: ./deconvolution_dask.png :align: center :width: 1024px The graph is executed as follows:: solution_graph.compute() As well as the specific graphs constructed by functions in this module, there are generic versions in the module :mod:`arl.pipelines.generic_dask_graphs`. Note that all parameters here should be passed using the kwargs mechanism. The exceptions are those needed to define the size of a graph. Since delayed graphs are not Iterable by default, it is necessary to use the nout= parameter to delayed to specify the graph size. Construction of the graphs requires that the number of nodes (e.g. w slices or time-slices) be known at construction, rather than execution. To counteract this, at run time, a given node should be able to act as a no-op. This is a workaround only. """ import numpy from dask import delayed from dask.distributed import wait from arl.calibration.operations import apply_gaintable from arl.calibration.solvers import solve_gaintable from arl.data.data_models import Image from arl.image.deconvolution import deconvolve_cube from arl.image.gather_scatter import image_scatter_facets, image_gather_facets, image_scatter_channels, \ image_gather_channels from arl.image.operations import copy_image, create_empty_image_like from arl.imaging import predict_2d, invert_2d, invert_wstack_single, predict_wstack_single, \ predict_timeslice_single, invert_timeslice_single, normalize_sumwt from arl.imaging.weighting import weight_visibility from arl.visibility.base import copy_visibility from arl.visibility.gather_scatter import visibility_scatter_w, visibility_gather_w, \ visibility_gather_channel, visibility_gather_time, visibility_scatter_time from arl.visibility.operations import divide_visibility, integrate_visibility_by_channel def compute_list(client, graph_list, nodes=None, **kwargs): """ Compute all elements in list :param graph_list: :param nodes: List of nodes. :return: list """ if nodes is not None: print("Computing graph_list on the following nodes: %s" % nodes) futures = client.compute(graph_list, sync=True, workers=['127.0.0.1'], **kwargs) wait(futures) return futures else: return client.compute(graph_list, sync=True, **kwargs) def create_zero_vis_graph_list(vis_graph_list, **kwargs): """ Initialise vis to zero: creates new data holders :param vis_graph_list: :param kwargs: Parameters for functions in graphs :return: List of vis_graphs """ def zerovis(vis): if vis is not None: zerovis = copy_visibility(vis) zerovis.data['vis'][...] = 0.0 return zerovis else: return None return [delayed(zerovis, pure=True, nout=1)(v) for v in vis_graph_list] def create_subtract_vis_graph_list(vis_graph_list, model_vis_graph_list, **kwargs): """ Initialise vis to zero :param vis_graph_list: :param model_vis_graph_list: Model to be subtracted :param kwargs: Parameters for functions in graphs :return: List of vis_graphs """ def subtract_vis(vis, model_vis): if vis is not None and model_vis is not None: assert vis.vis.shape == model_vis.vis.shape subvis = copy_visibility(vis) subvis.data['vis'][...] -= model_vis.data['vis'][...] return subvis else: return None return [delayed(subtract_vis, pure=True, nout=1)(vis=vis_graph_list[i], model_vis=model_vis_graph_list[i]) for i in range(len(vis_graph_list))] def create_weight_vis_graph_list(vis_graph_list, model_graph, weighting='uniform', **kwargs): """ Weight the visibility data :param vis_graph_list: :param model_graph: Model required to determine weighting parameters :param weighting: Type of weighting :param kwargs: Parameters for functions in graphs :return: List of vis_graphs """ def weight_vis(vis, model, weighting): if vis is not None and model is not None: vis, _, _ = weight_visibility(vis, model, weighting=weighting, **kwargs) return vis else: return None return [delayed(weight_vis, pure=True, nout=1)(vis_graph_list[i], model_graph, weighting) for i in range(len(vis_graph_list))] def create_invert_graph(vis_graph_list, template_model_graph: delayed, dopsf=False, invert=invert_2d, normalize=True, **kwargs) -> delayed: """ Sum results from invert iterating over the vis_graph_list :param vis_graph_list: :param template_model_graph: Model used to determine image parameters :param invert: Invert for a single Visibility set :param kwargs: Parameters for functions in graphs :return: delayed for invert """ def sum_invert_results(image_list): first = True for i, arg in enumerate(image_list): if arg is not None: if first: im = copy_image(arg[0]) im.data *= arg[1] sumwt = arg[1] first = False else: im.data += arg[1] * arg[0].data sumwt += arg[1] im = normalize_sumwt(im, sumwt) return im, sumwt def invert_ignore_None(vis, *args, **kwargs): if vis is not None: return invert(vis, *args, **kwargs) else: return None image_graph_list = list() for vis_graph in vis_graph_list: image_graph_list.append(delayed(invert_ignore_None, pure=True, nout=2)(vis_graph, template_model_graph, dopsf=dopsf, normalize=normalize, **kwargs)) return delayed(sum_invert_results)(image_graph_list) def create_invert_vis_scatter_graph(vis_graph_list, template_model_graph: delayed, vis_slices, scatter, invert, dopsf=False, normalize=True, **kwargs) -> delayed: """ Sum invert results for a scattered vis_graph_list Base for create_invert_wstack_graph and create_invert_timeslice_graph :param vis_graph_list: :param template_model_graph: Model used to determine image parameters :param vis_slices: Number of visibility slices in w stacking :param invert: Function used for invert :param dopsf: Make psf (False) :param kwargs: Parameters for functions in graphs :return: delayed for invert """ def sum_invert_results(image_list): first = True for i, arg in enumerate(image_list): if arg is not None: if first: im = copy_image(arg[0]) im.data *= arg[1] sumwt = arg[1] first = False else: im.data += arg[1] * arg[0].data sumwt += arg[1] assert not first, "No invert results" if numpy.sum(sumwt) > 0.0: im = normalize_sumwt(im, sumwt) return im, sumwt def invert_ignore_None(vis, model, *args, **kwargs): if vis is not None: return invert(vis, model, *args, **kwargs) else: return create_empty_image_like(model), 0.0 # Graph to combine the images from different vis_graphs. Do this on the outer loop to cut down on # traffic image_graph_list = list() for vis_graph in vis_graph_list: if vis_graph is not None: scatter_graph_list = list() scatter_vis_graph_list = delayed(scatter, nout=vis_slices)(vis_graph, vis_slices=vis_slices, **kwargs) for scatter_vis_graph in scatter_vis_graph_list: scatter_graph_list.append(delayed(invert_ignore_None, pure=True, nout=2)(scatter_vis_graph, template_model_graph, dopsf=dopsf, normalize=normalize, **kwargs)) image_graph_list.append(delayed(sum_invert_results)(scatter_graph_list)) return delayed(sum_invert_results)(image_graph_list) def create_invert_wstack_graph(vis_graph_list, template_model_graph: delayed, vis_slices, dopsf=False, normalize=True, **kwargs) -> delayed: """ Sum invert results using wstacking, iterating over the vis_graph_list and w :param vis_graph_list: :param template_model_graph: Model used to determine image parameters :param vis_slices: Number of visibility slices in w stacking :param dopsf: Make psf (False) :param kwargs: Parameters for functions in graphs :return: delayed for invert """ return create_invert_vis_scatter_graph(vis_graph_list, template_model_graph, scatter=visibility_scatter_w, vis_slices=vis_slices, dopsf=dopsf, normalize=normalize, invert=invert_wstack_single, **kwargs) def create_invert_timeslice_graph(vis_graph_list, template_model_graph: delayed, vis_slices, dopsf=False, normalize=True, **kwargs) -> delayed: """ Sum invert results using timeslice, iterating over the vis_graph_list and time wprojection is available with kernel='wprojection', wstep=some_number. This corresponds to the default SKA approach wsnapshots. :param vis_graph_list: :param template_model_graph: Model used to determine image parameters :param vis_slices: Number of visibility slices in w stacking :param dopsf: Make psf (False) :param kwargs: Parameters for functions in graphs :return: delayed for invert """ return create_invert_vis_scatter_graph(vis_graph_list, template_model_graph, scatter=visibility_scatter_time, vis_slices=vis_slices, dopsf=dopsf, normalize=normalize, invert=invert_timeslice_single, **kwargs) def create_invert_facet_graph(vis_graph_list, template_model_graph: delayed, dopsf=False, normalize=True, facets=1, **kwargs) -> delayed: """ Sum results from invert, iterating over the vis_graph_list, allows faceting :param vis_graph_list: :param template_model_graph: Model used to determine image parameters :param vis_slices: Number of visibility slices in w stacking :param kwargs: Parameters for functions in graphs :return: delayed for invert """ def gather_invert_results(results, template_model, facets, **kwargs): # Results contains the images for each facet, after adding across vis_graphs image_results = create_empty_image_like(template_model) image_results = image_gather_facets([result[0] for result in results], image_results, facets=facets) # For the gather, assume all are the same weight sumwt = results[0][1] return image_results, sumwt # Scatter the model in facets model_graphs = delayed(image_scatter_facets, nout=facets ** 2, pure=True)(template_model_graph, facets=facets) # For each facet, invert over the vis_graph results = [create_invert_graph(vis_graph_list, model_graph, dopsf=dopsf, normalize=normalize, **kwargs) for model_graph in model_graphs] # Now we have a list containing the facet images added over vis_graph. We can now # gather those images into one image return delayed(gather_invert_results, nout=2, pure=True)(results, template_model_graph, facets=facets, **kwargs) def create_invert_facet_vis_scatter_graph(vis_graph_list, template_model_graph: delayed, c_invert_vis_scatter_graph=create_invert_vis_scatter_graph, dopsf=False, normalize=True, facets=1, **kwargs) -> delayed: """ Sum results from invert, iterating over the scattered image and vis_graph_list :param vis_graph_list: :param template_model_graph: Model used to determine image parameters :param c_invert_vis_scatter_graph: Function to create invert graphs :param dopsf: Make the PSF instead of the dirty image :param facets: Number of facets :param kwargs: Parameters for functions in graphs :return: delayed for invert """ def gather_invert_results(results, template_model, facets, **kwargs): # Results contains the images for each facet, after adding across vis_graphs image_results = create_empty_image_like(template_model) image_results = image_gather_facets([result[0] for result in results], image_results, facets=facets) # For the gather, assume all are the same weight sumwt = results[0][1] return image_results, sumwt # Scatter the model in facets model_graphs = delayed(image_scatter_facets, nout=facets ** 2, pure=True)(template_model_graph, facets=facets) # For each facet, invert over the vis_graph results = [c_invert_vis_scatter_graph(vis_graph_list, model_graph, dopsf=dopsf, normalize=normalize, **kwargs) for model_graph in model_graphs] # Now we have a list containing the facet images added over vis_graph. We can now # gather those images into one image return delayed(gather_invert_results, nout=2, pure=True)(results, template_model_graph, facets=facets, **kwargs) def create_invert_facet_wstack_graph(vis_graph_list, template_model_graph: delayed, dopsf=False, normalize=True, facets=1, **kwargs) -> delayed: """ Sum results from invert, iterating over the vis_graph_list, allows faceting :param vis_graph_list: :param template_model_graph: Model used to determine image parameters :param facets: Number of facets per x, y axis) :param kwargs: Parameters for functions in graphs :return: delayed for invert """ return create_invert_facet_vis_scatter_graph(vis_graph_list, template_model_graph, dopsf=dopsf, c_invert_vis_scatter_graph=create_invert_wstack_graph, normalize=normalize, facets=facets, **kwargs) def create_invert_facet_timeslice_graph(vis_graph_list, template_model_graph: delayed, dopsf=False, normalize=True, facets=1, **kwargs) -> delayed: """ Sum results from invert, iterating over the vis_graph_list, allows faceting :param vis_graph_list: :param template_model_graph: Model used to determine image parameters :param facets: Number of facets per x, y axis) :param kwargs: Parameters for functions in graphs :return: delayed for invert """ return create_invert_facet_vis_scatter_graph(vis_graph_list, template_model_graph, dopsf=dopsf, c_invert_vis_scatter_graph=create_invert_timeslice_graph, normalize=normalize, facets=facets, **kwargs) def create_predict_graph(vis_graph_list, model_graph: delayed, predict=predict_2d, **kwargs): """Predict from model_graph, iterating over the vis_graph_list :param vis_graph_list: :param template_model_graph: Model used to determine image parameters :param facets: Number of facets per x, y axis) :param predict: Predict function to be used (predict_2d) :param kwargs: Parameters for functions in graphs Parameters for functions in graphs :return: List of vis_graphs """ def predict_and_sum(vis, model, **kwargs): if vis is not None: predicted = copy_visibility(vis) predicted = predict(predicted, model, **kwargs) return predicted else: return None return [delayed(predict_and_sum, pure=True, nout=1)(v, model_graph, **kwargs) for v in vis_graph_list] def create_predict_facet_graph(vis_graph_list, model_graph: delayed, predict=predict_2d, facets=2, **kwargs): """ Predict visibility from a model using facets :param vis_graph_list: :param template_model_graph: Model used to determine image parameters :param facets: Number of facets per x, y axis) :param predict: Predict function to be used (predict_2d) :param kwargs: Parameters for functions in graphs :return: List of vis_graphs """ def predict_facets_and_accumulate(vis, model, **kwargs): if vis is not None: predicted = copy_visibility(vis) predicted = predict(predicted, model, **kwargs) vis.data['vis'] += predicted.data['vis'] return vis else: return None # Note that we need to know the number of facets in order to define the size of facet_model_graphs facet_model_graphs = delayed(image_scatter_facets, nout=facets ** 2, pure=True)(model_graph, facets=facets) accumulate_vis_graphs = list() for vis_graph in vis_graph_list: for ifacet, facet_model_graph in enumerate(facet_model_graphs): # There is a dependency issue here so we chain the predicts accumulate_vis_graph = None if ifacet == 0: accumulate_vis_graph = delayed(predict_facets_and_accumulate, pure=True, nout=1)(vis_graph, facet_model_graph, **kwargs) else: accumulate_vis_graph = delayed(predict_facets_and_accumulate, pure=True, nout=1)( accumulate_vis_graph, facet_model_graph, **kwargs) accumulate_vis_graphs.append(accumulate_vis_graph) return accumulate_vis_graphs def create_predict_vis_scatter_graph(vis_graph_list, model_graph: delayed, vis_slices, predict, scatter, gather, **kwargs): """Predict, iterating over the scattered vis_graph_list :param vis_graph_list: :param template_model_graph: Model used to determine image parameters :param vis_slices: Number of vis slices (w stack or timeslice) :param predict: Predict function :param scatter: Scatter function e.g. visibility_scatter_w :param gather: Gatherer function e.g. visibility_gather_w :param kwargs: Parameters for functions in graphs :return: List of vis_graphs """ def predict_and_accumulate(vis, model, **kwargs): if vis is not None: predicted = copy_visibility(vis) predicted = predict(predicted, model, **kwargs) return predicted else: return None predicted_vis_list = list() for vis_graph in vis_graph_list: scatter_vis_graphs = delayed(scatter, nout=vis_slices)(vis_graph, vis_slices=vis_slices, **kwargs) predict_list = list() for scatter_vis_graph in scatter_vis_graphs: predict_list.append(delayed(predict_and_accumulate, pure=True, nout=1)(scatter_vis_graph, model_graph, **kwargs)) predicted_vis_list.append(delayed(gather, nout=1)(predict_list, vis_graph, vis_slices=vis_slices, **kwargs)) return predicted_vis_list def create_predict_wstack_graph(vis_graph_list, model_graph: delayed, vis_slices, **kwargs): """Predict using wstacking, iterating over the vis_graph_list and w :param vis_graph_list: :param model_graph: Model used to determine image parameters :param vis_slices: Number of vis slices (w stack or timeslice) :param kwargs: Parameters for functions in graphs :return: List of vis_graphs """ return create_predict_vis_scatter_graph(vis_graph_list, model_graph, vis_slices, scatter=visibility_scatter_w, gather=visibility_gather_w, predict=predict_wstack_single, **kwargs) def create_predict_timeslice_graph(vis_graph_list, model_graph: delayed, vis_slices, **kwargs): """Predict using timeslicing, iterating over the vis_graph_list and time wprojection is available with kernel='wprojection', wstep=some_number. This corresponds to the default SKA approach wsnapshots. :param vis_graph_list: :param model_graph: Model used to determine image parameters :param vis_slices: Number of vis slices (w stack or timeslice) :param kwargs: Parameters for functions in graphs :return: List of vis_graphs """ return create_predict_vis_scatter_graph(vis_graph_list, model_graph, vis_slices, scatter=visibility_scatter_time, gather=visibility_gather_time, predict=predict_timeslice_single, **kwargs) def create_predict_facet_vis_scatter_graph(vis_graph_list, model_graph: delayed, vis_slices, facets, predict, vis_scatter, vis_gather, **kwargs): """Predict, iterating over the scattered vis_graph_list and image :param vis_graph_list: :param model_graph: Model used to determine image parameters :param vis_slices: Number of vis slices (w stack or timeslice) :param predict: Predict function :param vis_scatter: Scatter function e.g. visibility_scatter_w :param vis_gather: Gatherer function e.g. visibility_gather_w :param kwargs: Parameters for functions in graphs :return: List of vis_graphs """ def predict_facets_and_accumulate(vis, model, **kwargs): if vis is not None: predicted = copy_visibility(vis) predicted = predict(predicted, model, **kwargs) return predicted else: return None # Note that we need to know the number of facets in order to define the size of facet_model_graphs facet_model_graphs = delayed(image_scatter_facets, nout=facets ** 2, pure=True)(model_graph, facets=facets) predicted_vis_list = list() for vis_graph in vis_graph_list: scatter_vis_graphs = delayed(vis_scatter, nout=vis_slices)(vis_graph, vis_slices=vis_slices, **kwargs) accumulate_vis_graphs = list() for scatter_vis_graph in scatter_vis_graphs: for ifacet, facet_model_graph in enumerate(facet_model_graphs): # if ifacet == 0: # accumulate_vis_graph = delayed(predict_facets_and_accumulate, # pure=True, nout=1)(scatter_vis_graph, facet_model_graphs[0], # **kwargs) # else: # accumulate_vis_graph = delayed(predict_facets_and_accumulate, # pure=True, nout=1)(accumulate_vis_graph, facet_model_graph, # **kwargs) accumulate_vis_graph = delayed(predict_facets_and_accumulate, pure=True, nout=1)(scatter_vis_graph, facet_model_graphs[ifacet], **kwargs) accumulate_vis_graphs.append(accumulate_vis_graph) predicted_vis_list.append(delayed(vis_gather, nout=1)(accumulate_vis_graphs, vis_graph, vis_slices=vis_slices, **kwargs)) return predicted_vis_list def create_predict_facet_wstack_graph(vis_graph_list, model_graph: delayed, vis_slices, facets, **kwargs): """Predict using wstacking, iterating over the vis_graph_list and w :param vis_graph_list: :param model_graph: Model used to determine image parameters :param vis_slices: Number of vis slices (w stack or timeslice) :param facets: Number of facets (in both x and y axes) :param kwargs: Parameters for functions in graphs :return: List of vis_graphs """ return create_predict_facet_vis_scatter_graph(vis_graph_list, model_graph, vis_slices=vis_slices, facets=facets, predict=predict_wstack_single, vis_scatter=visibility_scatter_w, vis_gather=visibility_gather_w, **kwargs) def create_predict_facet_timeslice_graph(vis_graph_list, model_graph: delayed, vis_slices, facets, **kwargs): """Predict using wstacking, iterating over the vis_graph_list and w :param vis_graph_list: :param model_graph: Model used to determine image parameters :param vis_slices: Number of vis slices in timeslice :param facets: Number of facets (in both x and y axes) :param kwargs: Parameters for functions in graphs :return: List of vis_graphs """ return create_predict_facet_vis_scatter_graph(vis_graph_list, model_graph, vis_slices=vis_slices, facets=facets, predict=predict_timeslice_single, vis_scatter=visibility_scatter_time, vis_gather=visibility_gather_time, **kwargs) def create_residual_graph(vis_graph_list, model_graph: delayed, **kwargs) -> delayed: """ Create a graph to calculate residual image using facets :param vis_graph_list: :param model_graph: Model used to determine image parameters :param kwargs: Parameters for functions in graphs :return: """ model_vis_graph_list = create_zero_vis_graph_list(vis_graph_list) model_vis_graph_list = create_predict_graph(model_vis_graph_list, model_graph, **kwargs) residual_vis_graph_list = create_subtract_vis_graph_list(vis_graph_list, model_vis_graph_list) return create_invert_graph(residual_vis_graph_list, model_graph, dopsf=False, normalize=True, **kwargs) def create_residual_facet_graph(vis_graph_list, model_graph: delayed, **kwargs) -> delayed: """ Create a graph to calculate residual image using facets :param vis_graph_list: :param model_graph: Model used to determine image parameters :param facets: Number of facets (in both x and y axes) :param kwargs: Parameters for functions in graphs :return: """ model_vis_graph_list = create_zero_vis_graph_list(vis_graph_list) model_vis_graph_list = create_predict_facet_graph(model_vis_graph_list, model_graph, **kwargs) residual_vis_graph_list = create_subtract_vis_graph_list(vis_graph_list, model_vis_graph_list) return create_invert_facet_graph(residual_vis_graph_list, model_graph, dopsf=False, normalize=True, **kwargs) def create_residual_wstack_graph(vis_graph_list, model_graph: delayed, **kwargs) -> delayed: """ Create a graph to calculate residual image using w stacking :param vis_graph_list: :param model_graph: Model used to determine image parameters :param vis_slices: Number of vis slices (w stack or timeslice) :param kwargs: Parameters for functions in graphs :return: """ model_vis_graph_list = create_zero_vis_graph_list(vis_graph_list) model_vis_graph_list = create_predict_wstack_graph(model_vis_graph_list, model_graph, **kwargs) residual_vis_graph_list = create_subtract_vis_graph_list(vis_graph_list, model_vis_graph_list) return create_invert_wstack_graph(residual_vis_graph_list, model_graph, dopsf=False, normalize=True, **kwargs) def create_residual_timeslice_graph(vis_graph_list, model_graph: delayed, **kwargs) -> delayed: """ Create a graph to calculate residual image using timeslicing :param vis_graph_list: :param model_graph: Model used to determine image parameters :param kwargs: Parameters for functions in graphs :return: """ model_vis_graph_list = create_zero_vis_graph_list(vis_graph_list) model_vis_graph_list = create_predict_timeslice_graph(model_vis_graph_list, model_graph, **kwargs) residual_vis_graph_list = create_subtract_vis_graph_list(vis_graph_list, model_vis_graph_list) return create_invert_timeslice_graph(residual_vis_graph_list, model_graph, dopsf=False, normalize=True, **kwargs) def create_residual_facet_wstack_graph(vis_graph_list, model_graph: delayed, **kwargs) -> delayed: """ Create a graph to calculate residual image using w stacking and faceting :param vis_graph_list: :param model_graph: Model used to determine image parameters :param vis_graph_list: :param model_graph: Model used to determine image parameters :param vis_slices: Number of vis slices (w stack or timeslice) :param facets: Number of facets (in both x and y axes) :param kwargs: Parameters for functions in graphs :return: """ model_vis_graph_list = create_zero_vis_graph_list(vis_graph_list) model_vis_graph_list = create_predict_facet_wstack_graph(model_vis_graph_list, model_graph, **kwargs) residual_vis_graph_list = create_subtract_vis_graph_list(vis_graph_list, model_vis_graph_list) return create_invert_facet_wstack_graph(residual_vis_graph_list, model_graph, dopsf=False, normalize=True, **kwargs) def create_deconvolve_graph(dirty_graph: delayed, psf_graph: delayed, model_graph: delayed, **kwargs) -> delayed: """Create a graph for deconvolution, adding to the model :param dirty_graph: :param psf_graph: :param model_graph: :param kwargs: Parameters for functions in graphs :return: """ def deconvolve(dirty, psf, model, **kwargs): result = deconvolve_cube(dirty, psf, **kwargs) result[0].data += model.data return result[0] return delayed(deconvolve, pure=True, nout=2)(dirty_graph[0], psf_graph[0], model_graph, **kwargs) def create_deconvolve_scatter_graph(dirty_graph: delayed, psf_graph: delayed, model_graph: delayed, subimages=1, image_scatter=image_scatter_facets, image_gather=image_gather_facets, **kwargs) -> delayed: """Create a graph for deconvolution by subimages, adding to the model Does deconvolution subimage by subimage. Currently does nothing very sensible about the edges. :param dirty_graph: :param psf_graph: :param model_graph: Current model :param subimages: Number of subimages :param kwargs: Parameters for functions in graphs :return: """ def deconvolve_subimage(dirty, psf, **kwargs): assert type(dirty) == Image assert type(psf) == Image result = deconvolve_cube(dirty, psf, **kwargs) return result[0] def add_model(output, model): assert type(output) == Image assert type(model) == Image output.data += model.data return output output = delayed(create_empty_image_like, nout=1, pure=True)(model_graph) dirty_graphs = delayed(image_scatter, nout=subimages, pure=True)(dirty_graph[0], subimages=subimages) results = [delayed(deconvolve_subimage)(dirty_graph, psf_graph[0], **kwargs) for dirty_graph in dirty_graphs] result = delayed(image_gather, nout=1, pure=True)(results, output, subimages=subimages) return delayed(add_model, nout=1, pure=True)(result, model_graph) def create_deconvolve_facet_graph(dirty_graph: delayed, psf_graph: delayed, model_graph: delayed, facets=1, **kwargs) -> delayed: """Create a graph for deconvolution by facets, adding to the model Does deconvolution facet-by-facet. Currently does nothing very sensible about the edges. :param dirty_graph: :param psf_graph: Must be the size of a facet :param model_graph: Current model :param facets: Number of facets on each axis :param kwargs: Parameters for functions in graphs :return: """ return create_deconvolve_scatter_graph(dirty_graph, psf_graph, model_graph, subimages=facets, facets=facets, image_scatter=image_scatter_facets, image_gather=image_gather_facets, **kwargs) def create_deconvolve_channel_graph(dirty_graph: delayed, psf_graph: delayed, model_graph: delayed, subimages, **kwargs) -> delayed: """Create a graph for deconvolution by channels, adding to the model Does deconvolution channel by channel. :param dirty_graph: :param psf_graph: Must be the size of a facet :param model_graph: Current model :param facets: Number of facets on each axis :param kwargs: Parameters for functions in graphs :return: """ return create_deconvolve_scatter_graph(dirty_graph, psf_graph, model_graph, subimages=subimages, image_scatter=image_scatter_channels, image_gather=image_gather_channels, **kwargs) def create_selfcal_graph_list(vis_graph_list, model_graph: delayed, c_predict_graph, vis_slices, global_solution=True, **kwargs): """ Create a set of graphs for (optionally global) selfcalibration of a list of visibilities If global solution is true then visibilities are gathered to a single visibility data set which is then self-calibrated. The resulting gaintable is then effectively scattered out for application to each visibility set. If global solution is false then the solutions are performed locally. :param vis_graph_list: :param model_graph: :param c_predict_graph: Function to create prediction graphs :param vis_slices: :param global_solution: Solve for global gains :param kwargs: Parameters for functions in graphs :return: """ model_vis_graph_list = create_zero_vis_graph_list(vis_graph_list) model_vis_graph_list = c_predict_graph(model_vis_graph_list, model_graph, vis_slices=vis_slices, **kwargs) if global_solution: point_vis_graph_list = [delayed(divide_visibility, nout=len(vis_graph_list))(vis_graph_list[i], model_vis_graph_list[i]) for i, _ in enumerate(vis_graph_list)] global_point_vis_graph = delayed(visibility_gather_channel, nout=1)(point_vis_graph_list) global_point_vis_graph = delayed(integrate_visibility_by_channel, nout=1)(global_point_vis_graph) gt_graph = delayed(solve_gaintable, pure=True, nout=1)(global_point_vis_graph, **kwargs) return [delayed(apply_gaintable, nout=len(vis_graph_list))(v, gt_graph, inverse=True, **kwargs) for v in vis_graph_list] else: gt_graph = delayed(solve_gaintable, pure=True, nout=1)(vis_graph_list, model_vis_graph_list, **kwargs) return [delayed(apply_gaintable, nout=len(vis_graph_list))(v, gt_graph, inverse=True, **kwargs) for v in vis_graph_list]
46.840652
117
0.653541
4,613
37,332
5.018209
0.071971
0.069117
0.083978
0.033781
0.766556
0.734503
0.713249
0.701153
0.670353
0.649531
0
0.003905
0.279733
37,332
796
118
46.899497
0.857005
0.344423
0
0.476879
0
0
0.004058
0
0
0
0
0
0.017341
1
0.132948
false
0
0.040462
0
0.33815
0.00289
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
4d1e4c353e8f2e3cfa0ebc33614a3634810d6779
111
py
Python
kapture/converter/opensfm/__init__.py
v-mehta/kapture
b95a15b83032d667282ab96fa5be5327b2c99ec7
[ "BSD-3-Clause" ]
264
2020-07-21T14:48:33.000Z
2022-03-16T17:05:21.000Z
kapture/converter/opensfm/__init__.py
v-mehta/kapture
b95a15b83032d667282ab96fa5be5327b2c99ec7
[ "BSD-3-Clause" ]
30
2020-08-31T19:27:26.000Z
2022-03-11T08:50:23.000Z
kapture/converter/opensfm/__init__.py
v-mehta/kapture
b95a15b83032d667282ab96fa5be5327b2c99ec7
[ "BSD-3-Clause" ]
49
2020-07-30T06:11:22.000Z
2022-03-22T13:46:06.000Z
# Copyright 2020-present NAVER Corp. Under BSD 3-clause license """ OpenSfM to kapture import and export. """
18.5
63
0.738739
16
111
5.125
1
0
0
0
0
0
0
0
0
0
0
0.053763
0.162162
111
5
64
22.2
0.827957
0.900901
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
4d3fd6548ae2e84f2f5b9fc8ffb1c2cb9dd425bf
90
py
Python
python/989.add-to-array-form-of-integer.py
stavanmehta/leetcode
1224e43ce29430c840e65daae3b343182e24709c
[ "Apache-2.0" ]
null
null
null
python/989.add-to-array-form-of-integer.py
stavanmehta/leetcode
1224e43ce29430c840e65daae3b343182e24709c
[ "Apache-2.0" ]
null
null
null
python/989.add-to-array-form-of-integer.py
stavanmehta/leetcode
1224e43ce29430c840e65daae3b343182e24709c
[ "Apache-2.0" ]
null
null
null
class Solution: def addToArrayForm(self, A: List[int], K: int) -> List[int]:
22.5
64
0.588889
12
90
4.416667
0.75
0.264151
0
0
0
0
0
0
0
0
0
0
0.255556
90
3
65
30
0.791045
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
4d501fcca3e7ec8c68bb45fb03d847e6c65baeb9
88
py
Python
mayan/apps/file_caching/literals.py
nattangwiwat/Mayan-EDMS-recitation
fcf16afb56eae812fb99144d65ae1ae6749de0b7
[ "Apache-2.0" ]
343
2015-01-05T14:19:35.000Z
2018-12-10T19:07:48.000Z
mayan/apps/file_caching/literals.py
nattangwiwat/Mayan-EDMS-recitation
fcf16afb56eae812fb99144d65ae1ae6749de0b7
[ "Apache-2.0" ]
191
2015-01-03T00:48:19.000Z
2018-11-30T09:10:25.000Z
mayan/apps/file_caching/literals.py
nattangwiwat/Mayan-EDMS-recitation
fcf16afb56eae812fb99144d65ae1ae6749de0b7
[ "Apache-2.0" ]
257
2019-05-14T10:26:37.000Z
2022-03-30T03:37:36.000Z
DEFAULT_MAXIMUM_FAILED_PRUNE_ATTEMPTS = 100 DEFAULT_MAXIMUM_NORMAL_PRUNE_ATTEMPTS = 100
29.333333
43
0.909091
12
88
6
0.583333
0.388889
0.444444
0
0
0
0
0
0
0
0
0.073171
0.068182
88
2
44
44
0.804878
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
4d50b410bc5a94c93a564e9ce0c9e5494dfb5d39
225
py
Python
rplugin/python3/denite/modules/models/controller_file.py
thedelchop/denite-rails
9afceb803a6c46a24b070b3cf1ff7dd1dbee534e
[ "MIT" ]
16
2017-03-12T08:41:24.000Z
2019-11-03T07:46:00.000Z
rplugin/python3/denite/modules/models/controller_file.py
sakuma/denite-rails
0029de49b10496ba647e28f66416faab55128081
[ "MIT" ]
3
2017-09-14T00:57:48.000Z
2018-03-02T03:34:23.000Z
rplugin/python3/denite/modules/models/controller_file.py
sakuma/denite-rails
0029de49b10496ba647e28f66416faab55128081
[ "MIT" ]
4
2017-06-29T08:11:32.000Z
2018-05-07T14:50:51.000Z
import re import os from file_base import FileBase class ControllerFile(FileBase): def remove_base_directory(self, filename, root_path): return re.sub(os.path.join(root_path, 'app/controllers/'), '', filename)
22.5
80
0.742222
31
225
5.225806
0.677419
0.098765
0
0
0
0
0
0
0
0
0
0
0.151111
225
9
81
25
0.848168
0
0
0
0
0
0.071111
0
0
0
0
0
0
1
0.166667
false
0
0.5
0.166667
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
1
0
0
4
4d8f1a88a0395f40c9713e5e0902ee11107cfa7d
113
py
Python
Module2/Python_Data_Analysis_code/Chapter 2/joblib/dautil/data/get_countries/func_code.py
vijaysharmapc/Python-End-to-end-Data-Analysis
a00f2d5d1547993e000b2551ec6a1360240885ba
[ "MIT" ]
119
2016-08-24T20:12:01.000Z
2022-03-23T03:59:30.000Z
Module2/Python_Data_Analysis_code/Chapter 2/joblib/dautil/data/get_countries/func_code.py
vijaysharmapc/Python-End-to-end-Data-Analysis
a00f2d5d1547993e000b2551ec6a1360240885ba
[ "MIT" ]
3
2016-10-18T03:49:11.000Z
2020-11-03T12:41:29.000Z
Module2/Python_Data_Analysis_code/Chapter 2/joblib/dautil/data/get_countries/func_code.py
vijaysharmapc/Python-End-to-end-Data-Analysis
a00f2d5d1547993e000b2551ec6a1360240885ba
[ "MIT" ]
110
2016-08-19T01:57:35.000Z
2022-02-18T17:02:17.000Z
# first line: 158 def get_countries(self, *args, **kwargs): return wb.get_countries(*args, **kwargs)
28.25
48
0.646018
15
113
4.733333
0.733333
0.338028
0
0
0
0
0
0
0
0
0
0.033333
0.20354
113
3
49
37.666667
0.755556
0.132743
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
4d9fafd9cf07cc339ad208b154e9abb521e78084
61
py
Python
model_history/__init__.py
shamanis/django-model-history
be6a825ba8aae669beeb4722da71f8699db8faa5
[ "MIT" ]
6
2015-11-17T16:22:39.000Z
2017-03-17T06:10:29.000Z
model_history/__init__.py
shamanis/django-model-history
be6a825ba8aae669beeb4722da71f8699db8faa5
[ "MIT" ]
null
null
null
model_history/__init__.py
shamanis/django-model-history
be6a825ba8aae669beeb4722da71f8699db8faa5
[ "MIT" ]
null
null
null
default_app_config = 'model_history.apps.ModelHistoryConfig'
30.5
60
0.868852
7
61
7.142857
1
0
0
0
0
0
0
0
0
0
0
0
0.04918
61
1
61
61
0.862069
0
0
0
0
0
0.606557
0.606557
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
4dd2b29cf48da33e82ca5a9fc8d39442ebb9a13f
129
py
Python
Scripts/ict/demo/helloworld/urls.py
mspgeek/Client_Portal
0267168bb90e8e9c85aecdd715972b9622b82384
[ "MIT" ]
4
2020-04-08T01:13:48.000Z
2020-08-15T17:12:07.000Z
Scripts/ict/demo/helloworld/urls.py
mspgeek/Client_Portal
0267168bb90e8e9c85aecdd715972b9622b82384
[ "MIT" ]
1
2021-04-12T12:55:24.000Z
2021-04-12T12:55:24.000Z
Scripts/ict/demo/helloworld/urls.py
mspgeek/Client_Portal
0267168bb90e8e9c85aecdd715972b9622b82384
[ "MIT" ]
null
null
null
from viewflow.flow.viewset import FlowViewSet from .flows import HelloWorldFlow urlpatterns = FlowViewSet(HelloWorldFlow).urls
21.5
46
0.844961
14
129
7.785714
0.714286
0
0
0
0
0
0
0
0
0
0
0
0.100775
129
5
47
25.8
0.939655
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
127cc279f01b4ea505bb5e993d53d244e1ca4823
1,420
py
Python
allennlp/modules/token_embedders/__init__.py
justindujardin/allennlp
c4559f3751775aa8bc018db417edc119d29d8051
[ "Apache-2.0" ]
2
2021-04-27T19:56:28.000Z
2021-08-19T05:34:37.000Z
allennlp/modules/token_embedders/__init__.py
justindujardin/allennlp
c4559f3751775aa8bc018db417edc119d29d8051
[ "Apache-2.0" ]
5
2021-05-03T14:40:33.000Z
2021-05-03T14:40:34.000Z
allennlp/modules/token_embedders/__init__.py
justindujardin/allennlp
c4559f3751775aa8bc018db417edc119d29d8051
[ "Apache-2.0" ]
null
null
null
""" A `TokenEmbedder` is a `Module` that embeds one-hot-encoded tokens as vectors. """ from allennlp.modules.token_embedders.token_embedder import TokenEmbedder from allennlp.modules.token_embedders.embedding import Embedding from allennlp.modules.token_embedders.token_characters_encoder import TokenCharactersEncoder from allennlp.modules.token_embedders.elmo_token_embedder import ElmoTokenEmbedder from allennlp.modules.token_embedders.elmo_token_embedder_multilang import ( ElmoTokenEmbedderMultiLang, ) from allennlp.modules.token_embedders.empty_embedder import EmptyEmbedder from allennlp.modules.token_embedders.bert_token_embedder import ( BertEmbedder, PretrainedBertEmbedder, ) from allennlp.modules.token_embedders.bidirectional_language_model_token_embedder import ( BidirectionalLanguageModelTokenEmbedder, ) from allennlp.modules.token_embedders.language_model_token_embedder import ( LanguageModelTokenEmbedder, ) from allennlp.modules.token_embedders.bag_of_word_counts_token_embedder import ( BagOfWordCountsTokenEmbedder, ) from allennlp.modules.token_embedders.pass_through_token_embedder import PassThroughTokenEmbedder from allennlp.modules.token_embedders.pretrained_transformer_embedder import ( PretrainedTransformerEmbedder, ) from allennlp.modules.token_embedders.pretrained_transformer_mismatched_embedder import ( PretrainedTransformerMismatchedEmbedder, )
41.764706
97
0.86831
149
1,420
7.979866
0.33557
0.131203
0.207738
0.262405
0.486964
0.238856
0.174937
0.084104
0
0
0
0
0.079577
1,420
33
98
43.030303
0.909717
0.05493
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.035714
0.464286
0
0.464286
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
12af6330353d647556ed1b4f5871418b63165be4
302
py
Python
backend/src/exceptions/db_error.py
yubowen0525/graceful-blog
1b2481a774c36b6b90f8accdc012bf99438d7643
[ "Apache-2.0" ]
null
null
null
backend/src/exceptions/db_error.py
yubowen0525/graceful-blog
1b2481a774c36b6b90f8accdc012bf99438d7643
[ "Apache-2.0" ]
null
null
null
backend/src/exceptions/db_error.py
yubowen0525/graceful-blog
1b2481a774c36b6b90f8accdc012bf99438d7643
[ "Apache-2.0" ]
null
null
null
from fastapi import HTTPException from starlette.requests import Request from starlette.responses import JSONResponse from sqlalchemy.exc import SQLAlchemyError async def db_error_handler(_: Request, exc: SQLAlchemyError) -> JSONResponse: return JSONResponse({"errors": str(exc)}, status_code=500)
43.142857
77
0.821192
36
302
6.777778
0.638889
0.106557
0
0
0
0
0
0
0
0
0
0.011111
0.10596
302
7
78
43.142857
0.892593
0
0
0
0
0
0.019802
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.833333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
12d72b8676c2cb821617f961361d85e4599ddb01
126
py
Python
wsgi.py
miguel-osuna/Quotes-API
cf4373fb2d303bfd36c1d3472cfde77e3612e6c6
[ "MIT" ]
null
null
null
wsgi.py
miguel-osuna/Quotes-API
cf4373fb2d303bfd36c1d3472cfde77e3612e6c6
[ "MIT" ]
null
null
null
wsgi.py
miguel-osuna/Quotes-API
cf4373fb2d303bfd36c1d3472cfde77e3612e6c6
[ "MIT" ]
null
null
null
import os from quotes_api.app import create_app app = create_app(configuration=os.getenv("APP_CONFIGURATION", "production"))
25.2
76
0.809524
18
126
5.444444
0.555556
0.183673
0
0
0
0
0
0
0
0
0
0
0.087302
126
4
77
31.5
0.852174
0
0
0
0
0
0.214286
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
12d9f03bb5791f1adfbb8a43296f9a7c75e4c7b1
1,482
py
Python
home/migrations/0002_automation.py
maknetwork/flybox
8bab5979253011d2392658f4bb9ccd9a989dfad7
[ "PostgreSQL", "Unlicense", "MIT" ]
1
2020-04-21T10:54:54.000Z
2020-04-21T10:54:54.000Z
home/migrations/0002_automation.py
maknetwork/flybox
8bab5979253011d2392658f4bb9ccd9a989dfad7
[ "PostgreSQL", "Unlicense", "MIT" ]
5
2021-03-19T00:46:55.000Z
2021-06-10T18:38:22.000Z
home/migrations/0002_automation.py
maknetwork/flybox
8bab5979253011d2392658f4bb9ccd9a989dfad7
[ "PostgreSQL", "Unlicense", "MIT" ]
null
null
null
# Generated by Django 3.0.4 on 2020-03-10 18:13 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('home', '0001_initial'), ] operations = [ migrations.CreateModel( name='Automation', fields=[ ('flybox', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, primary_key=True, related_name='automataflyboxa', serialize=False, to='home.Flyboxset')), ('mondaycd', models.BooleanField(default=False)), ('tuesdaycd', models.BooleanField(default=False)), ('wednesdaycd', models.BooleanField(default=False)), ('thursdaycd', models.BooleanField(default=False)), ('fridaycd', models.BooleanField(default=False)), ('saturdaycd', models.BooleanField(default=False)), ('sundaycd', models.BooleanField(default=False)), ('mondaypd', models.BooleanField(default=False)), ('tuesdaypd', models.BooleanField(default=False)), ('wednesdaypd', models.BooleanField(default=False)), ('thursdaypd', models.BooleanField(default=False)), ('fridaypd', models.BooleanField(default=False)), ('saturdaypd', models.BooleanField(default=False)), ('sundaypd', models.BooleanField(default=False)), ], ), ]
42.342857
179
0.598516
128
1,482
6.898438
0.453125
0.285391
0.396376
0.475651
0
0
0
0
0
0
0
0.017352
0.261134
1,482
34
180
43.588235
0.789041
0.030364
0
0
1
0
0.131707
0
0
0
0
0
0
1
0
false
0
0.071429
0
0.178571
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
12f0799776a7bc0635763031e6a622812a27ca5c
1,104
py
Python
BachelorETL/ETL/models.py
Athanar/BachelorProject
b2867aab55dab0c793fb5eb993850f13bb9e64fa
[ "MIT" ]
null
null
null
BachelorETL/ETL/models.py
Athanar/BachelorProject
b2867aab55dab0c793fb5eb993850f13bb9e64fa
[ "MIT" ]
null
null
null
BachelorETL/ETL/models.py
Athanar/BachelorProject
b2867aab55dab0c793fb5eb993850f13bb9e64fa
[ "MIT" ]
null
null
null
from django.db import models class Connection(models.Model): name = models.CharField(max_length=30) dialect = models.CharField(max_length=300) username = models.CharField(max_length=300) password = models.CharField(max_length=300) host = models.CharField(max_length=300) database = models.CharField(max_length=300) schema = models.CharField(max_length=300) class Tables(models.Model): connection_id = models.IntegerField() name = models.CharField(max_length=300) target_name = models.CharField(max_length=300) enabled = models.BooleanField(default=True) class Columns(models.Model): table_id = models.IntegerField() name = models.CharField(max_length=300) target_name = models.CharField(max_length=300) data_type = models.CharField(max_length=300) length = models.CharField(default='', max_length=300) is_key = models.BooleanField(default=False) enabled = models.BooleanField(default=True)
42.461538
66
0.652174
123
1,104
5.699187
0.284553
0.278174
0.308131
0.410842
0.663338
0.25107
0.25107
0.25107
0.25107
0.25107
0
0.046061
0.252717
1,104
25
67
44.16
0.803636
0
0
0.272727
0
0
0
0
0
0
0
0
0
1
0
false
0.045455
0.045455
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
12fd33914646d2c72ce5a9d4c873a85b337545d0
54
py
Python
db/conditions/expression.py
hiscaler/data-access-object-for-python
1234b315c4aedaad6577f7928ee6edc5c99801e5
[ "BSD-2-Clause" ]
null
null
null
db/conditions/expression.py
hiscaler/data-access-object-for-python
1234b315c4aedaad6577f7928ee6edc5c99801e5
[ "BSD-2-Clause" ]
null
null
null
db/conditions/expression.py
hiscaler/data-access-object-for-python
1234b315c4aedaad6577f7928ee6edc5c99801e5
[ "BSD-2-Clause" ]
null
null
null
# encoding=utf-8 class Expression(object): pass
9
25
0.685185
7
54
5.285714
1
0
0
0
0
0
0
0
0
0
0
0.023256
0.203704
54
5
26
10.8
0.837209
0.259259
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
4
422d3cf43cb8153343118d4240f401725d069a32
117
py
Python
32.operacoes_com_tuplas/3.1.soma.py
robinson-1985/python-zero-dnc
df510d67e453611fcd320df1397cdb9ca47fecb8
[ "MIT" ]
null
null
null
32.operacoes_com_tuplas/3.1.soma.py
robinson-1985/python-zero-dnc
df510d67e453611fcd320df1397cdb9ca47fecb8
[ "MIT" ]
null
null
null
32.operacoes_com_tuplas/3.1.soma.py
robinson-1985/python-zero-dnc
df510d67e453611fcd320df1397cdb9ca47fecb8
[ "MIT" ]
null
null
null
tupla_1 = (1,2,4,7,5,6,(4,3),1,2,1) tupla_2 = ("oi","tchau","boa tarde") tupla_3 = (tupla_1 + tupla_2) print(tupla_3)
29.25
36
0.623932
28
117
2.392857
0.464286
0.179104
0.208955
0
0
0
0
0
0
0
0
0.161905
0.102564
117
4
37
29.25
0.47619
0
0
0
0
0
0.135593
0
0
0
0
0
0
1
0
false
0
0
0
0
0.25
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
424f3ec79461488d078c2bc8e489c706afd73f77
948
py
Python
win/devkit/other/pymel/extras/completion/py/maya/app/general/UVSnapPosSection.py
leegoonz/Maya-devkit
b81fe799b58e854e4ef16435426d60446e975871
[ "ADSL" ]
10
2018-03-30T16:09:02.000Z
2021-12-07T07:29:19.000Z
win/devkit/other/pymel/extras/completion/py/maya/app/general/UVSnapPosSection.py
leegoonz/Maya-devkit
b81fe799b58e854e4ef16435426d60446e975871
[ "ADSL" ]
null
null
null
win/devkit/other/pymel/extras/completion/py/maya/app/general/UVSnapPosSection.py
leegoonz/Maya-devkit
b81fe799b58e854e4ef16435426d60446e975871
[ "ADSL" ]
9
2018-06-02T09:18:49.000Z
2021-12-20T09:24:35.000Z
import re from . import UVGenericSection as _UVGenericSection import maya.mel as mel import sys import maya.cmds as cmds import os from PySide.QtCore import * from PySide.QtGui import * from random import randint from maya.app.general.UVGenericSection import UVGenericSection class UVSnapPosSection(UVGenericSection): def __init__(self): pass def createLayout(self): pass def snapCenter(self): pass def snapDown(self): pass def snapDownLeft(self): pass def snapDownRight(self): pass def snapLeft(self): """ #BUTTON SLOT FUNCTIONS """ pass def snapRight(self): pass def snapUp(self): pass def snapUpLeft(self): pass def snapUpRight(self): pass staticMetaObject = None
14.149254
62
0.563291
92
948
5.75
0.413043
0.151229
0.187146
0
0
0
0
0
0
0
0
0
0.379747
948
66
63
14.363636
0.89966
0.023207
0
0.323529
0
0
0
0
0
0
0
0
0
1
0.323529
false
0.323529
0.294118
0
0.676471
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
4
427f374f2b47040e38c3b1287ffe839cb8eebc29
171
py
Python
{{cookiecutter.project_slug}}/backend/apps/utils/apps.py
paranambu/django-boilerplate
aecdd3e4a7ae48150eef09733649319b8eba8dfa
[ "Unlicense" ]
null
null
null
{{cookiecutter.project_slug}}/backend/apps/utils/apps.py
paranambu/django-boilerplate
aecdd3e4a7ae48150eef09733649319b8eba8dfa
[ "Unlicense" ]
null
null
null
{{cookiecutter.project_slug}}/backend/apps/utils/apps.py
paranambu/django-boilerplate
aecdd3e4a7ae48150eef09733649319b8eba8dfa
[ "Unlicense" ]
1
2020-01-23T04:23:20.000Z
2020-01-23T04:23:20.000Z
from django.apps import AppConfig from django.utils.translation import ugettext_lazy as _ class UtilsConfig(AppConfig): name = 'utils' verbose_name = _('Utils')
21.375
55
0.754386
21
171
5.952381
0.666667
0.16
0
0
0
0
0
0
0
0
0
0
0.163743
171
7
56
24.428571
0.874126
0
0
0
0
0
0.05848
0
0
0
0
0
0
1
0
false
0
0.4
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
42984eeaba716957063c6627548fd8acbb38019f
4,539
py
Python
shared.py
arminbahl/random_dot_motion
c54917f08cdb71bdca7a92106475170846ed71f2
[ "MIT" ]
null
null
null
shared.py
arminbahl/random_dot_motion
c54917f08cdb71bdca7a92106475170846ed71f2
[ "MIT" ]
null
null
null
shared.py
arminbahl/random_dot_motion
c54917f08cdb71bdca7a92106475170846ed71f2
[ "MIT" ]
1
2021-02-03T14:47:26.000Z
2021-02-03T14:47:26.000Z
from multiprocessing import Value, sharedctypes, RawArray import numpy as np import ctypes import time from stimulus_module import StimulusModule import pickle class Shared(): def __init__(self): self.window_properties_x = Value('i', 0) self.window_properties_y = Value('i', 0) self.window_properties_width = Value('i', 800) self.window_properties_height = Value('i', 800) self.window_properties_background = Value('d', 0) self.window_properties_radius = Value('d', 1.4) self.control_window_position_x = Value('i', 100) self.control_window_position_y = Value('i', 100) self.window_properties_update_requested = Value('b', 0) self.stimulus_properties_number_of_dots = Value('i', 1000) self.stimulus_properties_size_of_dots = Value('d', 0.1) self.stimulus_properties_speed_of_dots = Value('d', 0.3) self.stimulus_properties_direction_of_dots = Value('d', 0.0) self.stimulus_properties_coherence_of_dots = Value('d', 50) self.stimulus_properties_lifetime_of_dots = Value('d', 0.2) self.stimulus_properties_brightness_of_dots = Value('d', 1.0) self.stimulus_properties_update_requested = Value('b', 0) self.running = Value('b', 1) def load_values(self): try: values = pickle.load(open("values.pickle", "rb")) self.window_properties_x.value = values["window_properties_x"] self.window_properties_y.value = values["window_properties_y"] self.window_properties_width.value = values["window_properties_width"] self.window_properties_height.value = values["window_properties_height"] self.window_properties_radius.value = values["window_properties_radius"] self.window_properties_background.value = values["window_properties_background"] self.control_window_position_x.value = values["control_window_position_x"] self.control_window_position_y.value = values["control_window_position_y"] self.stimulus_properties_number_of_dots.value = values["stimulus_properties_number_of_dots"] self.stimulus_properties_size_of_dots.value = values["stimulus_properties_size_of_dots"] self.stimulus_properties_speed_of_dots.value = values["stimulus_properties_speed_of_dots"] self.stimulus_properties_direction_of_dots.value = values["stimulus_properties_direction_of_dots"] self.stimulus_properties_coherence_of_dots.value = values["stimulus_properties_coherence_of_dots"] self.stimulus_properties_lifetime_of_dots.value = values["stimulus_properties_lifetime_of_dots"] self.stimulus_properties_brightness_of_dots.value = values["stimulus_properties_brightness_of_dots"] except Exception as e: print(e) def save_values(self): try: values = dict({}) values["window_properties_x"] = self.window_properties_x.value values["window_properties_y"] = self.window_properties_y.value values["window_properties_width"] = self.window_properties_width.value values["window_properties_height"] = self.window_properties_height.value values["window_properties_radius"] = self.window_properties_radius.value values["window_properties_background"] = self.window_properties_background.value values["control_window_position_x"] = self.control_window_position_x.value values["control_window_position_y"] = self.control_window_position_y.value values["stimulus_properties_number_of_dots"] = self.stimulus_properties_number_of_dots.value values["stimulus_properties_size_of_dots"] = self.stimulus_properties_size_of_dots.value values["stimulus_properties_speed_of_dots"] = self.stimulus_properties_speed_of_dots.value values["stimulus_properties_coherence_of_dots"] = self.stimulus_properties_coherence_of_dots.value values["stimulus_properties_direction_of_dots"] = self.stimulus_properties_direction_of_dots.value values["stimulus_properties_lifetime_of_dots"] = self.stimulus_properties_lifetime_of_dots.value values["stimulus_properties_brightness_of_dots"] = self.stimulus_properties_brightness_of_dots.value pickle.dump(values, open("values.pickle", "wb")) except Exception as e: print(e) def start_threads(self): StimulusModule(self).start()
53.4
112
0.724389
553
4,539
5.504521
0.124774
0.212878
0.159001
0.133377
0.87615
0.847569
0.761827
0.601183
0.4159
0.376478
0
0.009759
0.187266
4,539
84
113
54.035714
0.815397
0
0
0.086957
0
0
0.201807
0.174488
0
0
0
0
0
1
0.057971
false
0
0.086957
0
0.15942
0.028986
0
0
0
null
1
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
42aef0bb0fada372219db79ea88d95eaafc8ee74
155
py
Python
scripts/mididumphw.py
b0rkestra/python-midi
98e8f167b7f3437ad8616fc45d86b729c7722776
[ "MIT" ]
1,344
2015-01-17T18:11:42.000Z
2022-03-24T08:42:47.000Z
scripts/mididumphw.py
b0rkestra/python-midi
98e8f167b7f3437ad8616fc45d86b729c7722776
[ "MIT" ]
130
2015-01-11T09:25:53.000Z
2022-01-16T17:54:24.000Z
scripts/mididumphw.py
b0rkestra/python-midi
98e8f167b7f3437ad8616fc45d86b729c7722776
[ "MIT" ]
403
2015-01-06T21:37:06.000Z
2022-03-31T21:07:43.000Z
#!/usr/bin/env python """ Print a description of the available devices. """ import midi.sequencer as sequencer s = sequencer.SequencerHardware() print s
15.5
45
0.748387
21
155
5.52381
0.809524
0
0
0
0
0
0
0
0
0
0
0
0.141935
155
9
46
17.222222
0.87218
0.129032
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0.333333
null
null
0.333333
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
4
c40817471dde50ce76e6acd1e03abb451d52306e
200
py
Python
Algorithms/Sorting/BubbleSort/bubble_sort.py
Nalhin/AlgorithmsAndDataStructures
2d2c87d0572e107c993c3c8866b8beefd4d22082
[ "MIT" ]
1
2021-11-16T13:02:25.000Z
2021-11-16T13:02:25.000Z
Algorithms/Sorting/BubbleSort/bubble_sort.py
Nalhin/AlgorithmsAndDataStructures
2d2c87d0572e107c993c3c8866b8beefd4d22082
[ "MIT" ]
null
null
null
Algorithms/Sorting/BubbleSort/bubble_sort.py
Nalhin/AlgorithmsAndDataStructures
2d2c87d0572e107c993c3c8866b8beefd4d22082
[ "MIT" ]
null
null
null
def bubble_sort(array): for i in range(len(array)): for j in range(len(array) - 1): if array[j] > array[j + 1]: array[j], array[j + 1] = array[j + 1], array[j]
33.333333
63
0.495
33
200
2.969697
0.363636
0.367347
0.214286
0.367347
0.397959
0.326531
0.326531
0
0
0
0
0.030075
0.335
200
5
64
40
0.706767
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0
0
0.2
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
c40852fcbc1ff05c06bbd84acee7738808198e7f
130
py
Python
classgrade/gradapp/apps.py
classgrade/classgrade
144dcfc9579e6858ff4aa79835c76b9611ed73b2
[ "MIT" ]
5
2016-11-15T17:46:27.000Z
2022-01-10T08:06:17.000Z
classgrade/gradapp/apps.py
classgrade/classgrade
144dcfc9579e6858ff4aa79835c76b9611ed73b2
[ "MIT" ]
21
2016-11-07T14:58:22.000Z
2021-02-02T21:41:12.000Z
classgrade/gradapp/apps.py
classgrade/classgrade
144dcfc9579e6858ff4aa79835c76b9611ed73b2
[ "MIT" ]
null
null
null
from __future__ import unicode_literals from django.apps import AppConfig class GradappConfig(AppConfig): name = 'gradapp'
16.25
39
0.792308
15
130
6.533333
0.8
0
0
0
0
0
0
0
0
0
0
0
0.153846
130
7
40
18.571429
0.890909
0
0
0
0
0
0.053846
0
0
0
0
0
0
1
0
false
0
0.5
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
c40e96ef508460c744a45f93d707199d4792a6ab
290
py
Python
podcasting/utils/twitter.py
boatcoder/django-podcasting
4f4ab9094d6b6b8010ac0b7d6c3158c2413f755a
[ "BSD-3-Clause" ]
41
2015-01-01T14:04:02.000Z
2022-02-20T19:31:02.000Z
podcasting/utils/twitter.py
boatcoder/django-podcasting
4f4ab9094d6b6b8010ac0b7d6c3158c2413f755a
[ "BSD-3-Clause" ]
14
2015-04-03T18:11:05.000Z
2020-09-19T13:32:43.000Z
podcasting/utils/twitter.py
boatcoder/django-podcasting
4f4ab9094d6b6b8010ac0b7d6c3158c2413f755a
[ "BSD-3-Clause" ]
25
2015-02-12T12:07:32.000Z
2022-01-09T21:26:25.000Z
from django.conf import settings try: import twitter except ImportError: twitter = None # noqa def can_tweet(): creds_available = (hasattr(settings, "TWITTER_USERNAME") and hasattr(settings, "TWITTER_PASSWORD")) return twitter and creds_available
22.307692
64
0.686207
32
290
6.0625
0.65625
0.14433
0.226804
0
0
0
0
0
0
0
0
0
0.241379
290
12
65
24.166667
0.881818
0.013793
0
0
0
0
0.112676
0
0
0
0
0
0
1
0.111111
false
0.111111
0.333333
0
0.555556
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
1
0
0
4
c438041ebd500019b8f7444077d455a2b55f66c7
37
py
Python
jogos/jogo_da_forca.py
rafaelpuyau/scripts_em_python
0a70827084425ca5a47650573d4a794e7afac2a2
[ "MIT" ]
null
null
null
jogos/jogo_da_forca.py
rafaelpuyau/scripts_em_python
0a70827084425ca5a47650573d4a794e7afac2a2
[ "MIT" ]
null
null
null
jogos/jogo_da_forca.py
rafaelpuyau/scripts_em_python
0a70827084425ca5a47650573d4a794e7afac2a2
[ "MIT" ]
null
null
null
''' Ainda vou implementar o jogo '''
9.25
28
0.648649
5
37
4.8
1
0
0
0
0
0
0
0
0
0
0
0
0.189189
37
3
29
12.333333
0.8
0.756757
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
c44225c3bd42a0139525267f294f628f8506d348
219
py
Python
easy_pil/__init__.py
madphysicist/easy-pil
4c9ec02ce88792224d940f4fd2634458d2432a18
[ "MIT" ]
22
2021-09-23T18:17:12.000Z
2022-03-29T22:23:26.000Z
easy_pil/__init__.py
madphysicist/easy-pil
4c9ec02ce88792224d940f4fd2634458d2432a18
[ "MIT" ]
7
2021-10-14T16:31:24.000Z
2022-03-27T13:12:49.000Z
easy_pil/__init__.py
madphysicist/easy-pil
4c9ec02ce88792224d940f4fd2634458d2432a18
[ "MIT" ]
6
2021-08-24T10:20:32.000Z
2022-03-31T17:53:23.000Z
from ._version import __version__, version_info from .canvas import Canvas from .editor import Editor from .font import Font from .text import Text from .utils import load_image, load_image_async, run_in_executor
31.285714
65
0.808219
33
219
5.030303
0.454545
0.108434
0
0
0
0
0
0
0
0
0
0
0.150685
219
6
66
36.5
0.892473
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
c45590ae59869f5342652148ead1fcc972f6f929
342
py
Python
trajectron/environment/__init__.py
Vision-CAIR/UnlikelihoodMotionForecasting
556d6a3ed3e4e0e2d88108d7dbb48933313b58aa
[ "MIT" ]
1
2022-02-23T13:20:58.000Z
2022-02-23T13:20:58.000Z
trajectron/environment/__init__.py
Vision-CAIR/UnlikelihoodMotionForecasting
556d6a3ed3e4e0e2d88108d7dbb48933313b58aa
[ "MIT" ]
null
null
null
trajectron/environment/__init__.py
Vision-CAIR/UnlikelihoodMotionForecasting
556d6a3ed3e4e0e2d88108d7dbb48933313b58aa
[ "MIT" ]
null
null
null
from .data_structures import RingBuffer, SingleHeaderNumpyArray, DoubleHeaderNumpyArray from .scene import Scene from .node import Node from .scene_graph import TemporalSceneGraph, SceneGraph from .environment import Environment from .node_type import NodeTypeEnum from .data_utils import derivative_of from .map import GeometricMap, SafeMap
38
87
0.859649
41
342
7.04878
0.512195
0.055363
0
0
0
0
0
0
0
0
0
0
0.105263
342
8
88
42.75
0.944444
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
c489f2d6aee790faf76357c8686fc200f526c407
3,403
py
Python
rzepa/movies/migrations/0001_initial.py
mpiskore/rzepa
be9b8454daa87954d6004a62f0740769bf080640
[ "MIT" ]
null
null
null
rzepa/movies/migrations/0001_initial.py
mpiskore/rzepa
be9b8454daa87954d6004a62f0740769bf080640
[ "MIT" ]
null
null
null
rzepa/movies/migrations/0001_initial.py
mpiskore/rzepa
be9b8454daa87954d6004a62f0740769bf080640
[ "MIT" ]
null
null
null
# Generated by Django 2.1.2 on 2018-10-13 21:37 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="Movie", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("title", models.CharField(db_index=True, max_length=128, unique=True)), ("actors", models.CharField(blank=True, default="", max_length=256)), ("awards", models.CharField(blank=True, default="", max_length=256)), ("box_office", models.CharField(blank=True, default="", max_length=32)), ("country", models.CharField(blank=True, default="", max_length=32)), ("dvd", models.CharField(blank=True, default="", max_length=32)), ("director", models.CharField(blank=True, default="", max_length=32)), ("genre", models.CharField(blank=True, default="", max_length=32)), ("language", models.CharField(blank=True, default="", max_length=64)), ("metascore", models.CharField(blank=True, default="", max_length=16)), ("plot", models.TextField(blank=True, default="")), ("poster", models.CharField(blank=True, default="", max_length=256)), ("production", models.CharField(blank=True, default="", max_length=64)), ("rated", models.CharField(blank=True, default="", max_length=16)), ("released", models.CharField(blank=True, default="", max_length=32)), ("runtime", models.CharField(blank=True, default="", max_length=16)), ("type", models.CharField(blank=True, default="", max_length=16)), ("website", models.CharField(blank=True, default="", max_length=128)), ("writer", models.CharField(blank=True, default="", max_length=32)), ("year", models.CharField(blank=True, default="", max_length=16)), ("imdbID", models.CharField(blank=True, default="", max_length=16)), ("imdbRating", models.CharField(blank=True, default="", max_length=16)), ("imdbVotes", models.CharField(blank=True, default="", max_length=16)), ], ), migrations.CreateModel( name="Rating", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("source", models.CharField(max_length=64)), ("value", models.CharField(max_length=16)), ( "movie", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name="ratings", to="movies.Movie", ), ), ], ), ]
44.776316
88
0.494857
304
3,403
5.427632
0.273026
0.218182
0.213333
0.305455
0.630909
0.630909
0.630909
0.606667
0.095758
0.095758
0
0.031207
0.359683
3,403
75
89
45.373333
0.726021
0.013224
0
0.382353
1
0
0.061681
0
0
0
0
0
0
1
0
false
0
0.029412
0
0.088235
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
671ebed2f88f0d31aeed22731ebefccb664dc306
1,018
py
Python
app/rooms/migrations/0003_alter_amenity_options_alter_facility_options_and_more.py
gurnitha/2022-django4-clone-airbnb
169060e3da0abf91f4ba25740c7bf6d2bea01750
[ "Unlicense" ]
null
null
null
app/rooms/migrations/0003_alter_amenity_options_alter_facility_options_and_more.py
gurnitha/2022-django4-clone-airbnb
169060e3da0abf91f4ba25740c7bf6d2bea01750
[ "Unlicense" ]
null
null
null
app/rooms/migrations/0003_alter_amenity_options_alter_facility_options_and_more.py
gurnitha/2022-django4-clone-airbnb
169060e3da0abf91f4ba25740c7bf6d2bea01750
[ "Unlicense" ]
null
null
null
# Generated by Django 4.0.2 on 2022-02-11 09:56 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('rooms', '0002_initial'), ] operations = [ migrations.AlterModelOptions( name='amenity', options={'verbose_name_plural': 'Amenities'}, ), migrations.AlterModelOptions( name='facility', options={'verbose_name_plural': 'Facilities'}, ), migrations.AlterModelOptions( name='houserule', options={'verbose_name_plural': 'House rules'}, ), migrations.AlterModelOptions( name='photo', options={'verbose_name_plural': 'Photos'}, ), migrations.AlterModelOptions( name='room', options={'verbose_name_plural': 'Rooms'}, ), migrations.AlterModelOptions( name='roomtype', options={'verbose_name_plural': 'Room types'}, ), ]
26.789474
59
0.555992
82
1,018
6.743902
0.487805
0.292948
0.336347
0.260398
0
0
0
0
0
0
0
0.027417
0.319253
1,018
37
60
27.513514
0.770563
0.044204
0
0.387097
1
0
0.22966
0
0
0
0
0
0
1
0
false
0
0.032258
0
0.129032
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
672378128a8153d631cd0540314880dbb84152df
33
py
Python
pacote-download/Python/modulo01/python01/aula14.py
fabiosabariego/curso-python
a4ffff53ff9e92b5ef0de637e9bcce25f7feebd9
[ "MIT" ]
null
null
null
pacote-download/Python/modulo01/python01/aula14.py
fabiosabariego/curso-python
a4ffff53ff9e92b5ef0de637e9bcce25f7feebd9
[ "MIT" ]
null
null
null
pacote-download/Python/modulo01/python01/aula14.py
fabiosabariego/curso-python
a4ffff53ff9e92b5ef0de637e9bcce25f7feebd9
[ "MIT" ]
null
null
null
""" LAÇOS DE REPETIÇÃO WHILE """
8.25
24
0.636364
4
33
5.25
1
0
0
0
0
0
0
0
0
0
0
0
0.181818
33
3
25
11
0.777778
0.727273
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
672cc53fc468f24699d6d967db305bed930280fc
224
py
Python
mporm/__init__.py
Mivinci/mporm
f67d656c749c2622d32f4917a402499b53686ead
[ "MIT" ]
9
2019-08-25T08:42:09.000Z
2019-09-30T05:09:33.000Z
mporm/__init__.py
Mivinci/tsorm
f67d656c749c2622d32f4917a402499b53686ead
[ "MIT" ]
1
2018-10-23T14:49:15.000Z
2018-10-23T14:49:15.000Z
mporm/__init__.py
Mivinci/tsorm
f67d656c749c2622d32f4917a402499b53686ead
[ "MIT" ]
1
2019-08-25T11:39:21.000Z
2019-08-25T11:39:21.000Z
from mporm.oper import Operator from mporm.schema import Schema from mporm.model import Model from mporm.expr import Expr from mporm.dsn import DSN from mporm.sql import ORM from mporm.fields import * __version__ = "0.0.2"
22.4
31
0.799107
38
224
4.605263
0.394737
0.36
0
0
0
0
0
0
0
0
0
0.015625
0.142857
224
9
32
24.888889
0.895833
0
0
0
0
0
0.022321
0
0
0
0
0
0
1
0
false
0
0.875
0
0.875
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4