code
stringlengths
13
6.09M
order_type
stringclasses
2 values
original_example
dict
step_ids
listlengths
1
5
# 문제 설명 # 길이가 같은 두 1차원 정수 배열 a, b가 매개변수로 주어집니다. a와 b의 내적을 return 하도록 solution 함수를 완성해주세요. # 이때, a와 b의 내적은 a[0]*b[0] + a[1]*b[1] + ... + a[n-1]*b[n-1] 입니다. (n은 a, b의 길이) # 제한사항 # a, b의 길이는 1 이상 1,000 이하입니다. # a, b의 모든 수는 -1,000 이상 1,000 이하입니다. # 입출력 예 # a b result # [1,2,3,4] [-3,-1,0,2] 3 # [-1,0,1] [1,0,-1] -2 # 입출력 예 설명 # 입출력 예 #1 # a와 b의 내적은 1*(-3) + 2*(-1) + 3*0 + 4*2 = 3 입니다. # 입출력 예 #2 # a와 b의 내적은 (-1)*1 + 0*0 + 1*(-1) = -2 입니다. def solution(a, b): answer = 0 for i in range(len(a)): answer += a[i] * b[i] return answer # 리스트 컨프리헨션 사용 def solution2(a, b): answer = [a[i] * b[i] for i in range(len(a))] return sum(answer) # 람다 사용 def solution3(a, b): return sum(map(lambda x,y: x * y , a, b)) # zip 사용 def solution4(a, b): answer = 0 for i,j in zip(a,b): answer += i * j return answer # zip + 리스트 컨프리헨션 사용 def solution5(a, b): answer = sum([i * j for i,j in zip(a,b)]) return answer
normal
{ "blob_id": "5b8322761975ebec76d1dccd0290c0fb1da404e5", "index": 5999, "step-1": "<mask token>\n\n\ndef solution2(a, b):\n answer = [(a[i] * b[i]) for i in range(len(a))]\n return sum(answer)\n\n\n<mask token>\n\n\ndef solution5(a, b):\n answer = sum([(i * j) for i, j in zip(a, b)])\n return answer\n", "step-2": "def solution(a, b):\n answer = 0\n for i in range(len(a)):\n answer += a[i] * b[i]\n return answer\n\n\ndef solution2(a, b):\n answer = [(a[i] * b[i]) for i in range(len(a))]\n return sum(answer)\n\n\n<mask token>\n\n\ndef solution5(a, b):\n answer = sum([(i * j) for i, j in zip(a, b)])\n return answer\n", "step-3": "def solution(a, b):\n answer = 0\n for i in range(len(a)):\n answer += a[i] * b[i]\n return answer\n\n\ndef solution2(a, b):\n answer = [(a[i] * b[i]) for i in range(len(a))]\n return sum(answer)\n\n\ndef solution3(a, b):\n return sum(map(lambda x, y: x * y, a, b))\n\n\n<mask token>\n\n\ndef solution5(a, b):\n answer = sum([(i * j) for i, j in zip(a, b)])\n return answer\n", "step-4": "def solution(a, b):\n answer = 0\n for i in range(len(a)):\n answer += a[i] * b[i]\n return answer\n\n\ndef solution2(a, b):\n answer = [(a[i] * b[i]) for i in range(len(a))]\n return sum(answer)\n\n\ndef solution3(a, b):\n return sum(map(lambda x, y: x * y, a, b))\n\n\ndef solution4(a, b):\n answer = 0\n for i, j in zip(a, b):\n answer += i * j\n return answer\n\n\ndef solution5(a, b):\n answer = sum([(i * j) for i, j in zip(a, b)])\n return answer\n", "step-5": "# 문제 설명\n# 길이가 같은 두 1차원 정수 배열 a, b가 매개변수로 주어집니다. a와 b의 내적을 return 하도록 solution 함수를 완성해주세요.\n\n# 이때, a와 b의 내적은 a[0]*b[0] + a[1]*b[1] + ... + a[n-1]*b[n-1] 입니다. (n은 a, b의 길이)\n\n# 제한사항\n# a, b의 길이는 1 이상 1,000 이하입니다.\n# a, b의 모든 수는 -1,000 이상 1,000 이하입니다.\n# 입출력 예\n# a\tb\tresult\n# [1,2,3,4]\t[-3,-1,0,2]\t3\n# [-1,0,1]\t[1,0,-1]\t-2\n# 입출력 예 설명\n# 입출력 예 #1\n\n# a와 b의 내적은 1*(-3) + 2*(-1) + 3*0 + 4*2 = 3 입니다.\n# 입출력 예 #2\n\n# a와 b의 내적은 (-1)*1 + 0*0 + 1*(-1) = -2 입니다.\n\ndef solution(a, b):\n answer = 0\n for i in range(len(a)):\n answer += a[i] * b[i]\n return answer\n\n# 리스트 컨프리헨션 사용\n\ndef solution2(a, b):\n answer = [a[i] * b[i] for i in range(len(a))]\n return sum(answer)\n\n\n# 람다 사용\n\ndef solution3(a, b):\n return sum(map(lambda x,y: x * y , a, b))\n\n\n# zip 사용\n\ndef solution4(a, b):\n answer = 0\n for i,j in zip(a,b):\n answer += i * j\n \n return answer\n\n\n# zip + 리스트 컨프리헨션 사용\n\ndef solution5(a, b):\n answer = sum([i * j for i,j in zip(a,b)])\n \n return answer", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
from data.dataframe_sequence_multi import DataFrameSequenceMulti from metrics import Metrics from models.models_ts_multi import lstm_model_multi import threading import sys from keras import optimizers from data.data_helper import plot_history epochs = 100 start = 6 end = 18 res = [] sets = [] min_vals = [] min_loss = [] def run_final_all_days(): # onsite # data = DataFrameSequenceMulti(False, True, False, False) # onsite & img # data = DataFrameSequenceMulti(False, True, True, False) # all data data = DataFrameSequenceMulti(False, True, True, True) data.build_ts_df(start, end, [7, 8, 9, 10, 11, 12], 5) data.scale_mega(model='lstm') name_time = '_sqnc_' + str(5) name_data = 'data_' + 'all' name_epoch = 'epochs_' + str(epochs) lstm = lstm_model_multi.LSTM_predictor(data, epochs, 'LSTM_SEQUENCE_MULTI alldays' + name_epoch + name_time + name_data) lstm.set_days(data.get_all_test_days()) lstm.run_experiment() def run_final_test_days(): # sqs = [5, 10] sqs=[5] cams = [1] permutations = [(True,True,True)] # permutations = [(True, True, True), (True, False, False), (False, True, False)] # permutations_names = ['all data', 'onsite_only', 'img only'] permutations_names = ['all data perez'] for pidx, p in enumerate(permutations): for s in sqs: for c in cams: data = DataFrameSequenceMulti(False, p[0], p[1], p[2]) data.build_ts_df(start, end, [7, 8, 9, 10, 11, 12], s, cams=c, clear_sky_label=True) # data.normalize_mega_df() data.scale_mega(model='lstm') name_time = '_sqnc_' + str(s) name_data = 'data_' + permutations_names[pidx] name_epoch = '_epochs_' + str(epochs) name_cam = '_cams_' + str(c) lstm = lstm_model_multi.LSTM_predictor(data, epochs, 'LSTM_TSET GRAD prz' + name_time + name_data + name_cam, pred_csi=True) lstm.set_days(data.get_thesis_test_days()) lstm.run_experiment() def run_lstm_experiment(set='test'): sqs = [5] cams = [1] permutations = [(True, True, False)] permutations_names = ['pxl_onsite'] for pidx, p in enumerate(permutations): for s in sqs: for c in cams: data = DataFrameSequenceMulti(False, p[0], p[1], p[2]) if set == 'test': # data.load_prev_mega_df() data.build_ts_df(start, end, [7,8,9,10,11,12], s, cams=c, clear_sky_label=False) # data.save_df() else: data.build_ts_df(start, end, [7,8,9,10], s, cams=c, clear_sky_label=False) data.scale_mega('lstm') name_time = '_sqnc_' + str(s) name_data = 'data_' + permutations_names[pidx] name_epoch = '_epochs_' + str(epochs) name_cam = '_cams_' + str(c) if set == 'test': lstm = lstm_model_multi.LSTM_predictor(data, 100, 'LSTM_TEST_PXL' + name_epoch + name_time + name_data + name_cam) lstm.set_days(data.get_thesis_test_days()) else: lstm = lstm_model_multi.LSTM_predictor(data, 100, 'LSTM_PREM2_PXL' + name_epoch + name_time + name_data + name_cam) lstm.set_days(data.get_prem_days()) lstm.run_experiment() def LSTM_test(): data = DataFrameSequenceMulti(False, True, True, False) # data.build_ts_df(6, 19, [7,8,9,10], 5) data.load_prev_mega_df() lstm = lstm_model_multi.LSTM_predictor(data, 100, 'LSTM_TEST') data.split_data_set_EXPRMTL(9, 15, 3) data.scale_mega(model='lstm') data.flatten_data_set_to_3d() lstm.get_model() lstm.train(100) y_pred, rmse = lstm.predict() # plot_history('s1', 1, lstm.history) # import matplotlib.pyplot as plt # from matplotlib.lines import lineStyles # plt.plot(lstm.history.history['loss']) # plt.plot(lstm.history.history['val_loss'], linestyle=':') # ymin = min(lstm.history.history['val_loss']) # xpos = lstm.history.history['val_loss'].index(ymin) # xmin = lstm.history.history['val_loss'][xpos] # plt.annotate('Minimum validation loss', size=20, xy=(xpos, ymin), xytext=(xpos, ymin + 30000), # arrowprops=dict(facecolor='black', shrink=0.05, width=5, headwidth=20), # horizontalalignment='center', verticalalignment='top', # ) # plt.ylim(0, 100000) # plt.title('LSTM M 5 all data', size=20) # plt.ylabel('Mean squared error', size=20) # plt.xlabel('Epochs', size=20) # plt.legend(['train', 'validation'], loc='upper left') # plt.show() # # Metrics.write_results_multi('LSTM_TEST_MULTI', data.test_x_df.reshape( # (data.test_x_df.shape[0], # data.sequence_len_minutes, # data.number_of_features)), # data.test_y_df, y_pred) print(rmse) def optimize(): # data.build_ts_df(6, 19, [8, 9, 10,11,12], 10, cams=1, clear_sky_label=False) # data.normalize_mega_df() # data.split_data_set(10,15) # data.flatten_data_set_to_3d() # # seq_l = [3,5,10] # nodes = [(50,25,10),(60,30,15),(80,40,20)] # activations = ['relu', 'sigmoid'] # opts = ['Adam', 'RMSprop'] # learning_rate = [0.001, 0.01, 0.1] seq_l = [5] nodes = [(50,25,10)] activations = ['relu'] opts = ['Adam'] learning_rate = [0.001] data = DataFrameSequenceMulti(False, True, True, True) lstm = lstm_model_multi.LSTM_predictor(data, 50, 'LSTM_TEST') num = 0 for s in seq_l: data.build_ts_df(6, 19, [7,8,9,10,11,12], s, 1) data.normalize_mega_df() data.split_data_set(10, 15) data.flatten_data_set_to_3d() for n in nodes: for a in activations: for o in opts: for lr in learning_rate: if o == 'Adam': opt = optimizers.Adam(lr=lr) else: opt = optimizers.RMSprop(lr=lr) lstm.set_model(n, a, opt) out = lstm.train(100) res.append(out) settings = 'nodes: ' + str(n) + ' activation: ' + str(a) + ' optimizer: ' + str(o) + ' lr: ' + str(lr) + " seq_l: " + str(s) sets.append(settings) plot_history(settings, num, out) min_loss.append(min(out.history['loss'])) min_vals.append(min(out.history['val_loss'])) num = num + 1 best_val_loss = min_vals.index(min(min_vals)) print('BEST VAL LOSS: ') print(sets[best_val_loss]) print('val loss: ' + str(min(min_vals))) print('epoch: ') print(res[best_val_loss].history['val_loss'].index(min(res[best_val_loss].history['val_loss']))) best_loss = min_loss.index(min(min_loss)) print('BEST Train LOSS: ') print(sets[best_loss]) print('train loss: ' + str(min(min_loss))) print('epoch: ') print(res[best_loss].history['loss'].index(min(res[best_loss].history['loss']))) run_lstm_experiment(set='test') # run_final_test_days() # run_final_all_days() # LSTM_test()
normal
{ "blob_id": "af903feda57e4ace0c7f909abbeb86bb9a7e4d8c", "index": 1806, "step-1": "<mask token>\n\n\ndef run_final_test_days():\n sqs = [5]\n cams = [1]\n permutations = [(True, True, True)]\n permutations_names = ['all data perez']\n for pidx, p in enumerate(permutations):\n for s in sqs:\n for c in cams:\n data = DataFrameSequenceMulti(False, p[0], p[1], p[2])\n data.build_ts_df(start, end, [7, 8, 9, 10, 11, 12], s, cams\n =c, clear_sky_label=True)\n data.scale_mega(model='lstm')\n name_time = '_sqnc_' + str(s)\n name_data = 'data_' + permutations_names[pidx]\n name_epoch = '_epochs_' + str(epochs)\n name_cam = '_cams_' + str(c)\n lstm = lstm_model_multi.LSTM_predictor(data, epochs, \n 'LSTM_TSET GRAD prz' + name_time + name_data + name_cam,\n pred_csi=True)\n lstm.set_days(data.get_thesis_test_days())\n lstm.run_experiment()\n\n\ndef run_lstm_experiment(set='test'):\n sqs = [5]\n cams = [1]\n permutations = [(True, True, False)]\n permutations_names = ['pxl_onsite']\n for pidx, p in enumerate(permutations):\n for s in sqs:\n for c in cams:\n data = DataFrameSequenceMulti(False, p[0], p[1], p[2])\n if set == 'test':\n data.build_ts_df(start, end, [7, 8, 9, 10, 11, 12], s,\n cams=c, clear_sky_label=False)\n else:\n data.build_ts_df(start, end, [7, 8, 9, 10], s, cams=c,\n clear_sky_label=False)\n data.scale_mega('lstm')\n name_time = '_sqnc_' + str(s)\n name_data = 'data_' + permutations_names[pidx]\n name_epoch = '_epochs_' + str(epochs)\n name_cam = '_cams_' + str(c)\n if set == 'test':\n lstm = lstm_model_multi.LSTM_predictor(data, 100, \n 'LSTM_TEST_PXL' + name_epoch + name_time +\n name_data + name_cam)\n lstm.set_days(data.get_thesis_test_days())\n else:\n lstm = lstm_model_multi.LSTM_predictor(data, 100, \n 'LSTM_PREM2_PXL' + name_epoch + name_time +\n name_data + name_cam)\n lstm.set_days(data.get_prem_days())\n lstm.run_experiment()\n\n\n<mask token>\n\n\ndef optimize():\n seq_l = [5]\n nodes = [(50, 25, 10)]\n activations = ['relu']\n opts = ['Adam']\n learning_rate = [0.001]\n data = DataFrameSequenceMulti(False, True, True, True)\n lstm = lstm_model_multi.LSTM_predictor(data, 50, 'LSTM_TEST')\n num = 0\n for s in seq_l:\n data.build_ts_df(6, 19, [7, 8, 9, 10, 11, 12], s, 1)\n data.normalize_mega_df()\n data.split_data_set(10, 15)\n data.flatten_data_set_to_3d()\n for n in nodes:\n for a in activations:\n for o in opts:\n for lr in learning_rate:\n if o == 'Adam':\n opt = optimizers.Adam(lr=lr)\n else:\n opt = optimizers.RMSprop(lr=lr)\n lstm.set_model(n, a, opt)\n out = lstm.train(100)\n res.append(out)\n settings = 'nodes: ' + str(n) + ' activation: ' + str(a\n ) + ' optimizer: ' + str(o) + ' lr: ' + str(lr\n ) + ' seq_l: ' + str(s)\n sets.append(settings)\n plot_history(settings, num, out)\n min_loss.append(min(out.history['loss']))\n min_vals.append(min(out.history['val_loss']))\n num = num + 1\n best_val_loss = min_vals.index(min(min_vals))\n print('BEST VAL LOSS: ')\n print(sets[best_val_loss])\n print('val loss: ' + str(min(min_vals)))\n print('epoch: ')\n print(res[best_val_loss].history['val_loss'].index(min(res[\n best_val_loss].history['val_loss'])))\n best_loss = min_loss.index(min(min_loss))\n print('BEST Train LOSS: ')\n print(sets[best_loss])\n print('train loss: ' + str(min(min_loss)))\n print('epoch: ')\n print(res[best_loss].history['loss'].index(min(res[best_loss].history[\n 'loss'])))\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef run_final_all_days():\n data = DataFrameSequenceMulti(False, True, True, True)\n data.build_ts_df(start, end, [7, 8, 9, 10, 11, 12], 5)\n data.scale_mega(model='lstm')\n name_time = '_sqnc_' + str(5)\n name_data = 'data_' + 'all'\n name_epoch = 'epochs_' + str(epochs)\n lstm = lstm_model_multi.LSTM_predictor(data, epochs, \n 'LSTM_SEQUENCE_MULTI alldays' + name_epoch + name_time + name_data)\n lstm.set_days(data.get_all_test_days())\n lstm.run_experiment()\n\n\ndef run_final_test_days():\n sqs = [5]\n cams = [1]\n permutations = [(True, True, True)]\n permutations_names = ['all data perez']\n for pidx, p in enumerate(permutations):\n for s in sqs:\n for c in cams:\n data = DataFrameSequenceMulti(False, p[0], p[1], p[2])\n data.build_ts_df(start, end, [7, 8, 9, 10, 11, 12], s, cams\n =c, clear_sky_label=True)\n data.scale_mega(model='lstm')\n name_time = '_sqnc_' + str(s)\n name_data = 'data_' + permutations_names[pidx]\n name_epoch = '_epochs_' + str(epochs)\n name_cam = '_cams_' + str(c)\n lstm = lstm_model_multi.LSTM_predictor(data, epochs, \n 'LSTM_TSET GRAD prz' + name_time + name_data + name_cam,\n pred_csi=True)\n lstm.set_days(data.get_thesis_test_days())\n lstm.run_experiment()\n\n\ndef run_lstm_experiment(set='test'):\n sqs = [5]\n cams = [1]\n permutations = [(True, True, False)]\n permutations_names = ['pxl_onsite']\n for pidx, p in enumerate(permutations):\n for s in sqs:\n for c in cams:\n data = DataFrameSequenceMulti(False, p[0], p[1], p[2])\n if set == 'test':\n data.build_ts_df(start, end, [7, 8, 9, 10, 11, 12], s,\n cams=c, clear_sky_label=False)\n else:\n data.build_ts_df(start, end, [7, 8, 9, 10], s, cams=c,\n clear_sky_label=False)\n data.scale_mega('lstm')\n name_time = '_sqnc_' + str(s)\n name_data = 'data_' + permutations_names[pidx]\n name_epoch = '_epochs_' + str(epochs)\n name_cam = '_cams_' + str(c)\n if set == 'test':\n lstm = lstm_model_multi.LSTM_predictor(data, 100, \n 'LSTM_TEST_PXL' + name_epoch + name_time +\n name_data + name_cam)\n lstm.set_days(data.get_thesis_test_days())\n else:\n lstm = lstm_model_multi.LSTM_predictor(data, 100, \n 'LSTM_PREM2_PXL' + name_epoch + name_time +\n name_data + name_cam)\n lstm.set_days(data.get_prem_days())\n lstm.run_experiment()\n\n\ndef LSTM_test():\n data = DataFrameSequenceMulti(False, True, True, False)\n data.load_prev_mega_df()\n lstm = lstm_model_multi.LSTM_predictor(data, 100, 'LSTM_TEST')\n data.split_data_set_EXPRMTL(9, 15, 3)\n data.scale_mega(model='lstm')\n data.flatten_data_set_to_3d()\n lstm.get_model()\n lstm.train(100)\n y_pred, rmse = lstm.predict()\n print(rmse)\n\n\ndef optimize():\n seq_l = [5]\n nodes = [(50, 25, 10)]\n activations = ['relu']\n opts = ['Adam']\n learning_rate = [0.001]\n data = DataFrameSequenceMulti(False, True, True, True)\n lstm = lstm_model_multi.LSTM_predictor(data, 50, 'LSTM_TEST')\n num = 0\n for s in seq_l:\n data.build_ts_df(6, 19, [7, 8, 9, 10, 11, 12], s, 1)\n data.normalize_mega_df()\n data.split_data_set(10, 15)\n data.flatten_data_set_to_3d()\n for n in nodes:\n for a in activations:\n for o in opts:\n for lr in learning_rate:\n if o == 'Adam':\n opt = optimizers.Adam(lr=lr)\n else:\n opt = optimizers.RMSprop(lr=lr)\n lstm.set_model(n, a, opt)\n out = lstm.train(100)\n res.append(out)\n settings = 'nodes: ' + str(n) + ' activation: ' + str(a\n ) + ' optimizer: ' + str(o) + ' lr: ' + str(lr\n ) + ' seq_l: ' + str(s)\n sets.append(settings)\n plot_history(settings, num, out)\n min_loss.append(min(out.history['loss']))\n min_vals.append(min(out.history['val_loss']))\n num = num + 1\n best_val_loss = min_vals.index(min(min_vals))\n print('BEST VAL LOSS: ')\n print(sets[best_val_loss])\n print('val loss: ' + str(min(min_vals)))\n print('epoch: ')\n print(res[best_val_loss].history['val_loss'].index(min(res[\n best_val_loss].history['val_loss'])))\n best_loss = min_loss.index(min(min_loss))\n print('BEST Train LOSS: ')\n print(sets[best_loss])\n print('train loss: ' + str(min(min_loss)))\n print('epoch: ')\n print(res[best_loss].history['loss'].index(min(res[best_loss].history[\n 'loss'])))\n\n\n<mask token>\n", "step-3": "<mask token>\nepochs = 100\nstart = 6\nend = 18\nres = []\nsets = []\nmin_vals = []\nmin_loss = []\n\n\ndef run_final_all_days():\n data = DataFrameSequenceMulti(False, True, True, True)\n data.build_ts_df(start, end, [7, 8, 9, 10, 11, 12], 5)\n data.scale_mega(model='lstm')\n name_time = '_sqnc_' + str(5)\n name_data = 'data_' + 'all'\n name_epoch = 'epochs_' + str(epochs)\n lstm = lstm_model_multi.LSTM_predictor(data, epochs, \n 'LSTM_SEQUENCE_MULTI alldays' + name_epoch + name_time + name_data)\n lstm.set_days(data.get_all_test_days())\n lstm.run_experiment()\n\n\ndef run_final_test_days():\n sqs = [5]\n cams = [1]\n permutations = [(True, True, True)]\n permutations_names = ['all data perez']\n for pidx, p in enumerate(permutations):\n for s in sqs:\n for c in cams:\n data = DataFrameSequenceMulti(False, p[0], p[1], p[2])\n data.build_ts_df(start, end, [7, 8, 9, 10, 11, 12], s, cams\n =c, clear_sky_label=True)\n data.scale_mega(model='lstm')\n name_time = '_sqnc_' + str(s)\n name_data = 'data_' + permutations_names[pidx]\n name_epoch = '_epochs_' + str(epochs)\n name_cam = '_cams_' + str(c)\n lstm = lstm_model_multi.LSTM_predictor(data, epochs, \n 'LSTM_TSET GRAD prz' + name_time + name_data + name_cam,\n pred_csi=True)\n lstm.set_days(data.get_thesis_test_days())\n lstm.run_experiment()\n\n\ndef run_lstm_experiment(set='test'):\n sqs = [5]\n cams = [1]\n permutations = [(True, True, False)]\n permutations_names = ['pxl_onsite']\n for pidx, p in enumerate(permutations):\n for s in sqs:\n for c in cams:\n data = DataFrameSequenceMulti(False, p[0], p[1], p[2])\n if set == 'test':\n data.build_ts_df(start, end, [7, 8, 9, 10, 11, 12], s,\n cams=c, clear_sky_label=False)\n else:\n data.build_ts_df(start, end, [7, 8, 9, 10], s, cams=c,\n clear_sky_label=False)\n data.scale_mega('lstm')\n name_time = '_sqnc_' + str(s)\n name_data = 'data_' + permutations_names[pidx]\n name_epoch = '_epochs_' + str(epochs)\n name_cam = '_cams_' + str(c)\n if set == 'test':\n lstm = lstm_model_multi.LSTM_predictor(data, 100, \n 'LSTM_TEST_PXL' + name_epoch + name_time +\n name_data + name_cam)\n lstm.set_days(data.get_thesis_test_days())\n else:\n lstm = lstm_model_multi.LSTM_predictor(data, 100, \n 'LSTM_PREM2_PXL' + name_epoch + name_time +\n name_data + name_cam)\n lstm.set_days(data.get_prem_days())\n lstm.run_experiment()\n\n\ndef LSTM_test():\n data = DataFrameSequenceMulti(False, True, True, False)\n data.load_prev_mega_df()\n lstm = lstm_model_multi.LSTM_predictor(data, 100, 'LSTM_TEST')\n data.split_data_set_EXPRMTL(9, 15, 3)\n data.scale_mega(model='lstm')\n data.flatten_data_set_to_3d()\n lstm.get_model()\n lstm.train(100)\n y_pred, rmse = lstm.predict()\n print(rmse)\n\n\ndef optimize():\n seq_l = [5]\n nodes = [(50, 25, 10)]\n activations = ['relu']\n opts = ['Adam']\n learning_rate = [0.001]\n data = DataFrameSequenceMulti(False, True, True, True)\n lstm = lstm_model_multi.LSTM_predictor(data, 50, 'LSTM_TEST')\n num = 0\n for s in seq_l:\n data.build_ts_df(6, 19, [7, 8, 9, 10, 11, 12], s, 1)\n data.normalize_mega_df()\n data.split_data_set(10, 15)\n data.flatten_data_set_to_3d()\n for n in nodes:\n for a in activations:\n for o in opts:\n for lr in learning_rate:\n if o == 'Adam':\n opt = optimizers.Adam(lr=lr)\n else:\n opt = optimizers.RMSprop(lr=lr)\n lstm.set_model(n, a, opt)\n out = lstm.train(100)\n res.append(out)\n settings = 'nodes: ' + str(n) + ' activation: ' + str(a\n ) + ' optimizer: ' + str(o) + ' lr: ' + str(lr\n ) + ' seq_l: ' + str(s)\n sets.append(settings)\n plot_history(settings, num, out)\n min_loss.append(min(out.history['loss']))\n min_vals.append(min(out.history['val_loss']))\n num = num + 1\n best_val_loss = min_vals.index(min(min_vals))\n print('BEST VAL LOSS: ')\n print(sets[best_val_loss])\n print('val loss: ' + str(min(min_vals)))\n print('epoch: ')\n print(res[best_val_loss].history['val_loss'].index(min(res[\n best_val_loss].history['val_loss'])))\n best_loss = min_loss.index(min(min_loss))\n print('BEST Train LOSS: ')\n print(sets[best_loss])\n print('train loss: ' + str(min(min_loss)))\n print('epoch: ')\n print(res[best_loss].history['loss'].index(min(res[best_loss].history[\n 'loss'])))\n\n\nrun_lstm_experiment(set='test')\n", "step-4": "from data.dataframe_sequence_multi import DataFrameSequenceMulti\nfrom metrics import Metrics\nfrom models.models_ts_multi import lstm_model_multi\nimport threading\nimport sys\nfrom keras import optimizers\nfrom data.data_helper import plot_history\nepochs = 100\nstart = 6\nend = 18\nres = []\nsets = []\nmin_vals = []\nmin_loss = []\n\n\ndef run_final_all_days():\n data = DataFrameSequenceMulti(False, True, True, True)\n data.build_ts_df(start, end, [7, 8, 9, 10, 11, 12], 5)\n data.scale_mega(model='lstm')\n name_time = '_sqnc_' + str(5)\n name_data = 'data_' + 'all'\n name_epoch = 'epochs_' + str(epochs)\n lstm = lstm_model_multi.LSTM_predictor(data, epochs, \n 'LSTM_SEQUENCE_MULTI alldays' + name_epoch + name_time + name_data)\n lstm.set_days(data.get_all_test_days())\n lstm.run_experiment()\n\n\ndef run_final_test_days():\n sqs = [5]\n cams = [1]\n permutations = [(True, True, True)]\n permutations_names = ['all data perez']\n for pidx, p in enumerate(permutations):\n for s in sqs:\n for c in cams:\n data = DataFrameSequenceMulti(False, p[0], p[1], p[2])\n data.build_ts_df(start, end, [7, 8, 9, 10, 11, 12], s, cams\n =c, clear_sky_label=True)\n data.scale_mega(model='lstm')\n name_time = '_sqnc_' + str(s)\n name_data = 'data_' + permutations_names[pidx]\n name_epoch = '_epochs_' + str(epochs)\n name_cam = '_cams_' + str(c)\n lstm = lstm_model_multi.LSTM_predictor(data, epochs, \n 'LSTM_TSET GRAD prz' + name_time + name_data + name_cam,\n pred_csi=True)\n lstm.set_days(data.get_thesis_test_days())\n lstm.run_experiment()\n\n\ndef run_lstm_experiment(set='test'):\n sqs = [5]\n cams = [1]\n permutations = [(True, True, False)]\n permutations_names = ['pxl_onsite']\n for pidx, p in enumerate(permutations):\n for s in sqs:\n for c in cams:\n data = DataFrameSequenceMulti(False, p[0], p[1], p[2])\n if set == 'test':\n data.build_ts_df(start, end, [7, 8, 9, 10, 11, 12], s,\n cams=c, clear_sky_label=False)\n else:\n data.build_ts_df(start, end, [7, 8, 9, 10], s, cams=c,\n clear_sky_label=False)\n data.scale_mega('lstm')\n name_time = '_sqnc_' + str(s)\n name_data = 'data_' + permutations_names[pidx]\n name_epoch = '_epochs_' + str(epochs)\n name_cam = '_cams_' + str(c)\n if set == 'test':\n lstm = lstm_model_multi.LSTM_predictor(data, 100, \n 'LSTM_TEST_PXL' + name_epoch + name_time +\n name_data + name_cam)\n lstm.set_days(data.get_thesis_test_days())\n else:\n lstm = lstm_model_multi.LSTM_predictor(data, 100, \n 'LSTM_PREM2_PXL' + name_epoch + name_time +\n name_data + name_cam)\n lstm.set_days(data.get_prem_days())\n lstm.run_experiment()\n\n\ndef LSTM_test():\n data = DataFrameSequenceMulti(False, True, True, False)\n data.load_prev_mega_df()\n lstm = lstm_model_multi.LSTM_predictor(data, 100, 'LSTM_TEST')\n data.split_data_set_EXPRMTL(9, 15, 3)\n data.scale_mega(model='lstm')\n data.flatten_data_set_to_3d()\n lstm.get_model()\n lstm.train(100)\n y_pred, rmse = lstm.predict()\n print(rmse)\n\n\ndef optimize():\n seq_l = [5]\n nodes = [(50, 25, 10)]\n activations = ['relu']\n opts = ['Adam']\n learning_rate = [0.001]\n data = DataFrameSequenceMulti(False, True, True, True)\n lstm = lstm_model_multi.LSTM_predictor(data, 50, 'LSTM_TEST')\n num = 0\n for s in seq_l:\n data.build_ts_df(6, 19, [7, 8, 9, 10, 11, 12], s, 1)\n data.normalize_mega_df()\n data.split_data_set(10, 15)\n data.flatten_data_set_to_3d()\n for n in nodes:\n for a in activations:\n for o in opts:\n for lr in learning_rate:\n if o == 'Adam':\n opt = optimizers.Adam(lr=lr)\n else:\n opt = optimizers.RMSprop(lr=lr)\n lstm.set_model(n, a, opt)\n out = lstm.train(100)\n res.append(out)\n settings = 'nodes: ' + str(n) + ' activation: ' + str(a\n ) + ' optimizer: ' + str(o) + ' lr: ' + str(lr\n ) + ' seq_l: ' + str(s)\n sets.append(settings)\n plot_history(settings, num, out)\n min_loss.append(min(out.history['loss']))\n min_vals.append(min(out.history['val_loss']))\n num = num + 1\n best_val_loss = min_vals.index(min(min_vals))\n print('BEST VAL LOSS: ')\n print(sets[best_val_loss])\n print('val loss: ' + str(min(min_vals)))\n print('epoch: ')\n print(res[best_val_loss].history['val_loss'].index(min(res[\n best_val_loss].history['val_loss'])))\n best_loss = min_loss.index(min(min_loss))\n print('BEST Train LOSS: ')\n print(sets[best_loss])\n print('train loss: ' + str(min(min_loss)))\n print('epoch: ')\n print(res[best_loss].history['loss'].index(min(res[best_loss].history[\n 'loss'])))\n\n\nrun_lstm_experiment(set='test')\n", "step-5": "from data.dataframe_sequence_multi import DataFrameSequenceMulti\nfrom metrics import Metrics\nfrom models.models_ts_multi import lstm_model_multi\nimport threading\nimport sys\nfrom keras import optimizers\nfrom data.data_helper import plot_history\n\nepochs = 100\nstart = 6\nend = 18\n\nres = []\nsets = []\nmin_vals = []\nmin_loss = []\n\ndef run_final_all_days():\n # onsite\n # data = DataFrameSequenceMulti(False, True, False, False)\n # onsite & img\n # data = DataFrameSequenceMulti(False, True, True, False)\n # all data\n data = DataFrameSequenceMulti(False, True, True, True)\n data.build_ts_df(start, end, [7, 8, 9, 10, 11, 12], 5)\n data.scale_mega(model='lstm')\n\n name_time = '_sqnc_' + str(5)\n name_data = 'data_' + 'all'\n name_epoch = 'epochs_' + str(epochs)\n\n lstm = lstm_model_multi.LSTM_predictor(data, epochs,\n 'LSTM_SEQUENCE_MULTI alldays' + name_epoch + name_time + name_data)\n lstm.set_days(data.get_all_test_days())\n lstm.run_experiment()\n\ndef run_final_test_days():\n # sqs = [5, 10]\n sqs=[5]\n cams = [1]\n permutations = [(True,True,True)]\n # permutations = [(True, True, True), (True, False, False), (False, True, False)]\n # permutations_names = ['all data', 'onsite_only', 'img only']\n permutations_names = ['all data perez']\n for pidx, p in enumerate(permutations):\n for s in sqs:\n for c in cams:\n data = DataFrameSequenceMulti(False, p[0], p[1], p[2])\n data.build_ts_df(start, end, [7, 8, 9, 10, 11, 12], s, cams=c, clear_sky_label=True)\n # data.normalize_mega_df()\n data.scale_mega(model='lstm')\n\n name_time = '_sqnc_' + str(s)\n name_data = 'data_' + permutations_names[pidx]\n name_epoch = '_epochs_' + str(epochs)\n name_cam = '_cams_' + str(c)\n\n lstm = lstm_model_multi.LSTM_predictor(data, epochs,\n 'LSTM_TSET GRAD prz' + name_time + name_data + name_cam, pred_csi=True)\n lstm.set_days(data.get_thesis_test_days())\n lstm.run_experiment()\n\n\ndef run_lstm_experiment(set='test'):\n sqs = [5]\n cams = [1]\n permutations = [(True, True, False)]\n permutations_names = ['pxl_onsite']\n\n for pidx, p in enumerate(permutations):\n for s in sqs:\n for c in cams:\n data = DataFrameSequenceMulti(False, p[0], p[1], p[2])\n if set == 'test':\n # data.load_prev_mega_df()\n data.build_ts_df(start, end, [7,8,9,10,11,12], s, cams=c, clear_sky_label=False)\n # data.save_df()\n else:\n data.build_ts_df(start, end, [7,8,9,10], s, cams=c, clear_sky_label=False)\n data.scale_mega('lstm')\n name_time = '_sqnc_' + str(s)\n name_data = 'data_' + permutations_names[pidx]\n name_epoch = '_epochs_' + str(epochs)\n name_cam = '_cams_' + str(c)\n if set == 'test':\n lstm = lstm_model_multi.LSTM_predictor(data, 100,\n 'LSTM_TEST_PXL' + name_epoch + name_time + name_data + name_cam)\n lstm.set_days(data.get_thesis_test_days())\n else:\n lstm = lstm_model_multi.LSTM_predictor(data, 100, 'LSTM_PREM2_PXL' + name_epoch + name_time + name_data + name_cam)\n lstm.set_days(data.get_prem_days())\n\n lstm.run_experiment()\n\n\ndef LSTM_test():\n data = DataFrameSequenceMulti(False, True, True, False)\n # data.build_ts_df(6, 19, [7,8,9,10], 5)\n data.load_prev_mega_df()\n lstm = lstm_model_multi.LSTM_predictor(data, 100, 'LSTM_TEST')\n data.split_data_set_EXPRMTL(9, 15, 3)\n data.scale_mega(model='lstm')\n data.flatten_data_set_to_3d()\n\n lstm.get_model()\n lstm.train(100)\n y_pred, rmse = lstm.predict()\n # plot_history('s1', 1, lstm.history)\n\n # import matplotlib.pyplot as plt\n # from matplotlib.lines import lineStyles\n # plt.plot(lstm.history.history['loss'])\n # plt.plot(lstm.history.history['val_loss'], linestyle=':')\n # ymin = min(lstm.history.history['val_loss'])\n # xpos = lstm.history.history['val_loss'].index(ymin)\n # xmin = lstm.history.history['val_loss'][xpos]\n # plt.annotate('Minimum validation loss', size=20, xy=(xpos, ymin), xytext=(xpos, ymin + 30000),\n # arrowprops=dict(facecolor='black', shrink=0.05, width=5, headwidth=20),\n # horizontalalignment='center', verticalalignment='top',\n # )\n # plt.ylim(0, 100000)\n # plt.title('LSTM M 5 all data', size=20)\n # plt.ylabel('Mean squared error', size=20)\n # plt.xlabel('Epochs', size=20)\n # plt.legend(['train', 'validation'], loc='upper left')\n # plt.show()\n #\n # Metrics.write_results_multi('LSTM_TEST_MULTI', data.test_x_df.reshape(\n # (data.test_x_df.shape[0],\n # data.sequence_len_minutes,\n # data.number_of_features)),\n # data.test_y_df, y_pred)\n\n print(rmse)\n\ndef optimize():\n # data.build_ts_df(6, 19, [8, 9, 10,11,12], 10, cams=1, clear_sky_label=False)\n # data.normalize_mega_df()\n # data.split_data_set(10,15)\n # data.flatten_data_set_to_3d()\n #\n # seq_l = [3,5,10]\n # nodes = [(50,25,10),(60,30,15),(80,40,20)]\n # activations = ['relu', 'sigmoid']\n # opts = ['Adam', 'RMSprop']\n # learning_rate = [0.001, 0.01, 0.1]\n\n\n seq_l = [5]\n nodes = [(50,25,10)]\n activations = ['relu']\n opts = ['Adam']\n learning_rate = [0.001]\n\n data = DataFrameSequenceMulti(False, True, True, True)\n lstm = lstm_model_multi.LSTM_predictor(data, 50, 'LSTM_TEST')\n num = 0\n for s in seq_l:\n data.build_ts_df(6, 19, [7,8,9,10,11,12], s, 1)\n data.normalize_mega_df()\n data.split_data_set(10, 15)\n data.flatten_data_set_to_3d()\n for n in nodes:\n for a in activations:\n for o in opts:\n for lr in learning_rate:\n\n if o == 'Adam':\n opt = optimizers.Adam(lr=lr)\n else:\n opt = optimizers.RMSprop(lr=lr)\n\n lstm.set_model(n, a, opt)\n out = lstm.train(100)\n res.append(out)\n settings = 'nodes: ' + str(n) + ' activation: ' + str(a) + ' optimizer: ' + str(o) + ' lr: ' + str(lr) + \" seq_l: \" + str(s)\n sets.append(settings)\n plot_history(settings, num, out)\n min_loss.append(min(out.history['loss']))\n min_vals.append(min(out.history['val_loss']))\n num = num + 1\n\n best_val_loss = min_vals.index(min(min_vals))\n print('BEST VAL LOSS: ')\n print(sets[best_val_loss])\n print('val loss: ' + str(min(min_vals)))\n print('epoch: ')\n print(res[best_val_loss].history['val_loss'].index(min(res[best_val_loss].history['val_loss'])))\n\n best_loss = min_loss.index(min(min_loss))\n print('BEST Train LOSS: ')\n print(sets[best_loss])\n print('train loss: ' + str(min(min_loss)))\n print('epoch: ')\n print(res[best_loss].history['loss'].index(min(res[best_loss].history['loss'])))\n\nrun_lstm_experiment(set='test')\n# run_final_test_days()\n# run_final_all_days()\n# LSTM_test()", "step-ids": [ 3, 5, 7, 8, 9 ] }
[ 3, 5, 7, 8, 9 ]
<|reserved_special_token_0|> def upgrade(): op.create_table('users', sa.Column('id', sa.Integer(), nullable=False), sa.Column('firstname', sa.String(length=64), nullable=True), sa. Column('lastname', sa.String(length=64), nullable=True), sa.Column( 'email', sa.String(length=120), nullable=True), sa.Column( 'password', sa.String(length=64), nullable=True), sa.Column( 'address', sa.String(length=120), nullable=True), sa.Column('city', sa.String(length=64), nullable=True), sa.Column('state', sa.String( length=64), nullable=True), sa.Column('zipcode', sa.String(length= 64), nullable=True), sa.Column('country', sa.String(length=64), nullable=True), sa.Column('role', sa.Integer(), nullable=True), sa. Column('dob', sa.DateTime(), nullable=True), sa.Column('gender', sa .String(length=64), nullable=True), sa.Column('fitness', sa.Integer (), nullable=True), sa.Column('experience', sa.Integer(), nullable= True), sa.Column('willing_teamLeader', sa.Boolean(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table('health_types', sa.Column('id', sa.Integer(), nullable= False), sa.Column('issue', sa.String(length=64), nullable=True), sa .PrimaryKeyConstraint('id')) op.create_table('users_health', sa.Column('id', sa.Integer(), nullable= False), sa.Column('user_id', sa.Integer(), nullable=True), sa. Column('health_id', sa.Integer(), nullable=True), sa. ForeignKeyConstraint(['health_id'], ['health_types.id']), sa. ForeignKeyConstraint(['user_id'], ['users.id']), sa. PrimaryKeyConstraint('id')) op.create_table('positions', sa.Column('id', sa.Integer(), nullable= False), sa.Column('user_id', sa.Integer(), nullable=True), sa. Column('position_type', sa.String(length=64), nullable=True), sa. ForeignKeyConstraint(['user_id'], ['users.id']), sa. PrimaryKeyConstraint('id')) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def upgrade(): op.create_table('users', sa.Column('id', sa.Integer(), nullable=False), sa.Column('firstname', sa.String(length=64), nullable=True), sa. Column('lastname', sa.String(length=64), nullable=True), sa.Column( 'email', sa.String(length=120), nullable=True), sa.Column( 'password', sa.String(length=64), nullable=True), sa.Column( 'address', sa.String(length=120), nullable=True), sa.Column('city', sa.String(length=64), nullable=True), sa.Column('state', sa.String( length=64), nullable=True), sa.Column('zipcode', sa.String(length= 64), nullable=True), sa.Column('country', sa.String(length=64), nullable=True), sa.Column('role', sa.Integer(), nullable=True), sa. Column('dob', sa.DateTime(), nullable=True), sa.Column('gender', sa .String(length=64), nullable=True), sa.Column('fitness', sa.Integer (), nullable=True), sa.Column('experience', sa.Integer(), nullable= True), sa.Column('willing_teamLeader', sa.Boolean(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table('health_types', sa.Column('id', sa.Integer(), nullable= False), sa.Column('issue', sa.String(length=64), nullable=True), sa .PrimaryKeyConstraint('id')) op.create_table('users_health', sa.Column('id', sa.Integer(), nullable= False), sa.Column('user_id', sa.Integer(), nullable=True), sa. Column('health_id', sa.Integer(), nullable=True), sa. ForeignKeyConstraint(['health_id'], ['health_types.id']), sa. ForeignKeyConstraint(['user_id'], ['users.id']), sa. PrimaryKeyConstraint('id')) op.create_table('positions', sa.Column('id', sa.Integer(), nullable= False), sa.Column('user_id', sa.Integer(), nullable=True), sa. Column('position_type', sa.String(length=64), nullable=True), sa. ForeignKeyConstraint(['user_id'], ['users.id']), sa. PrimaryKeyConstraint('id')) def downgrade(): op.drop_table('positions') op.drop_table('users_health') op.drop_table('health_types') op.drop_table('users') <|reserved_special_token_1|> <|reserved_special_token_0|> revision = '35f6815c3112' down_revision = None <|reserved_special_token_0|> def upgrade(): op.create_table('users', sa.Column('id', sa.Integer(), nullable=False), sa.Column('firstname', sa.String(length=64), nullable=True), sa. Column('lastname', sa.String(length=64), nullable=True), sa.Column( 'email', sa.String(length=120), nullable=True), sa.Column( 'password', sa.String(length=64), nullable=True), sa.Column( 'address', sa.String(length=120), nullable=True), sa.Column('city', sa.String(length=64), nullable=True), sa.Column('state', sa.String( length=64), nullable=True), sa.Column('zipcode', sa.String(length= 64), nullable=True), sa.Column('country', sa.String(length=64), nullable=True), sa.Column('role', sa.Integer(), nullable=True), sa. Column('dob', sa.DateTime(), nullable=True), sa.Column('gender', sa .String(length=64), nullable=True), sa.Column('fitness', sa.Integer (), nullable=True), sa.Column('experience', sa.Integer(), nullable= True), sa.Column('willing_teamLeader', sa.Boolean(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table('health_types', sa.Column('id', sa.Integer(), nullable= False), sa.Column('issue', sa.String(length=64), nullable=True), sa .PrimaryKeyConstraint('id')) op.create_table('users_health', sa.Column('id', sa.Integer(), nullable= False), sa.Column('user_id', sa.Integer(), nullable=True), sa. Column('health_id', sa.Integer(), nullable=True), sa. ForeignKeyConstraint(['health_id'], ['health_types.id']), sa. ForeignKeyConstraint(['user_id'], ['users.id']), sa. PrimaryKeyConstraint('id')) op.create_table('positions', sa.Column('id', sa.Integer(), nullable= False), sa.Column('user_id', sa.Integer(), nullable=True), sa. Column('position_type', sa.String(length=64), nullable=True), sa. ForeignKeyConstraint(['user_id'], ['users.id']), sa. PrimaryKeyConstraint('id')) def downgrade(): op.drop_table('positions') op.drop_table('users_health') op.drop_table('health_types') op.drop_table('users') <|reserved_special_token_1|> <|reserved_special_token_0|> revision = '35f6815c3112' down_revision = None from alembic import op import sqlalchemy as sa def upgrade(): op.create_table('users', sa.Column('id', sa.Integer(), nullable=False), sa.Column('firstname', sa.String(length=64), nullable=True), sa. Column('lastname', sa.String(length=64), nullable=True), sa.Column( 'email', sa.String(length=120), nullable=True), sa.Column( 'password', sa.String(length=64), nullable=True), sa.Column( 'address', sa.String(length=120), nullable=True), sa.Column('city', sa.String(length=64), nullable=True), sa.Column('state', sa.String( length=64), nullable=True), sa.Column('zipcode', sa.String(length= 64), nullable=True), sa.Column('country', sa.String(length=64), nullable=True), sa.Column('role', sa.Integer(), nullable=True), sa. Column('dob', sa.DateTime(), nullable=True), sa.Column('gender', sa .String(length=64), nullable=True), sa.Column('fitness', sa.Integer (), nullable=True), sa.Column('experience', sa.Integer(), nullable= True), sa.Column('willing_teamLeader', sa.Boolean(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table('health_types', sa.Column('id', sa.Integer(), nullable= False), sa.Column('issue', sa.String(length=64), nullable=True), sa .PrimaryKeyConstraint('id')) op.create_table('users_health', sa.Column('id', sa.Integer(), nullable= False), sa.Column('user_id', sa.Integer(), nullable=True), sa. Column('health_id', sa.Integer(), nullable=True), sa. ForeignKeyConstraint(['health_id'], ['health_types.id']), sa. ForeignKeyConstraint(['user_id'], ['users.id']), sa. PrimaryKeyConstraint('id')) op.create_table('positions', sa.Column('id', sa.Integer(), nullable= False), sa.Column('user_id', sa.Integer(), nullable=True), sa. Column('position_type', sa.String(length=64), nullable=True), sa. ForeignKeyConstraint(['user_id'], ['users.id']), sa. PrimaryKeyConstraint('id')) def downgrade(): op.drop_table('positions') op.drop_table('users_health') op.drop_table('health_types') op.drop_table('users') <|reserved_special_token_1|> """tables Revision ID: 35f6815c3112 Revises: None Create Date: 2013-07-28 21:15:38.385006 """ # revision identifiers, used by Alembic. revision = '35f6815c3112' down_revision = None from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('users', sa.Column('id', sa.Integer(), nullable=False), sa.Column('firstname', sa.String(length=64), nullable=True), sa.Column('lastname', sa.String(length=64), nullable=True), sa.Column('email', sa.String(length=120), nullable=True), sa.Column('password', sa.String(length=64), nullable=True), sa.Column('address', sa.String(length=120), nullable=True), sa.Column('city', sa.String(length=64), nullable=True), sa.Column('state', sa.String(length=64), nullable=True), sa.Column('zipcode', sa.String(length=64), nullable=True), sa.Column('country', sa.String(length=64), nullable=True), sa.Column('role', sa.Integer(), nullable=True), sa.Column('dob', sa.DateTime(), nullable=True), sa.Column('gender', sa.String(length=64), nullable=True), sa.Column('fitness', sa.Integer(), nullable=True), sa.Column('experience', sa.Integer(), nullable=True), sa.Column('willing_teamLeader', sa.Boolean(), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table('health_types', sa.Column('id', sa.Integer(), nullable=False), sa.Column('issue', sa.String(length=64), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table('users_health', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('health_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['health_id'], ['health_types.id'], ), sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('positions', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('position_type', sa.String(length=64), nullable=True), sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id') ) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_table('positions') op.drop_table('users_health') op.drop_table('health_types') op.drop_table('users') ### end Alembic commands ###
flexible
{ "blob_id": "9989d31dfe13809d67f629cc283cd02ce354a74e", "index": 115, "step-1": "<mask token>\n\n\ndef upgrade():\n op.create_table('users', sa.Column('id', sa.Integer(), nullable=False),\n sa.Column('firstname', sa.String(length=64), nullable=True), sa.\n Column('lastname', sa.String(length=64), nullable=True), sa.Column(\n 'email', sa.String(length=120), nullable=True), sa.Column(\n 'password', sa.String(length=64), nullable=True), sa.Column(\n 'address', sa.String(length=120), nullable=True), sa.Column('city',\n sa.String(length=64), nullable=True), sa.Column('state', sa.String(\n length=64), nullable=True), sa.Column('zipcode', sa.String(length=\n 64), nullable=True), sa.Column('country', sa.String(length=64),\n nullable=True), sa.Column('role', sa.Integer(), nullable=True), sa.\n Column('dob', sa.DateTime(), nullable=True), sa.Column('gender', sa\n .String(length=64), nullable=True), sa.Column('fitness', sa.Integer\n (), nullable=True), sa.Column('experience', sa.Integer(), nullable=\n True), sa.Column('willing_teamLeader', sa.Boolean(), nullable=True),\n sa.PrimaryKeyConstraint('id'))\n op.create_table('health_types', sa.Column('id', sa.Integer(), nullable=\n False), sa.Column('issue', sa.String(length=64), nullable=True), sa\n .PrimaryKeyConstraint('id'))\n op.create_table('users_health', sa.Column('id', sa.Integer(), nullable=\n False), sa.Column('user_id', sa.Integer(), nullable=True), sa.\n Column('health_id', sa.Integer(), nullable=True), sa.\n ForeignKeyConstraint(['health_id'], ['health_types.id']), sa.\n ForeignKeyConstraint(['user_id'], ['users.id']), sa.\n PrimaryKeyConstraint('id'))\n op.create_table('positions', sa.Column('id', sa.Integer(), nullable=\n False), sa.Column('user_id', sa.Integer(), nullable=True), sa.\n Column('position_type', sa.String(length=64), nullable=True), sa.\n ForeignKeyConstraint(['user_id'], ['users.id']), sa.\n PrimaryKeyConstraint('id'))\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef upgrade():\n op.create_table('users', sa.Column('id', sa.Integer(), nullable=False),\n sa.Column('firstname', sa.String(length=64), nullable=True), sa.\n Column('lastname', sa.String(length=64), nullable=True), sa.Column(\n 'email', sa.String(length=120), nullable=True), sa.Column(\n 'password', sa.String(length=64), nullable=True), sa.Column(\n 'address', sa.String(length=120), nullable=True), sa.Column('city',\n sa.String(length=64), nullable=True), sa.Column('state', sa.String(\n length=64), nullable=True), sa.Column('zipcode', sa.String(length=\n 64), nullable=True), sa.Column('country', sa.String(length=64),\n nullable=True), sa.Column('role', sa.Integer(), nullable=True), sa.\n Column('dob', sa.DateTime(), nullable=True), sa.Column('gender', sa\n .String(length=64), nullable=True), sa.Column('fitness', sa.Integer\n (), nullable=True), sa.Column('experience', sa.Integer(), nullable=\n True), sa.Column('willing_teamLeader', sa.Boolean(), nullable=True),\n sa.PrimaryKeyConstraint('id'))\n op.create_table('health_types', sa.Column('id', sa.Integer(), nullable=\n False), sa.Column('issue', sa.String(length=64), nullable=True), sa\n .PrimaryKeyConstraint('id'))\n op.create_table('users_health', sa.Column('id', sa.Integer(), nullable=\n False), sa.Column('user_id', sa.Integer(), nullable=True), sa.\n Column('health_id', sa.Integer(), nullable=True), sa.\n ForeignKeyConstraint(['health_id'], ['health_types.id']), sa.\n ForeignKeyConstraint(['user_id'], ['users.id']), sa.\n PrimaryKeyConstraint('id'))\n op.create_table('positions', sa.Column('id', sa.Integer(), nullable=\n False), sa.Column('user_id', sa.Integer(), nullable=True), sa.\n Column('position_type', sa.String(length=64), nullable=True), sa.\n ForeignKeyConstraint(['user_id'], ['users.id']), sa.\n PrimaryKeyConstraint('id'))\n\n\ndef downgrade():\n op.drop_table('positions')\n op.drop_table('users_health')\n op.drop_table('health_types')\n op.drop_table('users')\n", "step-3": "<mask token>\nrevision = '35f6815c3112'\ndown_revision = None\n<mask token>\n\n\ndef upgrade():\n op.create_table('users', sa.Column('id', sa.Integer(), nullable=False),\n sa.Column('firstname', sa.String(length=64), nullable=True), sa.\n Column('lastname', sa.String(length=64), nullable=True), sa.Column(\n 'email', sa.String(length=120), nullable=True), sa.Column(\n 'password', sa.String(length=64), nullable=True), sa.Column(\n 'address', sa.String(length=120), nullable=True), sa.Column('city',\n sa.String(length=64), nullable=True), sa.Column('state', sa.String(\n length=64), nullable=True), sa.Column('zipcode', sa.String(length=\n 64), nullable=True), sa.Column('country', sa.String(length=64),\n nullable=True), sa.Column('role', sa.Integer(), nullable=True), sa.\n Column('dob', sa.DateTime(), nullable=True), sa.Column('gender', sa\n .String(length=64), nullable=True), sa.Column('fitness', sa.Integer\n (), nullable=True), sa.Column('experience', sa.Integer(), nullable=\n True), sa.Column('willing_teamLeader', sa.Boolean(), nullable=True),\n sa.PrimaryKeyConstraint('id'))\n op.create_table('health_types', sa.Column('id', sa.Integer(), nullable=\n False), sa.Column('issue', sa.String(length=64), nullable=True), sa\n .PrimaryKeyConstraint('id'))\n op.create_table('users_health', sa.Column('id', sa.Integer(), nullable=\n False), sa.Column('user_id', sa.Integer(), nullable=True), sa.\n Column('health_id', sa.Integer(), nullable=True), sa.\n ForeignKeyConstraint(['health_id'], ['health_types.id']), sa.\n ForeignKeyConstraint(['user_id'], ['users.id']), sa.\n PrimaryKeyConstraint('id'))\n op.create_table('positions', sa.Column('id', sa.Integer(), nullable=\n False), sa.Column('user_id', sa.Integer(), nullable=True), sa.\n Column('position_type', sa.String(length=64), nullable=True), sa.\n ForeignKeyConstraint(['user_id'], ['users.id']), sa.\n PrimaryKeyConstraint('id'))\n\n\ndef downgrade():\n op.drop_table('positions')\n op.drop_table('users_health')\n op.drop_table('health_types')\n op.drop_table('users')\n", "step-4": "<mask token>\nrevision = '35f6815c3112'\ndown_revision = None\nfrom alembic import op\nimport sqlalchemy as sa\n\n\ndef upgrade():\n op.create_table('users', sa.Column('id', sa.Integer(), nullable=False),\n sa.Column('firstname', sa.String(length=64), nullable=True), sa.\n Column('lastname', sa.String(length=64), nullable=True), sa.Column(\n 'email', sa.String(length=120), nullable=True), sa.Column(\n 'password', sa.String(length=64), nullable=True), sa.Column(\n 'address', sa.String(length=120), nullable=True), sa.Column('city',\n sa.String(length=64), nullable=True), sa.Column('state', sa.String(\n length=64), nullable=True), sa.Column('zipcode', sa.String(length=\n 64), nullable=True), sa.Column('country', sa.String(length=64),\n nullable=True), sa.Column('role', sa.Integer(), nullable=True), sa.\n Column('dob', sa.DateTime(), nullable=True), sa.Column('gender', sa\n .String(length=64), nullable=True), sa.Column('fitness', sa.Integer\n (), nullable=True), sa.Column('experience', sa.Integer(), nullable=\n True), sa.Column('willing_teamLeader', sa.Boolean(), nullable=True),\n sa.PrimaryKeyConstraint('id'))\n op.create_table('health_types', sa.Column('id', sa.Integer(), nullable=\n False), sa.Column('issue', sa.String(length=64), nullable=True), sa\n .PrimaryKeyConstraint('id'))\n op.create_table('users_health', sa.Column('id', sa.Integer(), nullable=\n False), sa.Column('user_id', sa.Integer(), nullable=True), sa.\n Column('health_id', sa.Integer(), nullable=True), sa.\n ForeignKeyConstraint(['health_id'], ['health_types.id']), sa.\n ForeignKeyConstraint(['user_id'], ['users.id']), sa.\n PrimaryKeyConstraint('id'))\n op.create_table('positions', sa.Column('id', sa.Integer(), nullable=\n False), sa.Column('user_id', sa.Integer(), nullable=True), sa.\n Column('position_type', sa.String(length=64), nullable=True), sa.\n ForeignKeyConstraint(['user_id'], ['users.id']), sa.\n PrimaryKeyConstraint('id'))\n\n\ndef downgrade():\n op.drop_table('positions')\n op.drop_table('users_health')\n op.drop_table('health_types')\n op.drop_table('users')\n", "step-5": "\"\"\"tables\n\nRevision ID: 35f6815c3112\nRevises: None\nCreate Date: 2013-07-28 21:15:38.385006\n\n\"\"\"\n\n# revision identifiers, used by Alembic.\nrevision = '35f6815c3112'\ndown_revision = None\n\nfrom alembic import op\nimport sqlalchemy as sa\n\n\ndef upgrade():\n ### commands auto generated by Alembic - please adjust! ###\n op.create_table('users',\n sa.Column('id', sa.Integer(), nullable=False),\n sa.Column('firstname', sa.String(length=64), nullable=True),\n sa.Column('lastname', sa.String(length=64), nullable=True),\n sa.Column('email', sa.String(length=120), nullable=True),\n sa.Column('password', sa.String(length=64), nullable=True),\n sa.Column('address', sa.String(length=120), nullable=True),\n sa.Column('city', sa.String(length=64), nullable=True),\n sa.Column('state', sa.String(length=64), nullable=True),\n sa.Column('zipcode', sa.String(length=64), nullable=True),\n sa.Column('country', sa.String(length=64), nullable=True),\n sa.Column('role', sa.Integer(), nullable=True),\n sa.Column('dob', sa.DateTime(), nullable=True),\n sa.Column('gender', sa.String(length=64), nullable=True),\n sa.Column('fitness', sa.Integer(), nullable=True),\n sa.Column('experience', sa.Integer(), nullable=True),\n sa.Column('willing_teamLeader', sa.Boolean(), nullable=True),\n sa.PrimaryKeyConstraint('id')\n )\n op.create_table('health_types',\n sa.Column('id', sa.Integer(), nullable=False),\n sa.Column('issue', sa.String(length=64), nullable=True),\n sa.PrimaryKeyConstraint('id')\n )\n op.create_table('users_health',\n sa.Column('id', sa.Integer(), nullable=False),\n sa.Column('user_id', sa.Integer(), nullable=True),\n sa.Column('health_id', sa.Integer(), nullable=True),\n sa.ForeignKeyConstraint(['health_id'], ['health_types.id'], ),\n sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),\n sa.PrimaryKeyConstraint('id')\n )\n op.create_table('positions',\n sa.Column('id', sa.Integer(), nullable=False),\n sa.Column('user_id', sa.Integer(), nullable=True),\n sa.Column('position_type', sa.String(length=64), nullable=True),\n sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),\n sa.PrimaryKeyConstraint('id')\n )\n ### end Alembic commands ###\n\n\ndef downgrade():\n ### commands auto generated by Alembic - please adjust! ###\n op.drop_table('positions')\n op.drop_table('users_health')\n op.drop_table('health_types')\n op.drop_table('users')\n ### end Alembic commands ###\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
from rest_framework import serializers from urlshortner.models import UrlShortnerModel from urlshortner.constants import HOST class UrlShortRequest(serializers.Serializer): url = serializers.CharField(required=True, max_length=255) # Long Url expiry = serializers.DateTimeField(required=False) class UrlLongRequest(serializers.Serializer): url = serializers.CharField(required=True, max_length=64) # Short Url def validate_url(self, url): if url.startswith(HOST): return url else: return serializers.ValidationError("Invalid short URL") class UrlShortResponse(serializers.ModelSerializer): class Meta: model = UrlShortnerModel fields = "__all__"
normal
{ "blob_id": "6c16afe89d5d0fd6aa6911e3de9e9cebb57bf35e", "index": 1752, "step-1": "<mask token>\n\n\nclass UrlLongRequest(serializers.Serializer):\n url = serializers.CharField(required=True, max_length=64)\n\n def validate_url(self, url):\n if url.startswith(HOST):\n return url\n else:\n return serializers.ValidationError('Invalid short URL')\n\n\nclass UrlShortResponse(serializers.ModelSerializer):\n\n\n class Meta:\n model = UrlShortnerModel\n fields = '__all__'\n", "step-2": "<mask token>\n\n\nclass UrlShortRequest(serializers.Serializer):\n <mask token>\n <mask token>\n\n\nclass UrlLongRequest(serializers.Serializer):\n url = serializers.CharField(required=True, max_length=64)\n\n def validate_url(self, url):\n if url.startswith(HOST):\n return url\n else:\n return serializers.ValidationError('Invalid short URL')\n\n\nclass UrlShortResponse(serializers.ModelSerializer):\n\n\n class Meta:\n model = UrlShortnerModel\n fields = '__all__'\n", "step-3": "<mask token>\n\n\nclass UrlShortRequest(serializers.Serializer):\n url = serializers.CharField(required=True, max_length=255)\n expiry = serializers.DateTimeField(required=False)\n\n\nclass UrlLongRequest(serializers.Serializer):\n url = serializers.CharField(required=True, max_length=64)\n\n def validate_url(self, url):\n if url.startswith(HOST):\n return url\n else:\n return serializers.ValidationError('Invalid short URL')\n\n\nclass UrlShortResponse(serializers.ModelSerializer):\n\n\n class Meta:\n model = UrlShortnerModel\n fields = '__all__'\n", "step-4": "from rest_framework import serializers\nfrom urlshortner.models import UrlShortnerModel\nfrom urlshortner.constants import HOST\n\n\nclass UrlShortRequest(serializers.Serializer):\n url = serializers.CharField(required=True, max_length=255)\n expiry = serializers.DateTimeField(required=False)\n\n\nclass UrlLongRequest(serializers.Serializer):\n url = serializers.CharField(required=True, max_length=64)\n\n def validate_url(self, url):\n if url.startswith(HOST):\n return url\n else:\n return serializers.ValidationError('Invalid short URL')\n\n\nclass UrlShortResponse(serializers.ModelSerializer):\n\n\n class Meta:\n model = UrlShortnerModel\n fields = '__all__'\n", "step-5": "from rest_framework import serializers\nfrom urlshortner.models import UrlShortnerModel\nfrom urlshortner.constants import HOST\n\n\nclass UrlShortRequest(serializers.Serializer):\n url = serializers.CharField(required=True, max_length=255) # Long Url\n expiry = serializers.DateTimeField(required=False)\n\n\nclass UrlLongRequest(serializers.Serializer):\n url = serializers.CharField(required=True, max_length=64) # Short Url\n\n def validate_url(self, url):\n if url.startswith(HOST):\n return url\n else:\n return serializers.ValidationError(\"Invalid short URL\")\n\n\nclass UrlShortResponse(serializers.ModelSerializer):\n class Meta:\n model = UrlShortnerModel\n fields = \"__all__\"\n", "step-ids": [ 4, 5, 6, 7, 8 ] }
[ 4, 5, 6, 7, 8 ]
import json import boto3 import os import datetime regionName = os.environ['AWS_REGION'] BUCKET_PATH = os.environ['BUCKET_PATH'] SENSITIVIT = os.environ['SENSITIVIT'] s3_client = boto3.client('s3', region_name=regionName) ddb_resource = boto3.resource('dynamodb', region_name=regionName) def lambda_handler(event, context): # body = json.loads(event['body']) body = event videoPath = str(body['videoPath']) templatePath = str(body['templatePath']) facePath = str(body['facePath']) targetPeople = str(body['targetPeople']) FACES_BUCKET = facePath.split('/')[2] FACES_OBJECT = '/'.join(facePath.split('/')[3:]) s3_client.download_file(FACES_BUCKET, FACES_OBJECT, '/tmp/faces.json') facesJson = open('/tmp/faces.json', 'r') facesData = json.load(facesJson) FRAME_RATE = int(facesData['VideoMetadata']['FrameRate']) PEOPLE = targetPeople.split(',') timeStamps = [] scenesTime = [] i = 0 while i < len(facesData['Persons']): try: for target in PEOPLE: if facesData['Persons'][i]['FaceMatches'] == []: pass elif facesData['Persons'][i]['FaceMatches'][0]['Face']['ExternalImageId'] == target.strip(): timeStamps.append(facesData['Persons'][i]['Timestamp']) except IndexError: pass i = i+1 timeCollection = [[timeStamps[0]]] i = 1 j = 0 while i < len(timeStamps): if timeStamps[i] - timeCollection[j][-1] <= 1000: timeCollection[j].append(timeStamps[i]) i = i+1 else: j = j+1 timeCollection.append([timeStamps[i]]) for collection in timeCollection: if collection[-1] - collection[0] >= 1000: if collection[0] % 1000 == 0: start = datetime.datetime.utcfromtimestamp(collection[0]//1000).strftime("%H:%M:%S") + ':00' elif int(collection[0] % 1000 / 1000 * FRAME_RATE) < 10: start = datetime.datetime.utcfromtimestamp(collection[0] // 1000).strftime("%H:%M:%S") + ':0' + str(int(collection[0] % 1000 / 1000 * FRAME_RATE)) else: start = datetime.datetime.utcfromtimestamp(collection[0]//1000).strftime("%H:%M:%S") + ':' + str(int(collection[0] % 1000 / 1000 * FRAME_RATE)) if collection[-1] % 1000 == 0: end = datetime.datetime.utcfromtimestamp(collection[-1] // 1000).strftime("%H:%M:%S") + ':00' elif int(collection[-1] % 1000 / 1000 * FRAME_RATE) < 10: end = datetime.datetime.utcfromtimestamp(collection[-1] // 1000).strftime("%H:%M:%S") + ':0' + str(int(collection[-1] % 1000 / 1000 * FRAME_RATE)) else: end = datetime.datetime.utcfromtimestamp(collection[-1] // 1000).strftime("%H:%M:%S") + ':' + str(int(collection[-1] % 1000 / 1000 * FRAME_RATE)) scenesTime.append((start,end)) else: pass JOB_BUCKET = templatePath.split('/')[2] JOB_OBJECT = '/'.join(templatePath.split('/')[3:]) s3_client.download_file(JOB_BUCKET, JOB_OBJECT, '/tmp/job-template.json') finalName = [] for people in PEOPLE: finalName.append(people.strip()) OUTPUT_NAME = '-'+'-'.join(finalName) with open('/tmp/job-template.json', 'r') as r: template = json.load(r) for scene in scenesTime: template['Settings']['Inputs'][0]['InputClippings'].append({'StartTimecode': scene[0], 'EndTimecode': scene[-1]}) template['Settings']['Inputs'][0]['FileInput'] = videoPath template['Settings']['OutputGroups'][0]['Outputs'][0]['NameModifier'] = OUTPUT_NAME template['Settings']['OutputGroups'][0]['OutputGroupSettings']['FileGroupSettings']['Destination'] = BUCKET_PATH with open('/tmp/job-all.json', 'w') as w: json.dump(template, w, indent=2) w.close() r.close() mediaconvert_client = boto3.client('mediaconvert', region_name=regionName) response = mediaconvert_client.describe_endpoints(Mode='DEFAULT') mediaURL = response['Endpoints'][0]['Url'] mediaconvert_client = boto3.client('mediaconvert',endpoint_url=mediaURL) with open("/tmp/job-all.json", "r") as jsonfile: job_object = json.load(jsonfile) mediaconvert_client.create_job(**job_object) output = {'videoPath': videoPath, 'templatePath': templatePath, 'facePath': facePath, 'targetPerson': targetPeople, 'Frame Rate': FRAME_RATE } return { 'statusCode': 200, 'body': json.dumps(output) }
normal
{ "blob_id": "8c96c38a67c2eb97e30b325e4917ba4888731118", "index": 7349, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef lambda_handler(event, context):\n body = event\n videoPath = str(body['videoPath'])\n templatePath = str(body['templatePath'])\n facePath = str(body['facePath'])\n targetPeople = str(body['targetPeople'])\n FACES_BUCKET = facePath.split('/')[2]\n FACES_OBJECT = '/'.join(facePath.split('/')[3:])\n s3_client.download_file(FACES_BUCKET, FACES_OBJECT, '/tmp/faces.json')\n facesJson = open('/tmp/faces.json', 'r')\n facesData = json.load(facesJson)\n FRAME_RATE = int(facesData['VideoMetadata']['FrameRate'])\n PEOPLE = targetPeople.split(',')\n timeStamps = []\n scenesTime = []\n i = 0\n while i < len(facesData['Persons']):\n try:\n for target in PEOPLE:\n if facesData['Persons'][i]['FaceMatches'] == []:\n pass\n elif facesData['Persons'][i]['FaceMatches'][0]['Face'][\n 'ExternalImageId'] == target.strip():\n timeStamps.append(facesData['Persons'][i]['Timestamp'])\n except IndexError:\n pass\n i = i + 1\n timeCollection = [[timeStamps[0]]]\n i = 1\n j = 0\n while i < len(timeStamps):\n if timeStamps[i] - timeCollection[j][-1] <= 1000:\n timeCollection[j].append(timeStamps[i])\n i = i + 1\n else:\n j = j + 1\n timeCollection.append([timeStamps[i]])\n for collection in timeCollection:\n if collection[-1] - collection[0] >= 1000:\n if collection[0] % 1000 == 0:\n start = datetime.datetime.utcfromtimestamp(collection[0] //\n 1000).strftime('%H:%M:%S') + ':00'\n elif int(collection[0] % 1000 / 1000 * FRAME_RATE) < 10:\n start = datetime.datetime.utcfromtimestamp(collection[0] //\n 1000).strftime('%H:%M:%S') + ':0' + str(int(collection[\n 0] % 1000 / 1000 * FRAME_RATE))\n else:\n start = datetime.datetime.utcfromtimestamp(collection[0] //\n 1000).strftime('%H:%M:%S') + ':' + str(int(collection[0\n ] % 1000 / 1000 * FRAME_RATE))\n if collection[-1] % 1000 == 0:\n end = datetime.datetime.utcfromtimestamp(collection[-1] // 1000\n ).strftime('%H:%M:%S') + ':00'\n elif int(collection[-1] % 1000 / 1000 * FRAME_RATE) < 10:\n end = datetime.datetime.utcfromtimestamp(collection[-1] // 1000\n ).strftime('%H:%M:%S') + ':0' + str(int(collection[-1] %\n 1000 / 1000 * FRAME_RATE))\n else:\n end = datetime.datetime.utcfromtimestamp(collection[-1] // 1000\n ).strftime('%H:%M:%S') + ':' + str(int(collection[-1] %\n 1000 / 1000 * FRAME_RATE))\n scenesTime.append((start, end))\n else:\n pass\n JOB_BUCKET = templatePath.split('/')[2]\n JOB_OBJECT = '/'.join(templatePath.split('/')[3:])\n s3_client.download_file(JOB_BUCKET, JOB_OBJECT, '/tmp/job-template.json')\n finalName = []\n for people in PEOPLE:\n finalName.append(people.strip())\n OUTPUT_NAME = '-' + '-'.join(finalName)\n with open('/tmp/job-template.json', 'r') as r:\n template = json.load(r)\n for scene in scenesTime:\n template['Settings']['Inputs'][0]['InputClippings'].append({\n 'StartTimecode': scene[0], 'EndTimecode': scene[-1]})\n template['Settings']['Inputs'][0]['FileInput'] = videoPath\n template['Settings']['OutputGroups'][0]['Outputs'][0]['NameModifier'\n ] = OUTPUT_NAME\n template['Settings']['OutputGroups'][0]['OutputGroupSettings'][\n 'FileGroupSettings']['Destination'] = BUCKET_PATH\n with open('/tmp/job-all.json', 'w') as w:\n json.dump(template, w, indent=2)\n w.close()\n r.close()\n mediaconvert_client = boto3.client('mediaconvert', region_name=regionName)\n response = mediaconvert_client.describe_endpoints(Mode='DEFAULT')\n mediaURL = response['Endpoints'][0]['Url']\n mediaconvert_client = boto3.client('mediaconvert', endpoint_url=mediaURL)\n with open('/tmp/job-all.json', 'r') as jsonfile:\n job_object = json.load(jsonfile)\n mediaconvert_client.create_job(**job_object)\n output = {'videoPath': videoPath, 'templatePath': templatePath,\n 'facePath': facePath, 'targetPerson': targetPeople, 'Frame Rate':\n FRAME_RATE}\n return {'statusCode': 200, 'body': json.dumps(output)}\n", "step-3": "<mask token>\nregionName = os.environ['AWS_REGION']\nBUCKET_PATH = os.environ['BUCKET_PATH']\nSENSITIVIT = os.environ['SENSITIVIT']\ns3_client = boto3.client('s3', region_name=regionName)\nddb_resource = boto3.resource('dynamodb', region_name=regionName)\n\n\ndef lambda_handler(event, context):\n body = event\n videoPath = str(body['videoPath'])\n templatePath = str(body['templatePath'])\n facePath = str(body['facePath'])\n targetPeople = str(body['targetPeople'])\n FACES_BUCKET = facePath.split('/')[2]\n FACES_OBJECT = '/'.join(facePath.split('/')[3:])\n s3_client.download_file(FACES_BUCKET, FACES_OBJECT, '/tmp/faces.json')\n facesJson = open('/tmp/faces.json', 'r')\n facesData = json.load(facesJson)\n FRAME_RATE = int(facesData['VideoMetadata']['FrameRate'])\n PEOPLE = targetPeople.split(',')\n timeStamps = []\n scenesTime = []\n i = 0\n while i < len(facesData['Persons']):\n try:\n for target in PEOPLE:\n if facesData['Persons'][i]['FaceMatches'] == []:\n pass\n elif facesData['Persons'][i]['FaceMatches'][0]['Face'][\n 'ExternalImageId'] == target.strip():\n timeStamps.append(facesData['Persons'][i]['Timestamp'])\n except IndexError:\n pass\n i = i + 1\n timeCollection = [[timeStamps[0]]]\n i = 1\n j = 0\n while i < len(timeStamps):\n if timeStamps[i] - timeCollection[j][-1] <= 1000:\n timeCollection[j].append(timeStamps[i])\n i = i + 1\n else:\n j = j + 1\n timeCollection.append([timeStamps[i]])\n for collection in timeCollection:\n if collection[-1] - collection[0] >= 1000:\n if collection[0] % 1000 == 0:\n start = datetime.datetime.utcfromtimestamp(collection[0] //\n 1000).strftime('%H:%M:%S') + ':00'\n elif int(collection[0] % 1000 / 1000 * FRAME_RATE) < 10:\n start = datetime.datetime.utcfromtimestamp(collection[0] //\n 1000).strftime('%H:%M:%S') + ':0' + str(int(collection[\n 0] % 1000 / 1000 * FRAME_RATE))\n else:\n start = datetime.datetime.utcfromtimestamp(collection[0] //\n 1000).strftime('%H:%M:%S') + ':' + str(int(collection[0\n ] % 1000 / 1000 * FRAME_RATE))\n if collection[-1] % 1000 == 0:\n end = datetime.datetime.utcfromtimestamp(collection[-1] // 1000\n ).strftime('%H:%M:%S') + ':00'\n elif int(collection[-1] % 1000 / 1000 * FRAME_RATE) < 10:\n end = datetime.datetime.utcfromtimestamp(collection[-1] // 1000\n ).strftime('%H:%M:%S') + ':0' + str(int(collection[-1] %\n 1000 / 1000 * FRAME_RATE))\n else:\n end = datetime.datetime.utcfromtimestamp(collection[-1] // 1000\n ).strftime('%H:%M:%S') + ':' + str(int(collection[-1] %\n 1000 / 1000 * FRAME_RATE))\n scenesTime.append((start, end))\n else:\n pass\n JOB_BUCKET = templatePath.split('/')[2]\n JOB_OBJECT = '/'.join(templatePath.split('/')[3:])\n s3_client.download_file(JOB_BUCKET, JOB_OBJECT, '/tmp/job-template.json')\n finalName = []\n for people in PEOPLE:\n finalName.append(people.strip())\n OUTPUT_NAME = '-' + '-'.join(finalName)\n with open('/tmp/job-template.json', 'r') as r:\n template = json.load(r)\n for scene in scenesTime:\n template['Settings']['Inputs'][0]['InputClippings'].append({\n 'StartTimecode': scene[0], 'EndTimecode': scene[-1]})\n template['Settings']['Inputs'][0]['FileInput'] = videoPath\n template['Settings']['OutputGroups'][0]['Outputs'][0]['NameModifier'\n ] = OUTPUT_NAME\n template['Settings']['OutputGroups'][0]['OutputGroupSettings'][\n 'FileGroupSettings']['Destination'] = BUCKET_PATH\n with open('/tmp/job-all.json', 'w') as w:\n json.dump(template, w, indent=2)\n w.close()\n r.close()\n mediaconvert_client = boto3.client('mediaconvert', region_name=regionName)\n response = mediaconvert_client.describe_endpoints(Mode='DEFAULT')\n mediaURL = response['Endpoints'][0]['Url']\n mediaconvert_client = boto3.client('mediaconvert', endpoint_url=mediaURL)\n with open('/tmp/job-all.json', 'r') as jsonfile:\n job_object = json.load(jsonfile)\n mediaconvert_client.create_job(**job_object)\n output = {'videoPath': videoPath, 'templatePath': templatePath,\n 'facePath': facePath, 'targetPerson': targetPeople, 'Frame Rate':\n FRAME_RATE}\n return {'statusCode': 200, 'body': json.dumps(output)}\n", "step-4": "import json\nimport boto3\nimport os\nimport datetime\nregionName = os.environ['AWS_REGION']\nBUCKET_PATH = os.environ['BUCKET_PATH']\nSENSITIVIT = os.environ['SENSITIVIT']\ns3_client = boto3.client('s3', region_name=regionName)\nddb_resource = boto3.resource('dynamodb', region_name=regionName)\n\n\ndef lambda_handler(event, context):\n body = event\n videoPath = str(body['videoPath'])\n templatePath = str(body['templatePath'])\n facePath = str(body['facePath'])\n targetPeople = str(body['targetPeople'])\n FACES_BUCKET = facePath.split('/')[2]\n FACES_OBJECT = '/'.join(facePath.split('/')[3:])\n s3_client.download_file(FACES_BUCKET, FACES_OBJECT, '/tmp/faces.json')\n facesJson = open('/tmp/faces.json', 'r')\n facesData = json.load(facesJson)\n FRAME_RATE = int(facesData['VideoMetadata']['FrameRate'])\n PEOPLE = targetPeople.split(',')\n timeStamps = []\n scenesTime = []\n i = 0\n while i < len(facesData['Persons']):\n try:\n for target in PEOPLE:\n if facesData['Persons'][i]['FaceMatches'] == []:\n pass\n elif facesData['Persons'][i]['FaceMatches'][0]['Face'][\n 'ExternalImageId'] == target.strip():\n timeStamps.append(facesData['Persons'][i]['Timestamp'])\n except IndexError:\n pass\n i = i + 1\n timeCollection = [[timeStamps[0]]]\n i = 1\n j = 0\n while i < len(timeStamps):\n if timeStamps[i] - timeCollection[j][-1] <= 1000:\n timeCollection[j].append(timeStamps[i])\n i = i + 1\n else:\n j = j + 1\n timeCollection.append([timeStamps[i]])\n for collection in timeCollection:\n if collection[-1] - collection[0] >= 1000:\n if collection[0] % 1000 == 0:\n start = datetime.datetime.utcfromtimestamp(collection[0] //\n 1000).strftime('%H:%M:%S') + ':00'\n elif int(collection[0] % 1000 / 1000 * FRAME_RATE) < 10:\n start = datetime.datetime.utcfromtimestamp(collection[0] //\n 1000).strftime('%H:%M:%S') + ':0' + str(int(collection[\n 0] % 1000 / 1000 * FRAME_RATE))\n else:\n start = datetime.datetime.utcfromtimestamp(collection[0] //\n 1000).strftime('%H:%M:%S') + ':' + str(int(collection[0\n ] % 1000 / 1000 * FRAME_RATE))\n if collection[-1] % 1000 == 0:\n end = datetime.datetime.utcfromtimestamp(collection[-1] // 1000\n ).strftime('%H:%M:%S') + ':00'\n elif int(collection[-1] % 1000 / 1000 * FRAME_RATE) < 10:\n end = datetime.datetime.utcfromtimestamp(collection[-1] // 1000\n ).strftime('%H:%M:%S') + ':0' + str(int(collection[-1] %\n 1000 / 1000 * FRAME_RATE))\n else:\n end = datetime.datetime.utcfromtimestamp(collection[-1] // 1000\n ).strftime('%H:%M:%S') + ':' + str(int(collection[-1] %\n 1000 / 1000 * FRAME_RATE))\n scenesTime.append((start, end))\n else:\n pass\n JOB_BUCKET = templatePath.split('/')[2]\n JOB_OBJECT = '/'.join(templatePath.split('/')[3:])\n s3_client.download_file(JOB_BUCKET, JOB_OBJECT, '/tmp/job-template.json')\n finalName = []\n for people in PEOPLE:\n finalName.append(people.strip())\n OUTPUT_NAME = '-' + '-'.join(finalName)\n with open('/tmp/job-template.json', 'r') as r:\n template = json.load(r)\n for scene in scenesTime:\n template['Settings']['Inputs'][0]['InputClippings'].append({\n 'StartTimecode': scene[0], 'EndTimecode': scene[-1]})\n template['Settings']['Inputs'][0]['FileInput'] = videoPath\n template['Settings']['OutputGroups'][0]['Outputs'][0]['NameModifier'\n ] = OUTPUT_NAME\n template['Settings']['OutputGroups'][0]['OutputGroupSettings'][\n 'FileGroupSettings']['Destination'] = BUCKET_PATH\n with open('/tmp/job-all.json', 'w') as w:\n json.dump(template, w, indent=2)\n w.close()\n r.close()\n mediaconvert_client = boto3.client('mediaconvert', region_name=regionName)\n response = mediaconvert_client.describe_endpoints(Mode='DEFAULT')\n mediaURL = response['Endpoints'][0]['Url']\n mediaconvert_client = boto3.client('mediaconvert', endpoint_url=mediaURL)\n with open('/tmp/job-all.json', 'r') as jsonfile:\n job_object = json.load(jsonfile)\n mediaconvert_client.create_job(**job_object)\n output = {'videoPath': videoPath, 'templatePath': templatePath,\n 'facePath': facePath, 'targetPerson': targetPeople, 'Frame Rate':\n FRAME_RATE}\n return {'statusCode': 200, 'body': json.dumps(output)}\n", "step-5": "import json\nimport boto3\nimport os\nimport datetime\n\n\nregionName = os.environ['AWS_REGION']\nBUCKET_PATH = os.environ['BUCKET_PATH']\nSENSITIVIT = os.environ['SENSITIVIT']\n\ns3_client = boto3.client('s3', region_name=regionName)\nddb_resource = boto3.resource('dynamodb', region_name=regionName)\n\ndef lambda_handler(event, context):\n# body = json.loads(event['body'])\n body = event\n videoPath = str(body['videoPath'])\n templatePath = str(body['templatePath'])\n facePath = str(body['facePath'])\n targetPeople = str(body['targetPeople'])\n \n FACES_BUCKET = facePath.split('/')[2]\n FACES_OBJECT = '/'.join(facePath.split('/')[3:])\n \n s3_client.download_file(FACES_BUCKET, FACES_OBJECT, '/tmp/faces.json')\n facesJson = open('/tmp/faces.json', 'r')\n facesData = json.load(facesJson)\n \n FRAME_RATE = int(facesData['VideoMetadata']['FrameRate'])\n \n PEOPLE = targetPeople.split(',')\n \n timeStamps = []\n scenesTime = []\n \n i = 0\n while i < len(facesData['Persons']):\n try:\n for target in PEOPLE:\n if facesData['Persons'][i]['FaceMatches'] == []:\n pass\n elif facesData['Persons'][i]['FaceMatches'][0]['Face']['ExternalImageId'] == target.strip():\n timeStamps.append(facesData['Persons'][i]['Timestamp'])\n except IndexError:\n pass\n i = i+1\n \n timeCollection = [[timeStamps[0]]]\n i = 1\n j = 0\n while i < len(timeStamps):\n if timeStamps[i] - timeCollection[j][-1] <= 1000:\n timeCollection[j].append(timeStamps[i])\n i = i+1\n else:\n j = j+1\n timeCollection.append([timeStamps[i]])\n \n for collection in timeCollection:\n if collection[-1] - collection[0] >= 1000:\n if collection[0] % 1000 == 0:\n start = datetime.datetime.utcfromtimestamp(collection[0]//1000).strftime(\"%H:%M:%S\") + ':00'\n elif int(collection[0] % 1000 / 1000 * FRAME_RATE) < 10:\n start = datetime.datetime.utcfromtimestamp(collection[0] // 1000).strftime(\"%H:%M:%S\") + ':0' + str(int(collection[0] % 1000 / 1000 * FRAME_RATE))\n else:\n start = datetime.datetime.utcfromtimestamp(collection[0]//1000).strftime(\"%H:%M:%S\") + ':' + str(int(collection[0] % 1000 / 1000 * FRAME_RATE))\n if collection[-1] % 1000 == 0:\n end = datetime.datetime.utcfromtimestamp(collection[-1] // 1000).strftime(\"%H:%M:%S\") + ':00'\n elif int(collection[-1] % 1000 / 1000 * FRAME_RATE) < 10:\n end = datetime.datetime.utcfromtimestamp(collection[-1] // 1000).strftime(\"%H:%M:%S\") + ':0' + str(int(collection[-1] % 1000 / 1000 * FRAME_RATE))\n else:\n end = datetime.datetime.utcfromtimestamp(collection[-1] // 1000).strftime(\"%H:%M:%S\") + ':' + str(int(collection[-1] % 1000 / 1000 * FRAME_RATE))\n scenesTime.append((start,end))\n else:\n pass\n \n JOB_BUCKET = templatePath.split('/')[2]\n JOB_OBJECT = '/'.join(templatePath.split('/')[3:])\n s3_client.download_file(JOB_BUCKET, JOB_OBJECT, '/tmp/job-template.json')\n \n finalName = []\n for people in PEOPLE:\n finalName.append(people.strip())\n \n OUTPUT_NAME = '-'+'-'.join(finalName)\n \n with open('/tmp/job-template.json', 'r') as r:\n template = json.load(r)\n for scene in scenesTime:\n template['Settings']['Inputs'][0]['InputClippings'].append({'StartTimecode': scene[0], 'EndTimecode': scene[-1]})\n template['Settings']['Inputs'][0]['FileInput'] = videoPath\n template['Settings']['OutputGroups'][0]['Outputs'][0]['NameModifier'] = OUTPUT_NAME\n template['Settings']['OutputGroups'][0]['OutputGroupSettings']['FileGroupSettings']['Destination'] = BUCKET_PATH\n with open('/tmp/job-all.json', 'w') as w:\n json.dump(template, w, indent=2)\n w.close()\n r.close()\n \n mediaconvert_client = boto3.client('mediaconvert', region_name=regionName)\n\n response = mediaconvert_client.describe_endpoints(Mode='DEFAULT')\n \n mediaURL = response['Endpoints'][0]['Url']\n \n mediaconvert_client = boto3.client('mediaconvert',endpoint_url=mediaURL)\n \n with open(\"/tmp/job-all.json\", \"r\") as jsonfile:\n job_object = json.load(jsonfile)\n \n mediaconvert_client.create_job(**job_object)\n \n \n output = {'videoPath': videoPath,\n 'templatePath': templatePath,\n 'facePath': facePath,\n 'targetPerson': targetPeople,\n 'Frame Rate': FRAME_RATE\n }\n return {\n 'statusCode': 200,\n 'body': json.dumps(output)\n }\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
# RUSH HOUR m = int(input('Enter number of males:')) f = int(input('Enter number of females:')) if m%20 == 0: m2 = m//20 c = 20 else: m2 = m//20+1 c = m%20 f2 = f - 10*m2 if f2 <= 0 or f2-(20-c) <=0: print('Number of trains needed: '+str(m2)) else: print('Number of trains needed: '+str(1+(f2-(20-c))//30+m2))
normal
{ "blob_id": "3c6ef57501e01da79f894b36726a93a3a5e0a8f6", "index": 8068, "step-1": "<mask token>\n", "step-2": "<mask token>\nif m % 20 == 0:\n m2 = m // 20\n c = 20\nelse:\n m2 = m // 20 + 1\n c = m % 20\n<mask token>\nif f2 <= 0 or f2 - (20 - c) <= 0:\n print('Number of trains needed: ' + str(m2))\nelse:\n print('Number of trains needed: ' + str(1 + (f2 - (20 - c)) // 30 + m2))\n", "step-3": "m = int(input('Enter number of males:'))\nf = int(input('Enter number of females:'))\nif m % 20 == 0:\n m2 = m // 20\n c = 20\nelse:\n m2 = m // 20 + 1\n c = m % 20\nf2 = f - 10 * m2\nif f2 <= 0 or f2 - (20 - c) <= 0:\n print('Number of trains needed: ' + str(m2))\nelse:\n print('Number of trains needed: ' + str(1 + (f2 - (20 - c)) // 30 + m2))\n", "step-4": "# RUSH HOUR\r\nm = int(input('Enter number of males:'))\r\nf = int(input('Enter number of females:'))\r\n\r\n\r\nif m%20 == 0:\r\n m2 = m//20\r\n c = 20\r\nelse:\r\n m2 = m//20+1\r\n c = m%20\r\n\r\n\r\nf2 = f - 10*m2\r\n\r\nif f2 <= 0 or f2-(20-c) <=0:\r\n print('Number of trains needed: '+str(m2))\r\nelse:\r\n print('Number of trains needed: '+str(1+(f2-(20-c))//30+m2))\r\n\r\n\r\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
def main(): ' main entry point for module execution\n ' argument_spec = dict(src=dict(type='path'), replace_src=dict(), lines=dict(aliases=['commands'], type='list'), parents=dict(type='list'), before=dict(type='list'), after=dict(type='list'), match=dict(default='line', choices=['line', 'strict', 'exact', 'none']), replace=dict(default='line', choices=['line', 'block', 'config']), running_config=dict(aliases=['config']), intended_config=dict(), defaults=dict(type='bool', default=False), backup=dict(type='bool', default=False), save_when=dict(choices=['always', 'never', 'modified'], default='never'), diff_against=dict(choices=['running', 'startup', 'intended']), diff_ignore_lines=dict(type='list'), save=dict(default=False, type='bool', removed_in_version='2.4'), force=dict(default=False, type='bool', removed_in_version='2.2')) argument_spec.update(nxos_argument_spec) mutually_exclusive = [('lines', 'src', 'replace_src'), ('parents', 'src'), ('save', 'save_when')] required_if = [('match', 'strict', ['lines']), ('match', 'exact', ['lines']), ('replace', 'block', ['lines']), ('replace', 'config', ['replace_src']), ('diff_against', 'intended', ['intended_config'])] module = AnsibleModule(argument_spec=argument_spec, mutually_exclusive=mutually_exclusive, required_if=required_if, supports_check_mode=True) warnings = list() nxos_check_args(module, warnings) result = { 'changed': False, 'warnings': warnings, } config = None info = get_capabilities(module).get('device_info', { }) os_platform = info.get('network_os_platform', '') if (module.params['replace'] == 'config'): if ('9K' not in os_platform): module.fail_json(msg='replace: config is supported only for Nexus 9K series switches') if module.params['replace_src']: if (module.params['replace'] != 'config'): module.fail_json(msg='replace: config is required with replace_src') if (module.params['backup'] or (module._diff and (module.params['diff_against'] == 'running'))): contents = get_config(module) config = NetworkConfig(indent=2, contents=contents) if module.params['backup']: result['__backup__'] = contents if any((module.params['src'], module.params['lines'], module.params['replace_src'])): match = module.params['match'] replace = module.params['replace'] candidate = get_candidate(module) if ((match != 'none') and (replace != 'config')): config = get_running_config(module, config) path = module.params['parents'] configobjs = candidate.difference(config, match=match, replace=replace, path=path) else: configobjs = candidate.items if configobjs: commands = dumps(configobjs, 'commands').split('\n') if module.params['before']: commands[:0] = module.params['before'] if module.params['after']: commands.extend(module.params['after']) result['commands'] = commands result['updates'] = commands if (not module.check_mode): load_config(module, commands) result['changed'] = True running_config = None startup_config = None diff_ignore_lines = module.params['diff_ignore_lines'] if module.params['save']: module.params['save_when'] = 'always' if (module.params['save_when'] != 'never'): output = execute_show_commands(module, ['show running-config', 'show startup-config']) running_config = NetworkConfig(indent=1, contents=output[0], ignore_lines=diff_ignore_lines) startup_config = NetworkConfig(indent=1, contents=output[1], ignore_lines=diff_ignore_lines) if ((running_config.sha1 != startup_config.sha1) or (module.params['save_when'] == 'always')): result['changed'] = True if (not module.check_mode): cmd = { 'command': 'copy running-config startup-config', 'output': 'text', } run_commands(module, [cmd]) else: module.warn('Skipping command `copy running-config startup-config` due to check_mode. Configuration not copied to non-volatile storage') if module._diff: if (not running_config): output = execute_show_commands(module, 'show running-config') contents = output[0] else: contents = running_config.config_text running_config = NetworkConfig(indent=1, contents=contents, ignore_lines=diff_ignore_lines) if (module.params['diff_against'] == 'running'): if module.check_mode: module.warn('unable to perform diff against running-config due to check mode') contents = None else: contents = config.config_text elif (module.params['diff_against'] == 'startup'): if (not startup_config): output = execute_show_commands(module, 'show startup-config') contents = output[0] else: contents = output[0] contents = startup_config.config_text elif (module.params['diff_against'] == 'intended'): contents = module.params['intended_config'] if (contents is not None): base_config = NetworkConfig(indent=1, contents=contents, ignore_lines=diff_ignore_lines) if (running_config.sha1 != base_config.sha1): if (module.params['diff_against'] == 'intended'): before = running_config after = base_config elif (module.params['diff_against'] in ('startup', 'running')): before = base_config after = running_config result.update({ 'changed': True, 'diff': { 'before': str(before), 'after': str(after), }, }) module.exit_json(**result)
normal
{ "blob_id": "99b5ac74da95dff399c31d58e19bac65e538a34b", "index": 8012, "step-1": "<mask token>\n", "step-2": "def main():\n \"\"\" main entry point for module execution\n \"\"\"\n argument_spec = dict(src=dict(type='path'), replace_src=dict(), lines=\n dict(aliases=['commands'], type='list'), parents=dict(type='list'),\n before=dict(type='list'), after=dict(type='list'), match=dict(\n default='line', choices=['line', 'strict', 'exact', 'none']),\n replace=dict(default='line', choices=['line', 'block', 'config']),\n running_config=dict(aliases=['config']), intended_config=dict(),\n defaults=dict(type='bool', default=False), backup=dict(type='bool',\n default=False), save_when=dict(choices=['always', 'never',\n 'modified'], default='never'), diff_against=dict(choices=['running',\n 'startup', 'intended']), diff_ignore_lines=dict(type='list'), save=\n dict(default=False, type='bool', removed_in_version='2.4'), force=\n dict(default=False, type='bool', removed_in_version='2.2'))\n argument_spec.update(nxos_argument_spec)\n mutually_exclusive = [('lines', 'src', 'replace_src'), ('parents',\n 'src'), ('save', 'save_when')]\n required_if = [('match', 'strict', ['lines']), ('match', 'exact', [\n 'lines']), ('replace', 'block', ['lines']), ('replace', 'config', [\n 'replace_src']), ('diff_against', 'intended', ['intended_config'])]\n module = AnsibleModule(argument_spec=argument_spec, mutually_exclusive=\n mutually_exclusive, required_if=required_if, supports_check_mode=True)\n warnings = list()\n nxos_check_args(module, warnings)\n result = {'changed': False, 'warnings': warnings}\n config = None\n info = get_capabilities(module).get('device_info', {})\n os_platform = info.get('network_os_platform', '')\n if module.params['replace'] == 'config':\n if '9K' not in os_platform:\n module.fail_json(msg=\n 'replace: config is supported only for Nexus 9K series switches'\n )\n if module.params['replace_src']:\n if module.params['replace'] != 'config':\n module.fail_json(msg='replace: config is required with replace_src'\n )\n if module.params['backup'] or module._diff and module.params['diff_against'\n ] == 'running':\n contents = get_config(module)\n config = NetworkConfig(indent=2, contents=contents)\n if module.params['backup']:\n result['__backup__'] = contents\n if any((module.params['src'], module.params['lines'], module.params[\n 'replace_src'])):\n match = module.params['match']\n replace = module.params['replace']\n candidate = get_candidate(module)\n if match != 'none' and replace != 'config':\n config = get_running_config(module, config)\n path = module.params['parents']\n configobjs = candidate.difference(config, match=match, replace=\n replace, path=path)\n else:\n configobjs = candidate.items\n if configobjs:\n commands = dumps(configobjs, 'commands').split('\\n')\n if module.params['before']:\n commands[:0] = module.params['before']\n if module.params['after']:\n commands.extend(module.params['after'])\n result['commands'] = commands\n result['updates'] = commands\n if not module.check_mode:\n load_config(module, commands)\n result['changed'] = True\n running_config = None\n startup_config = None\n diff_ignore_lines = module.params['diff_ignore_lines']\n if module.params['save']:\n module.params['save_when'] = 'always'\n if module.params['save_when'] != 'never':\n output = execute_show_commands(module, ['show running-config',\n 'show startup-config'])\n running_config = NetworkConfig(indent=1, contents=output[0],\n ignore_lines=diff_ignore_lines)\n startup_config = NetworkConfig(indent=1, contents=output[1],\n ignore_lines=diff_ignore_lines)\n if running_config.sha1 != startup_config.sha1 or module.params[\n 'save_when'] == 'always':\n result['changed'] = True\n if not module.check_mode:\n cmd = {'command': 'copy running-config startup-config',\n 'output': 'text'}\n run_commands(module, [cmd])\n else:\n module.warn(\n 'Skipping command `copy running-config startup-config` due to check_mode. Configuration not copied to non-volatile storage'\n )\n if module._diff:\n if not running_config:\n output = execute_show_commands(module, 'show running-config')\n contents = output[0]\n else:\n contents = running_config.config_text\n running_config = NetworkConfig(indent=1, contents=contents,\n ignore_lines=diff_ignore_lines)\n if module.params['diff_against'] == 'running':\n if module.check_mode:\n module.warn(\n 'unable to perform diff against running-config due to check mode'\n )\n contents = None\n else:\n contents = config.config_text\n elif module.params['diff_against'] == 'startup':\n if not startup_config:\n output = execute_show_commands(module, 'show startup-config')\n contents = output[0]\n else:\n contents = output[0]\n contents = startup_config.config_text\n elif module.params['diff_against'] == 'intended':\n contents = module.params['intended_config']\n if contents is not None:\n base_config = NetworkConfig(indent=1, contents=contents,\n ignore_lines=diff_ignore_lines)\n if running_config.sha1 != base_config.sha1:\n if module.params['diff_against'] == 'intended':\n before = running_config\n after = base_config\n elif module.params['diff_against'] in ('startup', 'running'):\n before = base_config\n after = running_config\n result.update({'changed': True, 'diff': {'before': str(\n before), 'after': str(after)}})\n module.exit_json(**result)\n", "step-3": "def main():\n ' main entry point for module execution\\n '\n argument_spec = dict(src=dict(type='path'), replace_src=dict(), lines=dict(aliases=['commands'], type='list'), parents=dict(type='list'), before=dict(type='list'), after=dict(type='list'), match=dict(default='line', choices=['line', 'strict', 'exact', 'none']), replace=dict(default='line', choices=['line', 'block', 'config']), running_config=dict(aliases=['config']), intended_config=dict(), defaults=dict(type='bool', default=False), backup=dict(type='bool', default=False), save_when=dict(choices=['always', 'never', 'modified'], default='never'), diff_against=dict(choices=['running', 'startup', 'intended']), diff_ignore_lines=dict(type='list'), save=dict(default=False, type='bool', removed_in_version='2.4'), force=dict(default=False, type='bool', removed_in_version='2.2'))\n argument_spec.update(nxos_argument_spec)\n mutually_exclusive = [('lines', 'src', 'replace_src'), ('parents', 'src'), ('save', 'save_when')]\n required_if = [('match', 'strict', ['lines']), ('match', 'exact', ['lines']), ('replace', 'block', ['lines']), ('replace', 'config', ['replace_src']), ('diff_against', 'intended', ['intended_config'])]\n module = AnsibleModule(argument_spec=argument_spec, mutually_exclusive=mutually_exclusive, required_if=required_if, supports_check_mode=True)\n warnings = list()\n nxos_check_args(module, warnings)\n result = {\n 'changed': False,\n 'warnings': warnings,\n }\n config = None\n info = get_capabilities(module).get('device_info', {\n \n })\n os_platform = info.get('network_os_platform', '')\n if (module.params['replace'] == 'config'):\n if ('9K' not in os_platform):\n module.fail_json(msg='replace: config is supported only for Nexus 9K series switches')\n if module.params['replace_src']:\n if (module.params['replace'] != 'config'):\n module.fail_json(msg='replace: config is required with replace_src')\n if (module.params['backup'] or (module._diff and (module.params['diff_against'] == 'running'))):\n contents = get_config(module)\n config = NetworkConfig(indent=2, contents=contents)\n if module.params['backup']:\n result['__backup__'] = contents\n if any((module.params['src'], module.params['lines'], module.params['replace_src'])):\n match = module.params['match']\n replace = module.params['replace']\n candidate = get_candidate(module)\n if ((match != 'none') and (replace != 'config')):\n config = get_running_config(module, config)\n path = module.params['parents']\n configobjs = candidate.difference(config, match=match, replace=replace, path=path)\n else:\n configobjs = candidate.items\n if configobjs:\n commands = dumps(configobjs, 'commands').split('\\n')\n if module.params['before']:\n commands[:0] = module.params['before']\n if module.params['after']:\n commands.extend(module.params['after'])\n result['commands'] = commands\n result['updates'] = commands\n if (not module.check_mode):\n load_config(module, commands)\n result['changed'] = True\n running_config = None\n startup_config = None\n diff_ignore_lines = module.params['diff_ignore_lines']\n if module.params['save']:\n module.params['save_when'] = 'always'\n if (module.params['save_when'] != 'never'):\n output = execute_show_commands(module, ['show running-config', 'show startup-config'])\n running_config = NetworkConfig(indent=1, contents=output[0], ignore_lines=diff_ignore_lines)\n startup_config = NetworkConfig(indent=1, contents=output[1], ignore_lines=diff_ignore_lines)\n if ((running_config.sha1 != startup_config.sha1) or (module.params['save_when'] == 'always')):\n result['changed'] = True\n if (not module.check_mode):\n cmd = {\n 'command': 'copy running-config startup-config',\n 'output': 'text',\n }\n run_commands(module, [cmd])\n else:\n module.warn('Skipping command `copy running-config startup-config` due to check_mode. Configuration not copied to non-volatile storage')\n if module._diff:\n if (not running_config):\n output = execute_show_commands(module, 'show running-config')\n contents = output[0]\n else:\n contents = running_config.config_text\n running_config = NetworkConfig(indent=1, contents=contents, ignore_lines=diff_ignore_lines)\n if (module.params['diff_against'] == 'running'):\n if module.check_mode:\n module.warn('unable to perform diff against running-config due to check mode')\n contents = None\n else:\n contents = config.config_text\n elif (module.params['diff_against'] == 'startup'):\n if (not startup_config):\n output = execute_show_commands(module, 'show startup-config')\n contents = output[0]\n else:\n contents = output[0]\n contents = startup_config.config_text\n elif (module.params['diff_against'] == 'intended'):\n contents = module.params['intended_config']\n if (contents is not None):\n base_config = NetworkConfig(indent=1, contents=contents, ignore_lines=diff_ignore_lines)\n if (running_config.sha1 != base_config.sha1):\n if (module.params['diff_against'] == 'intended'):\n before = running_config\n after = base_config\n elif (module.params['diff_against'] in ('startup', 'running')):\n before = base_config\n after = running_config\n result.update({\n 'changed': True,\n 'diff': {\n 'before': str(before),\n 'after': str(after),\n },\n })\n module.exit_json(**result)", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
# Generated by Django 2.1.5 on 2021-06-01 19:16 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('fotbal', '0008_auto_20210601_2109'), ] operations = [ migrations.RemoveField( model_name='komenty', name='jmeno', ), migrations.DeleteModel( name='Komenty', ), ]
normal
{ "blob_id": "71ffad81bcbc480dc0a750680bc72e1d5c48556a", "index": 3619, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('fotbal', '0008_auto_20210601_2109')]\n operations = [migrations.RemoveField(model_name='komenty', name='jmeno'\n ), migrations.DeleteModel(name='Komenty')]\n", "step-4": "from django.db import migrations\n\n\nclass Migration(migrations.Migration):\n dependencies = [('fotbal', '0008_auto_20210601_2109')]\n operations = [migrations.RemoveField(model_name='komenty', name='jmeno'\n ), migrations.DeleteModel(name='Komenty')]\n", "step-5": "# Generated by Django 2.1.5 on 2021-06-01 19:16\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('fotbal', '0008_auto_20210601_2109'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='komenty',\n name='jmeno',\n ),\n migrations.DeleteModel(\n name='Komenty',\n ),\n ]\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> class Meetup: <|reserved_special_token_0|> def check_if_meetup_exists(self, topic): query = 'SELECT topic from meetups WHERE topic=%s;' cur.execute(query, (topic,)) meetup = cur.fetchone() if meetup: return True <|reserved_special_token_0|> <|reserved_special_token_0|> @staticmethod def get_all_meetups(): """Method to fetch all meetups""" query = 'SELECT * from meetups;' cur.execute(query) meetups = cur.fetchall() return meetups @staticmethod def get_meetup_by_id(meetup_id): """ Fetch a specific meetup using meetup_id""" query = 'SELECT * from meetups where meetup_id=%s;' cur.execute(query, (meetup_id,)) meetup = cur.fetchone() return meetup <|reserved_special_token_1|> <|reserved_special_token_0|> class Meetup: def __init__(self, topic, location, tags, happening_on): self.topic = topic self.location = location self.tags = tags self.happening_on = happening_on self.created_on = now def check_if_meetup_exists(self, topic): query = 'SELECT topic from meetups WHERE topic=%s;' cur.execute(query, (topic,)) meetup = cur.fetchone() if meetup: return True def create_meetup(self): if self.check_if_meetup_exists(self.topic): return False query = ( 'INSERT INTO meetups (topic, location, tags, happening_on, created_on) values (%s, %s, %s, %s, %s) RETURNING meetup_id, topic, location, tags, happening_on, created_on;' ) cur.execute(query, (self.topic, self.location, self.tags, self. happening_on, self.created_on)) meetup = cur.fetchone() db.conn.commit() return meetup def delete_meetup(meetup_id): """Delete a single Meetup""" query = "DELETE FROM meetups WHERE meetup_id= '{}';".format(meetup_id) cur.execute(query) db.conn.commit() @staticmethod def get_all_meetups(): """Method to fetch all meetups""" query = 'SELECT * from meetups;' cur.execute(query) meetups = cur.fetchall() return meetups @staticmethod def get_meetup_by_id(meetup_id): """ Fetch a specific meetup using meetup_id""" query = 'SELECT * from meetups where meetup_id=%s;' cur.execute(query, (meetup_id,)) meetup = cur.fetchone() return meetup <|reserved_special_token_1|> <|reserved_special_token_0|> now = datetime.datetime.now() db = Database() cur = db.cur class Meetup: def __init__(self, topic, location, tags, happening_on): self.topic = topic self.location = location self.tags = tags self.happening_on = happening_on self.created_on = now def check_if_meetup_exists(self, topic): query = 'SELECT topic from meetups WHERE topic=%s;' cur.execute(query, (topic,)) meetup = cur.fetchone() if meetup: return True def create_meetup(self): if self.check_if_meetup_exists(self.topic): return False query = ( 'INSERT INTO meetups (topic, location, tags, happening_on, created_on) values (%s, %s, %s, %s, %s) RETURNING meetup_id, topic, location, tags, happening_on, created_on;' ) cur.execute(query, (self.topic, self.location, self.tags, self. happening_on, self.created_on)) meetup = cur.fetchone() db.conn.commit() return meetup def delete_meetup(meetup_id): """Delete a single Meetup""" query = "DELETE FROM meetups WHERE meetup_id= '{}';".format(meetup_id) cur.execute(query) db.conn.commit() @staticmethod def get_all_meetups(): """Method to fetch all meetups""" query = 'SELECT * from meetups;' cur.execute(query) meetups = cur.fetchall() return meetups @staticmethod def get_meetup_by_id(meetup_id): """ Fetch a specific meetup using meetup_id""" query = 'SELECT * from meetups where meetup_id=%s;' cur.execute(query, (meetup_id,)) meetup = cur.fetchone() return meetup <|reserved_special_token_1|> import datetime from app.api.v2.models.db import Database now = datetime.datetime.now() db = Database() cur = db.cur class Meetup: def __init__(self, topic, location, tags, happening_on): self.topic = topic self.location = location self.tags = tags self.happening_on = happening_on self.created_on = now def check_if_meetup_exists(self, topic): query = 'SELECT topic from meetups WHERE topic=%s;' cur.execute(query, (topic,)) meetup = cur.fetchone() if meetup: return True def create_meetup(self): if self.check_if_meetup_exists(self.topic): return False query = ( 'INSERT INTO meetups (topic, location, tags, happening_on, created_on) values (%s, %s, %s, %s, %s) RETURNING meetup_id, topic, location, tags, happening_on, created_on;' ) cur.execute(query, (self.topic, self.location, self.tags, self. happening_on, self.created_on)) meetup = cur.fetchone() db.conn.commit() return meetup def delete_meetup(meetup_id): """Delete a single Meetup""" query = "DELETE FROM meetups WHERE meetup_id= '{}';".format(meetup_id) cur.execute(query) db.conn.commit() @staticmethod def get_all_meetups(): """Method to fetch all meetups""" query = 'SELECT * from meetups;' cur.execute(query) meetups = cur.fetchall() return meetups @staticmethod def get_meetup_by_id(meetup_id): """ Fetch a specific meetup using meetup_id""" query = 'SELECT * from meetups where meetup_id=%s;' cur.execute(query, (meetup_id,)) meetup = cur.fetchone() return meetup <|reserved_special_token_1|> import datetime from app.api.v2.models.db import Database now = datetime.datetime.now() db = Database() cur = db.cur class Meetup(): #meetup constructor def __init__(self, topic, location, tags, happening_on): self.topic = topic self.location = location self.tags = tags self.happening_on = happening_on self.created_on = now def check_if_meetup_exists(self, topic): query = "SELECT topic from meetups WHERE topic=%s;" cur.execute(query, (topic,)) meetup = cur.fetchone() if meetup: return True def create_meetup(self): if self.check_if_meetup_exists(self.topic): return False query = "INSERT INTO meetups (topic, location, tags, happening_on, created_on) values (%s, %s, %s, %s, %s) \ RETURNING meetup_id, topic, location, tags, happening_on, created_on;" cur.execute( query, (self.topic, self.location, self.tags, self.happening_on, self.created_on)) meetup = cur.fetchone() db.conn.commit() return meetup def delete_meetup(meetup_id): """Delete a single Meetup""" query = "DELETE FROM meetups WHERE meetup_id= '{}';".format(meetup_id) cur.execute(query) db.conn.commit() @staticmethod def get_all_meetups(): '''Method to fetch all meetups''' query = "SELECT * from meetups;" cur.execute(query) meetups = cur.fetchall() return meetups @staticmethod def get_meetup_by_id(meetup_id): """ Fetch a specific meetup using meetup_id""" query = "SELECT * from meetups where meetup_id=%s;" cur.execute(query, (meetup_id,)) meetup = cur.fetchone() return meetup
flexible
{ "blob_id": "275f8b6ac31792a9e4bb823b61366f868e45ef4e", "index": 6521, "step-1": "<mask token>\n\n\nclass Meetup:\n <mask token>\n\n def check_if_meetup_exists(self, topic):\n query = 'SELECT topic from meetups WHERE topic=%s;'\n cur.execute(query, (topic,))\n meetup = cur.fetchone()\n if meetup:\n return True\n <mask token>\n <mask token>\n\n @staticmethod\n def get_all_meetups():\n \"\"\"Method to fetch all meetups\"\"\"\n query = 'SELECT * from meetups;'\n cur.execute(query)\n meetups = cur.fetchall()\n return meetups\n\n @staticmethod\n def get_meetup_by_id(meetup_id):\n \"\"\" Fetch a specific meetup using meetup_id\"\"\"\n query = 'SELECT * from meetups where meetup_id=%s;'\n cur.execute(query, (meetup_id,))\n meetup = cur.fetchone()\n return meetup\n", "step-2": "<mask token>\n\n\nclass Meetup:\n\n def __init__(self, topic, location, tags, happening_on):\n self.topic = topic\n self.location = location\n self.tags = tags\n self.happening_on = happening_on\n self.created_on = now\n\n def check_if_meetup_exists(self, topic):\n query = 'SELECT topic from meetups WHERE topic=%s;'\n cur.execute(query, (topic,))\n meetup = cur.fetchone()\n if meetup:\n return True\n\n def create_meetup(self):\n if self.check_if_meetup_exists(self.topic):\n return False\n query = (\n 'INSERT INTO meetups (topic, location, tags, happening_on, created_on) values (%s, %s, %s, %s, %s) RETURNING meetup_id, topic, location, tags, happening_on, created_on;'\n )\n cur.execute(query, (self.topic, self.location, self.tags, self.\n happening_on, self.created_on))\n meetup = cur.fetchone()\n db.conn.commit()\n return meetup\n\n def delete_meetup(meetup_id):\n \"\"\"Delete a single Meetup\"\"\"\n query = \"DELETE FROM meetups WHERE meetup_id= '{}';\".format(meetup_id)\n cur.execute(query)\n db.conn.commit()\n\n @staticmethod\n def get_all_meetups():\n \"\"\"Method to fetch all meetups\"\"\"\n query = 'SELECT * from meetups;'\n cur.execute(query)\n meetups = cur.fetchall()\n return meetups\n\n @staticmethod\n def get_meetup_by_id(meetup_id):\n \"\"\" Fetch a specific meetup using meetup_id\"\"\"\n query = 'SELECT * from meetups where meetup_id=%s;'\n cur.execute(query, (meetup_id,))\n meetup = cur.fetchone()\n return meetup\n", "step-3": "<mask token>\nnow = datetime.datetime.now()\ndb = Database()\ncur = db.cur\n\n\nclass Meetup:\n\n def __init__(self, topic, location, tags, happening_on):\n self.topic = topic\n self.location = location\n self.tags = tags\n self.happening_on = happening_on\n self.created_on = now\n\n def check_if_meetup_exists(self, topic):\n query = 'SELECT topic from meetups WHERE topic=%s;'\n cur.execute(query, (topic,))\n meetup = cur.fetchone()\n if meetup:\n return True\n\n def create_meetup(self):\n if self.check_if_meetup_exists(self.topic):\n return False\n query = (\n 'INSERT INTO meetups (topic, location, tags, happening_on, created_on) values (%s, %s, %s, %s, %s) RETURNING meetup_id, topic, location, tags, happening_on, created_on;'\n )\n cur.execute(query, (self.topic, self.location, self.tags, self.\n happening_on, self.created_on))\n meetup = cur.fetchone()\n db.conn.commit()\n return meetup\n\n def delete_meetup(meetup_id):\n \"\"\"Delete a single Meetup\"\"\"\n query = \"DELETE FROM meetups WHERE meetup_id= '{}';\".format(meetup_id)\n cur.execute(query)\n db.conn.commit()\n\n @staticmethod\n def get_all_meetups():\n \"\"\"Method to fetch all meetups\"\"\"\n query = 'SELECT * from meetups;'\n cur.execute(query)\n meetups = cur.fetchall()\n return meetups\n\n @staticmethod\n def get_meetup_by_id(meetup_id):\n \"\"\" Fetch a specific meetup using meetup_id\"\"\"\n query = 'SELECT * from meetups where meetup_id=%s;'\n cur.execute(query, (meetup_id,))\n meetup = cur.fetchone()\n return meetup\n", "step-4": "import datetime\nfrom app.api.v2.models.db import Database\nnow = datetime.datetime.now()\ndb = Database()\ncur = db.cur\n\n\nclass Meetup:\n\n def __init__(self, topic, location, tags, happening_on):\n self.topic = topic\n self.location = location\n self.tags = tags\n self.happening_on = happening_on\n self.created_on = now\n\n def check_if_meetup_exists(self, topic):\n query = 'SELECT topic from meetups WHERE topic=%s;'\n cur.execute(query, (topic,))\n meetup = cur.fetchone()\n if meetup:\n return True\n\n def create_meetup(self):\n if self.check_if_meetup_exists(self.topic):\n return False\n query = (\n 'INSERT INTO meetups (topic, location, tags, happening_on, created_on) values (%s, %s, %s, %s, %s) RETURNING meetup_id, topic, location, tags, happening_on, created_on;'\n )\n cur.execute(query, (self.topic, self.location, self.tags, self.\n happening_on, self.created_on))\n meetup = cur.fetchone()\n db.conn.commit()\n return meetup\n\n def delete_meetup(meetup_id):\n \"\"\"Delete a single Meetup\"\"\"\n query = \"DELETE FROM meetups WHERE meetup_id= '{}';\".format(meetup_id)\n cur.execute(query)\n db.conn.commit()\n\n @staticmethod\n def get_all_meetups():\n \"\"\"Method to fetch all meetups\"\"\"\n query = 'SELECT * from meetups;'\n cur.execute(query)\n meetups = cur.fetchall()\n return meetups\n\n @staticmethod\n def get_meetup_by_id(meetup_id):\n \"\"\" Fetch a specific meetup using meetup_id\"\"\"\n query = 'SELECT * from meetups where meetup_id=%s;'\n cur.execute(query, (meetup_id,))\n meetup = cur.fetchone()\n return meetup\n", "step-5": "import datetime\nfrom app.api.v2.models.db import Database\n\nnow = datetime.datetime.now()\ndb = Database()\ncur = db.cur\n\nclass Meetup():\n\n #meetup constructor\n def __init__(self, topic, location, tags, happening_on):\n self.topic = topic\n self.location = location\n self.tags = tags\n self.happening_on = happening_on\n self.created_on = now\n\n def check_if_meetup_exists(self, topic):\n query = \"SELECT topic from meetups WHERE topic=%s;\"\n cur.execute(query, (topic,))\n meetup = cur.fetchone()\n if meetup:\n return True\n\n def create_meetup(self):\n if self.check_if_meetup_exists(self.topic):\n return False\n query = \"INSERT INTO meetups (topic, location, tags, happening_on, created_on) values (%s, %s, %s, %s, %s) \\\n RETURNING meetup_id, topic, location, tags, happening_on, created_on;\"\n cur.execute(\n query,\n (self.topic,\n self.location,\n self.tags,\n self.happening_on,\n self.created_on))\n meetup = cur.fetchone()\n db.conn.commit()\n return meetup\n\n def delete_meetup(meetup_id):\n \"\"\"Delete a single Meetup\"\"\"\n query = \"DELETE FROM meetups WHERE meetup_id= '{}';\".format(meetup_id)\n cur.execute(query)\n db.conn.commit()\n\n @staticmethod\n def get_all_meetups():\n '''Method to fetch all meetups'''\n query = \"SELECT * from meetups;\"\n cur.execute(query)\n meetups = cur.fetchall()\n return meetups\n\n @staticmethod\n def get_meetup_by_id(meetup_id):\n \"\"\" Fetch a specific meetup using meetup_id\"\"\"\n query = \"SELECT * from meetups where meetup_id=%s;\"\n cur.execute(query, (meetup_id,))\n meetup = cur.fetchone()\n return meetup\n\n", "step-ids": [ 4, 7, 8, 9, 10 ] }
[ 4, 7, 8, 9, 10 ]
import re def make_slug(string): print(re.sub(^'\w','',string)) make_slug('#$gejcb#$evnk?.kjb')
normal
{ "blob_id": "41e981e2192b600cdf9c9b515fe9f397cd1b8826", "index": 5788, "step-1": "import re\n\ndef make_slug(string):\n print(re.sub(^'\\w','',string))\n \nmake_slug('#$gejcb#$evnk?.kjb')\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
class _ProtectedClass: pass class MyClass: pass class OtherClass(MyClass): pass def _protected_fun() -> MyClass: return variable # noqa: F821 def my_fun() -> MyClass: return variable # noqa: F821 def my_fun2() -> MyClass: return variable # noqa: F821 variable: MyClass variable_with_value: MyClass = MyClass() __all__ = [ # noqa: F822 "OtherClass", "my_fun2", "variable", ]
normal
{ "blob_id": "b5949b40d731178bdbab776af8877921dcdfbf15", "index": 3215, "step-1": "class _ProtectedClass:\n pass\n\n\nclass MyClass:\n pass\n\n\nclass OtherClass(MyClass):\n pass\n\n\ndef _protected_fun() ->MyClass:\n return variable\n\n\n<mask token>\n\n\ndef my_fun2() ->MyClass:\n return variable\n\n\n<mask token>\n", "step-2": "class _ProtectedClass:\n pass\n\n\nclass MyClass:\n pass\n\n\nclass OtherClass(MyClass):\n pass\n\n\ndef _protected_fun() ->MyClass:\n return variable\n\n\ndef my_fun() ->MyClass:\n return variable\n\n\ndef my_fun2() ->MyClass:\n return variable\n\n\n<mask token>\n", "step-3": "class _ProtectedClass:\n pass\n\n\nclass MyClass:\n pass\n\n\nclass OtherClass(MyClass):\n pass\n\n\ndef _protected_fun() ->MyClass:\n return variable\n\n\ndef my_fun() ->MyClass:\n return variable\n\n\ndef my_fun2() ->MyClass:\n return variable\n\n\nvariable: MyClass\nvariable_with_value: MyClass = MyClass()\n<mask token>\n", "step-4": "class _ProtectedClass:\n pass\n\n\nclass MyClass:\n pass\n\n\nclass OtherClass(MyClass):\n pass\n\n\ndef _protected_fun() ->MyClass:\n return variable\n\n\ndef my_fun() ->MyClass:\n return variable\n\n\ndef my_fun2() ->MyClass:\n return variable\n\n\nvariable: MyClass\nvariable_with_value: MyClass = MyClass()\n__all__ = ['OtherClass', 'my_fun2', 'variable']\n", "step-5": "class _ProtectedClass:\n pass\n\n\nclass MyClass:\n pass\n\n\nclass OtherClass(MyClass):\n pass\n\n\ndef _protected_fun() -> MyClass:\n return variable # noqa: F821\n\n\ndef my_fun() -> MyClass:\n return variable # noqa: F821\n\n\ndef my_fun2() -> MyClass:\n return variable # noqa: F821\n\n\nvariable: MyClass\nvariable_with_value: MyClass = MyClass()\n\n\n__all__ = [ # noqa: F822\n \"OtherClass\",\n \"my_fun2\",\n \"variable\",\n]\n", "step-ids": [ 5, 6, 7, 8, 9 ] }
[ 5, 6, 7, 8, 9 ]
from binaryninja import * import yara def get_yara_rule_path(): return get_open_filename_input("Open YARA rule", "YARA rules (*.yar *.yara)") def get_markdown_result(matches): entry_fmt = "| {} | {} | {} |\n" md_text = """# YARA - Scan results | Rule Name | Function | Strings offsets | |-----------|----------|-----------------| """ for m in matches: rule = m['rule'] func = '-' if 'funcs' in m and len(m['funcs']) > 0: func = " ".join(['[{:name}](binaryninja://?expr={:name})'.format(name=f.name) for f in m['funcs']]) # 'strings': [(81L, '$a', 'abc'), (141L, '$b', 'def')] s = " ".join(['["{}"](binaryninja://?expr=0x{:x})'.format(s[2].decode('utf-8'), s[0]) for s in m['strings']]) md_text += entry_fmt.format(rule, func, s) return md_text def plugin_search_file(bv): matches = [] def yara_callback(data): """ { 'tags': ['foo', 'bar'], 'matches': True, 'namespace': 'default', 'rule': 'my_rule', 'meta': {}, 'strings': [(81L, '$a', 'abc'), (141L, '$b', 'def')] } """ if data['matches']: funcs = [] for addr, _, _ in data['strings']: funcs += bv.get_functions_containing(addr) data['funcs'] = funcs matches.append(data) yara.CALLBACK_CONTINUE yara_path = get_yara_rule_path() # user closed message prompt if yara_path is None: return try: rules = yara.compile(filepath=yara_path.decode('utf-8')) rules.match(bv.file.original_filename, callback=yara_callback) except Exception as e: log_error("[YARA] Exception: {}".format(str(e))) show_message_box("Error", "Check logs for details", icon=MessageBoxIcon.ErrorIcon) if len(matches) > 0: bv.show_markdown_report("YARA", get_markdown_result(matches)) else: log_info("[YARA] No matches") def plugin_search_functions(bv): show_message_box("Not implemented", "This feature is not implemented yet") # TODO implement Background task maybe? PluginCommand.register("[YARA] Scan file with yara rule...", "Scan file with yara rule", plugin_search_file) # PluginCommand.register('[YARA] Scan functions with yara rule...', "Scan all functions with yara rules (might be slower)", plugin_search_functions)
normal
{ "blob_id": "56d4532b633242f34f7a6ed86a35290836861f67", "index": 4201, "step-1": "<mask token>\n\n\ndef get_markdown_result(matches):\n entry_fmt = '| {} | {} | {} |\\n'\n md_text = \"\"\"# YARA - Scan results\n\n| Rule Name | Function | Strings offsets |\n|-----------|----------|-----------------|\n\"\"\"\n for m in matches:\n rule = m['rule']\n func = '-'\n if 'funcs' in m and len(m['funcs']) > 0:\n func = ' '.join(['[{:name}](binaryninja://?expr={:name})'.\n format(name=f.name) for f in m['funcs']])\n s = ' '.join(['[\"{}\"](binaryninja://?expr=0x{:x})'.format(s[2].\n decode('utf-8'), s[0]) for s in m['strings']])\n md_text += entry_fmt.format(rule, func, s)\n return md_text\n\n\ndef plugin_search_file(bv):\n matches = []\n\n def yara_callback(data):\n \"\"\"\n\t\t\t{\n\t\t\t'tags': ['foo', 'bar'],\n\t\t\t'matches': True,\n\t\t\t'namespace': 'default',\n\t\t\t'rule': 'my_rule',\n\t\t\t'meta': {},\n\t\t\t'strings': [(81L, '$a', 'abc'), (141L, '$b', 'def')]\n\t\t\t}\n\t\t\"\"\"\n if data['matches']:\n funcs = []\n for addr, _, _ in data['strings']:\n funcs += bv.get_functions_containing(addr)\n data['funcs'] = funcs\n matches.append(data)\n yara.CALLBACK_CONTINUE\n yara_path = get_yara_rule_path()\n if yara_path is None:\n return\n try:\n rules = yara.compile(filepath=yara_path.decode('utf-8'))\n rules.match(bv.file.original_filename, callback=yara_callback)\n except Exception as e:\n log_error('[YARA] Exception: {}'.format(str(e)))\n show_message_box('Error', 'Check logs for details', icon=\n MessageBoxIcon.ErrorIcon)\n if len(matches) > 0:\n bv.show_markdown_report('YARA', get_markdown_result(matches))\n else:\n log_info('[YARA] No matches')\n\n\ndef plugin_search_functions(bv):\n show_message_box('Not implemented', 'This feature is not implemented yet')\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef get_yara_rule_path():\n return get_open_filename_input('Open YARA rule',\n 'YARA rules (*.yar *.yara)')\n\n\ndef get_markdown_result(matches):\n entry_fmt = '| {} | {} | {} |\\n'\n md_text = \"\"\"# YARA - Scan results\n\n| Rule Name | Function | Strings offsets |\n|-----------|----------|-----------------|\n\"\"\"\n for m in matches:\n rule = m['rule']\n func = '-'\n if 'funcs' in m and len(m['funcs']) > 0:\n func = ' '.join(['[{:name}](binaryninja://?expr={:name})'.\n format(name=f.name) for f in m['funcs']])\n s = ' '.join(['[\"{}\"](binaryninja://?expr=0x{:x})'.format(s[2].\n decode('utf-8'), s[0]) for s in m['strings']])\n md_text += entry_fmt.format(rule, func, s)\n return md_text\n\n\ndef plugin_search_file(bv):\n matches = []\n\n def yara_callback(data):\n \"\"\"\n\t\t\t{\n\t\t\t'tags': ['foo', 'bar'],\n\t\t\t'matches': True,\n\t\t\t'namespace': 'default',\n\t\t\t'rule': 'my_rule',\n\t\t\t'meta': {},\n\t\t\t'strings': [(81L, '$a', 'abc'), (141L, '$b', 'def')]\n\t\t\t}\n\t\t\"\"\"\n if data['matches']:\n funcs = []\n for addr, _, _ in data['strings']:\n funcs += bv.get_functions_containing(addr)\n data['funcs'] = funcs\n matches.append(data)\n yara.CALLBACK_CONTINUE\n yara_path = get_yara_rule_path()\n if yara_path is None:\n return\n try:\n rules = yara.compile(filepath=yara_path.decode('utf-8'))\n rules.match(bv.file.original_filename, callback=yara_callback)\n except Exception as e:\n log_error('[YARA] Exception: {}'.format(str(e)))\n show_message_box('Error', 'Check logs for details', icon=\n MessageBoxIcon.ErrorIcon)\n if len(matches) > 0:\n bv.show_markdown_report('YARA', get_markdown_result(matches))\n else:\n log_info('[YARA] No matches')\n\n\ndef plugin_search_functions(bv):\n show_message_box('Not implemented', 'This feature is not implemented yet')\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef get_yara_rule_path():\n return get_open_filename_input('Open YARA rule',\n 'YARA rules (*.yar *.yara)')\n\n\ndef get_markdown_result(matches):\n entry_fmt = '| {} | {} | {} |\\n'\n md_text = \"\"\"# YARA - Scan results\n\n| Rule Name | Function | Strings offsets |\n|-----------|----------|-----------------|\n\"\"\"\n for m in matches:\n rule = m['rule']\n func = '-'\n if 'funcs' in m and len(m['funcs']) > 0:\n func = ' '.join(['[{:name}](binaryninja://?expr={:name})'.\n format(name=f.name) for f in m['funcs']])\n s = ' '.join(['[\"{}\"](binaryninja://?expr=0x{:x})'.format(s[2].\n decode('utf-8'), s[0]) for s in m['strings']])\n md_text += entry_fmt.format(rule, func, s)\n return md_text\n\n\ndef plugin_search_file(bv):\n matches = []\n\n def yara_callback(data):\n \"\"\"\n\t\t\t{\n\t\t\t'tags': ['foo', 'bar'],\n\t\t\t'matches': True,\n\t\t\t'namespace': 'default',\n\t\t\t'rule': 'my_rule',\n\t\t\t'meta': {},\n\t\t\t'strings': [(81L, '$a', 'abc'), (141L, '$b', 'def')]\n\t\t\t}\n\t\t\"\"\"\n if data['matches']:\n funcs = []\n for addr, _, _ in data['strings']:\n funcs += bv.get_functions_containing(addr)\n data['funcs'] = funcs\n matches.append(data)\n yara.CALLBACK_CONTINUE\n yara_path = get_yara_rule_path()\n if yara_path is None:\n return\n try:\n rules = yara.compile(filepath=yara_path.decode('utf-8'))\n rules.match(bv.file.original_filename, callback=yara_callback)\n except Exception as e:\n log_error('[YARA] Exception: {}'.format(str(e)))\n show_message_box('Error', 'Check logs for details', icon=\n MessageBoxIcon.ErrorIcon)\n if len(matches) > 0:\n bv.show_markdown_report('YARA', get_markdown_result(matches))\n else:\n log_info('[YARA] No matches')\n\n\ndef plugin_search_functions(bv):\n show_message_box('Not implemented', 'This feature is not implemented yet')\n\n\nPluginCommand.register('[YARA] Scan file with yara rule...',\n 'Scan file with yara rule', plugin_search_file)\n", "step-4": "from binaryninja import *\nimport yara\n\n\ndef get_yara_rule_path():\n return get_open_filename_input('Open YARA rule',\n 'YARA rules (*.yar *.yara)')\n\n\ndef get_markdown_result(matches):\n entry_fmt = '| {} | {} | {} |\\n'\n md_text = \"\"\"# YARA - Scan results\n\n| Rule Name | Function | Strings offsets |\n|-----------|----------|-----------------|\n\"\"\"\n for m in matches:\n rule = m['rule']\n func = '-'\n if 'funcs' in m and len(m['funcs']) > 0:\n func = ' '.join(['[{:name}](binaryninja://?expr={:name})'.\n format(name=f.name) for f in m['funcs']])\n s = ' '.join(['[\"{}\"](binaryninja://?expr=0x{:x})'.format(s[2].\n decode('utf-8'), s[0]) for s in m['strings']])\n md_text += entry_fmt.format(rule, func, s)\n return md_text\n\n\ndef plugin_search_file(bv):\n matches = []\n\n def yara_callback(data):\n \"\"\"\n\t\t\t{\n\t\t\t'tags': ['foo', 'bar'],\n\t\t\t'matches': True,\n\t\t\t'namespace': 'default',\n\t\t\t'rule': 'my_rule',\n\t\t\t'meta': {},\n\t\t\t'strings': [(81L, '$a', 'abc'), (141L, '$b', 'def')]\n\t\t\t}\n\t\t\"\"\"\n if data['matches']:\n funcs = []\n for addr, _, _ in data['strings']:\n funcs += bv.get_functions_containing(addr)\n data['funcs'] = funcs\n matches.append(data)\n yara.CALLBACK_CONTINUE\n yara_path = get_yara_rule_path()\n if yara_path is None:\n return\n try:\n rules = yara.compile(filepath=yara_path.decode('utf-8'))\n rules.match(bv.file.original_filename, callback=yara_callback)\n except Exception as e:\n log_error('[YARA] Exception: {}'.format(str(e)))\n show_message_box('Error', 'Check logs for details', icon=\n MessageBoxIcon.ErrorIcon)\n if len(matches) > 0:\n bv.show_markdown_report('YARA', get_markdown_result(matches))\n else:\n log_info('[YARA] No matches')\n\n\ndef plugin_search_functions(bv):\n show_message_box('Not implemented', 'This feature is not implemented yet')\n\n\nPluginCommand.register('[YARA] Scan file with yara rule...',\n 'Scan file with yara rule', plugin_search_file)\n", "step-5": "from binaryninja import *\nimport yara\n\ndef get_yara_rule_path():\n\treturn get_open_filename_input(\"Open YARA rule\", \"YARA rules (*.yar *.yara)\")\n\ndef get_markdown_result(matches):\n\tentry_fmt = \"| {} | {} | {} |\\n\"\n\tmd_text = \"\"\"# YARA - Scan results\n\n| Rule Name | Function | Strings offsets |\n|-----------|----------|-----------------|\n\"\"\"\n\tfor m in matches:\n\t\trule = m['rule']\n\t\tfunc = '-'\n\t\tif 'funcs' in m and len(m['funcs']) > 0:\n\t\t\tfunc = \" \".join(['[{:name}](binaryninja://?expr={:name})'.format(name=f.name) for f in m['funcs']])\n\t\t\n\t\t# 'strings': [(81L, '$a', 'abc'), (141L, '$b', 'def')]\n\t\ts = \" \".join(['[\"{}\"](binaryninja://?expr=0x{:x})'.format(s[2].decode('utf-8'), s[0]) for s in m['strings']])\n\t\tmd_text += entry_fmt.format(rule, func, s)\n\treturn md_text\n\ndef plugin_search_file(bv):\n\tmatches = []\n\t\n\tdef yara_callback(data):\n\t\t\"\"\"\n\t\t\t{\n\t\t\t'tags': ['foo', 'bar'],\n\t\t\t'matches': True,\n\t\t\t'namespace': 'default',\n\t\t\t'rule': 'my_rule',\n\t\t\t'meta': {},\n\t\t\t'strings': [(81L, '$a', 'abc'), (141L, '$b', 'def')]\n\t\t\t}\n\t\t\"\"\"\n\t\tif data['matches']:\n\t\t\tfuncs = []\n\t\t\tfor addr, _, _ in data['strings']:\n\t\t\t\tfuncs += bv.get_functions_containing(addr)\n\t\t\tdata['funcs'] = funcs\n\t\t\tmatches.append(data)\n\n\t\tyara.CALLBACK_CONTINUE\n\n\tyara_path = get_yara_rule_path()\n\t\n\t# user closed message prompt\n\tif yara_path is None:\n\t\treturn\n\n\ttry:\n\t\trules = yara.compile(filepath=yara_path.decode('utf-8'))\n\t\trules.match(bv.file.original_filename, callback=yara_callback)\n\n\texcept Exception as e:\n\t\tlog_error(\"[YARA] Exception: {}\".format(str(e)))\n\t\tshow_message_box(\"Error\", \"Check logs for details\", icon=MessageBoxIcon.ErrorIcon)\n\n\tif len(matches) > 0:\n\t\tbv.show_markdown_report(\"YARA\", get_markdown_result(matches))\n\telse:\n\t\tlog_info(\"[YARA] No matches\")\n\ndef plugin_search_functions(bv):\n\tshow_message_box(\"Not implemented\", \"This feature is not implemented yet\")\n\t # TODO implement Background task maybe?\n\nPluginCommand.register(\"[YARA] Scan file with yara rule...\", \"Scan file with yara rule\", plugin_search_file)\n# PluginCommand.register('[YARA] Scan functions with yara rule...', \"Scan all functions with yara rules (might be slower)\", plugin_search_functions)\n", "step-ids": [ 3, 4, 5, 6, 7 ] }
[ 3, 4, 5, 6, 7 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Migration(migrations.Migration): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Migration(migrations.Migration): dependencies = [('chat', '0005_user_image')] operations = [migrations.AlterField(model_name='user', name= 'first_name', field=models.CharField(max_length=255, verbose_name= 'Имя')), migrations.AlterField(model_name='user', name='last_name', field=models.CharField(max_length=255, verbose_name='Фамилия'))] <|reserved_special_token_1|> from django.db import migrations, models class Migration(migrations.Migration): dependencies = [('chat', '0005_user_image')] operations = [migrations.AlterField(model_name='user', name= 'first_name', field=models.CharField(max_length=255, verbose_name= 'Имя')), migrations.AlterField(model_name='user', name='last_name', field=models.CharField(max_length=255, verbose_name='Фамилия'))] <|reserved_special_token_1|> # Generated by Django 3.2.6 on 2021-08-19 22:01 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('chat', '0005_user_image'), ] operations = [ migrations.AlterField( model_name='user', name='first_name', field=models.CharField(max_length=255, verbose_name='Имя'), ), migrations.AlterField( model_name='user', name='last_name', field=models.CharField(max_length=255, verbose_name='Фамилия'), ), ]
flexible
{ "blob_id": "fac60a8967354e4f306b95fdb5c75d02dc2c1455", "index": 2247, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('chat', '0005_user_image')]\n operations = [migrations.AlterField(model_name='user', name=\n 'first_name', field=models.CharField(max_length=255, verbose_name=\n 'Имя')), migrations.AlterField(model_name='user', name='last_name',\n field=models.CharField(max_length=255, verbose_name='Фамилия'))]\n", "step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('chat', '0005_user_image')]\n operations = [migrations.AlterField(model_name='user', name=\n 'first_name', field=models.CharField(max_length=255, verbose_name=\n 'Имя')), migrations.AlterField(model_name='user', name='last_name',\n field=models.CharField(max_length=255, verbose_name='Фамилия'))]\n", "step-5": "# Generated by Django 3.2.6 on 2021-08-19 22:01\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('chat', '0005_user_image'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='user',\n name='first_name',\n field=models.CharField(max_length=255, verbose_name='Имя'),\n ),\n migrations.AlterField(\n model_name='user',\n name='last_name',\n field=models.CharField(max_length=255, verbose_name='Фамилия'),\n ),\n ]\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def proper_parenthetics(string): """Return if parentheses are matching or not.""" if isinstance(string, str): paren_q = Q() for i in range(len(string)): paren_q.enqueue(string[i]) opening_parens = 0 closing_parens = 0 while paren_q.size() > 0 and paren_q.queue.head is not None: i = paren_q.dequeue() if i != '(' and i != ')': raise TypeError('proper_parenthetics takes only parentheses.') if i == '(' and closing_parens == 0: opening_parens += 1 elif i == '(' and closing_parens > 0: closing_parens -= 1 elif i == ')' and opening_parens == 0: return -1 elif i == ')' and opening_parens > 0: opening_parens -= 1 if opening_parens - closing_parens == 0: return 0 if opening_parens - closing_parens > 0: return 1 raise TypeError('proper_parenthetics takes only strings') <|reserved_special_token_1|> <|reserved_special_token_0|> from _que_structure import Q def proper_parenthetics(string): """Return if parentheses are matching or not.""" if isinstance(string, str): paren_q = Q() for i in range(len(string)): paren_q.enqueue(string[i]) opening_parens = 0 closing_parens = 0 while paren_q.size() > 0 and paren_q.queue.head is not None: i = paren_q.dequeue() if i != '(' and i != ')': raise TypeError('proper_parenthetics takes only parentheses.') if i == '(' and closing_parens == 0: opening_parens += 1 elif i == '(' and closing_parens > 0: closing_parens -= 1 elif i == ')' and opening_parens == 0: return -1 elif i == ')' and opening_parens > 0: opening_parens -= 1 if opening_parens - closing_parens == 0: return 0 if opening_parens - closing_parens > 0: return 1 raise TypeError('proper_parenthetics takes only strings') <|reserved_special_token_1|> """Proper parenthetics extra credit kata.""" from _que_structure import Q def proper_parenthetics(string): """Return if parentheses are matching or not.""" if isinstance(string, str): paren_q = Q() for i in range(len(string)): paren_q.enqueue(string[i]) opening_parens = 0 closing_parens = 0 while paren_q.size() > 0 and paren_q.queue.head is not None: i = paren_q.dequeue() if i != '(' and i != ')': raise TypeError('proper_parenthetics takes only parentheses.') if i == '(' and closing_parens == 0: opening_parens += 1 elif i == '(' and closing_parens > 0: closing_parens -= 1 elif i == ')' and opening_parens == 0: return -1 elif i == ')' and opening_parens > 0: opening_parens -= 1 if opening_parens - closing_parens == 0: return 0 if opening_parens - closing_parens > 0: return 1 raise TypeError('proper_parenthetics takes only strings')
flexible
{ "blob_id": "a28ece0db9bf0d4c3ab26207216b1da45f7aaa0f", "index": 7582, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef proper_parenthetics(string):\n \"\"\"Return if parentheses are matching or not.\"\"\"\n if isinstance(string, str):\n paren_q = Q()\n for i in range(len(string)):\n paren_q.enqueue(string[i])\n opening_parens = 0\n closing_parens = 0\n while paren_q.size() > 0 and paren_q.queue.head is not None:\n i = paren_q.dequeue()\n if i != '(' and i != ')':\n raise TypeError('proper_parenthetics takes only parentheses.')\n if i == '(' and closing_parens == 0:\n opening_parens += 1\n elif i == '(' and closing_parens > 0:\n closing_parens -= 1\n elif i == ')' and opening_parens == 0:\n return -1\n elif i == ')' and opening_parens > 0:\n opening_parens -= 1\n if opening_parens - closing_parens == 0:\n return 0\n if opening_parens - closing_parens > 0:\n return 1\n raise TypeError('proper_parenthetics takes only strings')\n", "step-3": "<mask token>\nfrom _que_structure import Q\n\n\ndef proper_parenthetics(string):\n \"\"\"Return if parentheses are matching or not.\"\"\"\n if isinstance(string, str):\n paren_q = Q()\n for i in range(len(string)):\n paren_q.enqueue(string[i])\n opening_parens = 0\n closing_parens = 0\n while paren_q.size() > 0 and paren_q.queue.head is not None:\n i = paren_q.dequeue()\n if i != '(' and i != ')':\n raise TypeError('proper_parenthetics takes only parentheses.')\n if i == '(' and closing_parens == 0:\n opening_parens += 1\n elif i == '(' and closing_parens > 0:\n closing_parens -= 1\n elif i == ')' and opening_parens == 0:\n return -1\n elif i == ')' and opening_parens > 0:\n opening_parens -= 1\n if opening_parens - closing_parens == 0:\n return 0\n if opening_parens - closing_parens > 0:\n return 1\n raise TypeError('proper_parenthetics takes only strings')\n", "step-4": "\"\"\"Proper parenthetics extra credit kata.\"\"\"\n\nfrom _que_structure import Q\n\n\ndef proper_parenthetics(string):\n \"\"\"Return if parentheses are matching or not.\"\"\"\n if isinstance(string, str):\n paren_q = Q()\n for i in range(len(string)):\n paren_q.enqueue(string[i])\n opening_parens = 0\n closing_parens = 0\n while paren_q.size() > 0 and paren_q.queue.head is not None:\n i = paren_q.dequeue()\n if i != '(' and i != ')':\n raise TypeError('proper_parenthetics takes only parentheses.')\n if i == '(' and closing_parens == 0:\n opening_parens += 1\n elif i == '(' and closing_parens > 0:\n closing_parens -= 1\n elif i == ')' and opening_parens == 0:\n return -1\n elif i == ')' and opening_parens > 0:\n opening_parens -= 1\n if opening_parens - closing_parens == 0:\n return 0\n if opening_parens - closing_parens > 0:\n return 1\n raise TypeError('proper_parenthetics takes only strings')\n\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
import json import requests import itertools import logging from shared_code.config.setting import Settings from TailwindTraderFunc.cognitiveservices import CognitiveServices from shared_code.storage.storage import BlobStorageService class TailwindTraders(): def __init__(self, req): self._settings = Settings() self._cs = CognitiveServices() self._storage = BlobStorageService(self._settings.get_storage_connection_string()) self._reqbody = req.get_json() def readRequest(self): content = self._reqbody["values"][0]["data"]["content"] return content def getBlobUrlById(self, image_id): image = list(self._storage.list_blobs(self._settings.get_storage_container_name(), prefix=f'{image_id}.jpg')) image_url = self._storage.make_blob_url(self._settings.get_storage_container_name(), image[0].name) return image_url def getVisualFeaturesByImage(self, image_url): response_analyze = self._cs.getVisualFeaturesByImage(image_url, "analyze", {'visualFeatures': 'Description, Tags'}) response_ocr = self._cs.getOCRByImage(image_url, "recognizeText") return {"analyze":response_analyze, "ocr":response_ocr} def updateItemField(self, item, content): item["Tags"] = content["analyze"]["tags"] item["VisualDetail"] = content["analyze"]["description"] recognition_result = content["ocr"]["recognitionResult"] item["OCRText"] = [line["text"] for line in recognition_result["lines"]] def generateResult(self, content): result = {"values": [{"recordId": self._reqbody["values"][0]["recordId"], "data" : {"Items": content["Items"]}}]} result = json.dumps(result, sort_keys=True, indent=4, separators=(',', ': ')) return result
normal
{ "blob_id": "75ba2448897bed8388a7b8d876827461e1bc9dd7", "index": 2809, "step-1": "<mask token>\n\n\nclass TailwindTraders:\n\n def __init__(self, req):\n self._settings = Settings()\n self._cs = CognitiveServices()\n self._storage = BlobStorageService(self._settings.\n get_storage_connection_string())\n self._reqbody = req.get_json()\n <mask token>\n\n def getBlobUrlById(self, image_id):\n image = list(self._storage.list_blobs(self._settings.\n get_storage_container_name(), prefix=f'{image_id}.jpg'))\n image_url = self._storage.make_blob_url(self._settings.\n get_storage_container_name(), image[0].name)\n return image_url\n\n def getVisualFeaturesByImage(self, image_url):\n response_analyze = self._cs.getVisualFeaturesByImage(image_url,\n 'analyze', {'visualFeatures': 'Description, Tags'})\n response_ocr = self._cs.getOCRByImage(image_url, 'recognizeText')\n return {'analyze': response_analyze, 'ocr': response_ocr}\n\n def updateItemField(self, item, content):\n item['Tags'] = content['analyze']['tags']\n item['VisualDetail'] = content['analyze']['description']\n recognition_result = content['ocr']['recognitionResult']\n item['OCRText'] = [line['text'] for line in recognition_result['lines']\n ]\n <mask token>\n", "step-2": "<mask token>\n\n\nclass TailwindTraders:\n\n def __init__(self, req):\n self._settings = Settings()\n self._cs = CognitiveServices()\n self._storage = BlobStorageService(self._settings.\n get_storage_connection_string())\n self._reqbody = req.get_json()\n\n def readRequest(self):\n content = self._reqbody['values'][0]['data']['content']\n return content\n\n def getBlobUrlById(self, image_id):\n image = list(self._storage.list_blobs(self._settings.\n get_storage_container_name(), prefix=f'{image_id}.jpg'))\n image_url = self._storage.make_blob_url(self._settings.\n get_storage_container_name(), image[0].name)\n return image_url\n\n def getVisualFeaturesByImage(self, image_url):\n response_analyze = self._cs.getVisualFeaturesByImage(image_url,\n 'analyze', {'visualFeatures': 'Description, Tags'})\n response_ocr = self._cs.getOCRByImage(image_url, 'recognizeText')\n return {'analyze': response_analyze, 'ocr': response_ocr}\n\n def updateItemField(self, item, content):\n item['Tags'] = content['analyze']['tags']\n item['VisualDetail'] = content['analyze']['description']\n recognition_result = content['ocr']['recognitionResult']\n item['OCRText'] = [line['text'] for line in recognition_result['lines']\n ]\n <mask token>\n", "step-3": "<mask token>\n\n\nclass TailwindTraders:\n\n def __init__(self, req):\n self._settings = Settings()\n self._cs = CognitiveServices()\n self._storage = BlobStorageService(self._settings.\n get_storage_connection_string())\n self._reqbody = req.get_json()\n\n def readRequest(self):\n content = self._reqbody['values'][0]['data']['content']\n return content\n\n def getBlobUrlById(self, image_id):\n image = list(self._storage.list_blobs(self._settings.\n get_storage_container_name(), prefix=f'{image_id}.jpg'))\n image_url = self._storage.make_blob_url(self._settings.\n get_storage_container_name(), image[0].name)\n return image_url\n\n def getVisualFeaturesByImage(self, image_url):\n response_analyze = self._cs.getVisualFeaturesByImage(image_url,\n 'analyze', {'visualFeatures': 'Description, Tags'})\n response_ocr = self._cs.getOCRByImage(image_url, 'recognizeText')\n return {'analyze': response_analyze, 'ocr': response_ocr}\n\n def updateItemField(self, item, content):\n item['Tags'] = content['analyze']['tags']\n item['VisualDetail'] = content['analyze']['description']\n recognition_result = content['ocr']['recognitionResult']\n item['OCRText'] = [line['text'] for line in recognition_result['lines']\n ]\n\n def generateResult(self, content):\n result = {'values': [{'recordId': self._reqbody['values'][0][\n 'recordId'], 'data': {'Items': content['Items']}}]}\n result = json.dumps(result, sort_keys=True, indent=4, separators=(\n ',', ': '))\n return result\n", "step-4": "import json\nimport requests\nimport itertools\nimport logging\nfrom shared_code.config.setting import Settings\nfrom TailwindTraderFunc.cognitiveservices import CognitiveServices\nfrom shared_code.storage.storage import BlobStorageService\n\n\nclass TailwindTraders:\n\n def __init__(self, req):\n self._settings = Settings()\n self._cs = CognitiveServices()\n self._storage = BlobStorageService(self._settings.\n get_storage_connection_string())\n self._reqbody = req.get_json()\n\n def readRequest(self):\n content = self._reqbody['values'][0]['data']['content']\n return content\n\n def getBlobUrlById(self, image_id):\n image = list(self._storage.list_blobs(self._settings.\n get_storage_container_name(), prefix=f'{image_id}.jpg'))\n image_url = self._storage.make_blob_url(self._settings.\n get_storage_container_name(), image[0].name)\n return image_url\n\n def getVisualFeaturesByImage(self, image_url):\n response_analyze = self._cs.getVisualFeaturesByImage(image_url,\n 'analyze', {'visualFeatures': 'Description, Tags'})\n response_ocr = self._cs.getOCRByImage(image_url, 'recognizeText')\n return {'analyze': response_analyze, 'ocr': response_ocr}\n\n def updateItemField(self, item, content):\n item['Tags'] = content['analyze']['tags']\n item['VisualDetail'] = content['analyze']['description']\n recognition_result = content['ocr']['recognitionResult']\n item['OCRText'] = [line['text'] for line in recognition_result['lines']\n ]\n\n def generateResult(self, content):\n result = {'values': [{'recordId': self._reqbody['values'][0][\n 'recordId'], 'data': {'Items': content['Items']}}]}\n result = json.dumps(result, sort_keys=True, indent=4, separators=(\n ',', ': '))\n return result\n", "step-5": "import json\nimport requests\nimport itertools\nimport logging\nfrom shared_code.config.setting import Settings\nfrom TailwindTraderFunc.cognitiveservices import CognitiveServices\nfrom shared_code.storage.storage import BlobStorageService\n\n\nclass TailwindTraders():\n\n def __init__(self, req):\n self._settings = Settings()\n self._cs = CognitiveServices()\n self._storage = BlobStorageService(self._settings.get_storage_connection_string())\n self._reqbody = req.get_json()\n\n def readRequest(self):\n content = self._reqbody[\"values\"][0][\"data\"][\"content\"]\n return content\n\n def getBlobUrlById(self, image_id):\n image = list(self._storage.list_blobs(self._settings.get_storage_container_name(),\n prefix=f'{image_id}.jpg'))\n image_url = self._storage.make_blob_url(self._settings.get_storage_container_name(),\n image[0].name)\n return image_url\n\n def getVisualFeaturesByImage(self, image_url):\n response_analyze = self._cs.getVisualFeaturesByImage(image_url, \"analyze\", {'visualFeatures': 'Description, Tags'})\n response_ocr = self._cs.getOCRByImage(image_url, \"recognizeText\")\n return {\"analyze\":response_analyze, \"ocr\":response_ocr}\n \n def updateItemField(self, item, content):\n item[\"Tags\"] = content[\"analyze\"][\"tags\"]\n item[\"VisualDetail\"] = content[\"analyze\"][\"description\"]\n recognition_result = content[\"ocr\"][\"recognitionResult\"]\n item[\"OCRText\"] = [line[\"text\"] for line in recognition_result[\"lines\"]]\n\n def generateResult(self, content):\n result = {\"values\": [{\"recordId\": self._reqbody[\"values\"][0][\"recordId\"],\n \"data\" : {\"Items\": content[\"Items\"]}}]}\n result = json.dumps(result, sort_keys=True, indent=4, separators=(',', ': '))\n return result", "step-ids": [ 5, 6, 7, 8, 9 ] }
[ 5, 6, 7, 8, 9 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> admin.site.register(models.Comentario) <|reserved_special_token_1|> from django.contrib import admin from . import models admin.site.register(models.Comentario) <|reserved_special_token_1|> from django.contrib import admin from . import models admin.site.register(models.Comentario) # Register your models here.
flexible
{ "blob_id": "d7d94cfed0b819297069c3434c70359a327403cd", "index": 718, "step-1": "<mask token>\n", "step-2": "<mask token>\nadmin.site.register(models.Comentario)\n", "step-3": "from django.contrib import admin\nfrom . import models\nadmin.site.register(models.Comentario)\n", "step-4": "from django.contrib import admin\nfrom . import models\n\nadmin.site.register(models.Comentario)\n\n# Register your models here.\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> class Solution: <|reserved_special_token_0|> <|reserved_special_token_1|> class Solution: def uncommonFromSentences(self, A: str, B: str) ->List[str]: word_count = {} A = A.split() B = B.split() whole = A + B for word in whole: if word not in word_count: word_count[word] = 1 else: word_count[word] += 1 return [word for word in word_count if word_count[word] == 1]
flexible
{ "blob_id": "09420360ddcf2f74c2e130b4e09ae2a959e42e50", "index": 8305, "step-1": "<mask token>\n", "step-2": "class Solution:\n <mask token>\n", "step-3": "class Solution:\n\n def uncommonFromSentences(self, A: str, B: str) ->List[str]:\n word_count = {}\n A = A.split()\n B = B.split()\n whole = A + B\n for word in whole:\n if word not in word_count:\n word_count[word] = 1\n else:\n word_count[word] += 1\n return [word for word in word_count if word_count[word] == 1]\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def index(request): book_list = book.objects.all() c = Context({'book_list': book_list}) return render_to_response('index.html', c) <|reserved_special_token_1|> from django.http import HttpResponse from django.shortcuts import render_to_response from django.template import Context from books.models import book, Author def index(request): book_list = book.objects.all() c = Context({'book_list': book_list}) return render_to_response('index.html', c) <|reserved_special_token_1|> from django.http import HttpResponse from django.shortcuts import render_to_response from django.template import Context from books.models import book,Author def index(request): book_list=book.objects.all() c=Context({"book_list":book_list}) return render_to_response("index.html",c)
flexible
{ "blob_id": "441d224c37e0eae531c17db0e903b3344c570516", "index": 9867, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef index(request):\n book_list = book.objects.all()\n c = Context({'book_list': book_list})\n return render_to_response('index.html', c)\n", "step-3": "from django.http import HttpResponse\nfrom django.shortcuts import render_to_response\nfrom django.template import Context\nfrom books.models import book, Author\n\n\ndef index(request):\n book_list = book.objects.all()\n c = Context({'book_list': book_list})\n return render_to_response('index.html', c)\n", "step-4": "from django.http import HttpResponse\nfrom django.shortcuts import render_to_response\nfrom django.template import Context\nfrom books.models import book,Author\ndef index(request):\n book_list=book.objects.all()\n c=Context({\"book_list\":book_list})\n return render_to_response(\"index.html\",c)\n\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
# -*- snakemake -*- # # CENTIPEDE: Transcription factor footprinting and binding site prediction # install.packages("CENTIPEDE", repos="http://R-Forge.R-project.org") # # http://centipede.uchicago.edu/ # include: '../ngs.settings.smk' config_default = { 'bio.ngs.motif.centipede' : { 'options' : '', }, } update_config(config_default, config) config = config_default
normal
{ "blob_id": "4620b52a43f2469ff0350d8ef6548de3a7fe1b55", "index": 5019, "step-1": "<mask token>\n", "step-2": "include: '../ngs.settings.smk'\n<mask token>\nupdate_config(config_default, config)\n<mask token>\n", "step-3": "include: '../ngs.settings.smk'\nconfig_default = {'bio.ngs.motif.centipede': {'options': ''}}\nupdate_config(config_default, config)\nconfig = config_default\n", "step-4": "# -*- snakemake -*-\n#\n# CENTIPEDE: Transcription factor footprinting and binding site prediction\n# install.packages(\"CENTIPEDE\", repos=\"http://R-Forge.R-project.org\") \n# \n# http://centipede.uchicago.edu/\n#\ninclude: '../ngs.settings.smk'\n\nconfig_default = {\n 'bio.ngs.motif.centipede' : {\n 'options' : '',\n },\n}\n\nupdate_config(config_default, config)\nconfig = config_default\n\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def main(): tools.unpack.main() util.files.main() util.dark.main() util.flat.main() <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def main(): tools.unpack.main() util.files.main() util.dark.main() util.flat.main() if __name__ == '__main__': main() <|reserved_special_token_1|> <|reserved_special_token_0|> import tools.unpack import util.files import util.dark import util.flat def main(): tools.unpack.main() util.files.main() util.dark.main() util.flat.main() if __name__ == '__main__': main() <|reserved_special_token_1|> """ Unpacks and preprocesses all of the data from the tarball of partial data, which includes the flats and dark frames. """ import tools.unpack import util.files import util.dark import util.flat def main(): tools.unpack.main() util.files.main() util.dark.main() util.flat.main() if __name__ == '__main__': main()
flexible
{ "blob_id": "3667651697ac1c093d48fe2c4baa4b4dbdf20f8a", "index": 6832, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef main():\n tools.unpack.main()\n util.files.main()\n util.dark.main()\n util.flat.main()\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef main():\n tools.unpack.main()\n util.files.main()\n util.dark.main()\n util.flat.main()\n\n\nif __name__ == '__main__':\n main()\n", "step-4": "<mask token>\nimport tools.unpack\nimport util.files\nimport util.dark\nimport util.flat\n\n\ndef main():\n tools.unpack.main()\n util.files.main()\n util.dark.main()\n util.flat.main()\n\n\nif __name__ == '__main__':\n main()\n", "step-5": "\"\"\"\nUnpacks and preprocesses all of the data from the tarball of partial data,\nwhich includes the flats and dark frames.\n\"\"\"\n\nimport tools.unpack\nimport util.files\nimport util.dark\nimport util.flat\n\ndef main():\n tools.unpack.main()\n util.files.main()\n util.dark.main()\n util.flat.main()\n\n\nif __name__ == '__main__':\n main()\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
# -*- coding: utf-8 -*- """ Created on Mon Mar 6 12:20:45 2017 @author: 7 """ from os import listdir from PIL import Image as PImage from scipy import misc import numpy as np from Image_loader import LoadImages """ def LoadImages(path): # return array of images imagesList = listdir(path) loadedImages = [] for image in imagesList: img = misc.imread(path + image) loadedImages.append(img) return loadedImages """ def ModifyImages(path,path1): # modify images to same scale imagesList = listdir(path) for image in imagesList: old_img = PImage.open(path + image) old_size = old_img.size new_size = (540,420) new_img = PImage.new("L", new_size) new_img.paste(old_img,((new_size[0]-old_size[0])//2,(new_size[1]-old_size[1])//2)) new_img.save(path1 + image) """ path = "train\\" path1 = "train_modified\\" ModifyImages(path,path1) imgs = LoadImages(path1) a = np.array( imgs ) print (a.shape) print("finished") path = "test\\" path1 = "test_modified\\" ModifyImages(path,path1) imgs = LoadImages(path1) a = np.array( imgs ) print (a.shape) print("finished") path = "train_cleaned\\" path1 = "train_cleaned_modified\\" ModifyImages(path,path1) imgs = LoadImages(path1) a = np.array( imgs ) print (a.shape) print("finished") """
normal
{ "blob_id": "9cad36de6231f310ef9022f16f6ed0da83a003b3", "index": 9757, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef ModifyImages(path, path1):\n imagesList = listdir(path)\n for image in imagesList:\n old_img = PImage.open(path + image)\n old_size = old_img.size\n new_size = 540, 420\n new_img = PImage.new('L', new_size)\n new_img.paste(old_img, ((new_size[0] - old_size[0]) // 2, (new_size\n [1] - old_size[1]) // 2))\n new_img.save(path1 + image)\n\n\n<mask token>\n", "step-3": "<mask token>\nfrom os import listdir\nfrom PIL import Image as PImage\nfrom scipy import misc\nimport numpy as np\nfrom Image_loader import LoadImages\n<mask token>\n\n\ndef ModifyImages(path, path1):\n imagesList = listdir(path)\n for image in imagesList:\n old_img = PImage.open(path + image)\n old_size = old_img.size\n new_size = 540, 420\n new_img = PImage.new('L', new_size)\n new_img.paste(old_img, ((new_size[0] - old_size[0]) // 2, (new_size\n [1] - old_size[1]) // 2))\n new_img.save(path1 + image)\n\n\n<mask token>\n", "step-4": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Mon Mar 6 12:20:45 2017\r\n\r\n@author: 7\r\n\"\"\"\r\n\r\nfrom os import listdir\r\nfrom PIL import Image as PImage\r\nfrom scipy import misc\r\nimport numpy as np\r\nfrom Image_loader import LoadImages\r\n\"\"\"\r\ndef LoadImages(path):\r\n # return array of images\r\n imagesList = listdir(path)\r\n loadedImages = []\r\n for image in imagesList:\r\n img = misc.imread(path + image)\r\n loadedImages.append(img)\r\n return loadedImages\r\n\"\"\"\r\n\r\n\r\ndef ModifyImages(path,path1):\r\n # modify images to same scale\r\n\r\n imagesList = listdir(path)\r\n for image in imagesList:\r\n old_img = PImage.open(path + image)\r\n old_size = old_img.size\r\n new_size = (540,420)\r\n new_img = PImage.new(\"L\", new_size) \r\n new_img.paste(old_img,((new_size[0]-old_size[0])//2,(new_size[1]-old_size[1])//2))\r\n new_img.save(path1 + image)\r\n\r\n\"\"\"\r\npath = \"train\\\\\"\r\npath1 = \"train_modified\\\\\"\r\nModifyImages(path,path1)\r\nimgs = LoadImages(path1)\r\na = np.array( imgs )\r\nprint (a.shape)\r\nprint(\"finished\")\r\n\r\n\r\npath = \"test\\\\\"\r\npath1 = \"test_modified\\\\\"\r\n\r\nModifyImages(path,path1)\r\nimgs = LoadImages(path1)\r\na = np.array( imgs )\r\nprint (a.shape)\r\nprint(\"finished\")\r\n\r\npath = \"train_cleaned\\\\\"\r\npath1 = \"train_cleaned_modified\\\\\"\r\n\r\nModifyImages(path,path1)\r\nimgs = LoadImages(path1)\r\na = np.array( imgs )\r\nprint (a.shape)\r\nprint(\"finished\")\r\n\"\"\"", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
#coding: utf-8 import logging from threading import Thread from ldap import SCOPE_BASE from seafevents.ldap_syncer.ldap_conn import LdapConn from seafevents.ldap_syncer.utils import bytes2str, add_group_uuid_pair from seaserv import get_group_dn_pairs logger = logging.getLogger(__name__) def migrate_dn_pairs(settings): grp_dn_pairs = get_group_dn_pairs() if grp_dn_pairs is None: logger.warning('get group dn pairs from db failed when migrate dn pairs.') return grp_dn_pairs.reverse() for grp_dn_pair in grp_dn_pairs: for config in settings.ldap_configs: search_filter = '(objectClass=*)' ldap_conn = LdapConn(config.host, config.user_dn, config.passwd, config.follow_referrals) ldap_conn.create_conn() if not ldap_conn.conn: logger.warning('connect ldap server [%s] failed.' % config.user_dn) return if config.use_page_result: results = ldap_conn.paged_search(grp_dn_pair.dn, SCOPE_BASE, search_filter, [config.group_uuid_attr]) else: results = ldap_conn.search(grp_dn_pair.dn, SCOPE_BASE, search_filter, [config.group_uuid_attr]) ldap_conn.unbind_conn() results = bytes2str(results) if not results: continue else: uuid = results[0][1][config.group_uuid_attr][0] add_group_uuid_pair(grp_dn_pair.group_id, uuid) class LdapSync(Thread): def __init__(self, settings): Thread.__init__(self) self.settings = settings def run(self): if self.settings.enable_group_sync: migrate_dn_pairs(settings=self.settings) self.start_sync() self.show_sync_result() def show_sync_result(self): pass def start_sync(self): data_ldap = self.get_data_from_ldap() if data_ldap is None: return data_db = self.get_data_from_db() if data_db is None: return self.sync_data(data_db, data_ldap) def get_data_from_db(self): return None def get_data_from_ldap(self): ret = {} for config in self.settings.ldap_configs: cur_ret = self.get_data_from_ldap_by_server(config) # If get data from one server failed, then the result is failed if cur_ret is None: return None for key in cur_ret.keys(): if key not in ret: ret[key] = cur_ret[key] ret[key].config = config return ret def get_data_from_ldap_by_server(self, config): return None def sync_data(self, data_db, data_ldap): pass
normal
{ "blob_id": "8cc0393082448bb8f61068b5c96e89ef3aee77ed", "index": 235, "step-1": "<mask token>\n\n\nclass LdapSync(Thread):\n\n def __init__(self, settings):\n Thread.__init__(self)\n self.settings = settings\n\n def run(self):\n if self.settings.enable_group_sync:\n migrate_dn_pairs(settings=self.settings)\n self.start_sync()\n self.show_sync_result()\n\n def show_sync_result(self):\n pass\n\n def start_sync(self):\n data_ldap = self.get_data_from_ldap()\n if data_ldap is None:\n return\n data_db = self.get_data_from_db()\n if data_db is None:\n return\n self.sync_data(data_db, data_ldap)\n\n def get_data_from_db(self):\n return None\n\n def get_data_from_ldap(self):\n ret = {}\n for config in self.settings.ldap_configs:\n cur_ret = self.get_data_from_ldap_by_server(config)\n if cur_ret is None:\n return None\n for key in cur_ret.keys():\n if key not in ret:\n ret[key] = cur_ret[key]\n ret[key].config = config\n return ret\n\n def get_data_from_ldap_by_server(self, config):\n return None\n\n def sync_data(self, data_db, data_ldap):\n pass\n", "step-2": "<mask token>\n\n\ndef migrate_dn_pairs(settings):\n grp_dn_pairs = get_group_dn_pairs()\n if grp_dn_pairs is None:\n logger.warning(\n 'get group dn pairs from db failed when migrate dn pairs.')\n return\n grp_dn_pairs.reverse()\n for grp_dn_pair in grp_dn_pairs:\n for config in settings.ldap_configs:\n search_filter = '(objectClass=*)'\n ldap_conn = LdapConn(config.host, config.user_dn, config.passwd,\n config.follow_referrals)\n ldap_conn.create_conn()\n if not ldap_conn.conn:\n logger.warning('connect ldap server [%s] failed.' % config.\n user_dn)\n return\n if config.use_page_result:\n results = ldap_conn.paged_search(grp_dn_pair.dn, SCOPE_BASE,\n search_filter, [config.group_uuid_attr])\n else:\n results = ldap_conn.search(grp_dn_pair.dn, SCOPE_BASE,\n search_filter, [config.group_uuid_attr])\n ldap_conn.unbind_conn()\n results = bytes2str(results)\n if not results:\n continue\n else:\n uuid = results[0][1][config.group_uuid_attr][0]\n add_group_uuid_pair(grp_dn_pair.group_id, uuid)\n\n\nclass LdapSync(Thread):\n\n def __init__(self, settings):\n Thread.__init__(self)\n self.settings = settings\n\n def run(self):\n if self.settings.enable_group_sync:\n migrate_dn_pairs(settings=self.settings)\n self.start_sync()\n self.show_sync_result()\n\n def show_sync_result(self):\n pass\n\n def start_sync(self):\n data_ldap = self.get_data_from_ldap()\n if data_ldap is None:\n return\n data_db = self.get_data_from_db()\n if data_db is None:\n return\n self.sync_data(data_db, data_ldap)\n\n def get_data_from_db(self):\n return None\n\n def get_data_from_ldap(self):\n ret = {}\n for config in self.settings.ldap_configs:\n cur_ret = self.get_data_from_ldap_by_server(config)\n if cur_ret is None:\n return None\n for key in cur_ret.keys():\n if key not in ret:\n ret[key] = cur_ret[key]\n ret[key].config = config\n return ret\n\n def get_data_from_ldap_by_server(self, config):\n return None\n\n def sync_data(self, data_db, data_ldap):\n pass\n", "step-3": "<mask token>\nlogger = logging.getLogger(__name__)\n\n\ndef migrate_dn_pairs(settings):\n grp_dn_pairs = get_group_dn_pairs()\n if grp_dn_pairs is None:\n logger.warning(\n 'get group dn pairs from db failed when migrate dn pairs.')\n return\n grp_dn_pairs.reverse()\n for grp_dn_pair in grp_dn_pairs:\n for config in settings.ldap_configs:\n search_filter = '(objectClass=*)'\n ldap_conn = LdapConn(config.host, config.user_dn, config.passwd,\n config.follow_referrals)\n ldap_conn.create_conn()\n if not ldap_conn.conn:\n logger.warning('connect ldap server [%s] failed.' % config.\n user_dn)\n return\n if config.use_page_result:\n results = ldap_conn.paged_search(grp_dn_pair.dn, SCOPE_BASE,\n search_filter, [config.group_uuid_attr])\n else:\n results = ldap_conn.search(grp_dn_pair.dn, SCOPE_BASE,\n search_filter, [config.group_uuid_attr])\n ldap_conn.unbind_conn()\n results = bytes2str(results)\n if not results:\n continue\n else:\n uuid = results[0][1][config.group_uuid_attr][0]\n add_group_uuid_pair(grp_dn_pair.group_id, uuid)\n\n\nclass LdapSync(Thread):\n\n def __init__(self, settings):\n Thread.__init__(self)\n self.settings = settings\n\n def run(self):\n if self.settings.enable_group_sync:\n migrate_dn_pairs(settings=self.settings)\n self.start_sync()\n self.show_sync_result()\n\n def show_sync_result(self):\n pass\n\n def start_sync(self):\n data_ldap = self.get_data_from_ldap()\n if data_ldap is None:\n return\n data_db = self.get_data_from_db()\n if data_db is None:\n return\n self.sync_data(data_db, data_ldap)\n\n def get_data_from_db(self):\n return None\n\n def get_data_from_ldap(self):\n ret = {}\n for config in self.settings.ldap_configs:\n cur_ret = self.get_data_from_ldap_by_server(config)\n if cur_ret is None:\n return None\n for key in cur_ret.keys():\n if key not in ret:\n ret[key] = cur_ret[key]\n ret[key].config = config\n return ret\n\n def get_data_from_ldap_by_server(self, config):\n return None\n\n def sync_data(self, data_db, data_ldap):\n pass\n", "step-4": "import logging\nfrom threading import Thread\nfrom ldap import SCOPE_BASE\nfrom seafevents.ldap_syncer.ldap_conn import LdapConn\nfrom seafevents.ldap_syncer.utils import bytes2str, add_group_uuid_pair\nfrom seaserv import get_group_dn_pairs\nlogger = logging.getLogger(__name__)\n\n\ndef migrate_dn_pairs(settings):\n grp_dn_pairs = get_group_dn_pairs()\n if grp_dn_pairs is None:\n logger.warning(\n 'get group dn pairs from db failed when migrate dn pairs.')\n return\n grp_dn_pairs.reverse()\n for grp_dn_pair in grp_dn_pairs:\n for config in settings.ldap_configs:\n search_filter = '(objectClass=*)'\n ldap_conn = LdapConn(config.host, config.user_dn, config.passwd,\n config.follow_referrals)\n ldap_conn.create_conn()\n if not ldap_conn.conn:\n logger.warning('connect ldap server [%s] failed.' % config.\n user_dn)\n return\n if config.use_page_result:\n results = ldap_conn.paged_search(grp_dn_pair.dn, SCOPE_BASE,\n search_filter, [config.group_uuid_attr])\n else:\n results = ldap_conn.search(grp_dn_pair.dn, SCOPE_BASE,\n search_filter, [config.group_uuid_attr])\n ldap_conn.unbind_conn()\n results = bytes2str(results)\n if not results:\n continue\n else:\n uuid = results[0][1][config.group_uuid_attr][0]\n add_group_uuid_pair(grp_dn_pair.group_id, uuid)\n\n\nclass LdapSync(Thread):\n\n def __init__(self, settings):\n Thread.__init__(self)\n self.settings = settings\n\n def run(self):\n if self.settings.enable_group_sync:\n migrate_dn_pairs(settings=self.settings)\n self.start_sync()\n self.show_sync_result()\n\n def show_sync_result(self):\n pass\n\n def start_sync(self):\n data_ldap = self.get_data_from_ldap()\n if data_ldap is None:\n return\n data_db = self.get_data_from_db()\n if data_db is None:\n return\n self.sync_data(data_db, data_ldap)\n\n def get_data_from_db(self):\n return None\n\n def get_data_from_ldap(self):\n ret = {}\n for config in self.settings.ldap_configs:\n cur_ret = self.get_data_from_ldap_by_server(config)\n if cur_ret is None:\n return None\n for key in cur_ret.keys():\n if key not in ret:\n ret[key] = cur_ret[key]\n ret[key].config = config\n return ret\n\n def get_data_from_ldap_by_server(self, config):\n return None\n\n def sync_data(self, data_db, data_ldap):\n pass\n", "step-5": "#coding: utf-8\nimport logging\nfrom threading import Thread\n\nfrom ldap import SCOPE_BASE\nfrom seafevents.ldap_syncer.ldap_conn import LdapConn\nfrom seafevents.ldap_syncer.utils import bytes2str, add_group_uuid_pair\n\nfrom seaserv import get_group_dn_pairs\n\n\nlogger = logging.getLogger(__name__)\n\n\ndef migrate_dn_pairs(settings):\n grp_dn_pairs = get_group_dn_pairs()\n if grp_dn_pairs is None:\n logger.warning('get group dn pairs from db failed when migrate dn pairs.')\n return\n\n grp_dn_pairs.reverse()\n for grp_dn_pair in grp_dn_pairs:\n for config in settings.ldap_configs:\n search_filter = '(objectClass=*)'\n ldap_conn = LdapConn(config.host, config.user_dn, config.passwd, config.follow_referrals)\n ldap_conn.create_conn()\n if not ldap_conn.conn:\n logger.warning('connect ldap server [%s] failed.' % config.user_dn)\n return\n\n if config.use_page_result:\n results = ldap_conn.paged_search(grp_dn_pair.dn, SCOPE_BASE,\n search_filter,\n [config.group_uuid_attr])\n else:\n results = ldap_conn.search(grp_dn_pair.dn, SCOPE_BASE,\n search_filter,\n [config.group_uuid_attr])\n ldap_conn.unbind_conn()\n results = bytes2str(results)\n\n if not results:\n continue\n else:\n uuid = results[0][1][config.group_uuid_attr][0]\n add_group_uuid_pair(grp_dn_pair.group_id, uuid)\n\n\nclass LdapSync(Thread):\n def __init__(self, settings):\n Thread.__init__(self)\n self.settings = settings\n\n def run(self):\n if self.settings.enable_group_sync:\n migrate_dn_pairs(settings=self.settings)\n self.start_sync()\n self.show_sync_result()\n\n def show_sync_result(self):\n pass\n\n def start_sync(self):\n data_ldap = self.get_data_from_ldap()\n if data_ldap is None:\n return\n\n data_db = self.get_data_from_db()\n if data_db is None:\n return\n\n self.sync_data(data_db, data_ldap)\n\n def get_data_from_db(self):\n return None\n\n def get_data_from_ldap(self):\n ret = {}\n\n for config in self.settings.ldap_configs:\n cur_ret = self.get_data_from_ldap_by_server(config)\n # If get data from one server failed, then the result is failed\n if cur_ret is None:\n return None\n for key in cur_ret.keys():\n if key not in ret:\n ret[key] = cur_ret[key]\n ret[key].config = config\n\n return ret\n\n def get_data_from_ldap_by_server(self, config):\n return None\n\n def sync_data(self, data_db, data_ldap):\n pass\n", "step-ids": [ 9, 10, 11, 12, 13 ] }
[ 9, 10, 11, 12, 13 ]
""" Authentication views. login() Flask view to log a user in. """ import functools from typing import Any, Callable, cast, Dict from flask import Blueprint, make_response, request, session from werkzeug.security import check_password_hash as _check_password_hash from .accesscontrol import PERMISSIONS from .api import APIError, UserSchema from .db.models import User bp = Blueprint("auth", __name__, url_prefix="/api/v1/auth") _CHECK_HASH_ANYWAY = "pbkdf2:sha256:150000$tRQtwnYW$80442246fe5dbd649c8a90cd0209f7a3751e8a0ec1327f88f6b331f929642050" # pylint: disable=line-too-long check_password_hash: Callable[[str, str], bool] = cast( Callable[[str, str], bool], _check_password_hash ) @bp.route("/login", methods=("POST",)) def login() -> Any: """Flask view for logging a user in.""" user_dict = UserSchema().load( request.json, partial=("id", "qualifications") + PERMISSIONS ) username = user_dict["username"] password = user_dict["password"] if is_password_correct(username, password): user = fetch_user(username) session["user_id"] = user["id"] response = make_response(user) response.set_cookie("is_authenticated", "1") return response raise APIError(reason="invalid_user_or_password", status_code=403) @bp.route("/logout", methods=("POST",)) def logout() -> Any: """Flask view to log a user out.""" if "user_id" in session: del session["user_id"] response = make_response({"success": True}) response.set_cookie("is_authenticated", max_age=0, expires=0) return response def is_password_correct(username: str, password: str) -> bool: """Checks whether password is valid for user, tries to avoid timing attacks.""" user = User.query.filter_by(username=username).first() if user is None: # We need to prevent timing-based side-channel attacks # that could be exploited for user enumeration password_hash = _CHECK_HASH_ANYWAY else: password_hash = user.password return check_password_hash(password_hash, password) and user is not None def fetch_user(username: str) -> Dict[str, Any]: """Look up a user as a dictionary from the DB.""" user = User.query.filter_by(username=username).first() return cast(Dict[str, Any], UserSchema().dump(user)) def authentication_required(to_be_wrapped: Callable[..., Any]) -> Callable[..., Any]: """Wraps a view with a check for whether the user is authenticated.""" @functools.wraps(to_be_wrapped) def wrapper(*args: Any, **kwargs: Any) -> Any: user_id = session.get("user_id") if user_id is None or User.query.get(user_id) is None: if "user_id" in session: del session["user_id"] response = make_response({"reason": "authentication_required"}, 403) response.set_cookie("is_authenticated", max_age=0, expires=0) return response return to_be_wrapped(*args, **kwargs) return wrapper
normal
{ "blob_id": "2d36ae916ad257615016ed6c0bc67e506ee313c9", "index": 1528, "step-1": "<mask token>\n\n\n@bp.route('/login', methods=('POST',))\ndef login() ->Any:\n \"\"\"Flask view for logging a user in.\"\"\"\n user_dict = UserSchema().load(request.json, partial=('id',\n 'qualifications') + PERMISSIONS)\n username = user_dict['username']\n password = user_dict['password']\n if is_password_correct(username, password):\n user = fetch_user(username)\n session['user_id'] = user['id']\n response = make_response(user)\n response.set_cookie('is_authenticated', '1')\n return response\n raise APIError(reason='invalid_user_or_password', status_code=403)\n\n\n@bp.route('/logout', methods=('POST',))\ndef logout() ->Any:\n \"\"\"Flask view to log a user out.\"\"\"\n if 'user_id' in session:\n del session['user_id']\n response = make_response({'success': True})\n response.set_cookie('is_authenticated', max_age=0, expires=0)\n return response\n\n\ndef is_password_correct(username: str, password: str) ->bool:\n \"\"\"Checks whether password is valid for user, tries to avoid timing attacks.\"\"\"\n user = User.query.filter_by(username=username).first()\n if user is None:\n password_hash = _CHECK_HASH_ANYWAY\n else:\n password_hash = user.password\n return check_password_hash(password_hash, password) and user is not None\n\n\n<mask token>\n\n\ndef authentication_required(to_be_wrapped: Callable[..., Any]) ->Callable[\n ..., Any]:\n \"\"\"Wraps a view with a check for whether the user is authenticated.\"\"\"\n\n @functools.wraps(to_be_wrapped)\n def wrapper(*args: Any, **kwargs: Any) ->Any:\n user_id = session.get('user_id')\n if user_id is None or User.query.get(user_id) is None:\n if 'user_id' in session:\n del session['user_id']\n response = make_response({'reason': 'authentication_required'}, 403\n )\n response.set_cookie('is_authenticated', max_age=0, expires=0)\n return response\n return to_be_wrapped(*args, **kwargs)\n return wrapper\n", "step-2": "<mask token>\ncheck_password_hash: Callable[[str, str], bool] = cast(Callable[[str, str],\n bool], _check_password_hash)\n\n\n@bp.route('/login', methods=('POST',))\ndef login() ->Any:\n \"\"\"Flask view for logging a user in.\"\"\"\n user_dict = UserSchema().load(request.json, partial=('id',\n 'qualifications') + PERMISSIONS)\n username = user_dict['username']\n password = user_dict['password']\n if is_password_correct(username, password):\n user = fetch_user(username)\n session['user_id'] = user['id']\n response = make_response(user)\n response.set_cookie('is_authenticated', '1')\n return response\n raise APIError(reason='invalid_user_or_password', status_code=403)\n\n\n@bp.route('/logout', methods=('POST',))\ndef logout() ->Any:\n \"\"\"Flask view to log a user out.\"\"\"\n if 'user_id' in session:\n del session['user_id']\n response = make_response({'success': True})\n response.set_cookie('is_authenticated', max_age=0, expires=0)\n return response\n\n\ndef is_password_correct(username: str, password: str) ->bool:\n \"\"\"Checks whether password is valid for user, tries to avoid timing attacks.\"\"\"\n user = User.query.filter_by(username=username).first()\n if user is None:\n password_hash = _CHECK_HASH_ANYWAY\n else:\n password_hash = user.password\n return check_password_hash(password_hash, password) and user is not None\n\n\ndef fetch_user(username: str) ->Dict[str, Any]:\n \"\"\"Look up a user as a dictionary from the DB.\"\"\"\n user = User.query.filter_by(username=username).first()\n return cast(Dict[str, Any], UserSchema().dump(user))\n\n\ndef authentication_required(to_be_wrapped: Callable[..., Any]) ->Callable[\n ..., Any]:\n \"\"\"Wraps a view with a check for whether the user is authenticated.\"\"\"\n\n @functools.wraps(to_be_wrapped)\n def wrapper(*args: Any, **kwargs: Any) ->Any:\n user_id = session.get('user_id')\n if user_id is None or User.query.get(user_id) is None:\n if 'user_id' in session:\n del session['user_id']\n response = make_response({'reason': 'authentication_required'}, 403\n )\n response.set_cookie('is_authenticated', max_age=0, expires=0)\n return response\n return to_be_wrapped(*args, **kwargs)\n return wrapper\n", "step-3": "<mask token>\nbp = Blueprint('auth', __name__, url_prefix='/api/v1/auth')\n_CHECK_HASH_ANYWAY = (\n 'pbkdf2:sha256:150000$tRQtwnYW$80442246fe5dbd649c8a90cd0209f7a3751e8a0ec1327f88f6b331f929642050'\n )\ncheck_password_hash: Callable[[str, str], bool] = cast(Callable[[str, str],\n bool], _check_password_hash)\n\n\n@bp.route('/login', methods=('POST',))\ndef login() ->Any:\n \"\"\"Flask view for logging a user in.\"\"\"\n user_dict = UserSchema().load(request.json, partial=('id',\n 'qualifications') + PERMISSIONS)\n username = user_dict['username']\n password = user_dict['password']\n if is_password_correct(username, password):\n user = fetch_user(username)\n session['user_id'] = user['id']\n response = make_response(user)\n response.set_cookie('is_authenticated', '1')\n return response\n raise APIError(reason='invalid_user_or_password', status_code=403)\n\n\n@bp.route('/logout', methods=('POST',))\ndef logout() ->Any:\n \"\"\"Flask view to log a user out.\"\"\"\n if 'user_id' in session:\n del session['user_id']\n response = make_response({'success': True})\n response.set_cookie('is_authenticated', max_age=0, expires=0)\n return response\n\n\ndef is_password_correct(username: str, password: str) ->bool:\n \"\"\"Checks whether password is valid for user, tries to avoid timing attacks.\"\"\"\n user = User.query.filter_by(username=username).first()\n if user is None:\n password_hash = _CHECK_HASH_ANYWAY\n else:\n password_hash = user.password\n return check_password_hash(password_hash, password) and user is not None\n\n\ndef fetch_user(username: str) ->Dict[str, Any]:\n \"\"\"Look up a user as a dictionary from the DB.\"\"\"\n user = User.query.filter_by(username=username).first()\n return cast(Dict[str, Any], UserSchema().dump(user))\n\n\ndef authentication_required(to_be_wrapped: Callable[..., Any]) ->Callable[\n ..., Any]:\n \"\"\"Wraps a view with a check for whether the user is authenticated.\"\"\"\n\n @functools.wraps(to_be_wrapped)\n def wrapper(*args: Any, **kwargs: Any) ->Any:\n user_id = session.get('user_id')\n if user_id is None or User.query.get(user_id) is None:\n if 'user_id' in session:\n del session['user_id']\n response = make_response({'reason': 'authentication_required'}, 403\n )\n response.set_cookie('is_authenticated', max_age=0, expires=0)\n return response\n return to_be_wrapped(*args, **kwargs)\n return wrapper\n", "step-4": "<mask token>\nimport functools\nfrom typing import Any, Callable, cast, Dict\nfrom flask import Blueprint, make_response, request, session\nfrom werkzeug.security import check_password_hash as _check_password_hash\nfrom .accesscontrol import PERMISSIONS\nfrom .api import APIError, UserSchema\nfrom .db.models import User\nbp = Blueprint('auth', __name__, url_prefix='/api/v1/auth')\n_CHECK_HASH_ANYWAY = (\n 'pbkdf2:sha256:150000$tRQtwnYW$80442246fe5dbd649c8a90cd0209f7a3751e8a0ec1327f88f6b331f929642050'\n )\ncheck_password_hash: Callable[[str, str], bool] = cast(Callable[[str, str],\n bool], _check_password_hash)\n\n\n@bp.route('/login', methods=('POST',))\ndef login() ->Any:\n \"\"\"Flask view for logging a user in.\"\"\"\n user_dict = UserSchema().load(request.json, partial=('id',\n 'qualifications') + PERMISSIONS)\n username = user_dict['username']\n password = user_dict['password']\n if is_password_correct(username, password):\n user = fetch_user(username)\n session['user_id'] = user['id']\n response = make_response(user)\n response.set_cookie('is_authenticated', '1')\n return response\n raise APIError(reason='invalid_user_or_password', status_code=403)\n\n\n@bp.route('/logout', methods=('POST',))\ndef logout() ->Any:\n \"\"\"Flask view to log a user out.\"\"\"\n if 'user_id' in session:\n del session['user_id']\n response = make_response({'success': True})\n response.set_cookie('is_authenticated', max_age=0, expires=0)\n return response\n\n\ndef is_password_correct(username: str, password: str) ->bool:\n \"\"\"Checks whether password is valid for user, tries to avoid timing attacks.\"\"\"\n user = User.query.filter_by(username=username).first()\n if user is None:\n password_hash = _CHECK_HASH_ANYWAY\n else:\n password_hash = user.password\n return check_password_hash(password_hash, password) and user is not None\n\n\ndef fetch_user(username: str) ->Dict[str, Any]:\n \"\"\"Look up a user as a dictionary from the DB.\"\"\"\n user = User.query.filter_by(username=username).first()\n return cast(Dict[str, Any], UserSchema().dump(user))\n\n\ndef authentication_required(to_be_wrapped: Callable[..., Any]) ->Callable[\n ..., Any]:\n \"\"\"Wraps a view with a check for whether the user is authenticated.\"\"\"\n\n @functools.wraps(to_be_wrapped)\n def wrapper(*args: Any, **kwargs: Any) ->Any:\n user_id = session.get('user_id')\n if user_id is None or User.query.get(user_id) is None:\n if 'user_id' in session:\n del session['user_id']\n response = make_response({'reason': 'authentication_required'}, 403\n )\n response.set_cookie('is_authenticated', max_age=0, expires=0)\n return response\n return to_be_wrapped(*args, **kwargs)\n return wrapper\n", "step-5": "\"\"\"\nAuthentication views.\n\nlogin()\n Flask view to log a user in.\n\"\"\"\n\nimport functools\nfrom typing import Any, Callable, cast, Dict\n\nfrom flask import Blueprint, make_response, request, session\nfrom werkzeug.security import check_password_hash as _check_password_hash\n\nfrom .accesscontrol import PERMISSIONS\nfrom .api import APIError, UserSchema\nfrom .db.models import User\n\n\nbp = Blueprint(\"auth\", __name__, url_prefix=\"/api/v1/auth\")\n\n_CHECK_HASH_ANYWAY = \"pbkdf2:sha256:150000$tRQtwnYW$80442246fe5dbd649c8a90cd0209f7a3751e8a0ec1327f88f6b331f929642050\" # pylint: disable=line-too-long\n\ncheck_password_hash: Callable[[str, str], bool] = cast(\n Callable[[str, str], bool], _check_password_hash\n)\n\n\n@bp.route(\"/login\", methods=(\"POST\",))\ndef login() -> Any:\n \"\"\"Flask view for logging a user in.\"\"\"\n user_dict = UserSchema().load(\n request.json, partial=(\"id\", \"qualifications\") + PERMISSIONS\n )\n username = user_dict[\"username\"]\n password = user_dict[\"password\"]\n\n if is_password_correct(username, password):\n user = fetch_user(username)\n session[\"user_id\"] = user[\"id\"]\n response = make_response(user)\n response.set_cookie(\"is_authenticated\", \"1\")\n return response\n\n raise APIError(reason=\"invalid_user_or_password\", status_code=403)\n\n\n@bp.route(\"/logout\", methods=(\"POST\",))\ndef logout() -> Any:\n \"\"\"Flask view to log a user out.\"\"\"\n if \"user_id\" in session:\n del session[\"user_id\"]\n response = make_response({\"success\": True})\n response.set_cookie(\"is_authenticated\", max_age=0, expires=0)\n return response\n\n\ndef is_password_correct(username: str, password: str) -> bool:\n \"\"\"Checks whether password is valid for user, tries to avoid timing attacks.\"\"\"\n user = User.query.filter_by(username=username).first()\n if user is None:\n # We need to prevent timing-based side-channel attacks\n # that could be exploited for user enumeration\n password_hash = _CHECK_HASH_ANYWAY\n else:\n password_hash = user.password\n\n return check_password_hash(password_hash, password) and user is not None\n\n\ndef fetch_user(username: str) -> Dict[str, Any]:\n \"\"\"Look up a user as a dictionary from the DB.\"\"\"\n user = User.query.filter_by(username=username).first()\n return cast(Dict[str, Any], UserSchema().dump(user))\n\n\ndef authentication_required(to_be_wrapped: Callable[..., Any]) -> Callable[..., Any]:\n \"\"\"Wraps a view with a check for whether the user is authenticated.\"\"\"\n\n @functools.wraps(to_be_wrapped)\n def wrapper(*args: Any, **kwargs: Any) -> Any:\n user_id = session.get(\"user_id\")\n if user_id is None or User.query.get(user_id) is None:\n if \"user_id\" in session:\n del session[\"user_id\"]\n response = make_response({\"reason\": \"authentication_required\"}, 403)\n response.set_cookie(\"is_authenticated\", max_age=0, expires=0)\n return response\n return to_be_wrapped(*args, **kwargs)\n\n return wrapper\n", "step-ids": [ 4, 6, 7, 8, 9 ] }
[ 4, 6, 7, 8, 9 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> def fibonacci(quantidade): resultado = [1, 2] for _ in range(2, quantidade): resultado.append(sum(resultado[-2:])) return resultado <|reserved_special_token_0|> <|reserved_special_token_1|> def fibonacci(quantidade): resultado = [1, 2] for _ in range(2, quantidade): resultado.append(sum(resultado[-2:])) return resultado for fib in fibonacci(20): print(fib) <|reserved_special_token_1|> def fibonacci(quantidade): resultado = [1, 2] # while True: # substituir o while pelo for, em um range do 2° valor da lista, correr até # o valor definido na função "Quantidade" for _ in range(2, quantidade): # desta forma ele irá realizar a função do 2° da lista até atingir # o valor de quantiade. # utiziamos o _ no for, para dizer que é uma função não utilizad resultado.append(sum(resultado[-2:])) return resultado for fib in fibonacci(20): print(fib)
flexible
{ "blob_id": "83c7bb2e109f8affd9e2a12e8c5370b0f5a34048", "index": 653, "step-1": "<mask token>\n", "step-2": "def fibonacci(quantidade):\n resultado = [1, 2]\n for _ in range(2, quantidade):\n resultado.append(sum(resultado[-2:]))\n return resultado\n\n\n<mask token>\n", "step-3": "def fibonacci(quantidade):\n resultado = [1, 2]\n for _ in range(2, quantidade):\n resultado.append(sum(resultado[-2:]))\n return resultado\n\n\nfor fib in fibonacci(20):\n print(fib)\n", "step-4": "def fibonacci(quantidade):\n resultado = [1, 2]\n# while True:\n# substituir o while pelo for, em um range do 2° valor da lista, correr até\n# o valor definido na função \"Quantidade\"\n for _ in range(2, quantidade):\n # desta forma ele irá realizar a função do 2° da lista até atingir\n # o valor de quantiade.\n # utiziamos o _ no for, para dizer que é uma função não utilizad\n resultado.append(sum(resultado[-2:]))\n return resultado\n\n\nfor fib in fibonacci(20):\n print(fib)\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> with open('speaker_book.txt') as fin: for line in fin: elems = line.split('|') ID = elems[0].lstrip().strip() speaker = elems[1].lstrip().strip() subset = elems[3].lstrip().strip() book = elems[5].lstrip().strip() if (speaker, book) not in book_ID_mapping: book_ID_mapping[speaker, book] = [ID] else: book_ID_mapping[speaker, book].append(ID) with open(info_json) as fin: spk_bookwords = json.load(fin) <|reserved_special_token_0|> with open('../all_rare_words.txt') as fin: for line in fin: word = line.strip() worddict.add(word) <|reserved_special_token_0|> with open('word_freq.txt') as fin: for line in fin: word, freq = line.split() worddict_full[word] = int(freq) <|reserved_special_token_0|> for speaker, books in spk_bookwords.items(): for book, content in books.items(): speaker_book_IDs = book_ID_mapping[speaker, book ] if 'chapter' not in info_json else [speaker] for speaker_book_ID in speaker_book_IDs: spk_book_KB[speaker_book_ID] = [] bookwords = content['bookwords'] oovwords = content['oovwords'] for word in bookwords: if word in worddict: spk_book_KB[speaker_book_ID].append((word, worddict_full[word] if word in worddict_full else 0)) if word not in KBfulllist: KBfulllist.add(word) for word in oovwords: if word in worddict: spk_book_KB[speaker_book_ID].append((word, worddict_full[word] if word in worddict_full else 0)) if word not in KBfulllist: KBfulllist.add(word) <|reserved_special_token_0|> os.system('mkdir -p {}'.format(output_path)) <|reserved_special_token_0|> for ID, KB in spk_book_KB.items(): random.shuffle(worddict) count = 0 while len(KB) < minlen and count < len(worddict): word = worddict[count] freq = worddict_full[word] if word in worddict_full else 0 if (word, freq) not in KB: KB.append((word, freq)) count += 1 KB.sort(key=lambda tup: tup[1]) with open(os.path.join(output_path, ID), 'w') as fout: for word, freq in KB[:maxlen]: fout.write(word + '\n') <|reserved_special_token_1|> <|reserved_special_token_0|> chapter_mode = True setname = 'test_other' use_chapter = '_chapter' minlen = 1000 maxlen = 1000 context = '_1000' info_json = 'bookinfo{}_{}{}.json'.format(use_chapter, setname, context) book_ID_mapping = {} with open('speaker_book.txt') as fin: for line in fin: elems = line.split('|') ID = elems[0].lstrip().strip() speaker = elems[1].lstrip().strip() subset = elems[3].lstrip().strip() book = elems[5].lstrip().strip() if (speaker, book) not in book_ID_mapping: book_ID_mapping[speaker, book] = [ID] else: book_ID_mapping[speaker, book].append(ID) with open(info_json) as fin: spk_bookwords = json.load(fin) worddict = set() with open('../all_rare_words.txt') as fin: for line in fin: word = line.strip() worddict.add(word) worddict_full = {} with open('word_freq.txt') as fin: for line in fin: word, freq = line.split() worddict_full[word] = int(freq) spk_book_KB = {} KBfulllist = set() for speaker, books in spk_bookwords.items(): for book, content in books.items(): speaker_book_IDs = book_ID_mapping[speaker, book ] if 'chapter' not in info_json else [speaker] for speaker_book_ID in speaker_book_IDs: spk_book_KB[speaker_book_ID] = [] bookwords = content['bookwords'] oovwords = content['oovwords'] for word in bookwords: if word in worddict: spk_book_KB[speaker_book_ID].append((word, worddict_full[word] if word in worddict_full else 0)) if word not in KBfulllist: KBfulllist.add(word) for word in oovwords: if word in worddict: spk_book_KB[speaker_book_ID].append((word, worddict_full[word] if word in worddict_full else 0)) if word not in KBfulllist: KBfulllist.add(word) full_wordlist = list(KBfulllist) output_path = 'LibriKB{}{}all_{}'.format(use_chapter[1:], context, maxlen) os.system('mkdir -p {}'.format(output_path)) worddict = list(worddict) for ID, KB in spk_book_KB.items(): random.shuffle(worddict) count = 0 while len(KB) < minlen and count < len(worddict): word = worddict[count] freq = worddict_full[word] if word in worddict_full else 0 if (word, freq) not in KB: KB.append((word, freq)) count += 1 KB.sort(key=lambda tup: tup[1]) with open(os.path.join(output_path, ID), 'w') as fout: for word, freq in KB[:maxlen]: fout.write(word + '\n') <|reserved_special_token_1|> import os import json import random chapter_mode = True setname = 'test_other' use_chapter = '_chapter' minlen = 1000 maxlen = 1000 context = '_1000' info_json = 'bookinfo{}_{}{}.json'.format(use_chapter, setname, context) book_ID_mapping = {} with open('speaker_book.txt') as fin: for line in fin: elems = line.split('|') ID = elems[0].lstrip().strip() speaker = elems[1].lstrip().strip() subset = elems[3].lstrip().strip() book = elems[5].lstrip().strip() if (speaker, book) not in book_ID_mapping: book_ID_mapping[speaker, book] = [ID] else: book_ID_mapping[speaker, book].append(ID) with open(info_json) as fin: spk_bookwords = json.load(fin) worddict = set() with open('../all_rare_words.txt') as fin: for line in fin: word = line.strip() worddict.add(word) worddict_full = {} with open('word_freq.txt') as fin: for line in fin: word, freq = line.split() worddict_full[word] = int(freq) spk_book_KB = {} KBfulllist = set() for speaker, books in spk_bookwords.items(): for book, content in books.items(): speaker_book_IDs = book_ID_mapping[speaker, book ] if 'chapter' not in info_json else [speaker] for speaker_book_ID in speaker_book_IDs: spk_book_KB[speaker_book_ID] = [] bookwords = content['bookwords'] oovwords = content['oovwords'] for word in bookwords: if word in worddict: spk_book_KB[speaker_book_ID].append((word, worddict_full[word] if word in worddict_full else 0)) if word not in KBfulllist: KBfulllist.add(word) for word in oovwords: if word in worddict: spk_book_KB[speaker_book_ID].append((word, worddict_full[word] if word in worddict_full else 0)) if word not in KBfulllist: KBfulllist.add(word) full_wordlist = list(KBfulllist) output_path = 'LibriKB{}{}all_{}'.format(use_chapter[1:], context, maxlen) os.system('mkdir -p {}'.format(output_path)) worddict = list(worddict) for ID, KB in spk_book_KB.items(): random.shuffle(worddict) count = 0 while len(KB) < minlen and count < len(worddict): word = worddict[count] freq = worddict_full[word] if word in worddict_full else 0 if (word, freq) not in KB: KB.append((word, freq)) count += 1 KB.sort(key=lambda tup: tup[1]) with open(os.path.join(output_path, ID), 'w') as fout: for word, freq in KB[:maxlen]: fout.write(word + '\n') <|reserved_special_token_1|> import os import json import random chapter_mode = True setname = 'test_other' use_chapter = '_chapter' minlen = 1000 maxlen = 1000 context = '_1000' info_json = 'bookinfo{}_{}{}.json'.format(use_chapter, setname, context) book_ID_mapping = {} with open('speaker_book.txt') as fin: for line in fin: elems = line.split('|') ID = elems[0].lstrip().strip() speaker = elems[1].lstrip().strip() subset = elems[3].lstrip().strip() book = elems[5].lstrip().strip() if (speaker, book) not in book_ID_mapping: book_ID_mapping[(speaker, book)] = [ID] else: book_ID_mapping[(speaker, book)].append(ID) with open(info_json) as fin: spk_bookwords = json.load(fin) worddict = set() with open('../all_rare_words.txt') as fin: for line in fin: word = line.strip() worddict.add(word) worddict_full = {} with open('word_freq.txt') as fin: for line in fin: word, freq = line.split() worddict_full[word] = int(freq) spk_book_KB = {} KBfulllist = set() for speaker, books in spk_bookwords.items(): # spk_book_KB[speaker] = {} for book, content in books.items(): speaker_book_IDs = book_ID_mapping[(speaker, book)] if 'chapter' not in info_json else [speaker] for speaker_book_ID in speaker_book_IDs: spk_book_KB[speaker_book_ID] = [] bookwords = content['bookwords'] oovwords = content['oovwords'] for word in bookwords: if word in worddict: spk_book_KB[speaker_book_ID].append((word, worddict_full[word] if word in worddict_full else 0)) if word not in KBfulllist: KBfulllist.add(word) for word in oovwords: if word in worddict: spk_book_KB[speaker_book_ID].append((word, worddict_full[word] if word in worddict_full else 0)) if word not in KBfulllist: KBfulllist.add(word) full_wordlist = list(KBfulllist) output_path = 'LibriKB{}{}all_{}'.format(use_chapter[1:], context, maxlen) os.system('mkdir -p {}'.format(output_path)) worddict = list(worddict) for ID, KB in spk_book_KB.items(): random.shuffle(worddict) count = 0 while len(KB) < minlen and count < len(worddict): word = worddict[count] freq = worddict_full[word] if word in worddict_full else 0 if (word, freq) not in KB: KB.append((word, freq)) count += 1 KB.sort(key=lambda tup: tup[1]) with open(os.path.join(output_path, ID), 'w') as fout: for word, freq in KB[:maxlen]: fout.write(word+'\n')
flexible
{ "blob_id": "3b41bd59c133bb04dae3aa48dc0699388d5bf3d4", "index": 8346, "step-1": "<mask token>\n", "step-2": "<mask token>\nwith open('speaker_book.txt') as fin:\n for line in fin:\n elems = line.split('|')\n ID = elems[0].lstrip().strip()\n speaker = elems[1].lstrip().strip()\n subset = elems[3].lstrip().strip()\n book = elems[5].lstrip().strip()\n if (speaker, book) not in book_ID_mapping:\n book_ID_mapping[speaker, book] = [ID]\n else:\n book_ID_mapping[speaker, book].append(ID)\nwith open(info_json) as fin:\n spk_bookwords = json.load(fin)\n<mask token>\nwith open('../all_rare_words.txt') as fin:\n for line in fin:\n word = line.strip()\n worddict.add(word)\n<mask token>\nwith open('word_freq.txt') as fin:\n for line in fin:\n word, freq = line.split()\n worddict_full[word] = int(freq)\n<mask token>\nfor speaker, books in spk_bookwords.items():\n for book, content in books.items():\n speaker_book_IDs = book_ID_mapping[speaker, book\n ] if 'chapter' not in info_json else [speaker]\n for speaker_book_ID in speaker_book_IDs:\n spk_book_KB[speaker_book_ID] = []\n bookwords = content['bookwords']\n oovwords = content['oovwords']\n for word in bookwords:\n if word in worddict:\n spk_book_KB[speaker_book_ID].append((word, \n worddict_full[word] if word in worddict_full else 0))\n if word not in KBfulllist:\n KBfulllist.add(word)\n for word in oovwords:\n if word in worddict:\n spk_book_KB[speaker_book_ID].append((word, \n worddict_full[word] if word in worddict_full else 0))\n if word not in KBfulllist:\n KBfulllist.add(word)\n<mask token>\nos.system('mkdir -p {}'.format(output_path))\n<mask token>\nfor ID, KB in spk_book_KB.items():\n random.shuffle(worddict)\n count = 0\n while len(KB) < minlen and count < len(worddict):\n word = worddict[count]\n freq = worddict_full[word] if word in worddict_full else 0\n if (word, freq) not in KB:\n KB.append((word, freq))\n count += 1\n KB.sort(key=lambda tup: tup[1])\n with open(os.path.join(output_path, ID), 'w') as fout:\n for word, freq in KB[:maxlen]:\n fout.write(word + '\\n')\n", "step-3": "<mask token>\nchapter_mode = True\nsetname = 'test_other'\nuse_chapter = '_chapter'\nminlen = 1000\nmaxlen = 1000\ncontext = '_1000'\ninfo_json = 'bookinfo{}_{}{}.json'.format(use_chapter, setname, context)\nbook_ID_mapping = {}\nwith open('speaker_book.txt') as fin:\n for line in fin:\n elems = line.split('|')\n ID = elems[0].lstrip().strip()\n speaker = elems[1].lstrip().strip()\n subset = elems[3].lstrip().strip()\n book = elems[5].lstrip().strip()\n if (speaker, book) not in book_ID_mapping:\n book_ID_mapping[speaker, book] = [ID]\n else:\n book_ID_mapping[speaker, book].append(ID)\nwith open(info_json) as fin:\n spk_bookwords = json.load(fin)\nworddict = set()\nwith open('../all_rare_words.txt') as fin:\n for line in fin:\n word = line.strip()\n worddict.add(word)\nworddict_full = {}\nwith open('word_freq.txt') as fin:\n for line in fin:\n word, freq = line.split()\n worddict_full[word] = int(freq)\nspk_book_KB = {}\nKBfulllist = set()\nfor speaker, books in spk_bookwords.items():\n for book, content in books.items():\n speaker_book_IDs = book_ID_mapping[speaker, book\n ] if 'chapter' not in info_json else [speaker]\n for speaker_book_ID in speaker_book_IDs:\n spk_book_KB[speaker_book_ID] = []\n bookwords = content['bookwords']\n oovwords = content['oovwords']\n for word in bookwords:\n if word in worddict:\n spk_book_KB[speaker_book_ID].append((word, \n worddict_full[word] if word in worddict_full else 0))\n if word not in KBfulllist:\n KBfulllist.add(word)\n for word in oovwords:\n if word in worddict:\n spk_book_KB[speaker_book_ID].append((word, \n worddict_full[word] if word in worddict_full else 0))\n if word not in KBfulllist:\n KBfulllist.add(word)\nfull_wordlist = list(KBfulllist)\noutput_path = 'LibriKB{}{}all_{}'.format(use_chapter[1:], context, maxlen)\nos.system('mkdir -p {}'.format(output_path))\nworddict = list(worddict)\nfor ID, KB in spk_book_KB.items():\n random.shuffle(worddict)\n count = 0\n while len(KB) < minlen and count < len(worddict):\n word = worddict[count]\n freq = worddict_full[word] if word in worddict_full else 0\n if (word, freq) not in KB:\n KB.append((word, freq))\n count += 1\n KB.sort(key=lambda tup: tup[1])\n with open(os.path.join(output_path, ID), 'w') as fout:\n for word, freq in KB[:maxlen]:\n fout.write(word + '\\n')\n", "step-4": "import os\nimport json\nimport random\nchapter_mode = True\nsetname = 'test_other'\nuse_chapter = '_chapter'\nminlen = 1000\nmaxlen = 1000\ncontext = '_1000'\ninfo_json = 'bookinfo{}_{}{}.json'.format(use_chapter, setname, context)\nbook_ID_mapping = {}\nwith open('speaker_book.txt') as fin:\n for line in fin:\n elems = line.split('|')\n ID = elems[0].lstrip().strip()\n speaker = elems[1].lstrip().strip()\n subset = elems[3].lstrip().strip()\n book = elems[5].lstrip().strip()\n if (speaker, book) not in book_ID_mapping:\n book_ID_mapping[speaker, book] = [ID]\n else:\n book_ID_mapping[speaker, book].append(ID)\nwith open(info_json) as fin:\n spk_bookwords = json.load(fin)\nworddict = set()\nwith open('../all_rare_words.txt') as fin:\n for line in fin:\n word = line.strip()\n worddict.add(word)\nworddict_full = {}\nwith open('word_freq.txt') as fin:\n for line in fin:\n word, freq = line.split()\n worddict_full[word] = int(freq)\nspk_book_KB = {}\nKBfulllist = set()\nfor speaker, books in spk_bookwords.items():\n for book, content in books.items():\n speaker_book_IDs = book_ID_mapping[speaker, book\n ] if 'chapter' not in info_json else [speaker]\n for speaker_book_ID in speaker_book_IDs:\n spk_book_KB[speaker_book_ID] = []\n bookwords = content['bookwords']\n oovwords = content['oovwords']\n for word in bookwords:\n if word in worddict:\n spk_book_KB[speaker_book_ID].append((word, \n worddict_full[word] if word in worddict_full else 0))\n if word not in KBfulllist:\n KBfulllist.add(word)\n for word in oovwords:\n if word in worddict:\n spk_book_KB[speaker_book_ID].append((word, \n worddict_full[word] if word in worddict_full else 0))\n if word not in KBfulllist:\n KBfulllist.add(word)\nfull_wordlist = list(KBfulllist)\noutput_path = 'LibriKB{}{}all_{}'.format(use_chapter[1:], context, maxlen)\nos.system('mkdir -p {}'.format(output_path))\nworddict = list(worddict)\nfor ID, KB in spk_book_KB.items():\n random.shuffle(worddict)\n count = 0\n while len(KB) < minlen and count < len(worddict):\n word = worddict[count]\n freq = worddict_full[word] if word in worddict_full else 0\n if (word, freq) not in KB:\n KB.append((word, freq))\n count += 1\n KB.sort(key=lambda tup: tup[1])\n with open(os.path.join(output_path, ID), 'w') as fout:\n for word, freq in KB[:maxlen]:\n fout.write(word + '\\n')\n", "step-5": "import os\nimport json\nimport random\n\n\nchapter_mode = True\nsetname = 'test_other'\nuse_chapter = '_chapter'\nminlen = 1000\nmaxlen = 1000\ncontext = '_1000'\n\ninfo_json = 'bookinfo{}_{}{}.json'.format(use_chapter, setname, context)\nbook_ID_mapping = {}\nwith open('speaker_book.txt') as fin:\n for line in fin:\n elems = line.split('|')\n ID = elems[0].lstrip().strip()\n speaker = elems[1].lstrip().strip()\n subset = elems[3].lstrip().strip()\n book = elems[5].lstrip().strip()\n if (speaker, book) not in book_ID_mapping:\n book_ID_mapping[(speaker, book)] = [ID]\n else:\n book_ID_mapping[(speaker, book)].append(ID)\n\nwith open(info_json) as fin:\n spk_bookwords = json.load(fin)\n\nworddict = set()\nwith open('../all_rare_words.txt') as fin:\n for line in fin:\n word = line.strip()\n worddict.add(word)\n\nworddict_full = {}\nwith open('word_freq.txt') as fin:\n for line in fin:\n word, freq = line.split()\n worddict_full[word] = int(freq)\n\nspk_book_KB = {}\n\nKBfulllist = set()\n\nfor speaker, books in spk_bookwords.items():\n # spk_book_KB[speaker] = {}\n for book, content in books.items():\n speaker_book_IDs = book_ID_mapping[(speaker, book)] if 'chapter' not in info_json else [speaker]\n for speaker_book_ID in speaker_book_IDs:\n spk_book_KB[speaker_book_ID] = []\n bookwords = content['bookwords']\n oovwords = content['oovwords']\n for word in bookwords:\n if word in worddict:\n spk_book_KB[speaker_book_ID].append((word, worddict_full[word] if word in worddict_full else 0)) \n if word not in KBfulllist:\n KBfulllist.add(word)\n for word in oovwords:\n if word in worddict:\n spk_book_KB[speaker_book_ID].append((word, worddict_full[word] if word in worddict_full else 0))\n if word not in KBfulllist:\n KBfulllist.add(word)\n\nfull_wordlist = list(KBfulllist)\noutput_path = 'LibriKB{}{}all_{}'.format(use_chapter[1:], context, maxlen)\nos.system('mkdir -p {}'.format(output_path))\nworddict = list(worddict)\nfor ID, KB in spk_book_KB.items():\n random.shuffle(worddict)\n count = 0\n while len(KB) < minlen and count < len(worddict):\n word = worddict[count]\n freq = worddict_full[word] if word in worddict_full else 0\n if (word, freq) not in KB:\n KB.append((word, freq))\n count += 1\n KB.sort(key=lambda tup: tup[1])\n with open(os.path.join(output_path, ID), 'w') as fout:\n for word, freq in KB[:maxlen]:\n fout.write(word+'\\n')\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
import numpy as np def layer_forward(x, w): """ input: - inputs (x): (N, d_1, ..., d_k), - weights (w): (D, M) """ # intermediate value (z) z = None output = [] cache = (x, w, z, output) return output, cache def layer_backward(d_output, cache): """ Receive derivative of loss with respect to outputs and cache, and compute derivative with respect to inputs """ # Unpack cache values x, w, z, output = cache # Compute derivatives (gradients) d_x, d_w = None, None return d_x, d_w def affine_forward(x, w, b): """ A simple linear feedforward (affine) input: - inputs (x): (N, d_1, ..., d_k), - weights (w): (D, M) - bias (b): (M,) return: - output: (N, M) - cache: (x, w, b) """ N = x.shape[0] # reshape input into rows output = x.reshape([N, -1]).dot(w) + b cache = (x, w, b) return output, cache def affine_backward(d_output, cache): """ input: - upstream derivative (d_output): (N, M) - cache (cache): (x, w) return: - gradients (dx, d_w, d_b): ((N, d1, ..., d_k)(D, M), (M,)) """ # Unpack cache values x, w, b = cache N = d_output.shape[0] d_x = d_output.dot(w.T).reshape(x.shape) d_w = x.reshape([N, -1]).T.dot(d_output) d_b = np.sum(d_output, axis=0) return d_x, d_w, d_b def relu_forward(x): """ input: - inputs (x): (N, d_1, ..., d_k) return: - output: (N, d_1, ..., d_k) - cache: x """ output = np.fmax(x, 0) cache = x return output, cache def relu_backward(d_output, cache): """ input: - upstream derivative (d_output): (N, d_1, ..., d_k) - cache for x (cache): (N, d_1, ..., d_k) return: - d_x: gradient with respect to x """ x = cache d_x = np.sign(np.fmax(x, 0)) * d_output return d_x
normal
{ "blob_id": "c1fd6e940b3b15ae01a102b3c0aba9bd327c77b2", "index": 8403, "step-1": "<mask token>\n\n\ndef layer_forward(x, w):\n \"\"\"\n input:\n - inputs (x): (N, d_1, ..., d_k),\n - weights (w): (D, M)\n \"\"\"\n z = None\n output = []\n cache = x, w, z, output\n return output, cache\n\n\n<mask token>\n\n\ndef affine_backward(d_output, cache):\n \"\"\"\n input:\n - upstream derivative (d_output): (N, M)\n - cache (cache): (x, w)\n return:\n - gradients (dx, d_w, d_b): ((N, d1, ..., d_k)(D, M), (M,))\n \"\"\"\n x, w, b = cache\n N = d_output.shape[0]\n d_x = d_output.dot(w.T).reshape(x.shape)\n d_w = x.reshape([N, -1]).T.dot(d_output)\n d_b = np.sum(d_output, axis=0)\n return d_x, d_w, d_b\n\n\ndef relu_forward(x):\n \"\"\"\n input:\n - inputs (x): (N, d_1, ..., d_k)\n return:\n - output: (N, d_1, ..., d_k)\n - cache: x\n \"\"\"\n output = np.fmax(x, 0)\n cache = x\n return output, cache\n\n\ndef relu_backward(d_output, cache):\n \"\"\"\n input:\n - upstream derivative (d_output): (N, d_1, ..., d_k)\n - cache for x (cache): (N, d_1, ..., d_k)\n return:\n - d_x: gradient with respect to x\n \"\"\"\n x = cache\n d_x = np.sign(np.fmax(x, 0)) * d_output\n return d_x\n", "step-2": "<mask token>\n\n\ndef layer_forward(x, w):\n \"\"\"\n input:\n - inputs (x): (N, d_1, ..., d_k),\n - weights (w): (D, M)\n \"\"\"\n z = None\n output = []\n cache = x, w, z, output\n return output, cache\n\n\n<mask token>\n\n\ndef affine_forward(x, w, b):\n \"\"\"\n A simple linear feedforward (affine)\n input:\n - inputs (x): (N, d_1, ..., d_k),\n - weights (w): (D, M)\n - bias (b): (M,)\n return:\n - output: (N, M)\n - cache: (x, w, b)\n \"\"\"\n N = x.shape[0]\n output = x.reshape([N, -1]).dot(w) + b\n cache = x, w, b\n return output, cache\n\n\ndef affine_backward(d_output, cache):\n \"\"\"\n input:\n - upstream derivative (d_output): (N, M)\n - cache (cache): (x, w)\n return:\n - gradients (dx, d_w, d_b): ((N, d1, ..., d_k)(D, M), (M,))\n \"\"\"\n x, w, b = cache\n N = d_output.shape[0]\n d_x = d_output.dot(w.T).reshape(x.shape)\n d_w = x.reshape([N, -1]).T.dot(d_output)\n d_b = np.sum(d_output, axis=0)\n return d_x, d_w, d_b\n\n\ndef relu_forward(x):\n \"\"\"\n input:\n - inputs (x): (N, d_1, ..., d_k)\n return:\n - output: (N, d_1, ..., d_k)\n - cache: x\n \"\"\"\n output = np.fmax(x, 0)\n cache = x\n return output, cache\n\n\ndef relu_backward(d_output, cache):\n \"\"\"\n input:\n - upstream derivative (d_output): (N, d_1, ..., d_k)\n - cache for x (cache): (N, d_1, ..., d_k)\n return:\n - d_x: gradient with respect to x\n \"\"\"\n x = cache\n d_x = np.sign(np.fmax(x, 0)) * d_output\n return d_x\n", "step-3": "<mask token>\n\n\ndef layer_forward(x, w):\n \"\"\"\n input:\n - inputs (x): (N, d_1, ..., d_k),\n - weights (w): (D, M)\n \"\"\"\n z = None\n output = []\n cache = x, w, z, output\n return output, cache\n\n\ndef layer_backward(d_output, cache):\n \"\"\" Receive derivative of loss with respect\n to outputs and cache, and compute derivative\n with respect to inputs\n \"\"\"\n x, w, z, output = cache\n d_x, d_w = None, None\n return d_x, d_w\n\n\ndef affine_forward(x, w, b):\n \"\"\"\n A simple linear feedforward (affine)\n input:\n - inputs (x): (N, d_1, ..., d_k),\n - weights (w): (D, M)\n - bias (b): (M,)\n return:\n - output: (N, M)\n - cache: (x, w, b)\n \"\"\"\n N = x.shape[0]\n output = x.reshape([N, -1]).dot(w) + b\n cache = x, w, b\n return output, cache\n\n\ndef affine_backward(d_output, cache):\n \"\"\"\n input:\n - upstream derivative (d_output): (N, M)\n - cache (cache): (x, w)\n return:\n - gradients (dx, d_w, d_b): ((N, d1, ..., d_k)(D, M), (M,))\n \"\"\"\n x, w, b = cache\n N = d_output.shape[0]\n d_x = d_output.dot(w.T).reshape(x.shape)\n d_w = x.reshape([N, -1]).T.dot(d_output)\n d_b = np.sum(d_output, axis=0)\n return d_x, d_w, d_b\n\n\ndef relu_forward(x):\n \"\"\"\n input:\n - inputs (x): (N, d_1, ..., d_k)\n return:\n - output: (N, d_1, ..., d_k)\n - cache: x\n \"\"\"\n output = np.fmax(x, 0)\n cache = x\n return output, cache\n\n\ndef relu_backward(d_output, cache):\n \"\"\"\n input:\n - upstream derivative (d_output): (N, d_1, ..., d_k)\n - cache for x (cache): (N, d_1, ..., d_k)\n return:\n - d_x: gradient with respect to x\n \"\"\"\n x = cache\n d_x = np.sign(np.fmax(x, 0)) * d_output\n return d_x\n", "step-4": "import numpy as np\n\n\ndef layer_forward(x, w):\n \"\"\"\n input:\n - inputs (x): (N, d_1, ..., d_k),\n - weights (w): (D, M)\n \"\"\"\n z = None\n output = []\n cache = x, w, z, output\n return output, cache\n\n\ndef layer_backward(d_output, cache):\n \"\"\" Receive derivative of loss with respect\n to outputs and cache, and compute derivative\n with respect to inputs\n \"\"\"\n x, w, z, output = cache\n d_x, d_w = None, None\n return d_x, d_w\n\n\ndef affine_forward(x, w, b):\n \"\"\"\n A simple linear feedforward (affine)\n input:\n - inputs (x): (N, d_1, ..., d_k),\n - weights (w): (D, M)\n - bias (b): (M,)\n return:\n - output: (N, M)\n - cache: (x, w, b)\n \"\"\"\n N = x.shape[0]\n output = x.reshape([N, -1]).dot(w) + b\n cache = x, w, b\n return output, cache\n\n\ndef affine_backward(d_output, cache):\n \"\"\"\n input:\n - upstream derivative (d_output): (N, M)\n - cache (cache): (x, w)\n return:\n - gradients (dx, d_w, d_b): ((N, d1, ..., d_k)(D, M), (M,))\n \"\"\"\n x, w, b = cache\n N = d_output.shape[0]\n d_x = d_output.dot(w.T).reshape(x.shape)\n d_w = x.reshape([N, -1]).T.dot(d_output)\n d_b = np.sum(d_output, axis=0)\n return d_x, d_w, d_b\n\n\ndef relu_forward(x):\n \"\"\"\n input:\n - inputs (x): (N, d_1, ..., d_k)\n return:\n - output: (N, d_1, ..., d_k)\n - cache: x\n \"\"\"\n output = np.fmax(x, 0)\n cache = x\n return output, cache\n\n\ndef relu_backward(d_output, cache):\n \"\"\"\n input:\n - upstream derivative (d_output): (N, d_1, ..., d_k)\n - cache for x (cache): (N, d_1, ..., d_k)\n return:\n - d_x: gradient with respect to x\n \"\"\"\n x = cache\n d_x = np.sign(np.fmax(x, 0)) * d_output\n return d_x\n", "step-5": "import numpy as np\n\n\ndef layer_forward(x, w):\n \"\"\"\n input:\n - inputs (x): (N, d_1, ..., d_k),\n - weights (w): (D, M)\n \"\"\"\n # intermediate value (z)\n z = None\n output = []\n cache = (x, w, z, output)\n\n return output, cache\n\n\ndef layer_backward(d_output, cache):\n \"\"\" Receive derivative of loss with respect\n to outputs and cache, and compute derivative\n with respect to inputs\n \"\"\"\n\n # Unpack cache values\n x, w, z, output = cache\n\n # Compute derivatives (gradients)\n d_x, d_w = None, None\n\n return d_x, d_w\n\n\ndef affine_forward(x, w, b):\n \"\"\"\n A simple linear feedforward (affine)\n input:\n - inputs (x): (N, d_1, ..., d_k),\n - weights (w): (D, M)\n - bias (b): (M,)\n return:\n - output: (N, M)\n - cache: (x, w, b)\n \"\"\"\n N = x.shape[0]\n\n # reshape input into rows\n output = x.reshape([N, -1]).dot(w) + b\n cache = (x, w, b)\n\n return output, cache\n\n\ndef affine_backward(d_output, cache):\n \"\"\"\n input:\n - upstream derivative (d_output): (N, M)\n - cache (cache): (x, w)\n return:\n - gradients (dx, d_w, d_b): ((N, d1, ..., d_k)(D, M), (M,))\n \"\"\"\n\n # Unpack cache values\n x, w, b = cache\n\n N = d_output.shape[0]\n d_x = d_output.dot(w.T).reshape(x.shape)\n d_w = x.reshape([N, -1]).T.dot(d_output)\n d_b = np.sum(d_output, axis=0)\n\n return d_x, d_w, d_b\n\n\ndef relu_forward(x):\n \"\"\"\n input:\n - inputs (x): (N, d_1, ..., d_k)\n return:\n - output: (N, d_1, ..., d_k)\n - cache: x\n \"\"\"\n output = np.fmax(x, 0)\n cache = x\n\n return output, cache\n\n\ndef relu_backward(d_output, cache):\n \"\"\"\n input:\n - upstream derivative (d_output): (N, d_1, ..., d_k)\n - cache for x (cache): (N, d_1, ..., d_k)\n return:\n - d_x: gradient with respect to x\n \"\"\"\n x = cache\n d_x = np.sign(np.fmax(x, 0)) * d_output\n\n return d_x\n", "step-ids": [ 4, 5, 6, 7, 8 ] }
[ 4, 5, 6, 7, 8 ]
__author__ = "Prikly Grayp" __license__ = "MIT" __version__ = "1.0.0" __email__ = "priklygrayp@gmail.com" __status__ = "Development" from contextlib import closing class RefrigeratorRaider: '''Raid a refrigerator''' def open(self): print('Open fridge door.') def take(self, food): print('Finding {}...'.format(food)) if food == 'deep fried pizza': raise RuntimeError('Health warning!') print('Taking {}'.format(food)) def close(self): print('Close fridg door.') def raid(food): with closing(RefrigeratorRaider()) as r: r.open() r.take(food) raid('bacon') raid('deep fried pizza')
normal
{ "blob_id": "7455eb670c2c019b8d066fcc6f2878a2136b7fd0", "index": 5051, "step-1": "<mask token>\n\n\nclass RefrigeratorRaider:\n \"\"\"Raid a refrigerator\"\"\"\n\n def open(self):\n print('Open fridge door.')\n\n def take(self, food):\n print('Finding {}...'.format(food))\n if food == 'deep fried pizza':\n raise RuntimeError('Health warning!')\n print('Taking {}'.format(food))\n\n def close(self):\n print('Close fridg door.')\n\n\ndef raid(food):\n with closing(RefrigeratorRaider()) as r:\n r.open()\n r.take(food)\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass RefrigeratorRaider:\n \"\"\"Raid a refrigerator\"\"\"\n\n def open(self):\n print('Open fridge door.')\n\n def take(self, food):\n print('Finding {}...'.format(food))\n if food == 'deep fried pizza':\n raise RuntimeError('Health warning!')\n print('Taking {}'.format(food))\n\n def close(self):\n print('Close fridg door.')\n\n\ndef raid(food):\n with closing(RefrigeratorRaider()) as r:\n r.open()\n r.take(food)\n\n\nraid('bacon')\nraid('deep fried pizza')\n", "step-3": "__author__ = 'Prikly Grayp'\n__license__ = 'MIT'\n__version__ = '1.0.0'\n__email__ = 'priklygrayp@gmail.com'\n__status__ = 'Development'\n<mask token>\n\n\nclass RefrigeratorRaider:\n \"\"\"Raid a refrigerator\"\"\"\n\n def open(self):\n print('Open fridge door.')\n\n def take(self, food):\n print('Finding {}...'.format(food))\n if food == 'deep fried pizza':\n raise RuntimeError('Health warning!')\n print('Taking {}'.format(food))\n\n def close(self):\n print('Close fridg door.')\n\n\ndef raid(food):\n with closing(RefrigeratorRaider()) as r:\n r.open()\n r.take(food)\n\n\nraid('bacon')\nraid('deep fried pizza')\n", "step-4": "__author__ = 'Prikly Grayp'\n__license__ = 'MIT'\n__version__ = '1.0.0'\n__email__ = 'priklygrayp@gmail.com'\n__status__ = 'Development'\nfrom contextlib import closing\n\n\nclass RefrigeratorRaider:\n \"\"\"Raid a refrigerator\"\"\"\n\n def open(self):\n print('Open fridge door.')\n\n def take(self, food):\n print('Finding {}...'.format(food))\n if food == 'deep fried pizza':\n raise RuntimeError('Health warning!')\n print('Taking {}'.format(food))\n\n def close(self):\n print('Close fridg door.')\n\n\ndef raid(food):\n with closing(RefrigeratorRaider()) as r:\n r.open()\n r.take(food)\n\n\nraid('bacon')\nraid('deep fried pizza')\n", "step-5": "__author__ = \"Prikly Grayp\"\n__license__ = \"MIT\"\n__version__ = \"1.0.0\"\n__email__ = \"priklygrayp@gmail.com\"\n__status__ = \"Development\"\n\nfrom contextlib import closing\n\nclass RefrigeratorRaider:\n '''Raid a refrigerator'''\n\n def open(self):\n print('Open fridge door.')\n\n def take(self, food):\n print('Finding {}...'.format(food))\n if food == 'deep fried pizza':\n raise RuntimeError('Health warning!')\n print('Taking {}'.format(food))\n\n def close(self):\n print('Close fridg door.')\n\ndef raid(food):\n with closing(RefrigeratorRaider()) as r:\n r.open()\n r.take(food)\n\nraid('bacon')\nraid('deep fried pizza')", "step-ids": [ 6, 7, 8, 9, 10 ] }
[ 6, 7, 8, 9, 10 ]
<|reserved_special_token_0|> def valid_date(datestring): """ Determine if something is a valid date """ try: datetime.strptime(datestring, '%Y-%m-%d') return True except ValueError as e: logger.info('not a valid date: ' + e) return False def portfolio_value_on_date(date): """ Retrieve the total portfolio value on a given data """ if valid_date(date): try: with open(folder + 'portfolio_balance.json', encoding='utf-8' ) as read_file: data = json.loads(read_file.read(), object_pairs_hook= OrderedDict) return data[date]['daily_value'] except Exception: logger.critical('couldnt read portfolio.json') return ( 'something went horribly wrong trying to open the portfolio.json' ) else: return 'error on date format or date not in range' def net_gain_loss_percentage(): """ Retrieve the net gain percentage in total value of portfolio at the end of the backtest """ try: with open(folder + 'portfolio_balance.json', encoding='utf-8' ) as read_file: data = json.loads(read_file.read(), object_pairs_hook=OrderedDict) net_gain_loss = data['final_portfolio'] / portfolio_value logger.info('net gain loss is ' + net_gain_loss) if net_gain_loss > 0: return 'Your net gain is ' + str(net_gain_loss) + '%' elif net_gain_loss == 0: return 'You broke even' else: return 'Your net loss is ' + str(net_gain_loss) + '%' except Exception: logger.critical('couldnt read portfolio.json') return ( 'something went horribly wrong trying to open the portfolio.json') def max_drawdown(): """ Maximum percentage drawdown experienced in the backtest """ try: with open(folder + 'portfolio_balance.json', encoding='utf-8' ) as read_file: data = json.loads(read_file.read(), object_pairs_hook=OrderedDict) def daily_price(): """ Record daily volume in a generator """ for item in data: if valid_date(item): yield data[item]['daily_value'] max_price = max(daily_price()) min_price = min(daily_price()) draw = max_price / min_price logger.info('draw percent: ' + draw) return 'Max Drawdown is ' + str(draw) + '%' except Exception: logger.critical('couldnt read portfolio.json') return ( 'something went horribly wrong trying to open the portfolio.json') <|reserved_special_token_1|> <|reserved_special_token_0|> logging.basicConfig(level=logging.INFO) def valid_date(datestring): """ Determine if something is a valid date """ try: datetime.strptime(datestring, '%Y-%m-%d') return True except ValueError as e: logger.info('not a valid date: ' + e) return False def portfolio_value_on_date(date): """ Retrieve the total portfolio value on a given data """ if valid_date(date): try: with open(folder + 'portfolio_balance.json', encoding='utf-8' ) as read_file: data = json.loads(read_file.read(), object_pairs_hook= OrderedDict) return data[date]['daily_value'] except Exception: logger.critical('couldnt read portfolio.json') return ( 'something went horribly wrong trying to open the portfolio.json' ) else: return 'error on date format or date not in range' def net_gain_loss_percentage(): """ Retrieve the net gain percentage in total value of portfolio at the end of the backtest """ try: with open(folder + 'portfolio_balance.json', encoding='utf-8' ) as read_file: data = json.loads(read_file.read(), object_pairs_hook=OrderedDict) net_gain_loss = data['final_portfolio'] / portfolio_value logger.info('net gain loss is ' + net_gain_loss) if net_gain_loss > 0: return 'Your net gain is ' + str(net_gain_loss) + '%' elif net_gain_loss == 0: return 'You broke even' else: return 'Your net loss is ' + str(net_gain_loss) + '%' except Exception: logger.critical('couldnt read portfolio.json') return ( 'something went horribly wrong trying to open the portfolio.json') def max_drawdown(): """ Maximum percentage drawdown experienced in the backtest """ try: with open(folder + 'portfolio_balance.json', encoding='utf-8' ) as read_file: data = json.loads(read_file.read(), object_pairs_hook=OrderedDict) def daily_price(): """ Record daily volume in a generator """ for item in data: if valid_date(item): yield data[item]['daily_value'] max_price = max(daily_price()) min_price = min(daily_price()) draw = max_price / min_price logger.info('draw percent: ' + draw) return 'Max Drawdown is ' + str(draw) + '%' except Exception: logger.critical('couldnt read portfolio.json') return ( 'something went horribly wrong trying to open the portfolio.json') <|reserved_special_token_1|> <|reserved_special_token_0|> logger = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) def valid_date(datestring): """ Determine if something is a valid date """ try: datetime.strptime(datestring, '%Y-%m-%d') return True except ValueError as e: logger.info('not a valid date: ' + e) return False def portfolio_value_on_date(date): """ Retrieve the total portfolio value on a given data """ if valid_date(date): try: with open(folder + 'portfolio_balance.json', encoding='utf-8' ) as read_file: data = json.loads(read_file.read(), object_pairs_hook= OrderedDict) return data[date]['daily_value'] except Exception: logger.critical('couldnt read portfolio.json') return ( 'something went horribly wrong trying to open the portfolio.json' ) else: return 'error on date format or date not in range' def net_gain_loss_percentage(): """ Retrieve the net gain percentage in total value of portfolio at the end of the backtest """ try: with open(folder + 'portfolio_balance.json', encoding='utf-8' ) as read_file: data = json.loads(read_file.read(), object_pairs_hook=OrderedDict) net_gain_loss = data['final_portfolio'] / portfolio_value logger.info('net gain loss is ' + net_gain_loss) if net_gain_loss > 0: return 'Your net gain is ' + str(net_gain_loss) + '%' elif net_gain_loss == 0: return 'You broke even' else: return 'Your net loss is ' + str(net_gain_loss) + '%' except Exception: logger.critical('couldnt read portfolio.json') return ( 'something went horribly wrong trying to open the portfolio.json') def max_drawdown(): """ Maximum percentage drawdown experienced in the backtest """ try: with open(folder + 'portfolio_balance.json', encoding='utf-8' ) as read_file: data = json.loads(read_file.read(), object_pairs_hook=OrderedDict) def daily_price(): """ Record daily volume in a generator """ for item in data: if valid_date(item): yield data[item]['daily_value'] max_price = max(daily_price()) min_price = min(daily_price()) draw = max_price / min_price logger.info('draw percent: ' + draw) return 'Max Drawdown is ' + str(draw) + '%' except Exception: logger.critical('couldnt read portfolio.json') return ( 'something went horribly wrong trying to open the portfolio.json') <|reserved_special_token_1|> import sys import os import json from collections import OrderedDict from config import folder, portfolio_value from datetime import datetime import logging logger = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) def valid_date(datestring): """ Determine if something is a valid date """ try: datetime.strptime(datestring, '%Y-%m-%d') return True except ValueError as e: logger.info('not a valid date: ' + e) return False def portfolio_value_on_date(date): """ Retrieve the total portfolio value on a given data """ if valid_date(date): try: with open(folder + 'portfolio_balance.json', encoding='utf-8' ) as read_file: data = json.loads(read_file.read(), object_pairs_hook= OrderedDict) return data[date]['daily_value'] except Exception: logger.critical('couldnt read portfolio.json') return ( 'something went horribly wrong trying to open the portfolio.json' ) else: return 'error on date format or date not in range' def net_gain_loss_percentage(): """ Retrieve the net gain percentage in total value of portfolio at the end of the backtest """ try: with open(folder + 'portfolio_balance.json', encoding='utf-8' ) as read_file: data = json.loads(read_file.read(), object_pairs_hook=OrderedDict) net_gain_loss = data['final_portfolio'] / portfolio_value logger.info('net gain loss is ' + net_gain_loss) if net_gain_loss > 0: return 'Your net gain is ' + str(net_gain_loss) + '%' elif net_gain_loss == 0: return 'You broke even' else: return 'Your net loss is ' + str(net_gain_loss) + '%' except Exception: logger.critical('couldnt read portfolio.json') return ( 'something went horribly wrong trying to open the portfolio.json') def max_drawdown(): """ Maximum percentage drawdown experienced in the backtest """ try: with open(folder + 'portfolio_balance.json', encoding='utf-8' ) as read_file: data = json.loads(read_file.read(), object_pairs_hook=OrderedDict) def daily_price(): """ Record daily volume in a generator """ for item in data: if valid_date(item): yield data[item]['daily_value'] max_price = max(daily_price()) min_price = min(daily_price()) draw = max_price / min_price logger.info('draw percent: ' + draw) return 'Max Drawdown is ' + str(draw) + '%' except Exception: logger.critical('couldnt read portfolio.json') return ( 'something went horribly wrong trying to open the portfolio.json') <|reserved_special_token_1|> import sys import os import json from collections import OrderedDict from config import folder, portfolio_value from datetime import datetime import logging # Logger setup logger = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) def valid_date(datestring): """ Determine if something is a valid date """ try: datetime.strptime(datestring, '%Y-%m-%d') return True except ValueError as e: logger.info('not a valid date: ' + e) return False def portfolio_value_on_date(date): """ Retrieve the total portfolio value on a given data """ if valid_date(date): try: with open(folder + 'portfolio_balance.json', encoding='utf-8') as read_file: data = json.loads(read_file.read(), object_pairs_hook=OrderedDict) return data[date]['daily_value'] except Exception: logger.critical('couldnt read portfolio.json') return 'something went horribly wrong trying to open the portfolio.json' else: return 'error on date format or date not in range' def net_gain_loss_percentage(): """ Retrieve the net gain percentage in total value of portfolio at the end of the backtest """ try: with open(folder + 'portfolio_balance.json', encoding='utf-8') as read_file: data = json.loads(read_file.read(), object_pairs_hook=OrderedDict) net_gain_loss = data['final_portfolio'] / portfolio_value logger.info('net gain loss is ' + net_gain_loss) if net_gain_loss > 0: return 'Your net gain is ' + str(net_gain_loss) + '%' elif net_gain_loss == 0: return 'You broke even' else: return 'Your net loss is ' + str(net_gain_loss) + '%' except Exception: logger.critical('couldnt read portfolio.json') return 'something went horribly wrong trying to open the portfolio.json' def max_drawdown(): """ Maximum percentage drawdown experienced in the backtest """ try: with open(folder + 'portfolio_balance.json', encoding='utf-8') as read_file: data = json.loads(read_file.read(), object_pairs_hook=OrderedDict) def daily_price(): """ Record daily volume in a generator """ for item in data: if valid_date(item): yield data[item]['daily_value'] # since the daily portfolio is already a running tally # we just need to find the max and the min between them max_price = max(daily_price()) min_price = min(daily_price()) draw = max_price / min_price logger.info('draw percent: ' + draw) return 'Max Drawdown is ' + str(draw) + '%' except Exception: logger.critical('couldnt read portfolio.json') return 'something went horribly wrong trying to open the portfolio.json'
flexible
{ "blob_id": "0bc72a558b9bd3b5f74ce5dfce586dd66c579710", "index": 5776, "step-1": "<mask token>\n\n\ndef valid_date(datestring):\n \"\"\" Determine if something is a valid date \"\"\"\n try:\n datetime.strptime(datestring, '%Y-%m-%d')\n return True\n except ValueError as e:\n logger.info('not a valid date: ' + e)\n return False\n\n\ndef portfolio_value_on_date(date):\n \"\"\" Retrieve the total portfolio value on a given data \"\"\"\n if valid_date(date):\n try:\n with open(folder + 'portfolio_balance.json', encoding='utf-8'\n ) as read_file:\n data = json.loads(read_file.read(), object_pairs_hook=\n OrderedDict)\n return data[date]['daily_value']\n except Exception:\n logger.critical('couldnt read portfolio.json')\n return (\n 'something went horribly wrong trying to open the portfolio.json'\n )\n else:\n return 'error on date format or date not in range'\n\n\ndef net_gain_loss_percentage():\n \"\"\" Retrieve the net gain percentage in total value of portfolio at the end of the backtest \"\"\"\n try:\n with open(folder + 'portfolio_balance.json', encoding='utf-8'\n ) as read_file:\n data = json.loads(read_file.read(), object_pairs_hook=OrderedDict)\n net_gain_loss = data['final_portfolio'] / portfolio_value\n logger.info('net gain loss is ' + net_gain_loss)\n if net_gain_loss > 0:\n return 'Your net gain is ' + str(net_gain_loss) + '%'\n elif net_gain_loss == 0:\n return 'You broke even'\n else:\n return 'Your net loss is ' + str(net_gain_loss) + '%'\n except Exception:\n logger.critical('couldnt read portfolio.json')\n return (\n 'something went horribly wrong trying to open the portfolio.json')\n\n\ndef max_drawdown():\n \"\"\" Maximum percentage drawdown experienced in the backtest \"\"\"\n try:\n with open(folder + 'portfolio_balance.json', encoding='utf-8'\n ) as read_file:\n data = json.loads(read_file.read(), object_pairs_hook=OrderedDict)\n\n def daily_price():\n \"\"\" Record daily volume in a generator \"\"\"\n for item in data:\n if valid_date(item):\n yield data[item]['daily_value']\n max_price = max(daily_price())\n min_price = min(daily_price())\n draw = max_price / min_price\n logger.info('draw percent: ' + draw)\n return 'Max Drawdown is ' + str(draw) + '%'\n except Exception:\n logger.critical('couldnt read portfolio.json')\n return (\n 'something went horribly wrong trying to open the portfolio.json')\n", "step-2": "<mask token>\nlogging.basicConfig(level=logging.INFO)\n\n\ndef valid_date(datestring):\n \"\"\" Determine if something is a valid date \"\"\"\n try:\n datetime.strptime(datestring, '%Y-%m-%d')\n return True\n except ValueError as e:\n logger.info('not a valid date: ' + e)\n return False\n\n\ndef portfolio_value_on_date(date):\n \"\"\" Retrieve the total portfolio value on a given data \"\"\"\n if valid_date(date):\n try:\n with open(folder + 'portfolio_balance.json', encoding='utf-8'\n ) as read_file:\n data = json.loads(read_file.read(), object_pairs_hook=\n OrderedDict)\n return data[date]['daily_value']\n except Exception:\n logger.critical('couldnt read portfolio.json')\n return (\n 'something went horribly wrong trying to open the portfolio.json'\n )\n else:\n return 'error on date format or date not in range'\n\n\ndef net_gain_loss_percentage():\n \"\"\" Retrieve the net gain percentage in total value of portfolio at the end of the backtest \"\"\"\n try:\n with open(folder + 'portfolio_balance.json', encoding='utf-8'\n ) as read_file:\n data = json.loads(read_file.read(), object_pairs_hook=OrderedDict)\n net_gain_loss = data['final_portfolio'] / portfolio_value\n logger.info('net gain loss is ' + net_gain_loss)\n if net_gain_loss > 0:\n return 'Your net gain is ' + str(net_gain_loss) + '%'\n elif net_gain_loss == 0:\n return 'You broke even'\n else:\n return 'Your net loss is ' + str(net_gain_loss) + '%'\n except Exception:\n logger.critical('couldnt read portfolio.json')\n return (\n 'something went horribly wrong trying to open the portfolio.json')\n\n\ndef max_drawdown():\n \"\"\" Maximum percentage drawdown experienced in the backtest \"\"\"\n try:\n with open(folder + 'portfolio_balance.json', encoding='utf-8'\n ) as read_file:\n data = json.loads(read_file.read(), object_pairs_hook=OrderedDict)\n\n def daily_price():\n \"\"\" Record daily volume in a generator \"\"\"\n for item in data:\n if valid_date(item):\n yield data[item]['daily_value']\n max_price = max(daily_price())\n min_price = min(daily_price())\n draw = max_price / min_price\n logger.info('draw percent: ' + draw)\n return 'Max Drawdown is ' + str(draw) + '%'\n except Exception:\n logger.critical('couldnt read portfolio.json')\n return (\n 'something went horribly wrong trying to open the portfolio.json')\n", "step-3": "<mask token>\nlogger = logging.getLogger(__name__)\nlogging.basicConfig(level=logging.INFO)\n\n\ndef valid_date(datestring):\n \"\"\" Determine if something is a valid date \"\"\"\n try:\n datetime.strptime(datestring, '%Y-%m-%d')\n return True\n except ValueError as e:\n logger.info('not a valid date: ' + e)\n return False\n\n\ndef portfolio_value_on_date(date):\n \"\"\" Retrieve the total portfolio value on a given data \"\"\"\n if valid_date(date):\n try:\n with open(folder + 'portfolio_balance.json', encoding='utf-8'\n ) as read_file:\n data = json.loads(read_file.read(), object_pairs_hook=\n OrderedDict)\n return data[date]['daily_value']\n except Exception:\n logger.critical('couldnt read portfolio.json')\n return (\n 'something went horribly wrong trying to open the portfolio.json'\n )\n else:\n return 'error on date format or date not in range'\n\n\ndef net_gain_loss_percentage():\n \"\"\" Retrieve the net gain percentage in total value of portfolio at the end of the backtest \"\"\"\n try:\n with open(folder + 'portfolio_balance.json', encoding='utf-8'\n ) as read_file:\n data = json.loads(read_file.read(), object_pairs_hook=OrderedDict)\n net_gain_loss = data['final_portfolio'] / portfolio_value\n logger.info('net gain loss is ' + net_gain_loss)\n if net_gain_loss > 0:\n return 'Your net gain is ' + str(net_gain_loss) + '%'\n elif net_gain_loss == 0:\n return 'You broke even'\n else:\n return 'Your net loss is ' + str(net_gain_loss) + '%'\n except Exception:\n logger.critical('couldnt read portfolio.json')\n return (\n 'something went horribly wrong trying to open the portfolio.json')\n\n\ndef max_drawdown():\n \"\"\" Maximum percentage drawdown experienced in the backtest \"\"\"\n try:\n with open(folder + 'portfolio_balance.json', encoding='utf-8'\n ) as read_file:\n data = json.loads(read_file.read(), object_pairs_hook=OrderedDict)\n\n def daily_price():\n \"\"\" Record daily volume in a generator \"\"\"\n for item in data:\n if valid_date(item):\n yield data[item]['daily_value']\n max_price = max(daily_price())\n min_price = min(daily_price())\n draw = max_price / min_price\n logger.info('draw percent: ' + draw)\n return 'Max Drawdown is ' + str(draw) + '%'\n except Exception:\n logger.critical('couldnt read portfolio.json')\n return (\n 'something went horribly wrong trying to open the portfolio.json')\n", "step-4": "import sys\nimport os\nimport json\nfrom collections import OrderedDict\nfrom config import folder, portfolio_value\nfrom datetime import datetime\nimport logging\nlogger = logging.getLogger(__name__)\nlogging.basicConfig(level=logging.INFO)\n\n\ndef valid_date(datestring):\n \"\"\" Determine if something is a valid date \"\"\"\n try:\n datetime.strptime(datestring, '%Y-%m-%d')\n return True\n except ValueError as e:\n logger.info('not a valid date: ' + e)\n return False\n\n\ndef portfolio_value_on_date(date):\n \"\"\" Retrieve the total portfolio value on a given data \"\"\"\n if valid_date(date):\n try:\n with open(folder + 'portfolio_balance.json', encoding='utf-8'\n ) as read_file:\n data = json.loads(read_file.read(), object_pairs_hook=\n OrderedDict)\n return data[date]['daily_value']\n except Exception:\n logger.critical('couldnt read portfolio.json')\n return (\n 'something went horribly wrong trying to open the portfolio.json'\n )\n else:\n return 'error on date format or date not in range'\n\n\ndef net_gain_loss_percentage():\n \"\"\" Retrieve the net gain percentage in total value of portfolio at the end of the backtest \"\"\"\n try:\n with open(folder + 'portfolio_balance.json', encoding='utf-8'\n ) as read_file:\n data = json.loads(read_file.read(), object_pairs_hook=OrderedDict)\n net_gain_loss = data['final_portfolio'] / portfolio_value\n logger.info('net gain loss is ' + net_gain_loss)\n if net_gain_loss > 0:\n return 'Your net gain is ' + str(net_gain_loss) + '%'\n elif net_gain_loss == 0:\n return 'You broke even'\n else:\n return 'Your net loss is ' + str(net_gain_loss) + '%'\n except Exception:\n logger.critical('couldnt read portfolio.json')\n return (\n 'something went horribly wrong trying to open the portfolio.json')\n\n\ndef max_drawdown():\n \"\"\" Maximum percentage drawdown experienced in the backtest \"\"\"\n try:\n with open(folder + 'portfolio_balance.json', encoding='utf-8'\n ) as read_file:\n data = json.loads(read_file.read(), object_pairs_hook=OrderedDict)\n\n def daily_price():\n \"\"\" Record daily volume in a generator \"\"\"\n for item in data:\n if valid_date(item):\n yield data[item]['daily_value']\n max_price = max(daily_price())\n min_price = min(daily_price())\n draw = max_price / min_price\n logger.info('draw percent: ' + draw)\n return 'Max Drawdown is ' + str(draw) + '%'\n except Exception:\n logger.critical('couldnt read portfolio.json')\n return (\n 'something went horribly wrong trying to open the portfolio.json')\n", "step-5": "import sys\nimport os\nimport json\nfrom collections import OrderedDict\nfrom config import folder, portfolio_value\nfrom datetime import datetime\nimport logging\n# Logger setup\nlogger = logging.getLogger(__name__)\nlogging.basicConfig(level=logging.INFO)\n\n\ndef valid_date(datestring):\n \"\"\" Determine if something is a valid date \"\"\"\n try:\n datetime.strptime(datestring, '%Y-%m-%d')\n return True\n except ValueError as e:\n logger.info('not a valid date: ' + e)\n return False\n\n\ndef portfolio_value_on_date(date):\n \"\"\" Retrieve the total portfolio value on a given data \"\"\"\n if valid_date(date):\n try:\n with open(folder + 'portfolio_balance.json', encoding='utf-8') as read_file:\n data = json.loads(read_file.read(),\n object_pairs_hook=OrderedDict)\n return data[date]['daily_value']\n except Exception:\n logger.critical('couldnt read portfolio.json')\n return 'something went horribly wrong trying to open the portfolio.json'\n else:\n return 'error on date format or date not in range'\n\n\ndef net_gain_loss_percentage():\n \"\"\" Retrieve the net gain percentage in total value of portfolio at the end of the backtest \"\"\"\n try:\n with open(folder + 'portfolio_balance.json', encoding='utf-8') as read_file:\n data = json.loads(read_file.read(),\n object_pairs_hook=OrderedDict)\n net_gain_loss = data['final_portfolio'] / portfolio_value\n logger.info('net gain loss is ' + net_gain_loss)\n if net_gain_loss > 0:\n return 'Your net gain is ' + str(net_gain_loss) + '%'\n elif net_gain_loss == 0:\n return 'You broke even'\n else:\n return 'Your net loss is ' + str(net_gain_loss) + '%'\n except Exception:\n logger.critical('couldnt read portfolio.json')\n return 'something went horribly wrong trying to open the portfolio.json'\n\n\ndef max_drawdown():\n \"\"\" Maximum percentage drawdown experienced in the backtest \"\"\"\n try:\n with open(folder + 'portfolio_balance.json', encoding='utf-8') as read_file:\n data = json.loads(read_file.read(),\n object_pairs_hook=OrderedDict)\n\n def daily_price():\n \"\"\" Record daily volume in a generator \"\"\"\n for item in data:\n if valid_date(item):\n yield data[item]['daily_value']\n\n # since the daily portfolio is already a running tally\n # we just need to find the max and the min between them\n max_price = max(daily_price())\n min_price = min(daily_price())\n draw = max_price / min_price\n logger.info('draw percent: ' + draw)\n return 'Max Drawdown is ' + str(draw) + '%'\n except Exception:\n logger.critical('couldnt read portfolio.json')\n return 'something went horribly wrong trying to open the portfolio.json'\n", "step-ids": [ 4, 5, 6, 7, 8 ] }
[ 4, 5, 6, 7, 8 ]
# coding=utf8 from __future__ import print_function from application.controllers import * from application.models import board def __return__(): return render_template('board/board.html', lecturers = board.Lecturer.query.all(), disciplines = board.Discipline.query.all()) def __return_modal__(id): lecturer = board.Lecturer.query.get(id) print("esdasd"+lecturer.description) return render_template("board/modal.html", lecturer = lecturer)
normal
{ "blob_id": "f87c036c1eb5026e088bed62fbc330cfd2ea1952", "index": 7500, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef __return_modal__(id):\n lecturer = board.Lecturer.query.get(id)\n print('esdasd' + lecturer.description)\n return render_template('board/modal.html', lecturer=lecturer)\n", "step-3": "<mask token>\n\n\ndef __return__():\n return render_template('board/board.html', lecturers=board.Lecturer.\n query.all(), disciplines=board.Discipline.query.all())\n\n\ndef __return_modal__(id):\n lecturer = board.Lecturer.query.get(id)\n print('esdasd' + lecturer.description)\n return render_template('board/modal.html', lecturer=lecturer)\n", "step-4": "from __future__ import print_function\nfrom application.controllers import *\nfrom application.models import board\n\n\ndef __return__():\n return render_template('board/board.html', lecturers=board.Lecturer.\n query.all(), disciplines=board.Discipline.query.all())\n\n\ndef __return_modal__(id):\n lecturer = board.Lecturer.query.get(id)\n print('esdasd' + lecturer.description)\n return render_template('board/modal.html', lecturer=lecturer)\n", "step-5": "# coding=utf8\nfrom __future__ import print_function\nfrom application.controllers import *\n\nfrom application.models import board\n\ndef __return__():\n return render_template('board/board.html', \n lecturers = board.Lecturer.query.all(), disciplines = board.Discipline.query.all())\n\ndef __return_modal__(id):\n lecturer = board.Lecturer.query.get(id)\n print(\"esdasd\"+lecturer.description)\n return render_template(\"board/modal.html\", lecturer = lecturer)", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
from app.api import app def main(): app.run(host='0.0.0.0', port=5001) if __name__ == '__main__': main()
normal
{ "blob_id": "b49e5b40ce1e16f1b7c0bd9509daf94f36c51256", "index": 6726, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef main():\n app.run(host='0.0.0.0', port=5001)\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef main():\n app.run(host='0.0.0.0', port=5001)\n\n\nif __name__ == '__main__':\n main()\n", "step-4": "from app.api import app\n\n\ndef main():\n app.run(host='0.0.0.0', port=5001)\n\n\nif __name__ == '__main__':\n main()\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> print(num1) <|reserved_special_token_1|> <|reserved_special_token_0|> num1 = random.randint(50, 151) print(num1) <|reserved_special_token_1|> <|reserved_special_token_0|> import random num1 = random.randint(50, 151) print(num1) <|reserved_special_token_1|> ''' 使用random模块,如何产生 50~150之间的数? ''' import random num1 = random.randint(50,151) print(num1)
flexible
{ "blob_id": "7d3355ee775f759412308ab68a7aa409b9c74b20", "index": 708, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint(num1)\n", "step-3": "<mask token>\nnum1 = random.randint(50, 151)\nprint(num1)\n", "step-4": "<mask token>\nimport random\nnum1 = random.randint(50, 151)\nprint(num1)\n", "step-5": "'''\r\n使用random模块,如何产生 50~150之间的数?\r\n'''\r\n\r\n\r\nimport random\r\n\r\nnum1 = random.randint(50,151)\r\nprint(num1)", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
import time import os import psutil start = time.time() from queue import Queue from copy import copy process = psutil.Process(os.getpid()) class Node: object_id = 0 weight = 0 value = 0 def __init__(self,object_id,weight,value): self.object_id=object_id self.weight=weight self.value=value """ First we need to extract values from the file""" def read_file(file): f = open(file, "r") f.seek(0) queue=Queue(maxsize=0) list_elements=[] nodes=[] for line in f: id=int(line.split(".", 1)[0]) value= int(line.split(" ", 3)[1]) weight=int(line.split(" ", 3)[2].split('\n', 2)[0]) nodes.append(Node(id,weight,value)) list_elements.append(-1) list_elements.append(0) list_elements.append(0) queue.put(list_elements) res=go_explore(queue,nodes) for i in range(0,len(res)-2): if(res[i]==1): node=nodes[i] res[i]={"id":node.object_id,"weight":node.weight,"value":node.value} res=list(filter(lambda x: x != 0, res)) value=len(res)-1 weight=len(res)-2 res[value]={"total value":res[value]} res[weight]={"total weight":res[weight]} return res def go_explore(queue,nodes): best_value = 0 res=[] while not queue.empty(): q=copy(queue.get()) for i in range(len(q)): if q[i] is -1: weight = q[len(q)-2] value = q[len(q)-1] if weight<=420: if value > best_value: res = q best_value=value q[i]=0 queue.put(q) q_positive= copy(q) q_positive[len(q_positive)-1]=value+nodes[i].value q_positive[len(q_positive)-2]=weight+nodes[i].weight q_positive[i]=1 queue.put(q_positive) break elif i == len(q)-1: weight = q[len(q)-2] value = q[len(q)-1] if weight<=420: if value > best_value: res = q best_value=value return res solution=read_file('Knapsack/data_knapsack') for data in solution: print(data) end = time.time() print(end - start) print(process.memory_info().rss)
normal
{ "blob_id": "be408b349e2795101b525ad8d948dbf52cab81bf", "index": 4281, "step-1": "<mask token>\n\n\nclass Node:\n object_id = 0\n weight = 0\n value = 0\n\n def __init__(self, object_id, weight, value):\n self.object_id = object_id\n self.weight = weight\n self.value = value\n\n\n<mask token>\n\n\ndef read_file(file):\n f = open(file, 'r')\n f.seek(0)\n queue = Queue(maxsize=0)\n list_elements = []\n nodes = []\n for line in f:\n id = int(line.split('.', 1)[0])\n value = int(line.split(' ', 3)[1])\n weight = int(line.split(' ', 3)[2].split('\\n', 2)[0])\n nodes.append(Node(id, weight, value))\n list_elements.append(-1)\n list_elements.append(0)\n list_elements.append(0)\n queue.put(list_elements)\n res = go_explore(queue, nodes)\n for i in range(0, len(res) - 2):\n if res[i] == 1:\n node = nodes[i]\n res[i] = {'id': node.object_id, 'weight': node.weight, 'value':\n node.value}\n res = list(filter(lambda x: x != 0, res))\n value = len(res) - 1\n weight = len(res) - 2\n res[value] = {'total value': res[value]}\n res[weight] = {'total weight': res[weight]}\n return res\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass Node:\n object_id = 0\n weight = 0\n value = 0\n\n def __init__(self, object_id, weight, value):\n self.object_id = object_id\n self.weight = weight\n self.value = value\n\n\n<mask token>\n\n\ndef read_file(file):\n f = open(file, 'r')\n f.seek(0)\n queue = Queue(maxsize=0)\n list_elements = []\n nodes = []\n for line in f:\n id = int(line.split('.', 1)[0])\n value = int(line.split(' ', 3)[1])\n weight = int(line.split(' ', 3)[2].split('\\n', 2)[0])\n nodes.append(Node(id, weight, value))\n list_elements.append(-1)\n list_elements.append(0)\n list_elements.append(0)\n queue.put(list_elements)\n res = go_explore(queue, nodes)\n for i in range(0, len(res) - 2):\n if res[i] == 1:\n node = nodes[i]\n res[i] = {'id': node.object_id, 'weight': node.weight, 'value':\n node.value}\n res = list(filter(lambda x: x != 0, res))\n value = len(res) - 1\n weight = len(res) - 2\n res[value] = {'total value': res[value]}\n res[weight] = {'total weight': res[weight]}\n return res\n\n\ndef go_explore(queue, nodes):\n best_value = 0\n res = []\n while not queue.empty():\n q = copy(queue.get())\n for i in range(len(q)):\n if q[i] is -1:\n weight = q[len(q) - 2]\n value = q[len(q) - 1]\n if weight <= 420:\n if value > best_value:\n res = q\n best_value = value\n q[i] = 0\n queue.put(q)\n q_positive = copy(q)\n q_positive[len(q_positive) - 1] = value + nodes[i].value\n q_positive[len(q_positive) - 2] = weight + nodes[i].weight\n q_positive[i] = 1\n queue.put(q_positive)\n break\n elif i == len(q) - 1:\n weight = q[len(q) - 2]\n value = q[len(q) - 1]\n if weight <= 420:\n if value > best_value:\n res = q\n best_value = value\n return res\n\n\n<mask token>\n", "step-3": "<mask token>\nstart = time.time()\n<mask token>\nprocess = psutil.Process(os.getpid())\n\n\nclass Node:\n object_id = 0\n weight = 0\n value = 0\n\n def __init__(self, object_id, weight, value):\n self.object_id = object_id\n self.weight = weight\n self.value = value\n\n\n<mask token>\n\n\ndef read_file(file):\n f = open(file, 'r')\n f.seek(0)\n queue = Queue(maxsize=0)\n list_elements = []\n nodes = []\n for line in f:\n id = int(line.split('.', 1)[0])\n value = int(line.split(' ', 3)[1])\n weight = int(line.split(' ', 3)[2].split('\\n', 2)[0])\n nodes.append(Node(id, weight, value))\n list_elements.append(-1)\n list_elements.append(0)\n list_elements.append(0)\n queue.put(list_elements)\n res = go_explore(queue, nodes)\n for i in range(0, len(res) - 2):\n if res[i] == 1:\n node = nodes[i]\n res[i] = {'id': node.object_id, 'weight': node.weight, 'value':\n node.value}\n res = list(filter(lambda x: x != 0, res))\n value = len(res) - 1\n weight = len(res) - 2\n res[value] = {'total value': res[value]}\n res[weight] = {'total weight': res[weight]}\n return res\n\n\ndef go_explore(queue, nodes):\n best_value = 0\n res = []\n while not queue.empty():\n q = copy(queue.get())\n for i in range(len(q)):\n if q[i] is -1:\n weight = q[len(q) - 2]\n value = q[len(q) - 1]\n if weight <= 420:\n if value > best_value:\n res = q\n best_value = value\n q[i] = 0\n queue.put(q)\n q_positive = copy(q)\n q_positive[len(q_positive) - 1] = value + nodes[i].value\n q_positive[len(q_positive) - 2] = weight + nodes[i].weight\n q_positive[i] = 1\n queue.put(q_positive)\n break\n elif i == len(q) - 1:\n weight = q[len(q) - 2]\n value = q[len(q) - 1]\n if weight <= 420:\n if value > best_value:\n res = q\n best_value = value\n return res\n\n\nsolution = read_file('Knapsack/data_knapsack')\nfor data in solution:\n print(data)\nend = time.time()\nprint(end - start)\nprint(process.memory_info().rss)\n", "step-4": "import time\nimport os\nimport psutil\nstart = time.time()\nfrom queue import Queue\nfrom copy import copy\nprocess = psutil.Process(os.getpid())\n\n\nclass Node:\n object_id = 0\n weight = 0\n value = 0\n\n def __init__(self, object_id, weight, value):\n self.object_id = object_id\n self.weight = weight\n self.value = value\n\n\n<mask token>\n\n\ndef read_file(file):\n f = open(file, 'r')\n f.seek(0)\n queue = Queue(maxsize=0)\n list_elements = []\n nodes = []\n for line in f:\n id = int(line.split('.', 1)[0])\n value = int(line.split(' ', 3)[1])\n weight = int(line.split(' ', 3)[2].split('\\n', 2)[0])\n nodes.append(Node(id, weight, value))\n list_elements.append(-1)\n list_elements.append(0)\n list_elements.append(0)\n queue.put(list_elements)\n res = go_explore(queue, nodes)\n for i in range(0, len(res) - 2):\n if res[i] == 1:\n node = nodes[i]\n res[i] = {'id': node.object_id, 'weight': node.weight, 'value':\n node.value}\n res = list(filter(lambda x: x != 0, res))\n value = len(res) - 1\n weight = len(res) - 2\n res[value] = {'total value': res[value]}\n res[weight] = {'total weight': res[weight]}\n return res\n\n\ndef go_explore(queue, nodes):\n best_value = 0\n res = []\n while not queue.empty():\n q = copy(queue.get())\n for i in range(len(q)):\n if q[i] is -1:\n weight = q[len(q) - 2]\n value = q[len(q) - 1]\n if weight <= 420:\n if value > best_value:\n res = q\n best_value = value\n q[i] = 0\n queue.put(q)\n q_positive = copy(q)\n q_positive[len(q_positive) - 1] = value + nodes[i].value\n q_positive[len(q_positive) - 2] = weight + nodes[i].weight\n q_positive[i] = 1\n queue.put(q_positive)\n break\n elif i == len(q) - 1:\n weight = q[len(q) - 2]\n value = q[len(q) - 1]\n if weight <= 420:\n if value > best_value:\n res = q\n best_value = value\n return res\n\n\nsolution = read_file('Knapsack/data_knapsack')\nfor data in solution:\n print(data)\nend = time.time()\nprint(end - start)\nprint(process.memory_info().rss)\n", "step-5": "import time\nimport os\nimport psutil\nstart = time.time()\nfrom queue import Queue\nfrom copy import copy\n\nprocess = psutil.Process(os.getpid())\n\nclass Node:\n object_id = 0\n weight = 0\n value = 0\n \n def __init__(self,object_id,weight,value):\n self.object_id=object_id\n self.weight=weight\n self.value=value\n\n\n\n\"\"\" First we need to extract values from the file\"\"\"\ndef read_file(file):\n f = open(file, \"r\")\n f.seek(0)\n queue=Queue(maxsize=0) \n list_elements=[]\n nodes=[]\n for line in f:\n id=int(line.split(\".\", 1)[0])\n value= int(line.split(\" \", 3)[1])\n weight=int(line.split(\" \", 3)[2].split('\\n', 2)[0])\n nodes.append(Node(id,weight,value))\n list_elements.append(-1)\n list_elements.append(0)\n list_elements.append(0)\n\n queue.put(list_elements)\n res=go_explore(queue,nodes) \n for i in range(0,len(res)-2):\n if(res[i]==1):\n node=nodes[i]\n res[i]={\"id\":node.object_id,\"weight\":node.weight,\"value\":node.value}\n res=list(filter(lambda x: x != 0, res))\n \n value=len(res)-1\n weight=len(res)-2\n res[value]={\"total value\":res[value]}\n res[weight]={\"total weight\":res[weight]}\n return res\n\n\n\ndef go_explore(queue,nodes):\n best_value = 0\n res=[]\n while not queue.empty():\n q=copy(queue.get())\n for i in range(len(q)):\n if q[i] is -1:\n weight = q[len(q)-2]\n value = q[len(q)-1]\n if weight<=420:\n if value > best_value:\n res = q\n best_value=value\n q[i]=0\n queue.put(q)\n q_positive= copy(q)\n q_positive[len(q_positive)-1]=value+nodes[i].value\n q_positive[len(q_positive)-2]=weight+nodes[i].weight\n q_positive[i]=1\n queue.put(q_positive)\n break\n elif i == len(q)-1:\n weight = q[len(q)-2]\n value = q[len(q)-1]\n if weight<=420:\n if value > best_value:\n res = q\n best_value=value\n \n return res \n\n\n \n \nsolution=read_file('Knapsack/data_knapsack')\n\n\n\nfor data in solution:\n print(data)\n\nend = time.time()\nprint(end - start)\nprint(process.memory_info().rss)\n", "step-ids": [ 4, 5, 7, 8, 9 ] }
[ 4, 5, 7, 8, 9 ]
#!/usr/bin/python # -*- coding:utf-8 -*- ################################################################ # 服务器程序 ################################################################ import json import time import traceback from flask import Flask, abort, render_template, redirect, send_from_directory, request, make_response from flask.ext.bootstrap import Bootstrap from tools.http_tools import WeiboHandler from tools.db_operation.db_tools import save_user_log_info, get_user_log_info, batch_put_info, CONTENT_INFO, SCRAP_INFO, put_info, get_info, put_scrap_info, get_scraped_weibo_info from tools.__init__ import debug_flag from tools.scrap_tools import scrap_user from multiprocessing import Process global log_handler global search_user_list log_handler = {} search_user_list = {} process_list = [] server = Flask(__name__) bootstrap = Bootstrap(server) def read_wh(username): if log_handler.get(username) is None: log_handler[username] = WeiboHandler(username, '', 'flask_server/static/png/') return log_handler[username] def read_cookie(): username = request.cookies.get('username') if username is None: user_list = [] else: user_list = [{'username': username}] return user_list @server.route('/') def index(): user_list = read_cookie() return render_template('index.html', user_list=user_list) @server.route('/signup') def sign_up(): return redirect('http://weibo.com/signup/signup.php') @server.route('/login', methods=['POST']) def log_in(): username = request.form['id'] wh = read_wh(username) wh.passwd = request.form['passwd'] vercode = request.form['vercode'] log_flag = request.form['logflag'] if log_flag == '1': resp = make_response(json.dumps({'stat': '200', 'furl': request.form['ip']})) resp.set_cookie('username', username) return resp # log_handler.prelog_data = get_user_log_info(username) data2, replace_url = wh.do_log_req(vercode) if int(data2['retcode'][0]) == 0: wh.final_log_req(replace_url) resp = make_response(json.dumps({'stat': '200', 'furl': request.form['ip']})) resp.set_cookie('username', username) return resp print 'Log in failed ... retcode:', data2['retcode'][0], ', reason:', data2['reason'][0].decode('gbk') no = wh.get_vercode() return json.dumps({'stat': '502', 'reason': data2['reason'][0].decode('gbk'), 'vercode_no': no}) @server.route('/check_log', methods=['POST']) def check_log(): username = request.form['id'] wh = read_wh(username) wh.check_log_status(wh.open_weibo_page()) if wh.log_flag: return json.dumps({'stat': '200'}) prelog = wh.prelog() # save_user_log_info(username, prelog) try: if prelog['showpin'] == 1: no = wh.get_vercode() return json.dumps({'stat': '502', 'vercode_no': no}) return json.dumps({'stat': '501'}) except Exception, e: return json.dumps({'stat': '501'}) @server.route('/logout') def log_out(): resp = make_response(redirect('/')) resp.set_cookie('username', '', expires=0) return resp @server.route('/static/<path:path>') def send_static_file(path): return send_from_directory('static', path) @server.route('/search_user/<word>') def search_user(word): username = request.cookies.get('username') wh = read_wh(username) if username is None: return {'stat': '404'} search_user_list[username] = wh.get_user_list(word) if debug_flag: print search_user_list return json.dumps({'stat': '200', 'result': search_user_list[username]}) @server.route('/scrap/<user_no>') def to_scrap(user_no): username = request.cookies.get('username') if username is None: return render_template('index.html') user = search_user_list[username][int(user_no)] last_record = get_info(SCRAP_INFO, cond=' 1=1 order by id desc limit 1') scrap_id = 0 if len(last_record) == 0 else (int(last_record[0]['id']) + 1) put_scrap_info(scrap_id, username, user['user_id'], '开始爬取%s的所有微博内容...' % user['title']) sp = Process(target=scrap_process, name='%s_%s_%s' % (username, user['user_id'], scrap_id), args=(username, user, scrap_id)) sp.start() process_list.append(sp) return redirect('/scrap_listen?d=%s' % scrap_id) @server.route('/scrap_listen', methods=['GET']) def scrap_listen(): scrap_id = request.args.get('d') if debug_flag: print scrap_id user_list = read_cookie() return render_template('scrap_listen.html', scrap_id=scrap_id, user_list=user_list) @server.route('/read_scrap/<scrap_id>/<last_message_id>') def read_scrap(scrap_id, last_message_id): data = get_info(SCRAP_INFO, cond=' scrap_id=%s and id > %s ' % (scrap_id, last_message_id)) return json.dumps(data) def scrap_process(username, user, scrap_id): try: wh = read_wh(username) data_list = scrap_user(wh, user, scrap_id, 0) batch_put_info(CONTENT_INFO, data_list) put_scrap_info(scrap_id, username, user['user_id'], '爬取完毕!共爬取%s%s条微博.保存至数据库....' % (user['title'], len(data_list)), 1) except Exception, e: traceback.print_exc() put_scrap_info(scrap_id, username, user['user_id'], '出现异常,数据未保存,请重新爬取数据!', -1) @server.route('/search') def search_scrap_result(): user_list = read_cookie() return render_template('/search.html', user_list=user_list) @server.route('/search_scraped_weibo/<username>', methods=['GET']) def search_scraped_weibo(username): print 'here' keyword = request.args.get('keyword') print 'there' if keyword is None: weibo_list = get_scraped_weibo_info(username) else: weibo_list = get_scraped_weibo_info(username, keyword) return json.dumps({'stat': '200', 'result': weibo_list})
normal
{ "blob_id": "2c89f12d633da8da4d500dca910662d351b0958f", "index": 4509, "step-1": "#!/usr/bin/python\n# -*- coding:utf-8 -*-\n################################################################\n# 服务器程序\n################################################################\nimport json\nimport time\nimport traceback\nfrom flask import Flask, abort, render_template, redirect, send_from_directory, request, make_response\nfrom flask.ext.bootstrap import Bootstrap\nfrom tools.http_tools import WeiboHandler\nfrom tools.db_operation.db_tools import save_user_log_info, get_user_log_info, batch_put_info, CONTENT_INFO, SCRAP_INFO, put_info, get_info, put_scrap_info, get_scraped_weibo_info\nfrom tools.__init__ import debug_flag\nfrom tools.scrap_tools import scrap_user\nfrom multiprocessing import Process\nglobal log_handler\nglobal search_user_list\nlog_handler = {}\nsearch_user_list = {}\nprocess_list = []\n\n\nserver = Flask(__name__)\nbootstrap = Bootstrap(server)\n\n\ndef read_wh(username):\n if log_handler.get(username) is None:\n log_handler[username] = WeiboHandler(username, '', 'flask_server/static/png/')\n return log_handler[username]\n\n\ndef read_cookie():\n username = request.cookies.get('username')\n if username is None:\n user_list = []\n else:\n user_list = [{'username': username}]\n return user_list\n\n\n@server.route('/')\ndef index():\n user_list = read_cookie()\n return render_template('index.html', user_list=user_list)\n\n\n@server.route('/signup')\ndef sign_up():\n return redirect('http://weibo.com/signup/signup.php')\n\n\n@server.route('/login', methods=['POST'])\ndef log_in():\n username = request.form['id']\n wh = read_wh(username)\n wh.passwd = request.form['passwd']\n vercode = request.form['vercode']\n log_flag = request.form['logflag']\n if log_flag == '1':\n resp = make_response(json.dumps({'stat': '200', 'furl': request.form['ip']}))\n resp.set_cookie('username', username)\n return resp\n # log_handler.prelog_data = get_user_log_info(username)\n data2, replace_url = wh.do_log_req(vercode)\n if int(data2['retcode'][0]) == 0:\n wh.final_log_req(replace_url)\n resp = make_response(json.dumps({'stat': '200', 'furl': request.form['ip']}))\n resp.set_cookie('username', username)\n return resp\n print 'Log in failed ... retcode:', data2['retcode'][0], ', reason:', data2['reason'][0].decode('gbk')\n no = wh.get_vercode()\n return json.dumps({'stat': '502', 'reason': data2['reason'][0].decode('gbk'), 'vercode_no': no})\n\n\n@server.route('/check_log', methods=['POST'])\ndef check_log():\n username = request.form['id']\n wh = read_wh(username)\n wh.check_log_status(wh.open_weibo_page())\n if wh.log_flag:\n return json.dumps({'stat': '200'})\n prelog = wh.prelog()\n # save_user_log_info(username, prelog)\n try:\n if prelog['showpin'] == 1:\n no = wh.get_vercode()\n return json.dumps({'stat': '502', 'vercode_no': no})\n return json.dumps({'stat': '501'})\n except Exception, e:\n return json.dumps({'stat': '501'})\n\n\n@server.route('/logout')\ndef log_out():\n resp = make_response(redirect('/'))\n resp.set_cookie('username', '', expires=0)\n return resp\n\n\n@server.route('/static/<path:path>')\ndef send_static_file(path):\n return send_from_directory('static', path)\n\n\n@server.route('/search_user/<word>')\ndef search_user(word):\n username = request.cookies.get('username')\n wh = read_wh(username)\n if username is None:\n return {'stat': '404'}\n search_user_list[username] = wh.get_user_list(word)\n if debug_flag:\n print search_user_list\n return json.dumps({'stat': '200', 'result': search_user_list[username]})\n\n\n@server.route('/scrap/<user_no>')\ndef to_scrap(user_no):\n username = request.cookies.get('username')\n if username is None:\n return render_template('index.html')\n user = search_user_list[username][int(user_no)]\n last_record = get_info(SCRAP_INFO, cond=' 1=1 order by id desc limit 1')\n scrap_id = 0 if len(last_record) == 0 else (int(last_record[0]['id']) + 1)\n put_scrap_info(scrap_id, username, user['user_id'], '开始爬取%s的所有微博内容...' % user['title'])\n sp = Process(target=scrap_process, name='%s_%s_%s' % (username, user['user_id'], scrap_id), args=(username, user, scrap_id))\n sp.start()\n process_list.append(sp)\n return redirect('/scrap_listen?d=%s' % scrap_id)\n\n\n@server.route('/scrap_listen', methods=['GET'])\ndef scrap_listen():\n scrap_id = request.args.get('d')\n if debug_flag:\n print scrap_id\n user_list = read_cookie()\n return render_template('scrap_listen.html', scrap_id=scrap_id, user_list=user_list)\n\n\n@server.route('/read_scrap/<scrap_id>/<last_message_id>')\ndef read_scrap(scrap_id, last_message_id):\n data = get_info(SCRAP_INFO, cond=' scrap_id=%s and id > %s ' % (scrap_id, last_message_id))\n return json.dumps(data)\n\n\ndef scrap_process(username, user, scrap_id):\n try:\n wh = read_wh(username)\n data_list = scrap_user(wh, user, scrap_id, 0)\n batch_put_info(CONTENT_INFO, data_list)\n put_scrap_info(scrap_id, username, user['user_id'], '爬取完毕!共爬取%s%s条微博.保存至数据库....' % (user['title'], len(data_list)), 1)\n except Exception, e:\n traceback.print_exc()\n put_scrap_info(scrap_id, username, user['user_id'], '出现异常,数据未保存,请重新爬取数据!', -1)\n\n\n@server.route('/search')\ndef search_scrap_result():\n user_list = read_cookie()\n return render_template('/search.html', user_list=user_list)\n\n\n@server.route('/search_scraped_weibo/<username>', methods=['GET'])\ndef search_scraped_weibo(username):\n print 'here'\n keyword = request.args.get('keyword')\n print 'there'\n if keyword is None:\n weibo_list = get_scraped_weibo_info(username)\n else:\n weibo_list = get_scraped_weibo_info(username, keyword)\n return json.dumps({'stat': '200', 'result': weibo_list})\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
def convertEnEntier(nombre): result = ""; if (nombre == 4): result = "IV" if (nombre == 3): result = "III" if (nombre == 2): result = "II" if (nombre == 1): result = "I" return result print (convertEnEntier(1)) print (convertEnEntier(2)) print (convertEnEntier(3))
normal
{ "blob_id": "ef7fad5019e79950e8fad56404e9ba5d302cfe1c", "index": 7596, "step-1": "<mask token>\n", "step-2": "def convertEnEntier(nombre):\n result = ''\n if nombre == 4:\n result = 'IV'\n if nombre == 3:\n result = 'III'\n if nombre == 2:\n result = 'II'\n if nombre == 1:\n result = 'I'\n return result\n\n\n<mask token>\n", "step-3": "def convertEnEntier(nombre):\n result = ''\n if nombre == 4:\n result = 'IV'\n if nombre == 3:\n result = 'III'\n if nombre == 2:\n result = 'II'\n if nombre == 1:\n result = 'I'\n return result\n\n\nprint(convertEnEntier(1))\nprint(convertEnEntier(2))\nprint(convertEnEntier(3))\n", "step-4": "\r\ndef convertEnEntier(nombre):\r\n\r\n result = \"\";\r\n if (nombre == 4):\r\n result = \"IV\"\r\n if (nombre == 3):\r\n result = \"III\"\r\n if (nombre == 2):\r\n result = \"II\"\r\n if (nombre == 1):\r\n result = \"I\"\r\n\r\n return result\r\n\r\n\r\nprint (convertEnEntier(1))\r\nprint (convertEnEntier(2))\r\nprint (convertEnEntier(3))\r\n\r\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> def isPrime(x): if x == 1: return False for d in range(1, int(x ** 0.5)): if x == d + 1: continue if x % (d + 1) == 0: return False else: return True <|reserved_special_token_0|> <|reserved_special_token_1|> def isPrime(x): if x == 1: return False for d in range(1, int(x ** 0.5)): if x == d + 1: continue if x % (d + 1) == 0: return False else: return True <|reserved_special_token_0|> for x in range(N, M + 1): if isPrime(x): sum += x if min > x: min = x if sum == 0: print(-1) else: print(sum) print(min) <|reserved_special_token_1|> def isPrime(x): if x == 1: return False for d in range(1, int(x ** 0.5)): if x == d + 1: continue if x % (d + 1) == 0: return False else: return True N = int(input()) M = int(input()) sum = 0 min = 10001 for x in range(N, M + 1): if isPrime(x): sum += x if min > x: min = x if sum == 0: print(-1) else: print(sum) print(min) <|reserved_special_token_1|> #https://www.acmicpc.net/problem/2581 def isPrime(x): if x==1: return False for d in range(1,int(x**0.5)): if x==d+1: continue if x%(d+1)==0: return False else: return True N=int(input()) M=int(input()) sum=0 min=10001 for x in range(N,M+1): if isPrime(x): sum+=x if min>x: min=x if sum==0: print(-1) else: print(sum) print(min)
flexible
{ "blob_id": "37d465043eddd34c4453fd7e31b08d0ba58b725f", "index": 4351, "step-1": "<mask token>\n", "step-2": "def isPrime(x):\n if x == 1:\n return False\n for d in range(1, int(x ** 0.5)):\n if x == d + 1:\n continue\n if x % (d + 1) == 0:\n return False\n else:\n return True\n\n\n<mask token>\n", "step-3": "def isPrime(x):\n if x == 1:\n return False\n for d in range(1, int(x ** 0.5)):\n if x == d + 1:\n continue\n if x % (d + 1) == 0:\n return False\n else:\n return True\n\n\n<mask token>\nfor x in range(N, M + 1):\n if isPrime(x):\n sum += x\n if min > x:\n min = x\nif sum == 0:\n print(-1)\nelse:\n print(sum)\n print(min)\n", "step-4": "def isPrime(x):\n if x == 1:\n return False\n for d in range(1, int(x ** 0.5)):\n if x == d + 1:\n continue\n if x % (d + 1) == 0:\n return False\n else:\n return True\n\n\nN = int(input())\nM = int(input())\nsum = 0\nmin = 10001\nfor x in range(N, M + 1):\n if isPrime(x):\n sum += x\n if min > x:\n min = x\nif sum == 0:\n print(-1)\nelse:\n print(sum)\n print(min)\n", "step-5": "#https://www.acmicpc.net/problem/2581\n\ndef isPrime(x):\n if x==1:\n return False\n for d in range(1,int(x**0.5)):\n if x==d+1:\n continue\n if x%(d+1)==0:\n return False\n else:\n return True\n\nN=int(input())\nM=int(input())\nsum=0\nmin=10001\nfor x in range(N,M+1):\n if isPrime(x):\n sum+=x\n if min>x:\n min=x\nif sum==0:\n print(-1)\nelse:\n print(sum)\n print(min)\n \n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> class CotisationComputes: @staticmethod def current_year(): now = datetime.datetime.now() if now.month > 8: return now.year + 1 return now.year @staticmethod def registration_current_year(): now = datetime.datetime.now() if now.month > 8: return now.year return now.year - 1 <|reserved_special_token_0|> @staticmethod def is_valid_month(month): now = datetime.datetime.now() if now.month > 8: return (month >= now.month and month < 13 or month >= 1 and month < 9) else: return month >= now.month and month < 9 @staticmethod def price_to_pay(year_price, month_price, already_paid, number_months_to_pay): months_price = number_months_to_pay * month_price BrieLogging.get().debug('already paid : ' + str(already_paid)) BrieLogging.get().debug('months price : ' + str(months_price)) if already_paid + months_price > year_price: months_price = max(0, year_price - already_paid) return months_price @staticmethod def anniversary_from_ldap_items(ldap_cotisations): result = [] months = [] SORT_ORDER = {(9): 0, (10): 1, (11): 2, (12): 3, (1): 4, (2): 5, (3 ): 6, (4): 7, (5): 8, (6): 9, (7): 10, (8): 11} for cotisation in ldap_cotisations: cotisation_months = [] anniversary_data = cotisation.get('x-time').first() anniversary_datetime = datetime.datetime.strptime(anniversary_data, '%Y-%m-%d %H:%M:%S.%f') for month in cotisation.get('x-validMonth').all(): months.append(int(month)) cotisation_months.append(int(month)) cotisation_months.sort(key=lambda val: SORT_ORDER[val]) result.append((anniversary_datetime, cotisation_months)) anniversary = 0 result = sorted(result) previousMonth = -1 months.sort(key=lambda val: SORT_ORDER[val]) for resultat in result: if previousMonth != -1 and (resultat[1][0] == 1 and previousMonth == 12 or resultat[1][0] == previousMonth + 1): previousMonth = resultat[1][-1] continue else: previousMonth = resultat[1][-1] anniversary_day = resultat[0].day anniversary_month = months[-1] + 1 if anniversary_month == 13: anniversary_month = 1 if anniversary_month > 9 or resultat[0].month < 9: anniversary_year = resultat[0].year else: anniversary_year = resultat[0].year + 1 anniversary = datetime.datetime.strptime(str(anniversary_year) + '-' + str(anniversary_month) + '-1 0:0', '%Y-%m-%d %H:%M' ) + datetime.timedelta(days=anniversary_day - 1) if anniversary == 0: anniversary = datetime.datetime(1999, 1, 31, 12, 0) return anniversary @staticmethod def generate_new_anniversary_from_ldap_items(ldap_cotisations): anniversary = CotisationComputes.anniversary_from_ldap_items( ldap_cotisations) now = datetime.datetime.now() if anniversary == 0 or (now - anniversary).days > 30: return now else: return anniversary <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> @staticmethod def is_cotisation_paid(member_dn, user_session, residence_dn, cotisations=None, anniversary=None, verification_old_member=True): if verification_old_member and CotisationComputes.is_old_member( member_dn, user_session, residence_dn, cotisations): return False if cotisations is None: current_year = CotisationComputes.current_year() now = datetime.datetime.now() if cotisations is None: cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year) if anniversary is None: anniversary = CotisationComputes.anniversary_from_ldap_items( cotisations) delta = now - anniversary if (delta.days > 7 and now.month == 9 and CotisationComputes. is_cotisation_was_paid_last_year(member_dn, user_session, residence_dn)): return True return delta.days <= 7 @staticmethod def is_cotisation_late(member_dn, user_session, residence_dn, cotisations=None, anniversary=None, verification_old_member=True): if verification_old_member and CotisationComputes.is_old_member( member_dn, user_session, residence_dn, cotisations): return False if cotisations is None: current_year = CotisationComputes.current_year() now = datetime.datetime.now() if cotisations is None: cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year) if anniversary is None: anniversary = CotisationComputes.anniversary_from_ldap_items( cotisations) delta = now - anniversary return delta.days <= 30 and delta.days > 7 <|reserved_special_token_0|> @staticmethod def is_no_cotisation(member_dn, user_session, residence_dn, cotisations =None, anniversary=None, verification_old_member=True): if verification_old_member and CotisationComputes.is_old_member( member_dn, user_session, residence_dn, cotisations): return False if cotisations is None: current_year = CotisationComputes.current_year() now = datetime.datetime.now() if cotisations is None: cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year) if anniversary is None: anniversary = CotisationComputes.anniversary_from_ldap_items( cotisations) delta = now - anniversary return delta.days > 30 <|reserved_special_token_0|> @staticmethod def members_status_from_list_cotisations(user_session, residence_dn, cotisations): members_dict = dict() for cotisation in cotisations: cotisation_dn = cotisation.dn.split(',') member_dn = '' for i in range(3, len(cotisation_dn)): if i != 3: member_dn += ',' member_dn += cotisation_dn[i] if not member_dn in members_dict: members_dict[member_dn] = [cotisation] else: members_dict[member_dn].append(cotisation) BrieLogging.get().debug(str(datetime.datetime.now()) + 'members_status_from_list_cotisations:' + str(len(members_dict))) old_members = [] cotisation_paid_members = [] cotisation_late_members = [] no_cotisation_members = [] for member_dn, cotisations in members_dict.iteritems(): old_members.append(member_dn) rooms = Room.get_rooms(user_session, residence_dn) for room in rooms: if room.has('x-memberIn') and room.get('x-memberIn').first( ) in members_dict and room.get('x-memberIn').first( ) in old_members: old_members.remove(room.get('x-memberIn').first()) for member_dn, cotisations in members_dict.iteritems(): anniversary = CotisationComputes.anniversary_from_ldap_items( cotisations) if CotisationComputes.is_cotisation_paid(member_dn, user_session, residence_dn, cotisations, anniversary, False): cotisation_paid_members.append(member_dn) elif CotisationComputes.is_cotisation_late(member_dn, user_session, residence_dn, cotisations, anniversary, False): cotisation_late_members.append(member_dn) elif CotisationComputes.is_no_cotisation(member_dn, user_session, residence_dn, cotisations, anniversary, False): no_cotisation_members.append(member_dn) else: BrieLogging.get().debug( 'aurore_helper:393 : member with weird status !') return dict(old_members=old_members, cotisation_paid_members= cotisation_paid_members, cotisation_late_members= cotisation_late_members, no_cotisation_members= no_cotisation_members) <|reserved_special_token_1|> <|reserved_special_token_0|> class CotisationComputes: @staticmethod def current_year(): now = datetime.datetime.now() if now.month > 8: return now.year + 1 return now.year @staticmethod def registration_current_year(): now = datetime.datetime.now() if now.month > 8: return now.year return now.year - 1 <|reserved_special_token_0|> @staticmethod def is_valid_month(month): now = datetime.datetime.now() if now.month > 8: return (month >= now.month and month < 13 or month >= 1 and month < 9) else: return month >= now.month and month < 9 @staticmethod def price_to_pay(year_price, month_price, already_paid, number_months_to_pay): months_price = number_months_to_pay * month_price BrieLogging.get().debug('already paid : ' + str(already_paid)) BrieLogging.get().debug('months price : ' + str(months_price)) if already_paid + months_price > year_price: months_price = max(0, year_price - already_paid) return months_price @staticmethod def anniversary_from_ldap_items(ldap_cotisations): result = [] months = [] SORT_ORDER = {(9): 0, (10): 1, (11): 2, (12): 3, (1): 4, (2): 5, (3 ): 6, (4): 7, (5): 8, (6): 9, (7): 10, (8): 11} for cotisation in ldap_cotisations: cotisation_months = [] anniversary_data = cotisation.get('x-time').first() anniversary_datetime = datetime.datetime.strptime(anniversary_data, '%Y-%m-%d %H:%M:%S.%f') for month in cotisation.get('x-validMonth').all(): months.append(int(month)) cotisation_months.append(int(month)) cotisation_months.sort(key=lambda val: SORT_ORDER[val]) result.append((anniversary_datetime, cotisation_months)) anniversary = 0 result = sorted(result) previousMonth = -1 months.sort(key=lambda val: SORT_ORDER[val]) for resultat in result: if previousMonth != -1 and (resultat[1][0] == 1 and previousMonth == 12 or resultat[1][0] == previousMonth + 1): previousMonth = resultat[1][-1] continue else: previousMonth = resultat[1][-1] anniversary_day = resultat[0].day anniversary_month = months[-1] + 1 if anniversary_month == 13: anniversary_month = 1 if anniversary_month > 9 or resultat[0].month < 9: anniversary_year = resultat[0].year else: anniversary_year = resultat[0].year + 1 anniversary = datetime.datetime.strptime(str(anniversary_year) + '-' + str(anniversary_month) + '-1 0:0', '%Y-%m-%d %H:%M' ) + datetime.timedelta(days=anniversary_day - 1) if anniversary == 0: anniversary = datetime.datetime(1999, 1, 31, 12, 0) return anniversary @staticmethod def generate_new_anniversary_from_ldap_items(ldap_cotisations): anniversary = CotisationComputes.anniversary_from_ldap_items( ldap_cotisations) now = datetime.datetime.now() if anniversary == 0 or (now - anniversary).days > 30: return now else: return anniversary <|reserved_special_token_0|> @staticmethod def is_member_to_delete(member, user_session, residence_dn): current_year = CotisationComputes.current_year() cotisations_this_year = Cotisation.cotisations_of_member(user_session, member.dn, current_year) cotisations_previous_year = Cotisation.cotisations_of_member( user_session, member.dn, current_year - 1) if cotisations_this_year == [] and cotisations_previous_year == []: return True now = datetime.datetime.now() if now.month < 9: last_year = datetime.datetime(now.year - 1, 8, 31, 12, 0) else: last_year = datetime.datetime(now.year, 8, 31, 12, 0) anniversary = CotisationComputes.anniversary_from_ldap_items( cotisations_previous_year) delta = last_year - anniversary return cotisations_this_year == [] and (delta.days > 7 or Room. get_by_member_dn(user_session, residence_dn, member.dn) == None) <|reserved_special_token_0|> @staticmethod def is_cotisation_paid(member_dn, user_session, residence_dn, cotisations=None, anniversary=None, verification_old_member=True): if verification_old_member and CotisationComputes.is_old_member( member_dn, user_session, residence_dn, cotisations): return False if cotisations is None: current_year = CotisationComputes.current_year() now = datetime.datetime.now() if cotisations is None: cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year) if anniversary is None: anniversary = CotisationComputes.anniversary_from_ldap_items( cotisations) delta = now - anniversary if (delta.days > 7 and now.month == 9 and CotisationComputes. is_cotisation_was_paid_last_year(member_dn, user_session, residence_dn)): return True return delta.days <= 7 @staticmethod def is_cotisation_late(member_dn, user_session, residence_dn, cotisations=None, anniversary=None, verification_old_member=True): if verification_old_member and CotisationComputes.is_old_member( member_dn, user_session, residence_dn, cotisations): return False if cotisations is None: current_year = CotisationComputes.current_year() now = datetime.datetime.now() if cotisations is None: cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year) if anniversary is None: anniversary = CotisationComputes.anniversary_from_ldap_items( cotisations) delta = now - anniversary return delta.days <= 30 and delta.days > 7 <|reserved_special_token_0|> @staticmethod def is_no_cotisation(member_dn, user_session, residence_dn, cotisations =None, anniversary=None, verification_old_member=True): if verification_old_member and CotisationComputes.is_old_member( member_dn, user_session, residence_dn, cotisations): return False if cotisations is None: current_year = CotisationComputes.current_year() now = datetime.datetime.now() if cotisations is None: cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year) if anniversary is None: anniversary = CotisationComputes.anniversary_from_ldap_items( cotisations) delta = now - anniversary return delta.days > 30 <|reserved_special_token_0|> @staticmethod def members_status_from_list_cotisations(user_session, residence_dn, cotisations): members_dict = dict() for cotisation in cotisations: cotisation_dn = cotisation.dn.split(',') member_dn = '' for i in range(3, len(cotisation_dn)): if i != 3: member_dn += ',' member_dn += cotisation_dn[i] if not member_dn in members_dict: members_dict[member_dn] = [cotisation] else: members_dict[member_dn].append(cotisation) BrieLogging.get().debug(str(datetime.datetime.now()) + 'members_status_from_list_cotisations:' + str(len(members_dict))) old_members = [] cotisation_paid_members = [] cotisation_late_members = [] no_cotisation_members = [] for member_dn, cotisations in members_dict.iteritems(): old_members.append(member_dn) rooms = Room.get_rooms(user_session, residence_dn) for room in rooms: if room.has('x-memberIn') and room.get('x-memberIn').first( ) in members_dict and room.get('x-memberIn').first( ) in old_members: old_members.remove(room.get('x-memberIn').first()) for member_dn, cotisations in members_dict.iteritems(): anniversary = CotisationComputes.anniversary_from_ldap_items( cotisations) if CotisationComputes.is_cotisation_paid(member_dn, user_session, residence_dn, cotisations, anniversary, False): cotisation_paid_members.append(member_dn) elif CotisationComputes.is_cotisation_late(member_dn, user_session, residence_dn, cotisations, anniversary, False): cotisation_late_members.append(member_dn) elif CotisationComputes.is_no_cotisation(member_dn, user_session, residence_dn, cotisations, anniversary, False): no_cotisation_members.append(member_dn) else: BrieLogging.get().debug( 'aurore_helper:393 : member with weird status !') return dict(old_members=old_members, cotisation_paid_members= cotisation_paid_members, cotisation_late_members= cotisation_late_members, no_cotisation_members= no_cotisation_members) <|reserved_special_token_1|> <|reserved_special_token_0|> class CotisationComputes: @staticmethod def current_year(): now = datetime.datetime.now() if now.month > 8: return now.year + 1 return now.year @staticmethod def registration_current_year(): now = datetime.datetime.now() if now.month > 8: return now.year return now.year - 1 @staticmethod def get_available_months(start, end, paid_months=[]): next_months_available = [] if start > 12 or end > 12: raise Exception('invalid start or end') if start > 8 and end > 8: next_months_available = range(start, end + 1) elif start <= 8 and end < 9: next_months_available = range(start, end + 1) elif start > 8: next_months_available = range(start, 13) + range(1, end + 1) else: raise Exception('invalid start and end') if paid_months == []: return next_months_available BrieLogging.get().debug(next_months_available) available_months = [month for month in next_months_available if month not in paid_months] return available_months @staticmethod def is_valid_month(month): now = datetime.datetime.now() if now.month > 8: return (month >= now.month and month < 13 or month >= 1 and month < 9) else: return month >= now.month and month < 9 @staticmethod def price_to_pay(year_price, month_price, already_paid, number_months_to_pay): months_price = number_months_to_pay * month_price BrieLogging.get().debug('already paid : ' + str(already_paid)) BrieLogging.get().debug('months price : ' + str(months_price)) if already_paid + months_price > year_price: months_price = max(0, year_price - already_paid) return months_price @staticmethod def anniversary_from_ldap_items(ldap_cotisations): result = [] months = [] SORT_ORDER = {(9): 0, (10): 1, (11): 2, (12): 3, (1): 4, (2): 5, (3 ): 6, (4): 7, (5): 8, (6): 9, (7): 10, (8): 11} for cotisation in ldap_cotisations: cotisation_months = [] anniversary_data = cotisation.get('x-time').first() anniversary_datetime = datetime.datetime.strptime(anniversary_data, '%Y-%m-%d %H:%M:%S.%f') for month in cotisation.get('x-validMonth').all(): months.append(int(month)) cotisation_months.append(int(month)) cotisation_months.sort(key=lambda val: SORT_ORDER[val]) result.append((anniversary_datetime, cotisation_months)) anniversary = 0 result = sorted(result) previousMonth = -1 months.sort(key=lambda val: SORT_ORDER[val]) for resultat in result: if previousMonth != -1 and (resultat[1][0] == 1 and previousMonth == 12 or resultat[1][0] == previousMonth + 1): previousMonth = resultat[1][-1] continue else: previousMonth = resultat[1][-1] anniversary_day = resultat[0].day anniversary_month = months[-1] + 1 if anniversary_month == 13: anniversary_month = 1 if anniversary_month > 9 or resultat[0].month < 9: anniversary_year = resultat[0].year else: anniversary_year = resultat[0].year + 1 anniversary = datetime.datetime.strptime(str(anniversary_year) + '-' + str(anniversary_month) + '-1 0:0', '%Y-%m-%d %H:%M' ) + datetime.timedelta(days=anniversary_day - 1) if anniversary == 0: anniversary = datetime.datetime(1999, 1, 31, 12, 0) return anniversary @staticmethod def generate_new_anniversary_from_ldap_items(ldap_cotisations): anniversary = CotisationComputes.anniversary_from_ldap_items( ldap_cotisations) now = datetime.datetime.now() if anniversary == 0 or (now - anniversary).days > 30: return now else: return anniversary <|reserved_special_token_0|> @staticmethod def is_member_to_delete(member, user_session, residence_dn): current_year = CotisationComputes.current_year() cotisations_this_year = Cotisation.cotisations_of_member(user_session, member.dn, current_year) cotisations_previous_year = Cotisation.cotisations_of_member( user_session, member.dn, current_year - 1) if cotisations_this_year == [] and cotisations_previous_year == []: return True now = datetime.datetime.now() if now.month < 9: last_year = datetime.datetime(now.year - 1, 8, 31, 12, 0) else: last_year = datetime.datetime(now.year, 8, 31, 12, 0) anniversary = CotisationComputes.anniversary_from_ldap_items( cotisations_previous_year) delta = last_year - anniversary return cotisations_this_year == [] and (delta.days > 7 or Room. get_by_member_dn(user_session, residence_dn, member.dn) == None) @staticmethod def is_cotisation_was_paid_last_year(member_dn, user_session, residence_dn, cotisations=None, anniversary=None): if cotisations is None: current_year = CotisationComputes.current_year() - 1 now = datetime.datetime.now() if now.month < 9: last_year = datetime.datetime(now.year - 1, 8, 31, 12, 0) else: last_year = datetime.datetime(now.year, 8, 31, 12, 0) if cotisations is None: cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year) if anniversary is None: anniversary = CotisationComputes.anniversary_from_ldap_items( cotisations) delta = last_year - anniversary return delta.days <= 7 @staticmethod def is_cotisation_paid(member_dn, user_session, residence_dn, cotisations=None, anniversary=None, verification_old_member=True): if verification_old_member and CotisationComputes.is_old_member( member_dn, user_session, residence_dn, cotisations): return False if cotisations is None: current_year = CotisationComputes.current_year() now = datetime.datetime.now() if cotisations is None: cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year) if anniversary is None: anniversary = CotisationComputes.anniversary_from_ldap_items( cotisations) delta = now - anniversary if (delta.days > 7 and now.month == 9 and CotisationComputes. is_cotisation_was_paid_last_year(member_dn, user_session, residence_dn)): return True return delta.days <= 7 @staticmethod def is_cotisation_late(member_dn, user_session, residence_dn, cotisations=None, anniversary=None, verification_old_member=True): if verification_old_member and CotisationComputes.is_old_member( member_dn, user_session, residence_dn, cotisations): return False if cotisations is None: current_year = CotisationComputes.current_year() now = datetime.datetime.now() if cotisations is None: cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year) if anniversary is None: anniversary = CotisationComputes.anniversary_from_ldap_items( cotisations) delta = now - anniversary return delta.days <= 30 and delta.days > 7 def cotisation_late_reminder(member_dn, user_session, residence_dn): member = Member.get_by_dn(user_session, member_dn) from_mail = 'noreply@fede-aurore.net' to_mail = [member.mail.first()] residence_name = Residences.get_name_by_dn(user_session, residence_dn ).decode('utf-8').encode('ascii', 'ignore') @staticmethod def is_no_cotisation(member_dn, user_session, residence_dn, cotisations =None, anniversary=None, verification_old_member=True): if verification_old_member and CotisationComputes.is_old_member( member_dn, user_session, residence_dn, cotisations): return False if cotisations is None: current_year = CotisationComputes.current_year() now = datetime.datetime.now() if cotisations is None: cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year) if anniversary is None: anniversary = CotisationComputes.anniversary_from_ldap_items( cotisations) delta = now - anniversary return delta.days > 30 @staticmethod def members_status_from_residence(user_session, residence_dn): members = Member.get_all(user_session, residence_dn) old_members = [] cotisation_paid_members = [] cotisation_late_members = [] no_cotisation_members = [] for member in members: current_year = CotisationComputes.current_year() cotisations = Cotisation.cotisations_of_member(user_session, member.dn, current_year) if CotisationComputes.is_old_member(member.dn, user_session, residence_dn, cotisations): old_members.append(member) elif CotisationComputes.is_cotisation_paid(member.dn, user_session, residence_dn, cotisations): cotisation_paid_members.append(member) elif CotisationComputes.is_cotisation_late(member.dn, user_session, residence_dn, cotisations): cotisation_late_members.append(member) elif CotisationComputes.is_no_cotisation(member.dn, user_session, residence_dn, cotisations): no_cotisation_members.append(member) else: BrieLogging.get().warn( 'aurore_helper:336 member with weird status !') return dict(old_members=old_members, cotisation_paid_members= cotisation_paid_members, cotisation_late_members= cotisation_late_members, no_cotisation_members= no_cotisation_members) @staticmethod def members_status_from_list_cotisations(user_session, residence_dn, cotisations): members_dict = dict() for cotisation in cotisations: cotisation_dn = cotisation.dn.split(',') member_dn = '' for i in range(3, len(cotisation_dn)): if i != 3: member_dn += ',' member_dn += cotisation_dn[i] if not member_dn in members_dict: members_dict[member_dn] = [cotisation] else: members_dict[member_dn].append(cotisation) BrieLogging.get().debug(str(datetime.datetime.now()) + 'members_status_from_list_cotisations:' + str(len(members_dict))) old_members = [] cotisation_paid_members = [] cotisation_late_members = [] no_cotisation_members = [] for member_dn, cotisations in members_dict.iteritems(): old_members.append(member_dn) rooms = Room.get_rooms(user_session, residence_dn) for room in rooms: if room.has('x-memberIn') and room.get('x-memberIn').first( ) in members_dict and room.get('x-memberIn').first( ) in old_members: old_members.remove(room.get('x-memberIn').first()) for member_dn, cotisations in members_dict.iteritems(): anniversary = CotisationComputes.anniversary_from_ldap_items( cotisations) if CotisationComputes.is_cotisation_paid(member_dn, user_session, residence_dn, cotisations, anniversary, False): cotisation_paid_members.append(member_dn) elif CotisationComputes.is_cotisation_late(member_dn, user_session, residence_dn, cotisations, anniversary, False): cotisation_late_members.append(member_dn) elif CotisationComputes.is_no_cotisation(member_dn, user_session, residence_dn, cotisations, anniversary, False): no_cotisation_members.append(member_dn) else: BrieLogging.get().debug( 'aurore_helper:393 : member with weird status !') return dict(old_members=old_members, cotisation_paid_members= cotisation_paid_members, cotisation_late_members= cotisation_late_members, no_cotisation_members= no_cotisation_members) <|reserved_special_token_1|> <|reserved_special_token_0|> class CotisationComputes: @staticmethod def current_year(): now = datetime.datetime.now() if now.month > 8: return now.year + 1 return now.year @staticmethod def registration_current_year(): now = datetime.datetime.now() if now.month > 8: return now.year return now.year - 1 @staticmethod def get_available_months(start, end, paid_months=[]): next_months_available = [] if start > 12 or end > 12: raise Exception('invalid start or end') if start > 8 and end > 8: next_months_available = range(start, end + 1) elif start <= 8 and end < 9: next_months_available = range(start, end + 1) elif start > 8: next_months_available = range(start, 13) + range(1, end + 1) else: raise Exception('invalid start and end') if paid_months == []: return next_months_available BrieLogging.get().debug(next_months_available) available_months = [month for month in next_months_available if month not in paid_months] return available_months @staticmethod def is_valid_month(month): now = datetime.datetime.now() if now.month > 8: return (month >= now.month and month < 13 or month >= 1 and month < 9) else: return month >= now.month and month < 9 @staticmethod def price_to_pay(year_price, month_price, already_paid, number_months_to_pay): months_price = number_months_to_pay * month_price BrieLogging.get().debug('already paid : ' + str(already_paid)) BrieLogging.get().debug('months price : ' + str(months_price)) if already_paid + months_price > year_price: months_price = max(0, year_price - already_paid) return months_price @staticmethod def anniversary_from_ldap_items(ldap_cotisations): result = [] months = [] SORT_ORDER = {(9): 0, (10): 1, (11): 2, (12): 3, (1): 4, (2): 5, (3 ): 6, (4): 7, (5): 8, (6): 9, (7): 10, (8): 11} for cotisation in ldap_cotisations: cotisation_months = [] anniversary_data = cotisation.get('x-time').first() anniversary_datetime = datetime.datetime.strptime(anniversary_data, '%Y-%m-%d %H:%M:%S.%f') for month in cotisation.get('x-validMonth').all(): months.append(int(month)) cotisation_months.append(int(month)) cotisation_months.sort(key=lambda val: SORT_ORDER[val]) result.append((anniversary_datetime, cotisation_months)) anniversary = 0 result = sorted(result) previousMonth = -1 months.sort(key=lambda val: SORT_ORDER[val]) for resultat in result: if previousMonth != -1 and (resultat[1][0] == 1 and previousMonth == 12 or resultat[1][0] == previousMonth + 1): previousMonth = resultat[1][-1] continue else: previousMonth = resultat[1][-1] anniversary_day = resultat[0].day anniversary_month = months[-1] + 1 if anniversary_month == 13: anniversary_month = 1 if anniversary_month > 9 or resultat[0].month < 9: anniversary_year = resultat[0].year else: anniversary_year = resultat[0].year + 1 anniversary = datetime.datetime.strptime(str(anniversary_year) + '-' + str(anniversary_month) + '-1 0:0', '%Y-%m-%d %H:%M' ) + datetime.timedelta(days=anniversary_day - 1) if anniversary == 0: anniversary = datetime.datetime(1999, 1, 31, 12, 0) return anniversary @staticmethod def generate_new_anniversary_from_ldap_items(ldap_cotisations): anniversary = CotisationComputes.anniversary_from_ldap_items( ldap_cotisations) now = datetime.datetime.now() if anniversary == 0 or (now - anniversary).days > 30: return now else: return anniversary @staticmethod def is_old_member(member_dn, user_session, residence_dn, cotisations=None): if cotisations is None: current_year = CotisationComputes.current_year() cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year) return Room.get_by_member_dn(user_session, residence_dn, member_dn ) == None or datetime.datetime.now( ).month != 9 and cotisations == [] @staticmethod def is_member_to_delete(member, user_session, residence_dn): current_year = CotisationComputes.current_year() cotisations_this_year = Cotisation.cotisations_of_member(user_session, member.dn, current_year) cotisations_previous_year = Cotisation.cotisations_of_member( user_session, member.dn, current_year - 1) if cotisations_this_year == [] and cotisations_previous_year == []: return True now = datetime.datetime.now() if now.month < 9: last_year = datetime.datetime(now.year - 1, 8, 31, 12, 0) else: last_year = datetime.datetime(now.year, 8, 31, 12, 0) anniversary = CotisationComputes.anniversary_from_ldap_items( cotisations_previous_year) delta = last_year - anniversary return cotisations_this_year == [] and (delta.days > 7 or Room. get_by_member_dn(user_session, residence_dn, member.dn) == None) @staticmethod def is_cotisation_was_paid_last_year(member_dn, user_session, residence_dn, cotisations=None, anniversary=None): if cotisations is None: current_year = CotisationComputes.current_year() - 1 now = datetime.datetime.now() if now.month < 9: last_year = datetime.datetime(now.year - 1, 8, 31, 12, 0) else: last_year = datetime.datetime(now.year, 8, 31, 12, 0) if cotisations is None: cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year) if anniversary is None: anniversary = CotisationComputes.anniversary_from_ldap_items( cotisations) delta = last_year - anniversary return delta.days <= 7 @staticmethod def is_cotisation_paid(member_dn, user_session, residence_dn, cotisations=None, anniversary=None, verification_old_member=True): if verification_old_member and CotisationComputes.is_old_member( member_dn, user_session, residence_dn, cotisations): return False if cotisations is None: current_year = CotisationComputes.current_year() now = datetime.datetime.now() if cotisations is None: cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year) if anniversary is None: anniversary = CotisationComputes.anniversary_from_ldap_items( cotisations) delta = now - anniversary if (delta.days > 7 and now.month == 9 and CotisationComputes. is_cotisation_was_paid_last_year(member_dn, user_session, residence_dn)): return True return delta.days <= 7 @staticmethod def is_cotisation_late(member_dn, user_session, residence_dn, cotisations=None, anniversary=None, verification_old_member=True): if verification_old_member and CotisationComputes.is_old_member( member_dn, user_session, residence_dn, cotisations): return False if cotisations is None: current_year = CotisationComputes.current_year() now = datetime.datetime.now() if cotisations is None: cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year) if anniversary is None: anniversary = CotisationComputes.anniversary_from_ldap_items( cotisations) delta = now - anniversary return delta.days <= 30 and delta.days > 7 def cotisation_late_reminder(member_dn, user_session, residence_dn): member = Member.get_by_dn(user_session, member_dn) from_mail = 'noreply@fede-aurore.net' to_mail = [member.mail.first()] residence_name = Residences.get_name_by_dn(user_session, residence_dn ).decode('utf-8').encode('ascii', 'ignore') @staticmethod def is_no_cotisation(member_dn, user_session, residence_dn, cotisations =None, anniversary=None, verification_old_member=True): if verification_old_member and CotisationComputes.is_old_member( member_dn, user_session, residence_dn, cotisations): return False if cotisations is None: current_year = CotisationComputes.current_year() now = datetime.datetime.now() if cotisations is None: cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year) if anniversary is None: anniversary = CotisationComputes.anniversary_from_ldap_items( cotisations) delta = now - anniversary return delta.days > 30 @staticmethod def members_status_from_residence(user_session, residence_dn): members = Member.get_all(user_session, residence_dn) old_members = [] cotisation_paid_members = [] cotisation_late_members = [] no_cotisation_members = [] for member in members: current_year = CotisationComputes.current_year() cotisations = Cotisation.cotisations_of_member(user_session, member.dn, current_year) if CotisationComputes.is_old_member(member.dn, user_session, residence_dn, cotisations): old_members.append(member) elif CotisationComputes.is_cotisation_paid(member.dn, user_session, residence_dn, cotisations): cotisation_paid_members.append(member) elif CotisationComputes.is_cotisation_late(member.dn, user_session, residence_dn, cotisations): cotisation_late_members.append(member) elif CotisationComputes.is_no_cotisation(member.dn, user_session, residence_dn, cotisations): no_cotisation_members.append(member) else: BrieLogging.get().warn( 'aurore_helper:336 member with weird status !') return dict(old_members=old_members, cotisation_paid_members= cotisation_paid_members, cotisation_late_members= cotisation_late_members, no_cotisation_members= no_cotisation_members) @staticmethod def members_status_from_list_cotisations(user_session, residence_dn, cotisations): members_dict = dict() for cotisation in cotisations: cotisation_dn = cotisation.dn.split(',') member_dn = '' for i in range(3, len(cotisation_dn)): if i != 3: member_dn += ',' member_dn += cotisation_dn[i] if not member_dn in members_dict: members_dict[member_dn] = [cotisation] else: members_dict[member_dn].append(cotisation) BrieLogging.get().debug(str(datetime.datetime.now()) + 'members_status_from_list_cotisations:' + str(len(members_dict))) old_members = [] cotisation_paid_members = [] cotisation_late_members = [] no_cotisation_members = [] for member_dn, cotisations in members_dict.iteritems(): old_members.append(member_dn) rooms = Room.get_rooms(user_session, residence_dn) for room in rooms: if room.has('x-memberIn') and room.get('x-memberIn').first( ) in members_dict and room.get('x-memberIn').first( ) in old_members: old_members.remove(room.get('x-memberIn').first()) for member_dn, cotisations in members_dict.iteritems(): anniversary = CotisationComputes.anniversary_from_ldap_items( cotisations) if CotisationComputes.is_cotisation_paid(member_dn, user_session, residence_dn, cotisations, anniversary, False): cotisation_paid_members.append(member_dn) elif CotisationComputes.is_cotisation_late(member_dn, user_session, residence_dn, cotisations, anniversary, False): cotisation_late_members.append(member_dn) elif CotisationComputes.is_no_cotisation(member_dn, user_session, residence_dn, cotisations, anniversary, False): no_cotisation_members.append(member_dn) else: BrieLogging.get().debug( 'aurore_helper:393 : member with weird status !') return dict(old_members=old_members, cotisation_paid_members= cotisation_paid_members, cotisation_late_members= cotisation_late_members, no_cotisation_members= no_cotisation_members) <|reserved_special_token_1|> from brie.config import ldap_config from brie.model.ldap import * from brie.lib.log_helper import BrieLogging import datetime import smtplib class Residences: @staticmethod def get_dn_by_name(user_session, name): result = user_session.ldap_bind.search_first(ldap_config.liste_residence_dn, "(cn=" + name + ")") if result is None: return None #end if return result.uniqueMember.first() #end def @staticmethod def get_name_by_dn(user_session, dn): result = user_session.ldap_bind.search_first(ldap_config.liste_residence_dn, "(uniqueMember=" + dn + ")") if result is None: return None #end if return result.cn.first() #end def @staticmethod def get_residences(user_session): return user_session.ldap_bind.search(ldap_config.liste_residence_dn, "(objectClass=groupOfUniqueNames)") #end def #end class class CotisationComputes: @staticmethod def current_year(): now = datetime.datetime.now() if now.month > 8: return now.year + 1 return now.year #end def @staticmethod def registration_current_year(): now = datetime.datetime.now() if now.month > 8: return now.year return now.year - 1 #end def @staticmethod def get_available_months(start, end, paid_months = []): next_months_available = [] if start > 12 or end > 12: raise Exception("invalid start or end") if start > 8 and end > 8: next_months_available = range(start, end + 1) elif start <= 8 and end < 9: next_months_available = range(start, end + 1) elif start > 8: next_months_available = range(start, 13) + range(1, end + 1 ) else: raise Exception("invalid start and end") #end if if paid_months == []: return next_months_available BrieLogging.get().debug(next_months_available) available_months = [ month for month in next_months_available if month not in paid_months ] return available_months #end def @staticmethod def is_valid_month(month): now = datetime.datetime.now() if now.month > 8: return (month >= now.month and month < 13) or (month >= 1 and month < 9) else: return month >= now.month and month < 9 #end if #end def @staticmethod def price_to_pay(year_price, month_price, already_paid, number_months_to_pay): months_price = number_months_to_pay * month_price BrieLogging.get().debug("already paid : " + str(already_paid)) BrieLogging.get().debug("months price : " + str(months_price)) if already_paid + months_price > year_price: months_price = max(0, year_price - already_paid) return months_price #end def @staticmethod def anniversary_from_ldap_items(ldap_cotisations): result = [] months = [] SORT_ORDER = {9: 0, 10: 1, 11: 2, 12: 3, 1: 4, 2: 5, 3: 6, 4: 7, 5: 8, 6: 9, 7: 10, 8: 11} for cotisation in ldap_cotisations: cotisation_months = [] anniversary_data = cotisation.get("x-time").first() anniversary_datetime = datetime.datetime.strptime(anniversary_data, "%Y-%m-%d %H:%M:%S.%f") for month in cotisation.get("x-validMonth").all(): months.append(int(month)) cotisation_months.append(int(month)) #end for cotisation_months.sort(key=lambda val: SORT_ORDER[val]) result.append((anniversary_datetime, cotisation_months)) #end for anniversary = 0 # tri par ordre d'inscription result = sorted(result) previousMonth = -1 months.sort(key=lambda val: SORT_ORDER[val]) #on scanne chaque cotisation for resultat in result: #si on n'est pas la premiere cotisation et que les cotisations sont sans interruptions (pas de mois manquants) #la date anniversaire reste la meme if previousMonth != -1 and ( (resultat[1][0] == 1 and previousMonth == 12) or (resultat[1][0] == previousMonth + 1) ): previousMonth = resultat[1][-1] continue; #sinon on recalcule la date anniversaire else : previousMonth = resultat[1][-1] #end if anniversary_day = resultat[0].day anniversary_month = months[-1] + 1 if anniversary_month == 13: anniversary_month = 1 if anniversary_month > 9 or resultat[0].month < 9: #on reste avec une annee d'anniversaire sur l'annee de la cotisation si l'anniversaire est entre septembre et decembre, mais on met aussi l'annee_anniversaire la meme annee que la cotisation si on a cotise entre janvier et aout anniversary_year = resultat[0].year else : #sinon, c'est qu'on a cotise entre septembre et decembre et que notre fin de cotisation est l'annee suivante, donc on fait un +1 anniversary_year = resultat[0].year + 1 anniversary = datetime.datetime.strptime(str(anniversary_year) + "-" + str(anniversary_month) + "-1 0:0", "%Y-%m-%d %H:%M") + datetime.timedelta(days=(anniversary_day - 1)) #end for if anniversary == 0: anniversary = datetime.datetime(1999, 1, 31, 12, 0) return anniversary #end def @staticmethod # fonction de renvoi de la date anniversaire qui est la date actuelle au cas ou il n'a pas cotise depuis 30 jours, sinon date anniversaire actuelle def generate_new_anniversary_from_ldap_items(ldap_cotisations): anniversary = CotisationComputes.anniversary_from_ldap_items(ldap_cotisations) now = datetime.datetime.now() if anniversary == 0 or (now - anniversary).days > 30: return now else : return anniversary #end if #end def @staticmethod # old = SDF or no cotisation this year def is_old_member(member_dn, user_session, residence_dn, cotisations = None): if cotisations is None: current_year = CotisationComputes.current_year() cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year) #end if #en septembre, on ne met pas de membre en old afin que les anciens ne soient pas deconnectes return Room.get_by_member_dn(user_session, residence_dn, member_dn) == None or (datetime.datetime.now().month != 9 and cotisations == []) #end def @staticmethod # no cotisation for the new year and last year august didn't payed def is_member_to_delete(member, user_session, residence_dn): current_year = CotisationComputes.current_year() cotisations_this_year = Cotisation.cotisations_of_member(user_session, member.dn, current_year) cotisations_previous_year = Cotisation.cotisations_of_member(user_session, member.dn, current_year - 1) if cotisations_this_year == [] and cotisations_previous_year == []: return True now = datetime.datetime.now() if now.month < 9: last_year = datetime.datetime(now.year - 1, 8, 31, 12, 0) else: last_year = datetime.datetime(now.year, 8, 31, 12, 0) #end if anniversary = CotisationComputes.anniversary_from_ldap_items(cotisations_previous_year) #end if #si il a pas cotise cette annee et qu'il n'avait pas pris jusqua fin aout l'an dernier delta = (last_year - anniversary) return cotisations_this_year == [] and (delta.days > 7 or Room.get_by_member_dn(user_session, residence_dn, member.dn) == None) #end def @staticmethod # 7 days grace period def is_cotisation_was_paid_last_year(member_dn, user_session, residence_dn, cotisations = None, anniversary = None): if cotisations is None: current_year = CotisationComputes.current_year() - 1 #end if now = datetime.datetime.now() if now.month < 9: last_year = datetime.datetime(now.year - 1, 8, 31, 12, 0) else: last_year = datetime.datetime(now.year, 8, 31, 12, 0) #end if if cotisations is None: cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year) #end if if anniversary is None: anniversary = CotisationComputes.anniversary_from_ldap_items(cotisations) #end if delta = (last_year - anniversary) return delta.days <= 7 #end def @staticmethod # 7 days grace period def is_cotisation_paid(member_dn, user_session, residence_dn, cotisations = None, anniversary = None, verification_old_member = True): if verification_old_member and CotisationComputes.is_old_member(member_dn, user_session, residence_dn, cotisations): return False if cotisations is None: current_year = CotisationComputes.current_year() #end if now = datetime.datetime.now() if cotisations is None: cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year) #end if if anniversary is None: anniversary = CotisationComputes.anniversary_from_ldap_items(cotisations) #end if delta = (now - anniversary) if delta.days > 7 and now.month == 9 and CotisationComputes.is_cotisation_was_paid_last_year(member_dn, user_session, residence_dn): #le membre etait a jour en aout, on lui autorise un delai de paiement en septembre - pas de deconnexion return True #end if return delta.days <= 7 #end def @staticmethod # less than a month late but more than a week def is_cotisation_late(member_dn, user_session, residence_dn, cotisations = None, anniversary = None, verification_old_member = True): if verification_old_member and CotisationComputes.is_old_member(member_dn, user_session, residence_dn, cotisations): return False if cotisations is None: current_year = CotisationComputes.current_year() #end if now = datetime.datetime.now() if cotisations is None: cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year) #end if if anniversary is None: anniversary = CotisationComputes.anniversary_from_ldap_items(cotisations) #end if delta = (now - anniversary) #print("[DEBUG] cotisation en retard pour l'utilisateur "+ member.dn +" now="+ str(now) +" anniversary="+ str(anniversary) +" delta="+ str(delta)) return delta.days <= 30 and delta.days > 7 #end def def cotisation_late_reminder(member_dn, user_session, residence_dn): member = Member.get_by_dn(user_session,member_dn) from_mail = "noreply@fede-aurore.net" to_mail = [member.mail.first()] residence_name = Residences.get_name_by_dn(user_session,residence_dn).decode("utf-8").encode("ascii", "ignore") #end def @staticmethod # more than a month late def is_no_cotisation(member_dn, user_session, residence_dn, cotisations = None, anniversary = None, verification_old_member = True): if verification_old_member and CotisationComputes.is_old_member(member_dn, user_session, residence_dn, cotisations): return False if cotisations is None: current_year = CotisationComputes.current_year() #end if now = datetime.datetime.now() if cotisations is None: cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year) #end if if anniversary is None: anniversary = CotisationComputes.anniversary_from_ldap_items(cotisations) #end if delta = (now - anniversary) return delta.days > 30 #end def @staticmethod def members_status_from_residence(user_session, residence_dn): members = Member.get_all(user_session, residence_dn) old_members = [] cotisation_paid_members = [] cotisation_late_members = [] no_cotisation_members = [] for member in members: current_year = CotisationComputes.current_year() cotisations = Cotisation.cotisations_of_member(user_session, member.dn, current_year) if CotisationComputes.is_old_member(member.dn, user_session, residence_dn, cotisations): old_members.append(member) elif CotisationComputes.is_cotisation_paid(member.dn, user_session, residence_dn, cotisations): cotisation_paid_members.append(member) elif CotisationComputes.is_cotisation_late(member.dn, user_session, residence_dn, cotisations): cotisation_late_members.append(member) #print("[DEBUG] cotisation en retard pour l'utilisateur "+ member.dn) elif CotisationComputes.is_no_cotisation(member.dn, user_session, residence_dn, cotisations): no_cotisation_members.append(member) else: BrieLogging.get().warn("aurore_helper:336 member with weird status !") #end if #end for return dict(old_members=old_members, cotisation_paid_members=cotisation_paid_members, cotisation_late_members=cotisation_late_members, no_cotisation_members=no_cotisation_members) #end def @staticmethod def members_status_from_list_cotisations(user_session, residence_dn, cotisations): members_dict = dict() for cotisation in cotisations: cotisation_dn = cotisation.dn.split(",") member_dn = "" for i in range(3, len(cotisation_dn)): if i != 3: member_dn += "," #end if member_dn += cotisation_dn[i] #end for if not member_dn in members_dict: members_dict[member_dn] = [cotisation] else: members_dict[member_dn].append(cotisation) #end if #end for BrieLogging.get().debug(str(datetime.datetime.now()) + "members_status_from_list_cotisations:" + str(len(members_dict))) old_members = [] cotisation_paid_members = [] cotisation_late_members = [] no_cotisation_members = [] #on va verifier en un block quels sont les old_members for member_dn, cotisations in members_dict.iteritems(): old_members.append(member_dn) #end for rooms = Room.get_rooms(user_session, residence_dn) for room in rooms: #pour chaque chambre, on check si il a un proprietaire, et si ce dernier a des cotisations if room.has("x-memberIn") and room.get("x-memberIn").first() in members_dict and room.get("x-memberIn").first() in old_members: #si oui, bah on le retire des old_members old_members.remove(room.get("x-memberIn").first()) #endif #endfor for member_dn, cotisations in members_dict.iteritems(): anniversary = CotisationComputes.anniversary_from_ldap_items(cotisations) if CotisationComputes.is_cotisation_paid(member_dn, user_session, residence_dn, cotisations, anniversary, False): cotisation_paid_members.append(member_dn) elif CotisationComputes.is_cotisation_late(member_dn, user_session, residence_dn, cotisations, anniversary, False): cotisation_late_members.append(member_dn) #print("[DEBUG] cotisation en retard pour l'utilisateur "+ member.dn) elif CotisationComputes.is_no_cotisation(member_dn, user_session, residence_dn, cotisations, anniversary, False): no_cotisation_members.append(member_dn) else: BrieLogging.get().debug("aurore_helper:393 : member with weird status !") #end if #end for return dict(old_members=old_members, cotisation_paid_members=cotisation_paid_members, cotisation_late_members=cotisation_late_members, no_cotisation_members=no_cotisation_members) #end def #end class
flexible
{ "blob_id": "d726e468a9df26f1bcb8a016812b87fad7b41aa8", "index": 8089, "step-1": "<mask token>\n\n\nclass CotisationComputes:\n\n @staticmethod\n def current_year():\n now = datetime.datetime.now()\n if now.month > 8:\n return now.year + 1\n return now.year\n\n @staticmethod\n def registration_current_year():\n now = datetime.datetime.now()\n if now.month > 8:\n return now.year\n return now.year - 1\n <mask token>\n\n @staticmethod\n def is_valid_month(month):\n now = datetime.datetime.now()\n if now.month > 8:\n return (month >= now.month and month < 13 or month >= 1 and \n month < 9)\n else:\n return month >= now.month and month < 9\n\n @staticmethod\n def price_to_pay(year_price, month_price, already_paid,\n number_months_to_pay):\n months_price = number_months_to_pay * month_price\n BrieLogging.get().debug('already paid : ' + str(already_paid))\n BrieLogging.get().debug('months price : ' + str(months_price))\n if already_paid + months_price > year_price:\n months_price = max(0, year_price - already_paid)\n return months_price\n\n @staticmethod\n def anniversary_from_ldap_items(ldap_cotisations):\n result = []\n months = []\n SORT_ORDER = {(9): 0, (10): 1, (11): 2, (12): 3, (1): 4, (2): 5, (3\n ): 6, (4): 7, (5): 8, (6): 9, (7): 10, (8): 11}\n for cotisation in ldap_cotisations:\n cotisation_months = []\n anniversary_data = cotisation.get('x-time').first()\n anniversary_datetime = datetime.datetime.strptime(anniversary_data,\n '%Y-%m-%d %H:%M:%S.%f')\n for month in cotisation.get('x-validMonth').all():\n months.append(int(month))\n cotisation_months.append(int(month))\n cotisation_months.sort(key=lambda val: SORT_ORDER[val])\n result.append((anniversary_datetime, cotisation_months))\n anniversary = 0\n result = sorted(result)\n previousMonth = -1\n months.sort(key=lambda val: SORT_ORDER[val])\n for resultat in result:\n if previousMonth != -1 and (resultat[1][0] == 1 and \n previousMonth == 12 or resultat[1][0] == previousMonth + 1):\n previousMonth = resultat[1][-1]\n continue\n else:\n previousMonth = resultat[1][-1]\n anniversary_day = resultat[0].day\n anniversary_month = months[-1] + 1\n if anniversary_month == 13:\n anniversary_month = 1\n if anniversary_month > 9 or resultat[0].month < 9:\n anniversary_year = resultat[0].year\n else:\n anniversary_year = resultat[0].year + 1\n anniversary = datetime.datetime.strptime(str(anniversary_year) +\n '-' + str(anniversary_month) + '-1 0:0', '%Y-%m-%d %H:%M'\n ) + datetime.timedelta(days=anniversary_day - 1)\n if anniversary == 0:\n anniversary = datetime.datetime(1999, 1, 31, 12, 0)\n return anniversary\n\n @staticmethod\n def generate_new_anniversary_from_ldap_items(ldap_cotisations):\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n ldap_cotisations)\n now = datetime.datetime.now()\n if anniversary == 0 or (now - anniversary).days > 30:\n return now\n else:\n return anniversary\n <mask token>\n <mask token>\n <mask token>\n\n @staticmethod\n def is_cotisation_paid(member_dn, user_session, residence_dn,\n cotisations=None, anniversary=None, verification_old_member=True):\n if verification_old_member and CotisationComputes.is_old_member(\n member_dn, user_session, residence_dn, cotisations):\n return False\n if cotisations is None:\n current_year = CotisationComputes.current_year()\n now = datetime.datetime.now()\n if cotisations is None:\n cotisations = Cotisation.cotisations_of_member(user_session,\n member_dn, current_year)\n if anniversary is None:\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n cotisations)\n delta = now - anniversary\n if (delta.days > 7 and now.month == 9 and CotisationComputes.\n is_cotisation_was_paid_last_year(member_dn, user_session,\n residence_dn)):\n return True\n return delta.days <= 7\n\n @staticmethod\n def is_cotisation_late(member_dn, user_session, residence_dn,\n cotisations=None, anniversary=None, verification_old_member=True):\n if verification_old_member and CotisationComputes.is_old_member(\n member_dn, user_session, residence_dn, cotisations):\n return False\n if cotisations is None:\n current_year = CotisationComputes.current_year()\n now = datetime.datetime.now()\n if cotisations is None:\n cotisations = Cotisation.cotisations_of_member(user_session,\n member_dn, current_year)\n if anniversary is None:\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n cotisations)\n delta = now - anniversary\n return delta.days <= 30 and delta.days > 7\n <mask token>\n\n @staticmethod\n def is_no_cotisation(member_dn, user_session, residence_dn, cotisations\n =None, anniversary=None, verification_old_member=True):\n if verification_old_member and CotisationComputes.is_old_member(\n member_dn, user_session, residence_dn, cotisations):\n return False\n if cotisations is None:\n current_year = CotisationComputes.current_year()\n now = datetime.datetime.now()\n if cotisations is None:\n cotisations = Cotisation.cotisations_of_member(user_session,\n member_dn, current_year)\n if anniversary is None:\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n cotisations)\n delta = now - anniversary\n return delta.days > 30\n <mask token>\n\n @staticmethod\n def members_status_from_list_cotisations(user_session, residence_dn,\n cotisations):\n members_dict = dict()\n for cotisation in cotisations:\n cotisation_dn = cotisation.dn.split(',')\n member_dn = ''\n for i in range(3, len(cotisation_dn)):\n if i != 3:\n member_dn += ','\n member_dn += cotisation_dn[i]\n if not member_dn in members_dict:\n members_dict[member_dn] = [cotisation]\n else:\n members_dict[member_dn].append(cotisation)\n BrieLogging.get().debug(str(datetime.datetime.now()) +\n 'members_status_from_list_cotisations:' + str(len(members_dict)))\n old_members = []\n cotisation_paid_members = []\n cotisation_late_members = []\n no_cotisation_members = []\n for member_dn, cotisations in members_dict.iteritems():\n old_members.append(member_dn)\n rooms = Room.get_rooms(user_session, residence_dn)\n for room in rooms:\n if room.has('x-memberIn') and room.get('x-memberIn').first(\n ) in members_dict and room.get('x-memberIn').first(\n ) in old_members:\n old_members.remove(room.get('x-memberIn').first())\n for member_dn, cotisations in members_dict.iteritems():\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n cotisations)\n if CotisationComputes.is_cotisation_paid(member_dn,\n user_session, residence_dn, cotisations, anniversary, False):\n cotisation_paid_members.append(member_dn)\n elif CotisationComputes.is_cotisation_late(member_dn,\n user_session, residence_dn, cotisations, anniversary, False):\n cotisation_late_members.append(member_dn)\n elif CotisationComputes.is_no_cotisation(member_dn,\n user_session, residence_dn, cotisations, anniversary, False):\n no_cotisation_members.append(member_dn)\n else:\n BrieLogging.get().debug(\n 'aurore_helper:393 : member with weird status !')\n return dict(old_members=old_members, cotisation_paid_members=\n cotisation_paid_members, cotisation_late_members=\n cotisation_late_members, no_cotisation_members=\n no_cotisation_members)\n", "step-2": "<mask token>\n\n\nclass CotisationComputes:\n\n @staticmethod\n def current_year():\n now = datetime.datetime.now()\n if now.month > 8:\n return now.year + 1\n return now.year\n\n @staticmethod\n def registration_current_year():\n now = datetime.datetime.now()\n if now.month > 8:\n return now.year\n return now.year - 1\n <mask token>\n\n @staticmethod\n def is_valid_month(month):\n now = datetime.datetime.now()\n if now.month > 8:\n return (month >= now.month and month < 13 or month >= 1 and \n month < 9)\n else:\n return month >= now.month and month < 9\n\n @staticmethod\n def price_to_pay(year_price, month_price, already_paid,\n number_months_to_pay):\n months_price = number_months_to_pay * month_price\n BrieLogging.get().debug('already paid : ' + str(already_paid))\n BrieLogging.get().debug('months price : ' + str(months_price))\n if already_paid + months_price > year_price:\n months_price = max(0, year_price - already_paid)\n return months_price\n\n @staticmethod\n def anniversary_from_ldap_items(ldap_cotisations):\n result = []\n months = []\n SORT_ORDER = {(9): 0, (10): 1, (11): 2, (12): 3, (1): 4, (2): 5, (3\n ): 6, (4): 7, (5): 8, (6): 9, (7): 10, (8): 11}\n for cotisation in ldap_cotisations:\n cotisation_months = []\n anniversary_data = cotisation.get('x-time').first()\n anniversary_datetime = datetime.datetime.strptime(anniversary_data,\n '%Y-%m-%d %H:%M:%S.%f')\n for month in cotisation.get('x-validMonth').all():\n months.append(int(month))\n cotisation_months.append(int(month))\n cotisation_months.sort(key=lambda val: SORT_ORDER[val])\n result.append((anniversary_datetime, cotisation_months))\n anniversary = 0\n result = sorted(result)\n previousMonth = -1\n months.sort(key=lambda val: SORT_ORDER[val])\n for resultat in result:\n if previousMonth != -1 and (resultat[1][0] == 1 and \n previousMonth == 12 or resultat[1][0] == previousMonth + 1):\n previousMonth = resultat[1][-1]\n continue\n else:\n previousMonth = resultat[1][-1]\n anniversary_day = resultat[0].day\n anniversary_month = months[-1] + 1\n if anniversary_month == 13:\n anniversary_month = 1\n if anniversary_month > 9 or resultat[0].month < 9:\n anniversary_year = resultat[0].year\n else:\n anniversary_year = resultat[0].year + 1\n anniversary = datetime.datetime.strptime(str(anniversary_year) +\n '-' + str(anniversary_month) + '-1 0:0', '%Y-%m-%d %H:%M'\n ) + datetime.timedelta(days=anniversary_day - 1)\n if anniversary == 0:\n anniversary = datetime.datetime(1999, 1, 31, 12, 0)\n return anniversary\n\n @staticmethod\n def generate_new_anniversary_from_ldap_items(ldap_cotisations):\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n ldap_cotisations)\n now = datetime.datetime.now()\n if anniversary == 0 or (now - anniversary).days > 30:\n return now\n else:\n return anniversary\n <mask token>\n\n @staticmethod\n def is_member_to_delete(member, user_session, residence_dn):\n current_year = CotisationComputes.current_year()\n cotisations_this_year = Cotisation.cotisations_of_member(user_session,\n member.dn, current_year)\n cotisations_previous_year = Cotisation.cotisations_of_member(\n user_session, member.dn, current_year - 1)\n if cotisations_this_year == [] and cotisations_previous_year == []:\n return True\n now = datetime.datetime.now()\n if now.month < 9:\n last_year = datetime.datetime(now.year - 1, 8, 31, 12, 0)\n else:\n last_year = datetime.datetime(now.year, 8, 31, 12, 0)\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n cotisations_previous_year)\n delta = last_year - anniversary\n return cotisations_this_year == [] and (delta.days > 7 or Room.\n get_by_member_dn(user_session, residence_dn, member.dn) == None)\n <mask token>\n\n @staticmethod\n def is_cotisation_paid(member_dn, user_session, residence_dn,\n cotisations=None, anniversary=None, verification_old_member=True):\n if verification_old_member and CotisationComputes.is_old_member(\n member_dn, user_session, residence_dn, cotisations):\n return False\n if cotisations is None:\n current_year = CotisationComputes.current_year()\n now = datetime.datetime.now()\n if cotisations is None:\n cotisations = Cotisation.cotisations_of_member(user_session,\n member_dn, current_year)\n if anniversary is None:\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n cotisations)\n delta = now - anniversary\n if (delta.days > 7 and now.month == 9 and CotisationComputes.\n is_cotisation_was_paid_last_year(member_dn, user_session,\n residence_dn)):\n return True\n return delta.days <= 7\n\n @staticmethod\n def is_cotisation_late(member_dn, user_session, residence_dn,\n cotisations=None, anniversary=None, verification_old_member=True):\n if verification_old_member and CotisationComputes.is_old_member(\n member_dn, user_session, residence_dn, cotisations):\n return False\n if cotisations is None:\n current_year = CotisationComputes.current_year()\n now = datetime.datetime.now()\n if cotisations is None:\n cotisations = Cotisation.cotisations_of_member(user_session,\n member_dn, current_year)\n if anniversary is None:\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n cotisations)\n delta = now - anniversary\n return delta.days <= 30 and delta.days > 7\n <mask token>\n\n @staticmethod\n def is_no_cotisation(member_dn, user_session, residence_dn, cotisations\n =None, anniversary=None, verification_old_member=True):\n if verification_old_member and CotisationComputes.is_old_member(\n member_dn, user_session, residence_dn, cotisations):\n return False\n if cotisations is None:\n current_year = CotisationComputes.current_year()\n now = datetime.datetime.now()\n if cotisations is None:\n cotisations = Cotisation.cotisations_of_member(user_session,\n member_dn, current_year)\n if anniversary is None:\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n cotisations)\n delta = now - anniversary\n return delta.days > 30\n <mask token>\n\n @staticmethod\n def members_status_from_list_cotisations(user_session, residence_dn,\n cotisations):\n members_dict = dict()\n for cotisation in cotisations:\n cotisation_dn = cotisation.dn.split(',')\n member_dn = ''\n for i in range(3, len(cotisation_dn)):\n if i != 3:\n member_dn += ','\n member_dn += cotisation_dn[i]\n if not member_dn in members_dict:\n members_dict[member_dn] = [cotisation]\n else:\n members_dict[member_dn].append(cotisation)\n BrieLogging.get().debug(str(datetime.datetime.now()) +\n 'members_status_from_list_cotisations:' + str(len(members_dict)))\n old_members = []\n cotisation_paid_members = []\n cotisation_late_members = []\n no_cotisation_members = []\n for member_dn, cotisations in members_dict.iteritems():\n old_members.append(member_dn)\n rooms = Room.get_rooms(user_session, residence_dn)\n for room in rooms:\n if room.has('x-memberIn') and room.get('x-memberIn').first(\n ) in members_dict and room.get('x-memberIn').first(\n ) in old_members:\n old_members.remove(room.get('x-memberIn').first())\n for member_dn, cotisations in members_dict.iteritems():\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n cotisations)\n if CotisationComputes.is_cotisation_paid(member_dn,\n user_session, residence_dn, cotisations, anniversary, False):\n cotisation_paid_members.append(member_dn)\n elif CotisationComputes.is_cotisation_late(member_dn,\n user_session, residence_dn, cotisations, anniversary, False):\n cotisation_late_members.append(member_dn)\n elif CotisationComputes.is_no_cotisation(member_dn,\n user_session, residence_dn, cotisations, anniversary, False):\n no_cotisation_members.append(member_dn)\n else:\n BrieLogging.get().debug(\n 'aurore_helper:393 : member with weird status !')\n return dict(old_members=old_members, cotisation_paid_members=\n cotisation_paid_members, cotisation_late_members=\n cotisation_late_members, no_cotisation_members=\n no_cotisation_members)\n", "step-3": "<mask token>\n\n\nclass CotisationComputes:\n\n @staticmethod\n def current_year():\n now = datetime.datetime.now()\n if now.month > 8:\n return now.year + 1\n return now.year\n\n @staticmethod\n def registration_current_year():\n now = datetime.datetime.now()\n if now.month > 8:\n return now.year\n return now.year - 1\n\n @staticmethod\n def get_available_months(start, end, paid_months=[]):\n next_months_available = []\n if start > 12 or end > 12:\n raise Exception('invalid start or end')\n if start > 8 and end > 8:\n next_months_available = range(start, end + 1)\n elif start <= 8 and end < 9:\n next_months_available = range(start, end + 1)\n elif start > 8:\n next_months_available = range(start, 13) + range(1, end + 1)\n else:\n raise Exception('invalid start and end')\n if paid_months == []:\n return next_months_available\n BrieLogging.get().debug(next_months_available)\n available_months = [month for month in next_months_available if \n month not in paid_months]\n return available_months\n\n @staticmethod\n def is_valid_month(month):\n now = datetime.datetime.now()\n if now.month > 8:\n return (month >= now.month and month < 13 or month >= 1 and \n month < 9)\n else:\n return month >= now.month and month < 9\n\n @staticmethod\n def price_to_pay(year_price, month_price, already_paid,\n number_months_to_pay):\n months_price = number_months_to_pay * month_price\n BrieLogging.get().debug('already paid : ' + str(already_paid))\n BrieLogging.get().debug('months price : ' + str(months_price))\n if already_paid + months_price > year_price:\n months_price = max(0, year_price - already_paid)\n return months_price\n\n @staticmethod\n def anniversary_from_ldap_items(ldap_cotisations):\n result = []\n months = []\n SORT_ORDER = {(9): 0, (10): 1, (11): 2, (12): 3, (1): 4, (2): 5, (3\n ): 6, (4): 7, (5): 8, (6): 9, (7): 10, (8): 11}\n for cotisation in ldap_cotisations:\n cotisation_months = []\n anniversary_data = cotisation.get('x-time').first()\n anniversary_datetime = datetime.datetime.strptime(anniversary_data,\n '%Y-%m-%d %H:%M:%S.%f')\n for month in cotisation.get('x-validMonth').all():\n months.append(int(month))\n cotisation_months.append(int(month))\n cotisation_months.sort(key=lambda val: SORT_ORDER[val])\n result.append((anniversary_datetime, cotisation_months))\n anniversary = 0\n result = sorted(result)\n previousMonth = -1\n months.sort(key=lambda val: SORT_ORDER[val])\n for resultat in result:\n if previousMonth != -1 and (resultat[1][0] == 1 and \n previousMonth == 12 or resultat[1][0] == previousMonth + 1):\n previousMonth = resultat[1][-1]\n continue\n else:\n previousMonth = resultat[1][-1]\n anniversary_day = resultat[0].day\n anniversary_month = months[-1] + 1\n if anniversary_month == 13:\n anniversary_month = 1\n if anniversary_month > 9 or resultat[0].month < 9:\n anniversary_year = resultat[0].year\n else:\n anniversary_year = resultat[0].year + 1\n anniversary = datetime.datetime.strptime(str(anniversary_year) +\n '-' + str(anniversary_month) + '-1 0:0', '%Y-%m-%d %H:%M'\n ) + datetime.timedelta(days=anniversary_day - 1)\n if anniversary == 0:\n anniversary = datetime.datetime(1999, 1, 31, 12, 0)\n return anniversary\n\n @staticmethod\n def generate_new_anniversary_from_ldap_items(ldap_cotisations):\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n ldap_cotisations)\n now = datetime.datetime.now()\n if anniversary == 0 or (now - anniversary).days > 30:\n return now\n else:\n return anniversary\n <mask token>\n\n @staticmethod\n def is_member_to_delete(member, user_session, residence_dn):\n current_year = CotisationComputes.current_year()\n cotisations_this_year = Cotisation.cotisations_of_member(user_session,\n member.dn, current_year)\n cotisations_previous_year = Cotisation.cotisations_of_member(\n user_session, member.dn, current_year - 1)\n if cotisations_this_year == [] and cotisations_previous_year == []:\n return True\n now = datetime.datetime.now()\n if now.month < 9:\n last_year = datetime.datetime(now.year - 1, 8, 31, 12, 0)\n else:\n last_year = datetime.datetime(now.year, 8, 31, 12, 0)\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n cotisations_previous_year)\n delta = last_year - anniversary\n return cotisations_this_year == [] and (delta.days > 7 or Room.\n get_by_member_dn(user_session, residence_dn, member.dn) == None)\n\n @staticmethod\n def is_cotisation_was_paid_last_year(member_dn, user_session,\n residence_dn, cotisations=None, anniversary=None):\n if cotisations is None:\n current_year = CotisationComputes.current_year() - 1\n now = datetime.datetime.now()\n if now.month < 9:\n last_year = datetime.datetime(now.year - 1, 8, 31, 12, 0)\n else:\n last_year = datetime.datetime(now.year, 8, 31, 12, 0)\n if cotisations is None:\n cotisations = Cotisation.cotisations_of_member(user_session,\n member_dn, current_year)\n if anniversary is None:\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n cotisations)\n delta = last_year - anniversary\n return delta.days <= 7\n\n @staticmethod\n def is_cotisation_paid(member_dn, user_session, residence_dn,\n cotisations=None, anniversary=None, verification_old_member=True):\n if verification_old_member and CotisationComputes.is_old_member(\n member_dn, user_session, residence_dn, cotisations):\n return False\n if cotisations is None:\n current_year = CotisationComputes.current_year()\n now = datetime.datetime.now()\n if cotisations is None:\n cotisations = Cotisation.cotisations_of_member(user_session,\n member_dn, current_year)\n if anniversary is None:\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n cotisations)\n delta = now - anniversary\n if (delta.days > 7 and now.month == 9 and CotisationComputes.\n is_cotisation_was_paid_last_year(member_dn, user_session,\n residence_dn)):\n return True\n return delta.days <= 7\n\n @staticmethod\n def is_cotisation_late(member_dn, user_session, residence_dn,\n cotisations=None, anniversary=None, verification_old_member=True):\n if verification_old_member and CotisationComputes.is_old_member(\n member_dn, user_session, residence_dn, cotisations):\n return False\n if cotisations is None:\n current_year = CotisationComputes.current_year()\n now = datetime.datetime.now()\n if cotisations is None:\n cotisations = Cotisation.cotisations_of_member(user_session,\n member_dn, current_year)\n if anniversary is None:\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n cotisations)\n delta = now - anniversary\n return delta.days <= 30 and delta.days > 7\n\n def cotisation_late_reminder(member_dn, user_session, residence_dn):\n member = Member.get_by_dn(user_session, member_dn)\n from_mail = 'noreply@fede-aurore.net'\n to_mail = [member.mail.first()]\n residence_name = Residences.get_name_by_dn(user_session, residence_dn\n ).decode('utf-8').encode('ascii', 'ignore')\n\n @staticmethod\n def is_no_cotisation(member_dn, user_session, residence_dn, cotisations\n =None, anniversary=None, verification_old_member=True):\n if verification_old_member and CotisationComputes.is_old_member(\n member_dn, user_session, residence_dn, cotisations):\n return False\n if cotisations is None:\n current_year = CotisationComputes.current_year()\n now = datetime.datetime.now()\n if cotisations is None:\n cotisations = Cotisation.cotisations_of_member(user_session,\n member_dn, current_year)\n if anniversary is None:\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n cotisations)\n delta = now - anniversary\n return delta.days > 30\n\n @staticmethod\n def members_status_from_residence(user_session, residence_dn):\n members = Member.get_all(user_session, residence_dn)\n old_members = []\n cotisation_paid_members = []\n cotisation_late_members = []\n no_cotisation_members = []\n for member in members:\n current_year = CotisationComputes.current_year()\n cotisations = Cotisation.cotisations_of_member(user_session,\n member.dn, current_year)\n if CotisationComputes.is_old_member(member.dn, user_session,\n residence_dn, cotisations):\n old_members.append(member)\n elif CotisationComputes.is_cotisation_paid(member.dn,\n user_session, residence_dn, cotisations):\n cotisation_paid_members.append(member)\n elif CotisationComputes.is_cotisation_late(member.dn,\n user_session, residence_dn, cotisations):\n cotisation_late_members.append(member)\n elif CotisationComputes.is_no_cotisation(member.dn,\n user_session, residence_dn, cotisations):\n no_cotisation_members.append(member)\n else:\n BrieLogging.get().warn(\n 'aurore_helper:336 member with weird status !')\n return dict(old_members=old_members, cotisation_paid_members=\n cotisation_paid_members, cotisation_late_members=\n cotisation_late_members, no_cotisation_members=\n no_cotisation_members)\n\n @staticmethod\n def members_status_from_list_cotisations(user_session, residence_dn,\n cotisations):\n members_dict = dict()\n for cotisation in cotisations:\n cotisation_dn = cotisation.dn.split(',')\n member_dn = ''\n for i in range(3, len(cotisation_dn)):\n if i != 3:\n member_dn += ','\n member_dn += cotisation_dn[i]\n if not member_dn in members_dict:\n members_dict[member_dn] = [cotisation]\n else:\n members_dict[member_dn].append(cotisation)\n BrieLogging.get().debug(str(datetime.datetime.now()) +\n 'members_status_from_list_cotisations:' + str(len(members_dict)))\n old_members = []\n cotisation_paid_members = []\n cotisation_late_members = []\n no_cotisation_members = []\n for member_dn, cotisations in members_dict.iteritems():\n old_members.append(member_dn)\n rooms = Room.get_rooms(user_session, residence_dn)\n for room in rooms:\n if room.has('x-memberIn') and room.get('x-memberIn').first(\n ) in members_dict and room.get('x-memberIn').first(\n ) in old_members:\n old_members.remove(room.get('x-memberIn').first())\n for member_dn, cotisations in members_dict.iteritems():\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n cotisations)\n if CotisationComputes.is_cotisation_paid(member_dn,\n user_session, residence_dn, cotisations, anniversary, False):\n cotisation_paid_members.append(member_dn)\n elif CotisationComputes.is_cotisation_late(member_dn,\n user_session, residence_dn, cotisations, anniversary, False):\n cotisation_late_members.append(member_dn)\n elif CotisationComputes.is_no_cotisation(member_dn,\n user_session, residence_dn, cotisations, anniversary, False):\n no_cotisation_members.append(member_dn)\n else:\n BrieLogging.get().debug(\n 'aurore_helper:393 : member with weird status !')\n return dict(old_members=old_members, cotisation_paid_members=\n cotisation_paid_members, cotisation_late_members=\n cotisation_late_members, no_cotisation_members=\n no_cotisation_members)\n", "step-4": "<mask token>\n\n\nclass CotisationComputes:\n\n @staticmethod\n def current_year():\n now = datetime.datetime.now()\n if now.month > 8:\n return now.year + 1\n return now.year\n\n @staticmethod\n def registration_current_year():\n now = datetime.datetime.now()\n if now.month > 8:\n return now.year\n return now.year - 1\n\n @staticmethod\n def get_available_months(start, end, paid_months=[]):\n next_months_available = []\n if start > 12 or end > 12:\n raise Exception('invalid start or end')\n if start > 8 and end > 8:\n next_months_available = range(start, end + 1)\n elif start <= 8 and end < 9:\n next_months_available = range(start, end + 1)\n elif start > 8:\n next_months_available = range(start, 13) + range(1, end + 1)\n else:\n raise Exception('invalid start and end')\n if paid_months == []:\n return next_months_available\n BrieLogging.get().debug(next_months_available)\n available_months = [month for month in next_months_available if \n month not in paid_months]\n return available_months\n\n @staticmethod\n def is_valid_month(month):\n now = datetime.datetime.now()\n if now.month > 8:\n return (month >= now.month and month < 13 or month >= 1 and \n month < 9)\n else:\n return month >= now.month and month < 9\n\n @staticmethod\n def price_to_pay(year_price, month_price, already_paid,\n number_months_to_pay):\n months_price = number_months_to_pay * month_price\n BrieLogging.get().debug('already paid : ' + str(already_paid))\n BrieLogging.get().debug('months price : ' + str(months_price))\n if already_paid + months_price > year_price:\n months_price = max(0, year_price - already_paid)\n return months_price\n\n @staticmethod\n def anniversary_from_ldap_items(ldap_cotisations):\n result = []\n months = []\n SORT_ORDER = {(9): 0, (10): 1, (11): 2, (12): 3, (1): 4, (2): 5, (3\n ): 6, (4): 7, (5): 8, (6): 9, (7): 10, (8): 11}\n for cotisation in ldap_cotisations:\n cotisation_months = []\n anniversary_data = cotisation.get('x-time').first()\n anniversary_datetime = datetime.datetime.strptime(anniversary_data,\n '%Y-%m-%d %H:%M:%S.%f')\n for month in cotisation.get('x-validMonth').all():\n months.append(int(month))\n cotisation_months.append(int(month))\n cotisation_months.sort(key=lambda val: SORT_ORDER[val])\n result.append((anniversary_datetime, cotisation_months))\n anniversary = 0\n result = sorted(result)\n previousMonth = -1\n months.sort(key=lambda val: SORT_ORDER[val])\n for resultat in result:\n if previousMonth != -1 and (resultat[1][0] == 1 and \n previousMonth == 12 or resultat[1][0] == previousMonth + 1):\n previousMonth = resultat[1][-1]\n continue\n else:\n previousMonth = resultat[1][-1]\n anniversary_day = resultat[0].day\n anniversary_month = months[-1] + 1\n if anniversary_month == 13:\n anniversary_month = 1\n if anniversary_month > 9 or resultat[0].month < 9:\n anniversary_year = resultat[0].year\n else:\n anniversary_year = resultat[0].year + 1\n anniversary = datetime.datetime.strptime(str(anniversary_year) +\n '-' + str(anniversary_month) + '-1 0:0', '%Y-%m-%d %H:%M'\n ) + datetime.timedelta(days=anniversary_day - 1)\n if anniversary == 0:\n anniversary = datetime.datetime(1999, 1, 31, 12, 0)\n return anniversary\n\n @staticmethod\n def generate_new_anniversary_from_ldap_items(ldap_cotisations):\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n ldap_cotisations)\n now = datetime.datetime.now()\n if anniversary == 0 or (now - anniversary).days > 30:\n return now\n else:\n return anniversary\n\n @staticmethod\n def is_old_member(member_dn, user_session, residence_dn, cotisations=None):\n if cotisations is None:\n current_year = CotisationComputes.current_year()\n cotisations = Cotisation.cotisations_of_member(user_session,\n member_dn, current_year)\n return Room.get_by_member_dn(user_session, residence_dn, member_dn\n ) == None or datetime.datetime.now(\n ).month != 9 and cotisations == []\n\n @staticmethod\n def is_member_to_delete(member, user_session, residence_dn):\n current_year = CotisationComputes.current_year()\n cotisations_this_year = Cotisation.cotisations_of_member(user_session,\n member.dn, current_year)\n cotisations_previous_year = Cotisation.cotisations_of_member(\n user_session, member.dn, current_year - 1)\n if cotisations_this_year == [] and cotisations_previous_year == []:\n return True\n now = datetime.datetime.now()\n if now.month < 9:\n last_year = datetime.datetime(now.year - 1, 8, 31, 12, 0)\n else:\n last_year = datetime.datetime(now.year, 8, 31, 12, 0)\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n cotisations_previous_year)\n delta = last_year - anniversary\n return cotisations_this_year == [] and (delta.days > 7 or Room.\n get_by_member_dn(user_session, residence_dn, member.dn) == None)\n\n @staticmethod\n def is_cotisation_was_paid_last_year(member_dn, user_session,\n residence_dn, cotisations=None, anniversary=None):\n if cotisations is None:\n current_year = CotisationComputes.current_year() - 1\n now = datetime.datetime.now()\n if now.month < 9:\n last_year = datetime.datetime(now.year - 1, 8, 31, 12, 0)\n else:\n last_year = datetime.datetime(now.year, 8, 31, 12, 0)\n if cotisations is None:\n cotisations = Cotisation.cotisations_of_member(user_session,\n member_dn, current_year)\n if anniversary is None:\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n cotisations)\n delta = last_year - anniversary\n return delta.days <= 7\n\n @staticmethod\n def is_cotisation_paid(member_dn, user_session, residence_dn,\n cotisations=None, anniversary=None, verification_old_member=True):\n if verification_old_member and CotisationComputes.is_old_member(\n member_dn, user_session, residence_dn, cotisations):\n return False\n if cotisations is None:\n current_year = CotisationComputes.current_year()\n now = datetime.datetime.now()\n if cotisations is None:\n cotisations = Cotisation.cotisations_of_member(user_session,\n member_dn, current_year)\n if anniversary is None:\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n cotisations)\n delta = now - anniversary\n if (delta.days > 7 and now.month == 9 and CotisationComputes.\n is_cotisation_was_paid_last_year(member_dn, user_session,\n residence_dn)):\n return True\n return delta.days <= 7\n\n @staticmethod\n def is_cotisation_late(member_dn, user_session, residence_dn,\n cotisations=None, anniversary=None, verification_old_member=True):\n if verification_old_member and CotisationComputes.is_old_member(\n member_dn, user_session, residence_dn, cotisations):\n return False\n if cotisations is None:\n current_year = CotisationComputes.current_year()\n now = datetime.datetime.now()\n if cotisations is None:\n cotisations = Cotisation.cotisations_of_member(user_session,\n member_dn, current_year)\n if anniversary is None:\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n cotisations)\n delta = now - anniversary\n return delta.days <= 30 and delta.days > 7\n\n def cotisation_late_reminder(member_dn, user_session, residence_dn):\n member = Member.get_by_dn(user_session, member_dn)\n from_mail = 'noreply@fede-aurore.net'\n to_mail = [member.mail.first()]\n residence_name = Residences.get_name_by_dn(user_session, residence_dn\n ).decode('utf-8').encode('ascii', 'ignore')\n\n @staticmethod\n def is_no_cotisation(member_dn, user_session, residence_dn, cotisations\n =None, anniversary=None, verification_old_member=True):\n if verification_old_member and CotisationComputes.is_old_member(\n member_dn, user_session, residence_dn, cotisations):\n return False\n if cotisations is None:\n current_year = CotisationComputes.current_year()\n now = datetime.datetime.now()\n if cotisations is None:\n cotisations = Cotisation.cotisations_of_member(user_session,\n member_dn, current_year)\n if anniversary is None:\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n cotisations)\n delta = now - anniversary\n return delta.days > 30\n\n @staticmethod\n def members_status_from_residence(user_session, residence_dn):\n members = Member.get_all(user_session, residence_dn)\n old_members = []\n cotisation_paid_members = []\n cotisation_late_members = []\n no_cotisation_members = []\n for member in members:\n current_year = CotisationComputes.current_year()\n cotisations = Cotisation.cotisations_of_member(user_session,\n member.dn, current_year)\n if CotisationComputes.is_old_member(member.dn, user_session,\n residence_dn, cotisations):\n old_members.append(member)\n elif CotisationComputes.is_cotisation_paid(member.dn,\n user_session, residence_dn, cotisations):\n cotisation_paid_members.append(member)\n elif CotisationComputes.is_cotisation_late(member.dn,\n user_session, residence_dn, cotisations):\n cotisation_late_members.append(member)\n elif CotisationComputes.is_no_cotisation(member.dn,\n user_session, residence_dn, cotisations):\n no_cotisation_members.append(member)\n else:\n BrieLogging.get().warn(\n 'aurore_helper:336 member with weird status !')\n return dict(old_members=old_members, cotisation_paid_members=\n cotisation_paid_members, cotisation_late_members=\n cotisation_late_members, no_cotisation_members=\n no_cotisation_members)\n\n @staticmethod\n def members_status_from_list_cotisations(user_session, residence_dn,\n cotisations):\n members_dict = dict()\n for cotisation in cotisations:\n cotisation_dn = cotisation.dn.split(',')\n member_dn = ''\n for i in range(3, len(cotisation_dn)):\n if i != 3:\n member_dn += ','\n member_dn += cotisation_dn[i]\n if not member_dn in members_dict:\n members_dict[member_dn] = [cotisation]\n else:\n members_dict[member_dn].append(cotisation)\n BrieLogging.get().debug(str(datetime.datetime.now()) +\n 'members_status_from_list_cotisations:' + str(len(members_dict)))\n old_members = []\n cotisation_paid_members = []\n cotisation_late_members = []\n no_cotisation_members = []\n for member_dn, cotisations in members_dict.iteritems():\n old_members.append(member_dn)\n rooms = Room.get_rooms(user_session, residence_dn)\n for room in rooms:\n if room.has('x-memberIn') and room.get('x-memberIn').first(\n ) in members_dict and room.get('x-memberIn').first(\n ) in old_members:\n old_members.remove(room.get('x-memberIn').first())\n for member_dn, cotisations in members_dict.iteritems():\n anniversary = CotisationComputes.anniversary_from_ldap_items(\n cotisations)\n if CotisationComputes.is_cotisation_paid(member_dn,\n user_session, residence_dn, cotisations, anniversary, False):\n cotisation_paid_members.append(member_dn)\n elif CotisationComputes.is_cotisation_late(member_dn,\n user_session, residence_dn, cotisations, anniversary, False):\n cotisation_late_members.append(member_dn)\n elif CotisationComputes.is_no_cotisation(member_dn,\n user_session, residence_dn, cotisations, anniversary, False):\n no_cotisation_members.append(member_dn)\n else:\n BrieLogging.get().debug(\n 'aurore_helper:393 : member with weird status !')\n return dict(old_members=old_members, cotisation_paid_members=\n cotisation_paid_members, cotisation_late_members=\n cotisation_late_members, no_cotisation_members=\n no_cotisation_members)\n", "step-5": "from brie.config import ldap_config\nfrom brie.model.ldap import *\nfrom brie.lib.log_helper import BrieLogging\nimport datetime\nimport smtplib\n\nclass Residences:\n \n @staticmethod\n def get_dn_by_name(user_session, name):\n result = user_session.ldap_bind.search_first(ldap_config.liste_residence_dn, \"(cn=\" + name + \")\")\n\n if result is None:\n return None\n #end if\n\n return result.uniqueMember.first()\n #end def\n\n @staticmethod\n def get_name_by_dn(user_session, dn):\n result = user_session.ldap_bind.search_first(ldap_config.liste_residence_dn, \"(uniqueMember=\" + dn + \")\")\n\n if result is None:\n return None\n #end if\n \n return result.cn.first()\n #end def\n \n @staticmethod\n def get_residences(user_session):\n return user_session.ldap_bind.search(ldap_config.liste_residence_dn, \"(objectClass=groupOfUniqueNames)\")\n #end def\n#end class\n\nclass CotisationComputes:\n \n @staticmethod\n def current_year():\n now = datetime.datetime.now()\n if now.month > 8:\n return now.year + 1\n\n return now.year\n #end def\n\n @staticmethod\n def registration_current_year():\n now = datetime.datetime.now()\n if now.month > 8:\n return now.year\n\n return now.year - 1\n #end def\n\n @staticmethod\n def get_available_months(start, end, paid_months = []):\n next_months_available = []\n\n if start > 12 or end > 12:\n raise Exception(\"invalid start or end\")\n\n if start > 8 and end > 8:\n next_months_available = range(start, end + 1)\n elif start <= 8 and end < 9:\n next_months_available = range(start, end + 1)\n elif start > 8:\n next_months_available = range(start, 13) + range(1, end + 1 )\n else:\n raise Exception(\"invalid start and end\")\n #end if\n\n if paid_months == []:\n return next_months_available\n\n BrieLogging.get().debug(next_months_available)\n available_months = [\n month\n for month in next_months_available\n if month not in paid_months\n ]\n\n return available_months\n\n #end def\n\n @staticmethod\n def is_valid_month(month):\n now = datetime.datetime.now()\n if now.month > 8:\n return (month >= now.month and month < 13) or (month >= 1 and month < 9)\n else:\n return month >= now.month and month < 9\n #end if\n #end def\n\n @staticmethod\n def price_to_pay(year_price, month_price, already_paid, number_months_to_pay):\n \n months_price = number_months_to_pay * month_price\n BrieLogging.get().debug(\"already paid : \" + str(already_paid))\n BrieLogging.get().debug(\"months price : \" + str(months_price))\n if already_paid + months_price > year_price:\n months_price = max(0, year_price - already_paid)\n\n return months_price\n #end def\n\n @staticmethod\n def anniversary_from_ldap_items(ldap_cotisations):\n result = []\n months = []\n SORT_ORDER = {9: 0, 10: 1, 11: 2, 12: 3, 1: 4, 2: 5, 3: 6, 4: 7, 5: 8, 6: 9, 7: 10, 8: 11}\n for cotisation in ldap_cotisations:\n cotisation_months = []\n anniversary_data = cotisation.get(\"x-time\").first()\n anniversary_datetime = datetime.datetime.strptime(anniversary_data,\n \"%Y-%m-%d %H:%M:%S.%f\") \n for month in cotisation.get(\"x-validMonth\").all():\n months.append(int(month)) \n cotisation_months.append(int(month))\n #end for\n cotisation_months.sort(key=lambda val: SORT_ORDER[val])\n result.append((anniversary_datetime, cotisation_months))\n #end for\n\n anniversary = 0\n # tri par ordre d'inscription\n result = sorted(result)\n previousMonth = -1\n months.sort(key=lambda val: SORT_ORDER[val])\n #on scanne chaque cotisation\n for resultat in result:\n #si on n'est pas la premiere cotisation et que les cotisations sont sans interruptions (pas de mois manquants)\n #la date anniversaire reste la meme\n if previousMonth != -1 and ( (resultat[1][0] == 1 and previousMonth == 12) or (resultat[1][0] == previousMonth + 1) ):\n previousMonth = resultat[1][-1]\n continue;\n #sinon on recalcule la date anniversaire\n else : \n previousMonth = resultat[1][-1]\n #end if\n anniversary_day = resultat[0].day\n anniversary_month = months[-1] + 1\n if anniversary_month == 13:\n anniversary_month = 1\n if anniversary_month > 9 or resultat[0].month < 9:\n #on reste avec une annee d'anniversaire sur l'annee de la cotisation si l'anniversaire est entre septembre et decembre, mais on met aussi l'annee_anniversaire la meme annee que la cotisation si on a cotise entre janvier et aout\n anniversary_year = resultat[0].year\n else :\n #sinon, c'est qu'on a cotise entre septembre et decembre et que notre fin de cotisation est l'annee suivante, donc on fait un +1\n anniversary_year = resultat[0].year + 1\n anniversary = datetime.datetime.strptime(str(anniversary_year) + \"-\" + str(anniversary_month) + \"-1 0:0\", \"%Y-%m-%d %H:%M\") + datetime.timedelta(days=(anniversary_day - 1))\n #end for\n\n if anniversary == 0:\n anniversary = datetime.datetime(1999, 1, 31, 12, 0)\n\n return anniversary\n #end def\n\n @staticmethod\n # fonction de renvoi de la date anniversaire qui est la date actuelle au cas ou il n'a pas cotise depuis 30 jours, sinon date anniversaire actuelle\n def generate_new_anniversary_from_ldap_items(ldap_cotisations):\n anniversary = CotisationComputes.anniversary_from_ldap_items(ldap_cotisations)\n now = datetime.datetime.now()\n if anniversary == 0 or (now - anniversary).days > 30:\n return now\n else : \n return anniversary\n #end if\n #end def\n\n @staticmethod\n # old = SDF or no cotisation this year\n def is_old_member(member_dn, user_session, residence_dn, cotisations = None):\n if cotisations is None:\n current_year = CotisationComputes.current_year()\n cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year)\n #end if\n #en septembre, on ne met pas de membre en old afin que les anciens ne soient pas deconnectes\n return Room.get_by_member_dn(user_session, residence_dn, member_dn) == None or (datetime.datetime.now().month != 9 and cotisations == [])\n #end def\n\n @staticmethod\n # no cotisation for the new year and last year august didn't payed\n def is_member_to_delete(member, user_session, residence_dn):\n current_year = CotisationComputes.current_year()\n cotisations_this_year = Cotisation.cotisations_of_member(user_session, member.dn, current_year)\n cotisations_previous_year = Cotisation.cotisations_of_member(user_session, member.dn, current_year - 1)\n\n if cotisations_this_year == [] and cotisations_previous_year == []:\n return True\n\n now = datetime.datetime.now()\n if now.month < 9:\n last_year = datetime.datetime(now.year - 1, 8, 31, 12, 0)\n else:\n last_year = datetime.datetime(now.year, 8, 31, 12, 0)\n #end if\n\n anniversary = CotisationComputes.anniversary_from_ldap_items(cotisations_previous_year)\n #end if\n\n #si il a pas cotise cette annee et qu'il n'avait pas pris jusqua fin aout l'an dernier\n delta = (last_year - anniversary)\n return cotisations_this_year == [] and (delta.days > 7 or Room.get_by_member_dn(user_session, residence_dn, member.dn) == None)\n\n #end def\n\n @staticmethod\n # 7 days grace period\n def is_cotisation_was_paid_last_year(member_dn, user_session, residence_dn, cotisations = None, anniversary = None):\n if cotisations is None:\n current_year = CotisationComputes.current_year() - 1\n #end if\n\n now = datetime.datetime.now()\n if now.month < 9:\n last_year = datetime.datetime(now.year - 1, 8, 31, 12, 0)\n else:\n last_year = datetime.datetime(now.year, 8, 31, 12, 0)\n #end if\n\n if cotisations is None:\n cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year)\n #end if\n\n if anniversary is None:\n anniversary = CotisationComputes.anniversary_from_ldap_items(cotisations)\n #end if\n\n delta = (last_year - anniversary)\n return delta.days <= 7\n #end def\n\n @staticmethod\n # 7 days grace period\n def is_cotisation_paid(member_dn, user_session, residence_dn, cotisations = None, anniversary = None, verification_old_member = True):\n if verification_old_member and CotisationComputes.is_old_member(member_dn, user_session, residence_dn, cotisations):\n return False\n if cotisations is None:\n current_year = CotisationComputes.current_year()\n #end if\n now = datetime.datetime.now()\n\n if cotisations is None:\n cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year)\n #end if\n\n if anniversary is None:\n anniversary = CotisationComputes.anniversary_from_ldap_items(cotisations)\n #end if\n\n delta = (now - anniversary)\n if delta.days > 7 and now.month == 9 and CotisationComputes.is_cotisation_was_paid_last_year(member_dn, user_session, residence_dn):\n #le membre etait a jour en aout, on lui autorise un delai de paiement en septembre - pas de deconnexion\n return True\n #end if\n\n return delta.days <= 7\n #end def\n\n @staticmethod\n # less than a month late but more than a week\n def is_cotisation_late(member_dn, user_session, residence_dn, cotisations = None, anniversary = None, verification_old_member = True):\n if verification_old_member and CotisationComputes.is_old_member(member_dn, user_session, residence_dn, cotisations):\n return False\n if cotisations is None:\n current_year = CotisationComputes.current_year()\n #end if\n now = datetime.datetime.now()\n\n if cotisations is None:\n cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year)\n #end if\n\n if anniversary is None:\n anniversary = CotisationComputes.anniversary_from_ldap_items(cotisations)\n #end if\n\n delta = (now - anniversary)\n #print(\"[DEBUG] cotisation en retard pour l'utilisateur \"+ member.dn +\" now=\"+ str(now) +\" anniversary=\"+ str(anniversary) +\" delta=\"+ str(delta))\n return delta.days <= 30 and delta.days > 7\n #end def\n\n def cotisation_late_reminder(member_dn, user_session, residence_dn):\n member = Member.get_by_dn(user_session,member_dn)\n from_mail = \"noreply@fede-aurore.net\"\n to_mail = [member.mail.first()]\n residence_name = Residences.get_name_by_dn(user_session,residence_dn).decode(\"utf-8\").encode(\"ascii\", \"ignore\")\n #end def\n\n @staticmethod\n # more than a month late\n def is_no_cotisation(member_dn, user_session, residence_dn, cotisations = None, anniversary = None, verification_old_member = True):\n if verification_old_member and CotisationComputes.is_old_member(member_dn, user_session, residence_dn, cotisations):\n return False\n if cotisations is None:\n current_year = CotisationComputes.current_year()\n #end if\n now = datetime.datetime.now()\n\n if cotisations is None:\n cotisations = Cotisation.cotisations_of_member(user_session, member_dn, current_year)\n #end if\n\n if anniversary is None:\n anniversary = CotisationComputes.anniversary_from_ldap_items(cotisations)\n #end if\n \n delta = (now - anniversary)\n return delta.days > 30\n #end def\n\n @staticmethod\n def members_status_from_residence(user_session, residence_dn):\n members = Member.get_all(user_session, residence_dn)\n\n old_members = []\n cotisation_paid_members = []\n cotisation_late_members = []\n no_cotisation_members = []\n for member in members:\n current_year = CotisationComputes.current_year()\n cotisations = Cotisation.cotisations_of_member(user_session, member.dn, current_year)\n if CotisationComputes.is_old_member(member.dn, user_session, residence_dn, cotisations):\n old_members.append(member)\n elif CotisationComputes.is_cotisation_paid(member.dn, user_session, residence_dn, cotisations):\n cotisation_paid_members.append(member)\n elif CotisationComputes.is_cotisation_late(member.dn, user_session, residence_dn, cotisations):\n cotisation_late_members.append(member)\n #print(\"[DEBUG] cotisation en retard pour l'utilisateur \"+ member.dn)\n elif CotisationComputes.is_no_cotisation(member.dn, user_session, residence_dn, cotisations):\n no_cotisation_members.append(member)\n else:\n BrieLogging.get().warn(\"aurore_helper:336 member with weird status !\")\n #end if\n\n #end for\n return dict(old_members=old_members, cotisation_paid_members=cotisation_paid_members, cotisation_late_members=cotisation_late_members, no_cotisation_members=no_cotisation_members)\n #end def\n\n @staticmethod\n def members_status_from_list_cotisations(user_session, residence_dn, cotisations):\n members_dict = dict()\n for cotisation in cotisations:\n cotisation_dn = cotisation.dn.split(\",\")\n member_dn = \"\"\n for i in range(3, len(cotisation_dn)):\n if i != 3:\n member_dn += \",\"\n #end if\n member_dn += cotisation_dn[i]\n #end for\n if not member_dn in members_dict:\n members_dict[member_dn] = [cotisation]\n else:\n members_dict[member_dn].append(cotisation)\n #end if\n #end for\n\n BrieLogging.get().debug(str(datetime.datetime.now()) + \"members_status_from_list_cotisations:\" + str(len(members_dict)))\n\n old_members = []\n cotisation_paid_members = []\n cotisation_late_members = []\n no_cotisation_members = []\n\n #on va verifier en un block quels sont les old_members\n for member_dn, cotisations in members_dict.iteritems():\n old_members.append(member_dn)\n #end for\n rooms = Room.get_rooms(user_session, residence_dn)\n for room in rooms:\n #pour chaque chambre, on check si il a un proprietaire, et si ce dernier a des cotisations\n if room.has(\"x-memberIn\") and room.get(\"x-memberIn\").first() in members_dict and room.get(\"x-memberIn\").first() in old_members:\n #si oui, bah on le retire des old_members\n old_members.remove(room.get(\"x-memberIn\").first())\n #endif\n #endfor\n\n\n for member_dn, cotisations in members_dict.iteritems():\n anniversary = CotisationComputes.anniversary_from_ldap_items(cotisations)\n if CotisationComputes.is_cotisation_paid(member_dn, user_session, residence_dn, cotisations, anniversary, False):\n cotisation_paid_members.append(member_dn)\n elif CotisationComputes.is_cotisation_late(member_dn, user_session, residence_dn, cotisations, anniversary, False):\n cotisation_late_members.append(member_dn)\n #print(\"[DEBUG] cotisation en retard pour l'utilisateur \"+ member.dn)\n elif CotisationComputes.is_no_cotisation(member_dn, user_session, residence_dn, cotisations, anniversary, False):\n no_cotisation_members.append(member_dn)\n else:\n BrieLogging.get().debug(\"aurore_helper:393 : member with weird status !\")\n #end if\n\n #end for\n return dict(old_members=old_members, cotisation_paid_members=cotisation_paid_members, cotisation_late_members=cotisation_late_members, no_cotisation_members=no_cotisation_members)\n #end def\n\n#end class\n", "step-ids": [ 11, 12, 16, 17, 23 ] }
[ 11, 12, 16, 17, 23 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> class Solution: <|reserved_special_token_0|> <|reserved_special_token_1|> class Solution: def findKthNumber(self, m: int, n: int, k: int) ->int: return bisect_left(range(m * n), k, key=lambda x: x // n * n + sum( x // i for i in range(x // n + 1, m + 1))) <|reserved_special_token_1|> # -*- coding:utf-8 -*- # Author: washing # DateTime: 2022/5/18 10:28 # File: 0668.py # Desc: CV class Solution: def findKthNumber(self, m: int, n: int, k: int) -> int: return bisect_left(range(m * n), k, key=lambda x: x // n * n + sum(x // i for i in range(x // n + 1, m + 1)))
flexible
{ "blob_id": "ec9efeca7eef7b8ee25c1e089e675bdb1e53413b", "index": 417, "step-1": "<mask token>\n", "step-2": "class Solution:\n <mask token>\n", "step-3": "class Solution:\n\n def findKthNumber(self, m: int, n: int, k: int) ->int:\n return bisect_left(range(m * n), k, key=lambda x: x // n * n + sum(\n x // i for i in range(x // n + 1, m + 1)))\n", "step-4": "# -*- coding:utf-8 -*-\r\n# Author: washing\r\n# DateTime: 2022/5/18 10:28\r\n# File: 0668.py\r\n# Desc: CV\r\n\r\nclass Solution:\r\n def findKthNumber(self, m: int, n: int, k: int) -> int:\r\n return bisect_left(range(m * n), k, key=lambda x: x // n * n + sum(x // i for i in range(x // n + 1, m + 1)))\r\n\r\n\r\n\r\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> class DagRunnableReportingThread(StoppableThread, LoggingMixin): def __init__(self, async_mode: bool, dag_file_processor_agent, mailbox: Mailbox, *args, **kwargs): super(DagRunnableReportingThread, self).__init__(*args, **kwargs) self.setName('DagTrigger-DagRunnableReporter') self._async_mode = async_mode self._dag_file_processor_agent: DagTriggerDagFileProcessorAgent = ( dag_file_processor_agent) self._mailbox = mailbox <|reserved_special_token_0|> <|reserved_special_token_0|> class ParsingStatRetrieveThread(StoppableThread): def __init__(self, dag_file_processor_agent, *args, **kwargs): super(ParsingStatRetrieveThread, self).__init__(*args, **kwargs) self.setName('DagTrigger-ParsingStatRetriever') self._dag_file_processor_agent: DagTriggerDagFileProcessorAgent = ( dag_file_processor_agent) def run(self) ->None: while not self.stopped(): self._dag_file_processor_agent.wait_on_manager_message() self._dag_file_processor_agent.heartbeat() time.sleep(10) self.log.info('ParsingStatRetriever exiting') class DagTrigger(BackgroundService, MultiprocessingStartMethodMixin): def __init__(self, dag_directory: str, max_runs: int, dag_ids: Optional [List[str]], pickle_dags: bool, mailbox: Mailbox, refresh_dag_dir_interval=1, notification_service_uri=None): """ :param dag_directory: Directory where DAG definitions are kept. All files in file_paths should be under this directory :type dag_directory: unicode :param max_runs: The number of times to parse and schedule each file. -1 for unlimited. :type max_runs: int :param dag_ids: if specified, only schedule tasks with these DAG IDs :type dag_ids: list[str] :param pickle_dags: whether to pickle DAGs. :type pickle_dags: bool :param mailbox: the mailbox to send the DagExecutableEvent :type mailbox: Mailbox """ super().__init__() self._dag_directory = dag_directory self._max_runs = max_runs self._dag_ids = dag_ids self._pickle_dags = pickle_dags self._mailbox = mailbox self._async_mode = not conf.get('core', 'sql_alchemy_conn').lower( ).startswith('sqlite') self._dag_runnable_reporting_thread: Optional[StoppableThread] = None self._parsing_stat_process_thread: Optional[StoppableThread] = None self._dag_file_processor_agent: Optional[ DagTriggerDagFileProcessorAgent] = None self._refresh_dag_dir_interval = refresh_dag_dir_interval self._notification_service_uri = notification_service_uri def start(self): self._start_dag_file_processor_manager() self._dag_runnable_reporting_thread = DagRunnableReportingThread(self ._async_mode, self._dag_file_processor_agent, self._mailbox) self._dag_runnable_reporting_thread.start() self._parsing_stat_process_thread = ParsingStatRetrieveThread(self. _dag_file_processor_agent) self._parsing_stat_process_thread.start() def end(self) ->None: if self._dag_file_processor_agent is not None: self._dag_file_processor_agent.terminate() if self._dag_runnable_reporting_thread is not None: self._dag_runnable_reporting_thread.stop() if self._parsing_stat_process_thread is not None: self._parsing_stat_process_thread.stop() self._dag_runnable_reporting_thread.join() self._parsing_stat_process_thread.join() self._dag_file_processor_agent.end() def terminate(self): if self._dag_file_processor_agent is not None: self._dag_file_processor_agent.end() if self._dag_runnable_reporting_thread is not None: self._dag_runnable_reporting_thread.stop() if self._parsing_stat_process_thread is not None: self._parsing_stat_process_thread.stop() def _start_dag_file_processor_manager(self): processor_factory = (scheduler_job.SchedulerJob. _create_dag_file_processor) self._dag_file_processor_agent = DagTriggerDagFileProcessorAgent(self ._dag_directory, self._max_runs, processor_factory, self. _get_processor_timeout(), [], self._pickle_dags, self. _async_mode, self._refresh_dag_dir_interval, self. _notification_service_uri) self._dag_file_processor_agent.start() @staticmethod def _get_processor_timeout(): processor_timeout_seconds: int = conf.getint('core', 'dag_file_processor_timeout') return timedelta(seconds=processor_timeout_seconds) def is_alive(self) ->bool: return (self._dag_file_processor_agent is not None and self. _dag_runnable_reporting_thread is not None and self. _dag_runnable_reporting_thread.is_alive() and self. _parsing_stat_process_thread is not None and self. _parsing_stat_process_thread.is_alive()) <|reserved_special_token_1|> <|reserved_special_token_0|> class StoppableThread(threading.Thread, LoggingMixin): <|reserved_special_token_0|> def __init__(self, *args, **kwargs): super(StoppableThread, self).__init__(*args, **kwargs) self._ended = threading.Event() def stop(self): self.log.debug('stopping thread') self._ended.set() def stopped(self): return self._ended.is_set() class DagRunnableReportingThread(StoppableThread, LoggingMixin): def __init__(self, async_mode: bool, dag_file_processor_agent, mailbox: Mailbox, *args, **kwargs): super(DagRunnableReportingThread, self).__init__(*args, **kwargs) self.setName('DagTrigger-DagRunnableReporter') self._async_mode = async_mode self._dag_file_processor_agent: DagTriggerDagFileProcessorAgent = ( dag_file_processor_agent) self._mailbox = mailbox def run(self) ->None: while not self.stopped(): if not self._async_mode: self._dag_file_processor_agent.run_single_parsing_loop() with create_session() as session: dag_models = DagModel.dags_needing_dagruns(session).all() self.log.debug('dags needs dagruns: {}'.format(dag_models)) self._send_dag_executable(dag_models) time.sleep(5) self.log.info('DagRunnableReporter exiting') def _send_dag_executable(self, dag_models: Set[DagModel]): for dag_model in dag_models: self._mailbox.send_message(DagExecutableEvent(dag_model.dag_id) .to_event()) class ParsingStatRetrieveThread(StoppableThread): def __init__(self, dag_file_processor_agent, *args, **kwargs): super(ParsingStatRetrieveThread, self).__init__(*args, **kwargs) self.setName('DagTrigger-ParsingStatRetriever') self._dag_file_processor_agent: DagTriggerDagFileProcessorAgent = ( dag_file_processor_agent) def run(self) ->None: while not self.stopped(): self._dag_file_processor_agent.wait_on_manager_message() self._dag_file_processor_agent.heartbeat() time.sleep(10) self.log.info('ParsingStatRetriever exiting') class DagTrigger(BackgroundService, MultiprocessingStartMethodMixin): def __init__(self, dag_directory: str, max_runs: int, dag_ids: Optional [List[str]], pickle_dags: bool, mailbox: Mailbox, refresh_dag_dir_interval=1, notification_service_uri=None): """ :param dag_directory: Directory where DAG definitions are kept. All files in file_paths should be under this directory :type dag_directory: unicode :param max_runs: The number of times to parse and schedule each file. -1 for unlimited. :type max_runs: int :param dag_ids: if specified, only schedule tasks with these DAG IDs :type dag_ids: list[str] :param pickle_dags: whether to pickle DAGs. :type pickle_dags: bool :param mailbox: the mailbox to send the DagExecutableEvent :type mailbox: Mailbox """ super().__init__() self._dag_directory = dag_directory self._max_runs = max_runs self._dag_ids = dag_ids self._pickle_dags = pickle_dags self._mailbox = mailbox self._async_mode = not conf.get('core', 'sql_alchemy_conn').lower( ).startswith('sqlite') self._dag_runnable_reporting_thread: Optional[StoppableThread] = None self._parsing_stat_process_thread: Optional[StoppableThread] = None self._dag_file_processor_agent: Optional[ DagTriggerDagFileProcessorAgent] = None self._refresh_dag_dir_interval = refresh_dag_dir_interval self._notification_service_uri = notification_service_uri def start(self): self._start_dag_file_processor_manager() self._dag_runnable_reporting_thread = DagRunnableReportingThread(self ._async_mode, self._dag_file_processor_agent, self._mailbox) self._dag_runnable_reporting_thread.start() self._parsing_stat_process_thread = ParsingStatRetrieveThread(self. _dag_file_processor_agent) self._parsing_stat_process_thread.start() def end(self) ->None: if self._dag_file_processor_agent is not None: self._dag_file_processor_agent.terminate() if self._dag_runnable_reporting_thread is not None: self._dag_runnable_reporting_thread.stop() if self._parsing_stat_process_thread is not None: self._parsing_stat_process_thread.stop() self._dag_runnable_reporting_thread.join() self._parsing_stat_process_thread.join() self._dag_file_processor_agent.end() def terminate(self): if self._dag_file_processor_agent is not None: self._dag_file_processor_agent.end() if self._dag_runnable_reporting_thread is not None: self._dag_runnable_reporting_thread.stop() if self._parsing_stat_process_thread is not None: self._parsing_stat_process_thread.stop() def _start_dag_file_processor_manager(self): processor_factory = (scheduler_job.SchedulerJob. _create_dag_file_processor) self._dag_file_processor_agent = DagTriggerDagFileProcessorAgent(self ._dag_directory, self._max_runs, processor_factory, self. _get_processor_timeout(), [], self._pickle_dags, self. _async_mode, self._refresh_dag_dir_interval, self. _notification_service_uri) self._dag_file_processor_agent.start() @staticmethod def _get_processor_timeout(): processor_timeout_seconds: int = conf.getint('core', 'dag_file_processor_timeout') return timedelta(seconds=processor_timeout_seconds) def is_alive(self) ->bool: return (self._dag_file_processor_agent is not None and self. _dag_runnable_reporting_thread is not None and self. _dag_runnable_reporting_thread.is_alive() and self. _parsing_stat_process_thread is not None and self. _parsing_stat_process_thread.is_alive()) <|reserved_special_token_1|> <|reserved_special_token_0|> class StoppableThread(threading.Thread, LoggingMixin): """Thread class with a stop() method. The thread itself has to check regularly for the stopped() condition.""" def __init__(self, *args, **kwargs): super(StoppableThread, self).__init__(*args, **kwargs) self._ended = threading.Event() def stop(self): self.log.debug('stopping thread') self._ended.set() def stopped(self): return self._ended.is_set() class DagRunnableReportingThread(StoppableThread, LoggingMixin): def __init__(self, async_mode: bool, dag_file_processor_agent, mailbox: Mailbox, *args, **kwargs): super(DagRunnableReportingThread, self).__init__(*args, **kwargs) self.setName('DagTrigger-DagRunnableReporter') self._async_mode = async_mode self._dag_file_processor_agent: DagTriggerDagFileProcessorAgent = ( dag_file_processor_agent) self._mailbox = mailbox def run(self) ->None: while not self.stopped(): if not self._async_mode: self._dag_file_processor_agent.run_single_parsing_loop() with create_session() as session: dag_models = DagModel.dags_needing_dagruns(session).all() self.log.debug('dags needs dagruns: {}'.format(dag_models)) self._send_dag_executable(dag_models) time.sleep(5) self.log.info('DagRunnableReporter exiting') def _send_dag_executable(self, dag_models: Set[DagModel]): for dag_model in dag_models: self._mailbox.send_message(DagExecutableEvent(dag_model.dag_id) .to_event()) class ParsingStatRetrieveThread(StoppableThread): def __init__(self, dag_file_processor_agent, *args, **kwargs): super(ParsingStatRetrieveThread, self).__init__(*args, **kwargs) self.setName('DagTrigger-ParsingStatRetriever') self._dag_file_processor_agent: DagTriggerDagFileProcessorAgent = ( dag_file_processor_agent) def run(self) ->None: while not self.stopped(): self._dag_file_processor_agent.wait_on_manager_message() self._dag_file_processor_agent.heartbeat() time.sleep(10) self.log.info('ParsingStatRetriever exiting') class DagTrigger(BackgroundService, MultiprocessingStartMethodMixin): def __init__(self, dag_directory: str, max_runs: int, dag_ids: Optional [List[str]], pickle_dags: bool, mailbox: Mailbox, refresh_dag_dir_interval=1, notification_service_uri=None): """ :param dag_directory: Directory where DAG definitions are kept. All files in file_paths should be under this directory :type dag_directory: unicode :param max_runs: The number of times to parse and schedule each file. -1 for unlimited. :type max_runs: int :param dag_ids: if specified, only schedule tasks with these DAG IDs :type dag_ids: list[str] :param pickle_dags: whether to pickle DAGs. :type pickle_dags: bool :param mailbox: the mailbox to send the DagExecutableEvent :type mailbox: Mailbox """ super().__init__() self._dag_directory = dag_directory self._max_runs = max_runs self._dag_ids = dag_ids self._pickle_dags = pickle_dags self._mailbox = mailbox self._async_mode = not conf.get('core', 'sql_alchemy_conn').lower( ).startswith('sqlite') self._dag_runnable_reporting_thread: Optional[StoppableThread] = None self._parsing_stat_process_thread: Optional[StoppableThread] = None self._dag_file_processor_agent: Optional[ DagTriggerDagFileProcessorAgent] = None self._refresh_dag_dir_interval = refresh_dag_dir_interval self._notification_service_uri = notification_service_uri def start(self): self._start_dag_file_processor_manager() self._dag_runnable_reporting_thread = DagRunnableReportingThread(self ._async_mode, self._dag_file_processor_agent, self._mailbox) self._dag_runnable_reporting_thread.start() self._parsing_stat_process_thread = ParsingStatRetrieveThread(self. _dag_file_processor_agent) self._parsing_stat_process_thread.start() def end(self) ->None: if self._dag_file_processor_agent is not None: self._dag_file_processor_agent.terminate() if self._dag_runnable_reporting_thread is not None: self._dag_runnable_reporting_thread.stop() if self._parsing_stat_process_thread is not None: self._parsing_stat_process_thread.stop() self._dag_runnable_reporting_thread.join() self._parsing_stat_process_thread.join() self._dag_file_processor_agent.end() def terminate(self): if self._dag_file_processor_agent is not None: self._dag_file_processor_agent.end() if self._dag_runnable_reporting_thread is not None: self._dag_runnable_reporting_thread.stop() if self._parsing_stat_process_thread is not None: self._parsing_stat_process_thread.stop() def _start_dag_file_processor_manager(self): processor_factory = (scheduler_job.SchedulerJob. _create_dag_file_processor) self._dag_file_processor_agent = DagTriggerDagFileProcessorAgent(self ._dag_directory, self._max_runs, processor_factory, self. _get_processor_timeout(), [], self._pickle_dags, self. _async_mode, self._refresh_dag_dir_interval, self. _notification_service_uri) self._dag_file_processor_agent.start() @staticmethod def _get_processor_timeout(): processor_timeout_seconds: int = conf.getint('core', 'dag_file_processor_timeout') return timedelta(seconds=processor_timeout_seconds) def is_alive(self) ->bool: return (self._dag_file_processor_agent is not None and self. _dag_runnable_reporting_thread is not None and self. _dag_runnable_reporting_thread.is_alive() and self. _parsing_stat_process_thread is not None and self. _parsing_stat_process_thread.is_alive()) <|reserved_special_token_1|> <|reserved_special_token_0|> class DagTriggerDagFileProcessorAgent(dag_processing.DagFileProcessorAgent): def wait_on_manager_message(self, timeout=None): self._parent_signal_conn.poll(timeout) class StoppableThread(threading.Thread, LoggingMixin): """Thread class with a stop() method. The thread itself has to check regularly for the stopped() condition.""" def __init__(self, *args, **kwargs): super(StoppableThread, self).__init__(*args, **kwargs) self._ended = threading.Event() def stop(self): self.log.debug('stopping thread') self._ended.set() def stopped(self): return self._ended.is_set() class DagRunnableReportingThread(StoppableThread, LoggingMixin): def __init__(self, async_mode: bool, dag_file_processor_agent, mailbox: Mailbox, *args, **kwargs): super(DagRunnableReportingThread, self).__init__(*args, **kwargs) self.setName('DagTrigger-DagRunnableReporter') self._async_mode = async_mode self._dag_file_processor_agent: DagTriggerDagFileProcessorAgent = ( dag_file_processor_agent) self._mailbox = mailbox def run(self) ->None: while not self.stopped(): if not self._async_mode: self._dag_file_processor_agent.run_single_parsing_loop() with create_session() as session: dag_models = DagModel.dags_needing_dagruns(session).all() self.log.debug('dags needs dagruns: {}'.format(dag_models)) self._send_dag_executable(dag_models) time.sleep(5) self.log.info('DagRunnableReporter exiting') def _send_dag_executable(self, dag_models: Set[DagModel]): for dag_model in dag_models: self._mailbox.send_message(DagExecutableEvent(dag_model.dag_id) .to_event()) class ParsingStatRetrieveThread(StoppableThread): def __init__(self, dag_file_processor_agent, *args, **kwargs): super(ParsingStatRetrieveThread, self).__init__(*args, **kwargs) self.setName('DagTrigger-ParsingStatRetriever') self._dag_file_processor_agent: DagTriggerDagFileProcessorAgent = ( dag_file_processor_agent) def run(self) ->None: while not self.stopped(): self._dag_file_processor_agent.wait_on_manager_message() self._dag_file_processor_agent.heartbeat() time.sleep(10) self.log.info('ParsingStatRetriever exiting') class DagTrigger(BackgroundService, MultiprocessingStartMethodMixin): def __init__(self, dag_directory: str, max_runs: int, dag_ids: Optional [List[str]], pickle_dags: bool, mailbox: Mailbox, refresh_dag_dir_interval=1, notification_service_uri=None): """ :param dag_directory: Directory where DAG definitions are kept. All files in file_paths should be under this directory :type dag_directory: unicode :param max_runs: The number of times to parse and schedule each file. -1 for unlimited. :type max_runs: int :param dag_ids: if specified, only schedule tasks with these DAG IDs :type dag_ids: list[str] :param pickle_dags: whether to pickle DAGs. :type pickle_dags: bool :param mailbox: the mailbox to send the DagExecutableEvent :type mailbox: Mailbox """ super().__init__() self._dag_directory = dag_directory self._max_runs = max_runs self._dag_ids = dag_ids self._pickle_dags = pickle_dags self._mailbox = mailbox self._async_mode = not conf.get('core', 'sql_alchemy_conn').lower( ).startswith('sqlite') self._dag_runnable_reporting_thread: Optional[StoppableThread] = None self._parsing_stat_process_thread: Optional[StoppableThread] = None self._dag_file_processor_agent: Optional[ DagTriggerDagFileProcessorAgent] = None self._refresh_dag_dir_interval = refresh_dag_dir_interval self._notification_service_uri = notification_service_uri def start(self): self._start_dag_file_processor_manager() self._dag_runnable_reporting_thread = DagRunnableReportingThread(self ._async_mode, self._dag_file_processor_agent, self._mailbox) self._dag_runnable_reporting_thread.start() self._parsing_stat_process_thread = ParsingStatRetrieveThread(self. _dag_file_processor_agent) self._parsing_stat_process_thread.start() def end(self) ->None: if self._dag_file_processor_agent is not None: self._dag_file_processor_agent.terminate() if self._dag_runnable_reporting_thread is not None: self._dag_runnable_reporting_thread.stop() if self._parsing_stat_process_thread is not None: self._parsing_stat_process_thread.stop() self._dag_runnable_reporting_thread.join() self._parsing_stat_process_thread.join() self._dag_file_processor_agent.end() def terminate(self): if self._dag_file_processor_agent is not None: self._dag_file_processor_agent.end() if self._dag_runnable_reporting_thread is not None: self._dag_runnable_reporting_thread.stop() if self._parsing_stat_process_thread is not None: self._parsing_stat_process_thread.stop() def _start_dag_file_processor_manager(self): processor_factory = (scheduler_job.SchedulerJob. _create_dag_file_processor) self._dag_file_processor_agent = DagTriggerDagFileProcessorAgent(self ._dag_directory, self._max_runs, processor_factory, self. _get_processor_timeout(), [], self._pickle_dags, self. _async_mode, self._refresh_dag_dir_interval, self. _notification_service_uri) self._dag_file_processor_agent.start() @staticmethod def _get_processor_timeout(): processor_timeout_seconds: int = conf.getint('core', 'dag_file_processor_timeout') return timedelta(seconds=processor_timeout_seconds) def is_alive(self) ->bool: return (self._dag_file_processor_agent is not None and self. _dag_runnable_reporting_thread is not None and self. _dag_runnable_reporting_thread.is_alive() and self. _parsing_stat_process_thread is not None and self. _parsing_stat_process_thread.is_alive()) <|reserved_special_token_1|> # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import threading import time from datetime import timedelta from typing import List, Optional, Set import airflow.utils.dag_processing as dag_processing from airflow.configuration import conf from airflow.contrib.jobs.background_service import BackgroundService from airflow.events.scheduler_events import DagExecutableEvent from airflow.jobs import scheduler_job from airflow.models import DagModel from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.mailbox import Mailbox from airflow.utils.mixins import MultiprocessingStartMethodMixin from airflow.utils.session import create_session class DagTriggerDagFileProcessorAgent(dag_processing.DagFileProcessorAgent): def wait_on_manager_message(self, timeout=None): self._parent_signal_conn.poll(timeout) class StoppableThread(threading.Thread, LoggingMixin): """Thread class with a stop() method. The thread itself has to check regularly for the stopped() condition.""" def __init__(self, *args, **kwargs): super(StoppableThread, self).__init__(*args, **kwargs) self._ended = threading.Event() def stop(self): self.log.debug("stopping thread") self._ended.set() def stopped(self): return self._ended.is_set() class DagRunnableReportingThread(StoppableThread, LoggingMixin): def __init__(self, async_mode: bool, dag_file_processor_agent, mailbox: Mailbox, *args, **kwargs): super(DagRunnableReportingThread, self).__init__(*args, **kwargs) self.setName("DagTrigger-DagRunnableReporter") self._async_mode = async_mode self._dag_file_processor_agent: DagTriggerDagFileProcessorAgent = dag_file_processor_agent self._mailbox = mailbox def run(self) -> None: while not self.stopped(): # send AGENT_RUN_ONCE to DagFileProcessorManager to trigger dag parsing if not async mode if not self._async_mode: self._dag_file_processor_agent.run_single_parsing_loop() with create_session() as session: dag_models = DagModel.dags_needing_dagruns(session).all() self.log.debug("dags needs dagruns: {}".format(dag_models)) self._send_dag_executable(dag_models) time.sleep(5) self.log.info("DagRunnableReporter exiting") def _send_dag_executable(self, dag_models: Set[DagModel]): for dag_model in dag_models: self._mailbox.send_message(DagExecutableEvent(dag_model.dag_id).to_event()) class ParsingStatRetrieveThread(StoppableThread): def __init__(self, dag_file_processor_agent, *args, **kwargs): super(ParsingStatRetrieveThread, self).__init__(*args, **kwargs) self.setName("DagTrigger-ParsingStatRetriever") self._dag_file_processor_agent: DagTriggerDagFileProcessorAgent = dag_file_processor_agent def run(self) -> None: while not self.stopped(): self._dag_file_processor_agent.wait_on_manager_message() self._dag_file_processor_agent.heartbeat() time.sleep(10) self.log.info("ParsingStatRetriever exiting") class DagTrigger(BackgroundService, MultiprocessingStartMethodMixin): def __init__(self, dag_directory: str, max_runs: int, dag_ids: Optional[List[str]], pickle_dags: bool, mailbox: Mailbox, refresh_dag_dir_interval=1, notification_service_uri=None): """ :param dag_directory: Directory where DAG definitions are kept. All files in file_paths should be under this directory :type dag_directory: unicode :param max_runs: The number of times to parse and schedule each file. -1 for unlimited. :type max_runs: int :param dag_ids: if specified, only schedule tasks with these DAG IDs :type dag_ids: list[str] :param pickle_dags: whether to pickle DAGs. :type pickle_dags: bool :param mailbox: the mailbox to send the DagExecutableEvent :type mailbox: Mailbox """ super().__init__() self._dag_directory = dag_directory self._max_runs = max_runs self._dag_ids = dag_ids self._pickle_dags = pickle_dags self._mailbox = mailbox # use synchronize mode when using sqlite self._async_mode = not conf.get('core', 'sql_alchemy_conn').lower().startswith('sqlite') self._dag_runnable_reporting_thread: Optional[StoppableThread] = None self._parsing_stat_process_thread: Optional[StoppableThread] = None self._dag_file_processor_agent: Optional[DagTriggerDagFileProcessorAgent] = None self._refresh_dag_dir_interval = refresh_dag_dir_interval self._notification_service_uri = notification_service_uri def start(self): self._start_dag_file_processor_manager() self._dag_runnable_reporting_thread = DagRunnableReportingThread(self._async_mode, self._dag_file_processor_agent, self._mailbox) self._dag_runnable_reporting_thread.start() self._parsing_stat_process_thread = ParsingStatRetrieveThread(self._dag_file_processor_agent) self._parsing_stat_process_thread.start() def end(self) -> None: if self._dag_file_processor_agent is not None: self._dag_file_processor_agent.terminate() if self._dag_runnable_reporting_thread is not None: self._dag_runnable_reporting_thread.stop() if self._parsing_stat_process_thread is not None: self._parsing_stat_process_thread.stop() self._dag_runnable_reporting_thread.join() self._parsing_stat_process_thread.join() self._dag_file_processor_agent.end() def terminate(self): if self._dag_file_processor_agent is not None: self._dag_file_processor_agent.end() if self._dag_runnable_reporting_thread is not None: self._dag_runnable_reporting_thread.stop() if self._parsing_stat_process_thread is not None: self._parsing_stat_process_thread.stop() def _start_dag_file_processor_manager(self): processor_factory = scheduler_job.SchedulerJob._create_dag_file_processor self._dag_file_processor_agent = DagTriggerDagFileProcessorAgent(self._dag_directory, self._max_runs, processor_factory, self._get_processor_timeout(), [], self._pickle_dags, self._async_mode, self._refresh_dag_dir_interval, self._notification_service_uri) self._dag_file_processor_agent.start() @staticmethod def _get_processor_timeout(): processor_timeout_seconds: int = conf.getint('core', 'dag_file_processor_timeout') return timedelta(seconds=processor_timeout_seconds) def is_alive(self) -> bool: return self._dag_file_processor_agent is not None \ and self._dag_runnable_reporting_thread is not None and self._dag_runnable_reporting_thread.is_alive() \ and self._parsing_stat_process_thread is not None and self._parsing_stat_process_thread.is_alive()
flexible
{ "blob_id": "8e26a6b50539fa5f498aa2079a2625214e5b4d03", "index": 5919, "step-1": "<mask token>\n\n\nclass DagRunnableReportingThread(StoppableThread, LoggingMixin):\n\n def __init__(self, async_mode: bool, dag_file_processor_agent, mailbox:\n Mailbox, *args, **kwargs):\n super(DagRunnableReportingThread, self).__init__(*args, **kwargs)\n self.setName('DagTrigger-DagRunnableReporter')\n self._async_mode = async_mode\n self._dag_file_processor_agent: DagTriggerDagFileProcessorAgent = (\n dag_file_processor_agent)\n self._mailbox = mailbox\n <mask token>\n <mask token>\n\n\nclass ParsingStatRetrieveThread(StoppableThread):\n\n def __init__(self, dag_file_processor_agent, *args, **kwargs):\n super(ParsingStatRetrieveThread, self).__init__(*args, **kwargs)\n self.setName('DagTrigger-ParsingStatRetriever')\n self._dag_file_processor_agent: DagTriggerDagFileProcessorAgent = (\n dag_file_processor_agent)\n\n def run(self) ->None:\n while not self.stopped():\n self._dag_file_processor_agent.wait_on_manager_message()\n self._dag_file_processor_agent.heartbeat()\n time.sleep(10)\n self.log.info('ParsingStatRetriever exiting')\n\n\nclass DagTrigger(BackgroundService, MultiprocessingStartMethodMixin):\n\n def __init__(self, dag_directory: str, max_runs: int, dag_ids: Optional\n [List[str]], pickle_dags: bool, mailbox: Mailbox,\n refresh_dag_dir_interval=1, notification_service_uri=None):\n \"\"\"\n :param dag_directory: Directory where DAG definitions are kept. All\n files in file_paths should be under this directory\n :type dag_directory: unicode\n :param max_runs: The number of times to parse and schedule each file. -1\n for unlimited.\n :type max_runs: int\n :param dag_ids: if specified, only schedule tasks with these DAG IDs\n :type dag_ids: list[str]\n :param pickle_dags: whether to pickle DAGs.\n :type pickle_dags: bool\n :param mailbox: the mailbox to send the DagExecutableEvent\n :type mailbox: Mailbox\n \"\"\"\n super().__init__()\n self._dag_directory = dag_directory\n self._max_runs = max_runs\n self._dag_ids = dag_ids\n self._pickle_dags = pickle_dags\n self._mailbox = mailbox\n self._async_mode = not conf.get('core', 'sql_alchemy_conn').lower(\n ).startswith('sqlite')\n self._dag_runnable_reporting_thread: Optional[StoppableThread] = None\n self._parsing_stat_process_thread: Optional[StoppableThread] = None\n self._dag_file_processor_agent: Optional[\n DagTriggerDagFileProcessorAgent] = None\n self._refresh_dag_dir_interval = refresh_dag_dir_interval\n self._notification_service_uri = notification_service_uri\n\n def start(self):\n self._start_dag_file_processor_manager()\n self._dag_runnable_reporting_thread = DagRunnableReportingThread(self\n ._async_mode, self._dag_file_processor_agent, self._mailbox)\n self._dag_runnable_reporting_thread.start()\n self._parsing_stat_process_thread = ParsingStatRetrieveThread(self.\n _dag_file_processor_agent)\n self._parsing_stat_process_thread.start()\n\n def end(self) ->None:\n if self._dag_file_processor_agent is not None:\n self._dag_file_processor_agent.terminate()\n if self._dag_runnable_reporting_thread is not None:\n self._dag_runnable_reporting_thread.stop()\n if self._parsing_stat_process_thread is not None:\n self._parsing_stat_process_thread.stop()\n self._dag_runnable_reporting_thread.join()\n self._parsing_stat_process_thread.join()\n self._dag_file_processor_agent.end()\n\n def terminate(self):\n if self._dag_file_processor_agent is not None:\n self._dag_file_processor_agent.end()\n if self._dag_runnable_reporting_thread is not None:\n self._dag_runnable_reporting_thread.stop()\n if self._parsing_stat_process_thread is not None:\n self._parsing_stat_process_thread.stop()\n\n def _start_dag_file_processor_manager(self):\n processor_factory = (scheduler_job.SchedulerJob.\n _create_dag_file_processor)\n self._dag_file_processor_agent = DagTriggerDagFileProcessorAgent(self\n ._dag_directory, self._max_runs, processor_factory, self.\n _get_processor_timeout(), [], self._pickle_dags, self.\n _async_mode, self._refresh_dag_dir_interval, self.\n _notification_service_uri)\n self._dag_file_processor_agent.start()\n\n @staticmethod\n def _get_processor_timeout():\n processor_timeout_seconds: int = conf.getint('core',\n 'dag_file_processor_timeout')\n return timedelta(seconds=processor_timeout_seconds)\n\n def is_alive(self) ->bool:\n return (self._dag_file_processor_agent is not None and self.\n _dag_runnable_reporting_thread is not None and self.\n _dag_runnable_reporting_thread.is_alive() and self.\n _parsing_stat_process_thread is not None and self.\n _parsing_stat_process_thread.is_alive())\n", "step-2": "<mask token>\n\n\nclass StoppableThread(threading.Thread, LoggingMixin):\n <mask token>\n\n def __init__(self, *args, **kwargs):\n super(StoppableThread, self).__init__(*args, **kwargs)\n self._ended = threading.Event()\n\n def stop(self):\n self.log.debug('stopping thread')\n self._ended.set()\n\n def stopped(self):\n return self._ended.is_set()\n\n\nclass DagRunnableReportingThread(StoppableThread, LoggingMixin):\n\n def __init__(self, async_mode: bool, dag_file_processor_agent, mailbox:\n Mailbox, *args, **kwargs):\n super(DagRunnableReportingThread, self).__init__(*args, **kwargs)\n self.setName('DagTrigger-DagRunnableReporter')\n self._async_mode = async_mode\n self._dag_file_processor_agent: DagTriggerDagFileProcessorAgent = (\n dag_file_processor_agent)\n self._mailbox = mailbox\n\n def run(self) ->None:\n while not self.stopped():\n if not self._async_mode:\n self._dag_file_processor_agent.run_single_parsing_loop()\n with create_session() as session:\n dag_models = DagModel.dags_needing_dagruns(session).all()\n self.log.debug('dags needs dagruns: {}'.format(dag_models))\n self._send_dag_executable(dag_models)\n time.sleep(5)\n self.log.info('DagRunnableReporter exiting')\n\n def _send_dag_executable(self, dag_models: Set[DagModel]):\n for dag_model in dag_models:\n self._mailbox.send_message(DagExecutableEvent(dag_model.dag_id)\n .to_event())\n\n\nclass ParsingStatRetrieveThread(StoppableThread):\n\n def __init__(self, dag_file_processor_agent, *args, **kwargs):\n super(ParsingStatRetrieveThread, self).__init__(*args, **kwargs)\n self.setName('DagTrigger-ParsingStatRetriever')\n self._dag_file_processor_agent: DagTriggerDagFileProcessorAgent = (\n dag_file_processor_agent)\n\n def run(self) ->None:\n while not self.stopped():\n self._dag_file_processor_agent.wait_on_manager_message()\n self._dag_file_processor_agent.heartbeat()\n time.sleep(10)\n self.log.info('ParsingStatRetriever exiting')\n\n\nclass DagTrigger(BackgroundService, MultiprocessingStartMethodMixin):\n\n def __init__(self, dag_directory: str, max_runs: int, dag_ids: Optional\n [List[str]], pickle_dags: bool, mailbox: Mailbox,\n refresh_dag_dir_interval=1, notification_service_uri=None):\n \"\"\"\n :param dag_directory: Directory where DAG definitions are kept. All\n files in file_paths should be under this directory\n :type dag_directory: unicode\n :param max_runs: The number of times to parse and schedule each file. -1\n for unlimited.\n :type max_runs: int\n :param dag_ids: if specified, only schedule tasks with these DAG IDs\n :type dag_ids: list[str]\n :param pickle_dags: whether to pickle DAGs.\n :type pickle_dags: bool\n :param mailbox: the mailbox to send the DagExecutableEvent\n :type mailbox: Mailbox\n \"\"\"\n super().__init__()\n self._dag_directory = dag_directory\n self._max_runs = max_runs\n self._dag_ids = dag_ids\n self._pickle_dags = pickle_dags\n self._mailbox = mailbox\n self._async_mode = not conf.get('core', 'sql_alchemy_conn').lower(\n ).startswith('sqlite')\n self._dag_runnable_reporting_thread: Optional[StoppableThread] = None\n self._parsing_stat_process_thread: Optional[StoppableThread] = None\n self._dag_file_processor_agent: Optional[\n DagTriggerDagFileProcessorAgent] = None\n self._refresh_dag_dir_interval = refresh_dag_dir_interval\n self._notification_service_uri = notification_service_uri\n\n def start(self):\n self._start_dag_file_processor_manager()\n self._dag_runnable_reporting_thread = DagRunnableReportingThread(self\n ._async_mode, self._dag_file_processor_agent, self._mailbox)\n self._dag_runnable_reporting_thread.start()\n self._parsing_stat_process_thread = ParsingStatRetrieveThread(self.\n _dag_file_processor_agent)\n self._parsing_stat_process_thread.start()\n\n def end(self) ->None:\n if self._dag_file_processor_agent is not None:\n self._dag_file_processor_agent.terminate()\n if self._dag_runnable_reporting_thread is not None:\n self._dag_runnable_reporting_thread.stop()\n if self._parsing_stat_process_thread is not None:\n self._parsing_stat_process_thread.stop()\n self._dag_runnable_reporting_thread.join()\n self._parsing_stat_process_thread.join()\n self._dag_file_processor_agent.end()\n\n def terminate(self):\n if self._dag_file_processor_agent is not None:\n self._dag_file_processor_agent.end()\n if self._dag_runnable_reporting_thread is not None:\n self._dag_runnable_reporting_thread.stop()\n if self._parsing_stat_process_thread is not None:\n self._parsing_stat_process_thread.stop()\n\n def _start_dag_file_processor_manager(self):\n processor_factory = (scheduler_job.SchedulerJob.\n _create_dag_file_processor)\n self._dag_file_processor_agent = DagTriggerDagFileProcessorAgent(self\n ._dag_directory, self._max_runs, processor_factory, self.\n _get_processor_timeout(), [], self._pickle_dags, self.\n _async_mode, self._refresh_dag_dir_interval, self.\n _notification_service_uri)\n self._dag_file_processor_agent.start()\n\n @staticmethod\n def _get_processor_timeout():\n processor_timeout_seconds: int = conf.getint('core',\n 'dag_file_processor_timeout')\n return timedelta(seconds=processor_timeout_seconds)\n\n def is_alive(self) ->bool:\n return (self._dag_file_processor_agent is not None and self.\n _dag_runnable_reporting_thread is not None and self.\n _dag_runnable_reporting_thread.is_alive() and self.\n _parsing_stat_process_thread is not None and self.\n _parsing_stat_process_thread.is_alive())\n", "step-3": "<mask token>\n\n\nclass StoppableThread(threading.Thread, LoggingMixin):\n \"\"\"Thread class with a stop() method. The thread itself has to check\n regularly for the stopped() condition.\"\"\"\n\n def __init__(self, *args, **kwargs):\n super(StoppableThread, self).__init__(*args, **kwargs)\n self._ended = threading.Event()\n\n def stop(self):\n self.log.debug('stopping thread')\n self._ended.set()\n\n def stopped(self):\n return self._ended.is_set()\n\n\nclass DagRunnableReportingThread(StoppableThread, LoggingMixin):\n\n def __init__(self, async_mode: bool, dag_file_processor_agent, mailbox:\n Mailbox, *args, **kwargs):\n super(DagRunnableReportingThread, self).__init__(*args, **kwargs)\n self.setName('DagTrigger-DagRunnableReporter')\n self._async_mode = async_mode\n self._dag_file_processor_agent: DagTriggerDagFileProcessorAgent = (\n dag_file_processor_agent)\n self._mailbox = mailbox\n\n def run(self) ->None:\n while not self.stopped():\n if not self._async_mode:\n self._dag_file_processor_agent.run_single_parsing_loop()\n with create_session() as session:\n dag_models = DagModel.dags_needing_dagruns(session).all()\n self.log.debug('dags needs dagruns: {}'.format(dag_models))\n self._send_dag_executable(dag_models)\n time.sleep(5)\n self.log.info('DagRunnableReporter exiting')\n\n def _send_dag_executable(self, dag_models: Set[DagModel]):\n for dag_model in dag_models:\n self._mailbox.send_message(DagExecutableEvent(dag_model.dag_id)\n .to_event())\n\n\nclass ParsingStatRetrieveThread(StoppableThread):\n\n def __init__(self, dag_file_processor_agent, *args, **kwargs):\n super(ParsingStatRetrieveThread, self).__init__(*args, **kwargs)\n self.setName('DagTrigger-ParsingStatRetriever')\n self._dag_file_processor_agent: DagTriggerDagFileProcessorAgent = (\n dag_file_processor_agent)\n\n def run(self) ->None:\n while not self.stopped():\n self._dag_file_processor_agent.wait_on_manager_message()\n self._dag_file_processor_agent.heartbeat()\n time.sleep(10)\n self.log.info('ParsingStatRetriever exiting')\n\n\nclass DagTrigger(BackgroundService, MultiprocessingStartMethodMixin):\n\n def __init__(self, dag_directory: str, max_runs: int, dag_ids: Optional\n [List[str]], pickle_dags: bool, mailbox: Mailbox,\n refresh_dag_dir_interval=1, notification_service_uri=None):\n \"\"\"\n :param dag_directory: Directory where DAG definitions are kept. All\n files in file_paths should be under this directory\n :type dag_directory: unicode\n :param max_runs: The number of times to parse and schedule each file. -1\n for unlimited.\n :type max_runs: int\n :param dag_ids: if specified, only schedule tasks with these DAG IDs\n :type dag_ids: list[str]\n :param pickle_dags: whether to pickle DAGs.\n :type pickle_dags: bool\n :param mailbox: the mailbox to send the DagExecutableEvent\n :type mailbox: Mailbox\n \"\"\"\n super().__init__()\n self._dag_directory = dag_directory\n self._max_runs = max_runs\n self._dag_ids = dag_ids\n self._pickle_dags = pickle_dags\n self._mailbox = mailbox\n self._async_mode = not conf.get('core', 'sql_alchemy_conn').lower(\n ).startswith('sqlite')\n self._dag_runnable_reporting_thread: Optional[StoppableThread] = None\n self._parsing_stat_process_thread: Optional[StoppableThread] = None\n self._dag_file_processor_agent: Optional[\n DagTriggerDagFileProcessorAgent] = None\n self._refresh_dag_dir_interval = refresh_dag_dir_interval\n self._notification_service_uri = notification_service_uri\n\n def start(self):\n self._start_dag_file_processor_manager()\n self._dag_runnable_reporting_thread = DagRunnableReportingThread(self\n ._async_mode, self._dag_file_processor_agent, self._mailbox)\n self._dag_runnable_reporting_thread.start()\n self._parsing_stat_process_thread = ParsingStatRetrieveThread(self.\n _dag_file_processor_agent)\n self._parsing_stat_process_thread.start()\n\n def end(self) ->None:\n if self._dag_file_processor_agent is not None:\n self._dag_file_processor_agent.terminate()\n if self._dag_runnable_reporting_thread is not None:\n self._dag_runnable_reporting_thread.stop()\n if self._parsing_stat_process_thread is not None:\n self._parsing_stat_process_thread.stop()\n self._dag_runnable_reporting_thread.join()\n self._parsing_stat_process_thread.join()\n self._dag_file_processor_agent.end()\n\n def terminate(self):\n if self._dag_file_processor_agent is not None:\n self._dag_file_processor_agent.end()\n if self._dag_runnable_reporting_thread is not None:\n self._dag_runnable_reporting_thread.stop()\n if self._parsing_stat_process_thread is not None:\n self._parsing_stat_process_thread.stop()\n\n def _start_dag_file_processor_manager(self):\n processor_factory = (scheduler_job.SchedulerJob.\n _create_dag_file_processor)\n self._dag_file_processor_agent = DagTriggerDagFileProcessorAgent(self\n ._dag_directory, self._max_runs, processor_factory, self.\n _get_processor_timeout(), [], self._pickle_dags, self.\n _async_mode, self._refresh_dag_dir_interval, self.\n _notification_service_uri)\n self._dag_file_processor_agent.start()\n\n @staticmethod\n def _get_processor_timeout():\n processor_timeout_seconds: int = conf.getint('core',\n 'dag_file_processor_timeout')\n return timedelta(seconds=processor_timeout_seconds)\n\n def is_alive(self) ->bool:\n return (self._dag_file_processor_agent is not None and self.\n _dag_runnable_reporting_thread is not None and self.\n _dag_runnable_reporting_thread.is_alive() and self.\n _parsing_stat_process_thread is not None and self.\n _parsing_stat_process_thread.is_alive())\n", "step-4": "<mask token>\n\n\nclass DagTriggerDagFileProcessorAgent(dag_processing.DagFileProcessorAgent):\n\n def wait_on_manager_message(self, timeout=None):\n self._parent_signal_conn.poll(timeout)\n\n\nclass StoppableThread(threading.Thread, LoggingMixin):\n \"\"\"Thread class with a stop() method. The thread itself has to check\n regularly for the stopped() condition.\"\"\"\n\n def __init__(self, *args, **kwargs):\n super(StoppableThread, self).__init__(*args, **kwargs)\n self._ended = threading.Event()\n\n def stop(self):\n self.log.debug('stopping thread')\n self._ended.set()\n\n def stopped(self):\n return self._ended.is_set()\n\n\nclass DagRunnableReportingThread(StoppableThread, LoggingMixin):\n\n def __init__(self, async_mode: bool, dag_file_processor_agent, mailbox:\n Mailbox, *args, **kwargs):\n super(DagRunnableReportingThread, self).__init__(*args, **kwargs)\n self.setName('DagTrigger-DagRunnableReporter')\n self._async_mode = async_mode\n self._dag_file_processor_agent: DagTriggerDagFileProcessorAgent = (\n dag_file_processor_agent)\n self._mailbox = mailbox\n\n def run(self) ->None:\n while not self.stopped():\n if not self._async_mode:\n self._dag_file_processor_agent.run_single_parsing_loop()\n with create_session() as session:\n dag_models = DagModel.dags_needing_dagruns(session).all()\n self.log.debug('dags needs dagruns: {}'.format(dag_models))\n self._send_dag_executable(dag_models)\n time.sleep(5)\n self.log.info('DagRunnableReporter exiting')\n\n def _send_dag_executable(self, dag_models: Set[DagModel]):\n for dag_model in dag_models:\n self._mailbox.send_message(DagExecutableEvent(dag_model.dag_id)\n .to_event())\n\n\nclass ParsingStatRetrieveThread(StoppableThread):\n\n def __init__(self, dag_file_processor_agent, *args, **kwargs):\n super(ParsingStatRetrieveThread, self).__init__(*args, **kwargs)\n self.setName('DagTrigger-ParsingStatRetriever')\n self._dag_file_processor_agent: DagTriggerDagFileProcessorAgent = (\n dag_file_processor_agent)\n\n def run(self) ->None:\n while not self.stopped():\n self._dag_file_processor_agent.wait_on_manager_message()\n self._dag_file_processor_agent.heartbeat()\n time.sleep(10)\n self.log.info('ParsingStatRetriever exiting')\n\n\nclass DagTrigger(BackgroundService, MultiprocessingStartMethodMixin):\n\n def __init__(self, dag_directory: str, max_runs: int, dag_ids: Optional\n [List[str]], pickle_dags: bool, mailbox: Mailbox,\n refresh_dag_dir_interval=1, notification_service_uri=None):\n \"\"\"\n :param dag_directory: Directory where DAG definitions are kept. All\n files in file_paths should be under this directory\n :type dag_directory: unicode\n :param max_runs: The number of times to parse and schedule each file. -1\n for unlimited.\n :type max_runs: int\n :param dag_ids: if specified, only schedule tasks with these DAG IDs\n :type dag_ids: list[str]\n :param pickle_dags: whether to pickle DAGs.\n :type pickle_dags: bool\n :param mailbox: the mailbox to send the DagExecutableEvent\n :type mailbox: Mailbox\n \"\"\"\n super().__init__()\n self._dag_directory = dag_directory\n self._max_runs = max_runs\n self._dag_ids = dag_ids\n self._pickle_dags = pickle_dags\n self._mailbox = mailbox\n self._async_mode = not conf.get('core', 'sql_alchemy_conn').lower(\n ).startswith('sqlite')\n self._dag_runnable_reporting_thread: Optional[StoppableThread] = None\n self._parsing_stat_process_thread: Optional[StoppableThread] = None\n self._dag_file_processor_agent: Optional[\n DagTriggerDagFileProcessorAgent] = None\n self._refresh_dag_dir_interval = refresh_dag_dir_interval\n self._notification_service_uri = notification_service_uri\n\n def start(self):\n self._start_dag_file_processor_manager()\n self._dag_runnable_reporting_thread = DagRunnableReportingThread(self\n ._async_mode, self._dag_file_processor_agent, self._mailbox)\n self._dag_runnable_reporting_thread.start()\n self._parsing_stat_process_thread = ParsingStatRetrieveThread(self.\n _dag_file_processor_agent)\n self._parsing_stat_process_thread.start()\n\n def end(self) ->None:\n if self._dag_file_processor_agent is not None:\n self._dag_file_processor_agent.terminate()\n if self._dag_runnable_reporting_thread is not None:\n self._dag_runnable_reporting_thread.stop()\n if self._parsing_stat_process_thread is not None:\n self._parsing_stat_process_thread.stop()\n self._dag_runnable_reporting_thread.join()\n self._parsing_stat_process_thread.join()\n self._dag_file_processor_agent.end()\n\n def terminate(self):\n if self._dag_file_processor_agent is not None:\n self._dag_file_processor_agent.end()\n if self._dag_runnable_reporting_thread is not None:\n self._dag_runnable_reporting_thread.stop()\n if self._parsing_stat_process_thread is not None:\n self._parsing_stat_process_thread.stop()\n\n def _start_dag_file_processor_manager(self):\n processor_factory = (scheduler_job.SchedulerJob.\n _create_dag_file_processor)\n self._dag_file_processor_agent = DagTriggerDagFileProcessorAgent(self\n ._dag_directory, self._max_runs, processor_factory, self.\n _get_processor_timeout(), [], self._pickle_dags, self.\n _async_mode, self._refresh_dag_dir_interval, self.\n _notification_service_uri)\n self._dag_file_processor_agent.start()\n\n @staticmethod\n def _get_processor_timeout():\n processor_timeout_seconds: int = conf.getint('core',\n 'dag_file_processor_timeout')\n return timedelta(seconds=processor_timeout_seconds)\n\n def is_alive(self) ->bool:\n return (self._dag_file_processor_agent is not None and self.\n _dag_runnable_reporting_thread is not None and self.\n _dag_runnable_reporting_thread.is_alive() and self.\n _parsing_stat_process_thread is not None and self.\n _parsing_stat_process_thread.is_alive())\n", "step-5": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n\nimport threading\nimport time\nfrom datetime import timedelta\nfrom typing import List, Optional, Set\n\nimport airflow.utils.dag_processing as dag_processing\nfrom airflow.configuration import conf\nfrom airflow.contrib.jobs.background_service import BackgroundService\nfrom airflow.events.scheduler_events import DagExecutableEvent\nfrom airflow.jobs import scheduler_job\nfrom airflow.models import DagModel\nfrom airflow.utils.log.logging_mixin import LoggingMixin\nfrom airflow.utils.mailbox import Mailbox\nfrom airflow.utils.mixins import MultiprocessingStartMethodMixin\nfrom airflow.utils.session import create_session\n\n\nclass DagTriggerDagFileProcessorAgent(dag_processing.DagFileProcessorAgent):\n def wait_on_manager_message(self, timeout=None):\n self._parent_signal_conn.poll(timeout)\n\n\nclass StoppableThread(threading.Thread, LoggingMixin):\n \"\"\"Thread class with a stop() method. The thread itself has to check\n regularly for the stopped() condition.\"\"\"\n\n def __init__(self, *args, **kwargs):\n super(StoppableThread, self).__init__(*args, **kwargs)\n self._ended = threading.Event()\n\n def stop(self):\n self.log.debug(\"stopping thread\")\n self._ended.set()\n\n def stopped(self):\n return self._ended.is_set()\n\n\nclass DagRunnableReportingThread(StoppableThread, LoggingMixin):\n\n def __init__(self, async_mode: bool, dag_file_processor_agent, mailbox: Mailbox, *args, **kwargs):\n super(DagRunnableReportingThread, self).__init__(*args, **kwargs)\n self.setName(\"DagTrigger-DagRunnableReporter\")\n self._async_mode = async_mode\n self._dag_file_processor_agent: DagTriggerDagFileProcessorAgent = dag_file_processor_agent\n self._mailbox = mailbox\n\n def run(self) -> None:\n while not self.stopped():\n # send AGENT_RUN_ONCE to DagFileProcessorManager to trigger dag parsing if not async mode\n if not self._async_mode:\n self._dag_file_processor_agent.run_single_parsing_loop()\n with create_session() as session:\n dag_models = DagModel.dags_needing_dagruns(session).all()\n self.log.debug(\"dags needs dagruns: {}\".format(dag_models))\n self._send_dag_executable(dag_models)\n time.sleep(5)\n self.log.info(\"DagRunnableReporter exiting\")\n\n def _send_dag_executable(self, dag_models: Set[DagModel]):\n for dag_model in dag_models:\n self._mailbox.send_message(DagExecutableEvent(dag_model.dag_id).to_event())\n\n\nclass ParsingStatRetrieveThread(StoppableThread):\n def __init__(self, dag_file_processor_agent, *args, **kwargs):\n super(ParsingStatRetrieveThread, self).__init__(*args, **kwargs)\n self.setName(\"DagTrigger-ParsingStatRetriever\")\n self._dag_file_processor_agent: DagTriggerDagFileProcessorAgent = dag_file_processor_agent\n\n def run(self) -> None:\n while not self.stopped():\n self._dag_file_processor_agent.wait_on_manager_message()\n self._dag_file_processor_agent.heartbeat()\n time.sleep(10)\n self.log.info(\"ParsingStatRetriever exiting\")\n\n\nclass DagTrigger(BackgroundService, MultiprocessingStartMethodMixin):\n\n def __init__(self,\n dag_directory: str,\n max_runs: int,\n dag_ids: Optional[List[str]],\n pickle_dags: bool,\n mailbox: Mailbox,\n refresh_dag_dir_interval=1,\n notification_service_uri=None):\n \"\"\"\n :param dag_directory: Directory where DAG definitions are kept. All\n files in file_paths should be under this directory\n :type dag_directory: unicode\n :param max_runs: The number of times to parse and schedule each file. -1\n for unlimited.\n :type max_runs: int\n :param dag_ids: if specified, only schedule tasks with these DAG IDs\n :type dag_ids: list[str]\n :param pickle_dags: whether to pickle DAGs.\n :type pickle_dags: bool\n :param mailbox: the mailbox to send the DagExecutableEvent\n :type mailbox: Mailbox\n \"\"\"\n\n super().__init__()\n\n self._dag_directory = dag_directory\n self._max_runs = max_runs\n self._dag_ids = dag_ids\n self._pickle_dags = pickle_dags\n self._mailbox = mailbox\n # use synchronize mode when using sqlite\n self._async_mode = not conf.get('core', 'sql_alchemy_conn').lower().startswith('sqlite')\n\n self._dag_runnable_reporting_thread: Optional[StoppableThread] = None\n self._parsing_stat_process_thread: Optional[StoppableThread] = None\n self._dag_file_processor_agent: Optional[DagTriggerDagFileProcessorAgent] = None\n self._refresh_dag_dir_interval = refresh_dag_dir_interval\n self._notification_service_uri = notification_service_uri\n\n def start(self):\n self._start_dag_file_processor_manager()\n self._dag_runnable_reporting_thread = DagRunnableReportingThread(self._async_mode,\n self._dag_file_processor_agent,\n self._mailbox)\n self._dag_runnable_reporting_thread.start()\n\n self._parsing_stat_process_thread = ParsingStatRetrieveThread(self._dag_file_processor_agent)\n self._parsing_stat_process_thread.start()\n\n def end(self) -> None:\n if self._dag_file_processor_agent is not None:\n self._dag_file_processor_agent.terminate()\n if self._dag_runnable_reporting_thread is not None:\n self._dag_runnable_reporting_thread.stop()\n if self._parsing_stat_process_thread is not None:\n self._parsing_stat_process_thread.stop()\n self._dag_runnable_reporting_thread.join()\n self._parsing_stat_process_thread.join()\n self._dag_file_processor_agent.end()\n\n def terminate(self):\n if self._dag_file_processor_agent is not None:\n self._dag_file_processor_agent.end()\n if self._dag_runnable_reporting_thread is not None:\n self._dag_runnable_reporting_thread.stop()\n if self._parsing_stat_process_thread is not None:\n self._parsing_stat_process_thread.stop()\n\n def _start_dag_file_processor_manager(self):\n processor_factory = scheduler_job.SchedulerJob._create_dag_file_processor\n\n self._dag_file_processor_agent = DagTriggerDagFileProcessorAgent(self._dag_directory,\n self._max_runs,\n processor_factory,\n self._get_processor_timeout(),\n [],\n self._pickle_dags,\n self._async_mode,\n self._refresh_dag_dir_interval,\n self._notification_service_uri)\n self._dag_file_processor_agent.start()\n\n @staticmethod\n def _get_processor_timeout():\n processor_timeout_seconds: int = conf.getint('core', 'dag_file_processor_timeout')\n return timedelta(seconds=processor_timeout_seconds)\n\n def is_alive(self) -> bool:\n return self._dag_file_processor_agent is not None \\\n and self._dag_runnable_reporting_thread is not None and self._dag_runnable_reporting_thread.is_alive() \\\n and self._parsing_stat_process_thread is not None and self._parsing_stat_process_thread.is_alive()\n", "step-ids": [ 13, 19, 20, 22, 24 ] }
[ 13, 19, 20, 22, 24 ]
<|reserved_special_token_0|> def aggSumFn(path, grpByCol): allFiles = glob.glob(path + '/*.csv') for file_ in allFiles: df = pd.read_csv(file_, index_col=None, header=0) list_.append(df) frame = pd.concat(list_) frame[grpByCol] = pd.to_datetime(frame['day'], format='%Y-%m-%d') frame = frame.resample('W-Mon', on=grpByCol)['energy_sum'].sum( ).reset_index().sort_values(by=grpByCol) frame.columns = ['week', 'total_consumption'] frame.to_csv( 'C:\\Users\\Tony\\Downloads\\daily_dataset\\summary\\weekly_dataset_summary.csv' ) print('completed') <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def aggSumFn(path, grpByCol): allFiles = glob.glob(path + '/*.csv') for file_ in allFiles: df = pd.read_csv(file_, index_col=None, header=0) list_.append(df) frame = pd.concat(list_) frame[grpByCol] = pd.to_datetime(frame['day'], format='%Y-%m-%d') frame = frame.resample('W-Mon', on=grpByCol)['energy_sum'].sum( ).reset_index().sort_values(by=grpByCol) frame.columns = ['week', 'total_consumption'] frame.to_csv( 'C:\\Users\\Tony\\Downloads\\daily_dataset\\summary\\weekly_dataset_summary.csv' ) print('completed') aggSumFn(path, 'day') <|reserved_special_token_1|> <|reserved_special_token_0|> path = 'C:\\Users\\Tony\\Downloads\\daily_dataset\\daily_dataset' frame = pd.DataFrame() list_ = [] def aggSumFn(path, grpByCol): allFiles = glob.glob(path + '/*.csv') for file_ in allFiles: df = pd.read_csv(file_, index_col=None, header=0) list_.append(df) frame = pd.concat(list_) frame[grpByCol] = pd.to_datetime(frame['day'], format='%Y-%m-%d') frame = frame.resample('W-Mon', on=grpByCol)['energy_sum'].sum( ).reset_index().sort_values(by=grpByCol) frame.columns = ['week', 'total_consumption'] frame.to_csv( 'C:\\Users\\Tony\\Downloads\\daily_dataset\\summary\\weekly_dataset_summary.csv' ) print('completed') aggSumFn(path, 'day') <|reserved_special_token_1|> <|reserved_special_token_0|> import glob import pandas as pd path = 'C:\\Users\\Tony\\Downloads\\daily_dataset\\daily_dataset' frame = pd.DataFrame() list_ = [] def aggSumFn(path, grpByCol): allFiles = glob.glob(path + '/*.csv') for file_ in allFiles: df = pd.read_csv(file_, index_col=None, header=0) list_.append(df) frame = pd.concat(list_) frame[grpByCol] = pd.to_datetime(frame['day'], format='%Y-%m-%d') frame = frame.resample('W-Mon', on=grpByCol)['energy_sum'].sum( ).reset_index().sort_values(by=grpByCol) frame.columns = ['week', 'total_consumption'] frame.to_csv( 'C:\\Users\\Tony\\Downloads\\daily_dataset\\summary\\weekly_dataset_summary.csv' ) print('completed') aggSumFn(path, 'day') <|reserved_special_token_1|> # -*- coding: utf-8 -*- """ Created on Thu Nov 15 06:50:48 2018 @author: Tony """ import glob import pandas as pd path =r'C:\Users\Tony\Downloads\daily_dataset\daily_dataset' # use your path frame = pd.DataFrame() list_ = [] def aggSumFn(path,grpByCol): allFiles = glob.glob(path + "/*.csv") for file_ in allFiles: df = pd.read_csv(file_,index_col=None, header=0) list_.append(df) frame = pd.concat(list_) frame[grpByCol] = pd.to_datetime(frame['day'], format='%Y-%m-%d') frame=frame.resample('W-Mon', on=grpByCol)['energy_sum'].sum().reset_index().sort_values(by=grpByCol) frame.columns=['week','total_consumption'] frame.to_csv(r'C:\Users\Tony\Downloads\daily_dataset\summary\weekly_dataset_summary.csv') print('completed') aggSumFn(path,'day') #
flexible
{ "blob_id": "252d6b381af09dbafb1d10c188eb154e53213033", "index": 8845, "step-1": "<mask token>\n\n\ndef aggSumFn(path, grpByCol):\n allFiles = glob.glob(path + '/*.csv')\n for file_ in allFiles:\n df = pd.read_csv(file_, index_col=None, header=0)\n list_.append(df)\n frame = pd.concat(list_)\n frame[grpByCol] = pd.to_datetime(frame['day'], format='%Y-%m-%d')\n frame = frame.resample('W-Mon', on=grpByCol)['energy_sum'].sum(\n ).reset_index().sort_values(by=grpByCol)\n frame.columns = ['week', 'total_consumption']\n frame.to_csv(\n 'C:\\\\Users\\\\Tony\\\\Downloads\\\\daily_dataset\\\\summary\\\\weekly_dataset_summary.csv'\n )\n print('completed')\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef aggSumFn(path, grpByCol):\n allFiles = glob.glob(path + '/*.csv')\n for file_ in allFiles:\n df = pd.read_csv(file_, index_col=None, header=0)\n list_.append(df)\n frame = pd.concat(list_)\n frame[grpByCol] = pd.to_datetime(frame['day'], format='%Y-%m-%d')\n frame = frame.resample('W-Mon', on=grpByCol)['energy_sum'].sum(\n ).reset_index().sort_values(by=grpByCol)\n frame.columns = ['week', 'total_consumption']\n frame.to_csv(\n 'C:\\\\Users\\\\Tony\\\\Downloads\\\\daily_dataset\\\\summary\\\\weekly_dataset_summary.csv'\n )\n print('completed')\n\n\naggSumFn(path, 'day')\n", "step-3": "<mask token>\npath = 'C:\\\\Users\\\\Tony\\\\Downloads\\\\daily_dataset\\\\daily_dataset'\nframe = pd.DataFrame()\nlist_ = []\n\n\ndef aggSumFn(path, grpByCol):\n allFiles = glob.glob(path + '/*.csv')\n for file_ in allFiles:\n df = pd.read_csv(file_, index_col=None, header=0)\n list_.append(df)\n frame = pd.concat(list_)\n frame[grpByCol] = pd.to_datetime(frame['day'], format='%Y-%m-%d')\n frame = frame.resample('W-Mon', on=grpByCol)['energy_sum'].sum(\n ).reset_index().sort_values(by=grpByCol)\n frame.columns = ['week', 'total_consumption']\n frame.to_csv(\n 'C:\\\\Users\\\\Tony\\\\Downloads\\\\daily_dataset\\\\summary\\\\weekly_dataset_summary.csv'\n )\n print('completed')\n\n\naggSumFn(path, 'day')\n", "step-4": "<mask token>\nimport glob\nimport pandas as pd\npath = 'C:\\\\Users\\\\Tony\\\\Downloads\\\\daily_dataset\\\\daily_dataset'\nframe = pd.DataFrame()\nlist_ = []\n\n\ndef aggSumFn(path, grpByCol):\n allFiles = glob.glob(path + '/*.csv')\n for file_ in allFiles:\n df = pd.read_csv(file_, index_col=None, header=0)\n list_.append(df)\n frame = pd.concat(list_)\n frame[grpByCol] = pd.to_datetime(frame['day'], format='%Y-%m-%d')\n frame = frame.resample('W-Mon', on=grpByCol)['energy_sum'].sum(\n ).reset_index().sort_values(by=grpByCol)\n frame.columns = ['week', 'total_consumption']\n frame.to_csv(\n 'C:\\\\Users\\\\Tony\\\\Downloads\\\\daily_dataset\\\\summary\\\\weekly_dataset_summary.csv'\n )\n print('completed')\n\n\naggSumFn(path, 'day')\n", "step-5": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Thu Nov 15 06:50:48 2018\r\n\r\n@author: Tony\r\n\"\"\"\r\n\r\nimport glob\r\nimport pandas as pd\r\n\r\npath =r'C:\\Users\\Tony\\Downloads\\daily_dataset\\daily_dataset' # use your path\r\n\r\nframe = pd.DataFrame()\r\nlist_ = []\r\ndef aggSumFn(path,grpByCol):\r\n allFiles = glob.glob(path + \"/*.csv\")\r\n for file_ in allFiles:\r\n df = pd.read_csv(file_,index_col=None, header=0)\r\n \r\n list_.append(df)\r\n \r\n frame = pd.concat(list_)\r\n frame[grpByCol] = pd.to_datetime(frame['day'], format='%Y-%m-%d')\r\n frame=frame.resample('W-Mon', on=grpByCol)['energy_sum'].sum().reset_index().sort_values(by=grpByCol)\r\n frame.columns=['week','total_consumption']\r\n frame.to_csv(r'C:\\Users\\Tony\\Downloads\\daily_dataset\\summary\\weekly_dataset_summary.csv')\r\n print('completed')\r\n\r\naggSumFn(path,'day')\r\n#\r\n\r\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
<|reserved_special_token_0|> def build_response(session_attributes, speechlet_response): """ Build the full response JSON from the speechlet response """ return {'version': '1.0', 'sessionAttributes': session_attributes, 'response': speechlet_response} def get_welcome_response(): welcome_response = ( 'Welcome to the L.A. Board of Supervisors Skill. You can say, give me recent motions or give me the latest agenda.' ) print(welcome_response) session_attributes = {} card_title = 'Hello' speech_output = welcome_response reprompt_text = ( "I'm sorry - I didn't understand. You should say give me latest motions." ) should_end_session = True return build_response(session_attributes, build_speechlet_response( card_title, speech_output, reprompt_text, should_end_session)) <|reserved_special_token_0|> def get_next_motions_response(session): print('Initial session attributes are ' + str(session['attributes'])) if 'result_number' not in session['attributes']: print('Second session attributes are ' + str(session['attributes'])) session['attributes']['result_number'] = 1 print('Value is ' + str(session['attributes']['result_number'])) print('Final session attributes are ' + str(session['attributes'])) result_number = session['attributes']['result_number'] host = 'http://api.lacounty.gov' url = (host + '/searchAPIWeb/searchapi?type=bcsearch&database=OMD&SearchTerm=1&title=1&content=1&PStart=' + str(result_number) + '&PEnd=' + str(result_number) + '&_=1509121047612') response = requests.get(url) data = json.loads(response.text) alexaResponse = '' if result_number == 1: alexaResponse = ( 'Here is the latest correspondence before the L.A. board (both upcoming and past): ' ) alexaResponse += str(result_number ) + ': From the ' + replace_with_longform_name(data['results'][0][ 'department']) + ', ' alexaResponse += 'on ' + data['results'][0]['date'] + ', ' alexaResponse += data['results'][0]['title'] + '... ' alexaResponse += 'You can say text me link or next item' session['attributes']['result_number'] = result_number + 1 session['attributes']['result_url'] = data['results'][0]['url'] reprompt_text = ( "I'm sorry - I didn't understand. You should say text me link or next item" ) card_title = 'LA Board Latest Motions Message' greeting_string = alexaResponse return build_response(session['attributes'], build_speechlet_response( card_title, greeting_string, reprompt_text, False)) <|reserved_special_token_0|> def text_url_to_number(session, intent): if 'phone_number' not in session['attributes'] and 'value' not in intent[ 'slots']['phoneNumber']: greeting_string = ( 'Say your nine digit phone number, including the area code') card_title = "What's your phone number?" reprompt_text = ( "I didn't understand. Please say your nine digit mobile phone number." ) return build_response(session['attributes'], build_speechlet_response(card_title, greeting_string, reprompt_text, False)) else: number = intent['slots']['phoneNumber']['value'] if 'result_url' not in session['attributes']: session['attributes']['result_url' ] = 'http://portal.lacounty.gov/wps/portal/omd' url = session['attributes']['result_url'] session['attributes']['phone_number'] = number sns_client = boto3.client('sns') response = sns_client.publish(PhoneNumber='1' + str(number), Message= "Thank you for using the LA Board of Supervisors Skill. Here's your URL: " + url) greeting_string = 'Sent text message to ' + ' '.join(number) card_title = 'Sent motion URL via text message' reprompt_text = ( "I didn't understand. Please say your nine digit mobile phone number." ) return build_response(session['attributes'], build_speechlet_response(card_title, greeting_string, reprompt_text, True)) <|reserved_special_token_0|> def handle_session_end_request(): card_title = 'County of LA Board of Supervisors Skill- Thanks' speech_output = ( 'Thank you for using the County of LA Board of Supervisors Skill. See you next time!' ) should_end_session = True return build_response({}, build_speechlet_response(card_title, speech_output, None, should_end_session)) def on_launch(launch_request, session): """ Called when the user launches the skill without specifying what they want """ print('on_launch requestId=' + launch_request['requestId'] + ', sessionId=' + session['sessionId']) return get_welcome_response() def on_intent(intent_request, session): """ Called when the user specifies an intent for this skill """ print('on_intent requestId=' + intent_request['requestId'] + ', sessionId=' + session['sessionId']) intent = intent_request['intent'] intent_name = intent_request['intent']['name'] if intent_name == 'GetLatestAgendaIntent': return get_next_agenda_response(session) elif intent_name == 'GetLatestMotionsIntent': return get_next_motions_response(session) elif intent_name == 'GetNextMotionIntent': return get_next_motions_response(session) elif intent_name == 'SetPhoneNumberIntent': return text_url_to_number(session, intent) elif intent_name == 'AMAZON.HelpIntent': return get_welcome_response() elif intent_name == 'AMAZON.CancelIntent' or intent_name == 'AMAZON.StopIntent': return handle_session_end_request() else: raise ValueError('Invalid intent') def lambda_handler(event, context): print('Test!') print('event.session.application.applicationId=' + event['session'][ 'application']['applicationId']) if event['session']['new']: on_session_started({'requestId': event['request']['requestId']}, event['session']) if event['request']['type'] == 'LaunchRequest': return on_launch(event['request'], event['session']) elif event['request']['type'] == 'IntentRequest': return on_intent(event['request'], event['session']) elif event['request']['type'] == 'SessionEndedRequest': return handle_session_end_request() <|reserved_special_token_1|> <|reserved_special_token_0|> def build_response(session_attributes, speechlet_response): """ Build the full response JSON from the speechlet response """ return {'version': '1.0', 'sessionAttributes': session_attributes, 'response': speechlet_response} def get_welcome_response(): welcome_response = ( 'Welcome to the L.A. Board of Supervisors Skill. You can say, give me recent motions or give me the latest agenda.' ) print(welcome_response) session_attributes = {} card_title = 'Hello' speech_output = welcome_response reprompt_text = ( "I'm sorry - I didn't understand. You should say give me latest motions." ) should_end_session = True return build_response(session_attributes, build_speechlet_response( card_title, speech_output, reprompt_text, should_end_session)) def replace_with_longform_name(name): if name == 'LASD': longformName = "Los Angeles County Sheriff's Department" elif name == 'DMH': longformName = 'Department of Mental Health' else: longformName = name return longformName def get_next_motions_response(session): print('Initial session attributes are ' + str(session['attributes'])) if 'result_number' not in session['attributes']: print('Second session attributes are ' + str(session['attributes'])) session['attributes']['result_number'] = 1 print('Value is ' + str(session['attributes']['result_number'])) print('Final session attributes are ' + str(session['attributes'])) result_number = session['attributes']['result_number'] host = 'http://api.lacounty.gov' url = (host + '/searchAPIWeb/searchapi?type=bcsearch&database=OMD&SearchTerm=1&title=1&content=1&PStart=' + str(result_number) + '&PEnd=' + str(result_number) + '&_=1509121047612') response = requests.get(url) data = json.loads(response.text) alexaResponse = '' if result_number == 1: alexaResponse = ( 'Here is the latest correspondence before the L.A. board (both upcoming and past): ' ) alexaResponse += str(result_number ) + ': From the ' + replace_with_longform_name(data['results'][0][ 'department']) + ', ' alexaResponse += 'on ' + data['results'][0]['date'] + ', ' alexaResponse += data['results'][0]['title'] + '... ' alexaResponse += 'You can say text me link or next item' session['attributes']['result_number'] = result_number + 1 session['attributes']['result_url'] = data['results'][0]['url'] reprompt_text = ( "I'm sorry - I didn't understand. You should say text me link or next item" ) card_title = 'LA Board Latest Motions Message' greeting_string = alexaResponse return build_response(session['attributes'], build_speechlet_response( card_title, greeting_string, reprompt_text, False)) def get_next_agenda_response(session): print('Initial session attributes are ' + str(session['attributes'])) host = 'http://bos.lacounty.gov/Board-Meeting/Board-Agendas' url = host page = parse(url) nodes = page.xpath("//div[a[text()='View Agenda']]") latest_agenda_node = nodes[0] headline = latest_agenda_node.find('ul').xpath('string()').strip() print(headline) agenda_url = latest_agenda_node.find('a[@href]').attrib['href'] print('http://bos.lacounty.gov' + agenda_url) agenda_heading = headline session['attributes']['result_url' ] = 'http://bos.lacounty.gov' + agenda_url card_title = 'Agenda' greeting_string = ('I have a link for the ' + agenda_heading + ". Say text me and I'll send it to you.") reprompt = 'Say text me to receive a link to the agenda.' return build_response(session['attributes'], build_speechlet_response( card_title, greeting_string, reprompt, False)) def text_url_to_number(session, intent): if 'phone_number' not in session['attributes'] and 'value' not in intent[ 'slots']['phoneNumber']: greeting_string = ( 'Say your nine digit phone number, including the area code') card_title = "What's your phone number?" reprompt_text = ( "I didn't understand. Please say your nine digit mobile phone number." ) return build_response(session['attributes'], build_speechlet_response(card_title, greeting_string, reprompt_text, False)) else: number = intent['slots']['phoneNumber']['value'] if 'result_url' not in session['attributes']: session['attributes']['result_url' ] = 'http://portal.lacounty.gov/wps/portal/omd' url = session['attributes']['result_url'] session['attributes']['phone_number'] = number sns_client = boto3.client('sns') response = sns_client.publish(PhoneNumber='1' + str(number), Message= "Thank you for using the LA Board of Supervisors Skill. Here's your URL: " + url) greeting_string = 'Sent text message to ' + ' '.join(number) card_title = 'Sent motion URL via text message' reprompt_text = ( "I didn't understand. Please say your nine digit mobile phone number." ) return build_response(session['attributes'], build_speechlet_response(card_title, greeting_string, reprompt_text, True)) def on_session_started(session_started_request, session): """ Called when the session starts """ session['attributes'] = {} print('on_session_started requestId=' + session_started_request[ 'requestId'] + ', sessionId=' + session['sessionId']) def handle_session_end_request(): card_title = 'County of LA Board of Supervisors Skill- Thanks' speech_output = ( 'Thank you for using the County of LA Board of Supervisors Skill. See you next time!' ) should_end_session = True return build_response({}, build_speechlet_response(card_title, speech_output, None, should_end_session)) def on_launch(launch_request, session): """ Called when the user launches the skill without specifying what they want """ print('on_launch requestId=' + launch_request['requestId'] + ', sessionId=' + session['sessionId']) return get_welcome_response() def on_intent(intent_request, session): """ Called when the user specifies an intent for this skill """ print('on_intent requestId=' + intent_request['requestId'] + ', sessionId=' + session['sessionId']) intent = intent_request['intent'] intent_name = intent_request['intent']['name'] if intent_name == 'GetLatestAgendaIntent': return get_next_agenda_response(session) elif intent_name == 'GetLatestMotionsIntent': return get_next_motions_response(session) elif intent_name == 'GetNextMotionIntent': return get_next_motions_response(session) elif intent_name == 'SetPhoneNumberIntent': return text_url_to_number(session, intent) elif intent_name == 'AMAZON.HelpIntent': return get_welcome_response() elif intent_name == 'AMAZON.CancelIntent' or intent_name == 'AMAZON.StopIntent': return handle_session_end_request() else: raise ValueError('Invalid intent') def lambda_handler(event, context): print('Test!') print('event.session.application.applicationId=' + event['session'][ 'application']['applicationId']) if event['session']['new']: on_session_started({'requestId': event['request']['requestId']}, event['session']) if event['request']['type'] == 'LaunchRequest': return on_launch(event['request'], event['session']) elif event['request']['type'] == 'IntentRequest': return on_intent(event['request'], event['session']) elif event['request']['type'] == 'SessionEndedRequest': return handle_session_end_request() <|reserved_special_token_1|> <|reserved_special_token_0|> CardTitlePrefix = 'Greeting' def build_speechlet_response(title, output, reprompt_text, should_end_session): """ Build a speechlet JSON representation of the title, output text, reprompt text & end of session """ return {'outputSpeech': {'type': 'PlainText', 'text': output}, 'card': {'type': 'Simple', 'title': CardTitlePrefix + ' - ' + title, 'content': output}, 'reprompt': {'outputSpeech': {'type': 'PlainText', 'text': reprompt_text}}, 'shouldEndSession': should_end_session} def build_response(session_attributes, speechlet_response): """ Build the full response JSON from the speechlet response """ return {'version': '1.0', 'sessionAttributes': session_attributes, 'response': speechlet_response} def get_welcome_response(): welcome_response = ( 'Welcome to the L.A. Board of Supervisors Skill. You can say, give me recent motions or give me the latest agenda.' ) print(welcome_response) session_attributes = {} card_title = 'Hello' speech_output = welcome_response reprompt_text = ( "I'm sorry - I didn't understand. You should say give me latest motions." ) should_end_session = True return build_response(session_attributes, build_speechlet_response( card_title, speech_output, reprompt_text, should_end_session)) def replace_with_longform_name(name): if name == 'LASD': longformName = "Los Angeles County Sheriff's Department" elif name == 'DMH': longformName = 'Department of Mental Health' else: longformName = name return longformName def get_next_motions_response(session): print('Initial session attributes are ' + str(session['attributes'])) if 'result_number' not in session['attributes']: print('Second session attributes are ' + str(session['attributes'])) session['attributes']['result_number'] = 1 print('Value is ' + str(session['attributes']['result_number'])) print('Final session attributes are ' + str(session['attributes'])) result_number = session['attributes']['result_number'] host = 'http://api.lacounty.gov' url = (host + '/searchAPIWeb/searchapi?type=bcsearch&database=OMD&SearchTerm=1&title=1&content=1&PStart=' + str(result_number) + '&PEnd=' + str(result_number) + '&_=1509121047612') response = requests.get(url) data = json.loads(response.text) alexaResponse = '' if result_number == 1: alexaResponse = ( 'Here is the latest correspondence before the L.A. board (both upcoming and past): ' ) alexaResponse += str(result_number ) + ': From the ' + replace_with_longform_name(data['results'][0][ 'department']) + ', ' alexaResponse += 'on ' + data['results'][0]['date'] + ', ' alexaResponse += data['results'][0]['title'] + '... ' alexaResponse += 'You can say text me link or next item' session['attributes']['result_number'] = result_number + 1 session['attributes']['result_url'] = data['results'][0]['url'] reprompt_text = ( "I'm sorry - I didn't understand. You should say text me link or next item" ) card_title = 'LA Board Latest Motions Message' greeting_string = alexaResponse return build_response(session['attributes'], build_speechlet_response( card_title, greeting_string, reprompt_text, False)) def get_next_agenda_response(session): print('Initial session attributes are ' + str(session['attributes'])) host = 'http://bos.lacounty.gov/Board-Meeting/Board-Agendas' url = host page = parse(url) nodes = page.xpath("//div[a[text()='View Agenda']]") latest_agenda_node = nodes[0] headline = latest_agenda_node.find('ul').xpath('string()').strip() print(headline) agenda_url = latest_agenda_node.find('a[@href]').attrib['href'] print('http://bos.lacounty.gov' + agenda_url) agenda_heading = headline session['attributes']['result_url' ] = 'http://bos.lacounty.gov' + agenda_url card_title = 'Agenda' greeting_string = ('I have a link for the ' + agenda_heading + ". Say text me and I'll send it to you.") reprompt = 'Say text me to receive a link to the agenda.' return build_response(session['attributes'], build_speechlet_response( card_title, greeting_string, reprompt, False)) def text_url_to_number(session, intent): if 'phone_number' not in session['attributes'] and 'value' not in intent[ 'slots']['phoneNumber']: greeting_string = ( 'Say your nine digit phone number, including the area code') card_title = "What's your phone number?" reprompt_text = ( "I didn't understand. Please say your nine digit mobile phone number." ) return build_response(session['attributes'], build_speechlet_response(card_title, greeting_string, reprompt_text, False)) else: number = intent['slots']['phoneNumber']['value'] if 'result_url' not in session['attributes']: session['attributes']['result_url' ] = 'http://portal.lacounty.gov/wps/portal/omd' url = session['attributes']['result_url'] session['attributes']['phone_number'] = number sns_client = boto3.client('sns') response = sns_client.publish(PhoneNumber='1' + str(number), Message= "Thank you for using the LA Board of Supervisors Skill. Here's your URL: " + url) greeting_string = 'Sent text message to ' + ' '.join(number) card_title = 'Sent motion URL via text message' reprompt_text = ( "I didn't understand. Please say your nine digit mobile phone number." ) return build_response(session['attributes'], build_speechlet_response(card_title, greeting_string, reprompt_text, True)) def on_session_started(session_started_request, session): """ Called when the session starts """ session['attributes'] = {} print('on_session_started requestId=' + session_started_request[ 'requestId'] + ', sessionId=' + session['sessionId']) def handle_session_end_request(): card_title = 'County of LA Board of Supervisors Skill- Thanks' speech_output = ( 'Thank you for using the County of LA Board of Supervisors Skill. See you next time!' ) should_end_session = True return build_response({}, build_speechlet_response(card_title, speech_output, None, should_end_session)) def on_launch(launch_request, session): """ Called when the user launches the skill without specifying what they want """ print('on_launch requestId=' + launch_request['requestId'] + ', sessionId=' + session['sessionId']) return get_welcome_response() def on_intent(intent_request, session): """ Called when the user specifies an intent for this skill """ print('on_intent requestId=' + intent_request['requestId'] + ', sessionId=' + session['sessionId']) intent = intent_request['intent'] intent_name = intent_request['intent']['name'] if intent_name == 'GetLatestAgendaIntent': return get_next_agenda_response(session) elif intent_name == 'GetLatestMotionsIntent': return get_next_motions_response(session) elif intent_name == 'GetNextMotionIntent': return get_next_motions_response(session) elif intent_name == 'SetPhoneNumberIntent': return text_url_to_number(session, intent) elif intent_name == 'AMAZON.HelpIntent': return get_welcome_response() elif intent_name == 'AMAZON.CancelIntent' or intent_name == 'AMAZON.StopIntent': return handle_session_end_request() else: raise ValueError('Invalid intent') def lambda_handler(event, context): print('Test!') print('event.session.application.applicationId=' + event['session'][ 'application']['applicationId']) if event['session']['new']: on_session_started({'requestId': event['request']['requestId']}, event['session']) if event['request']['type'] == 'LaunchRequest': return on_launch(event['request'], event['session']) elif event['request']['type'] == 'IntentRequest': return on_intent(event['request'], event['session']) elif event['request']['type'] == 'SessionEndedRequest': return handle_session_end_request() <|reserved_special_token_1|> import requests import json import boto3 from lxml.html import parse CardTitlePrefix = 'Greeting' def build_speechlet_response(title, output, reprompt_text, should_end_session): """ Build a speechlet JSON representation of the title, output text, reprompt text & end of session """ return {'outputSpeech': {'type': 'PlainText', 'text': output}, 'card': {'type': 'Simple', 'title': CardTitlePrefix + ' - ' + title, 'content': output}, 'reprompt': {'outputSpeech': {'type': 'PlainText', 'text': reprompt_text}}, 'shouldEndSession': should_end_session} def build_response(session_attributes, speechlet_response): """ Build the full response JSON from the speechlet response """ return {'version': '1.0', 'sessionAttributes': session_attributes, 'response': speechlet_response} def get_welcome_response(): welcome_response = ( 'Welcome to the L.A. Board of Supervisors Skill. You can say, give me recent motions or give me the latest agenda.' ) print(welcome_response) session_attributes = {} card_title = 'Hello' speech_output = welcome_response reprompt_text = ( "I'm sorry - I didn't understand. You should say give me latest motions." ) should_end_session = True return build_response(session_attributes, build_speechlet_response( card_title, speech_output, reprompt_text, should_end_session)) def replace_with_longform_name(name): if name == 'LASD': longformName = "Los Angeles County Sheriff's Department" elif name == 'DMH': longformName = 'Department of Mental Health' else: longformName = name return longformName def get_next_motions_response(session): print('Initial session attributes are ' + str(session['attributes'])) if 'result_number' not in session['attributes']: print('Second session attributes are ' + str(session['attributes'])) session['attributes']['result_number'] = 1 print('Value is ' + str(session['attributes']['result_number'])) print('Final session attributes are ' + str(session['attributes'])) result_number = session['attributes']['result_number'] host = 'http://api.lacounty.gov' url = (host + '/searchAPIWeb/searchapi?type=bcsearch&database=OMD&SearchTerm=1&title=1&content=1&PStart=' + str(result_number) + '&PEnd=' + str(result_number) + '&_=1509121047612') response = requests.get(url) data = json.loads(response.text) alexaResponse = '' if result_number == 1: alexaResponse = ( 'Here is the latest correspondence before the L.A. board (both upcoming and past): ' ) alexaResponse += str(result_number ) + ': From the ' + replace_with_longform_name(data['results'][0][ 'department']) + ', ' alexaResponse += 'on ' + data['results'][0]['date'] + ', ' alexaResponse += data['results'][0]['title'] + '... ' alexaResponse += 'You can say text me link or next item' session['attributes']['result_number'] = result_number + 1 session['attributes']['result_url'] = data['results'][0]['url'] reprompt_text = ( "I'm sorry - I didn't understand. You should say text me link or next item" ) card_title = 'LA Board Latest Motions Message' greeting_string = alexaResponse return build_response(session['attributes'], build_speechlet_response( card_title, greeting_string, reprompt_text, False)) def get_next_agenda_response(session): print('Initial session attributes are ' + str(session['attributes'])) host = 'http://bos.lacounty.gov/Board-Meeting/Board-Agendas' url = host page = parse(url) nodes = page.xpath("//div[a[text()='View Agenda']]") latest_agenda_node = nodes[0] headline = latest_agenda_node.find('ul').xpath('string()').strip() print(headline) agenda_url = latest_agenda_node.find('a[@href]').attrib['href'] print('http://bos.lacounty.gov' + agenda_url) agenda_heading = headline session['attributes']['result_url' ] = 'http://bos.lacounty.gov' + agenda_url card_title = 'Agenda' greeting_string = ('I have a link for the ' + agenda_heading + ". Say text me and I'll send it to you.") reprompt = 'Say text me to receive a link to the agenda.' return build_response(session['attributes'], build_speechlet_response( card_title, greeting_string, reprompt, False)) def text_url_to_number(session, intent): if 'phone_number' not in session['attributes'] and 'value' not in intent[ 'slots']['phoneNumber']: greeting_string = ( 'Say your nine digit phone number, including the area code') card_title = "What's your phone number?" reprompt_text = ( "I didn't understand. Please say your nine digit mobile phone number." ) return build_response(session['attributes'], build_speechlet_response(card_title, greeting_string, reprompt_text, False)) else: number = intent['slots']['phoneNumber']['value'] if 'result_url' not in session['attributes']: session['attributes']['result_url' ] = 'http://portal.lacounty.gov/wps/portal/omd' url = session['attributes']['result_url'] session['attributes']['phone_number'] = number sns_client = boto3.client('sns') response = sns_client.publish(PhoneNumber='1' + str(number), Message= "Thank you for using the LA Board of Supervisors Skill. Here's your URL: " + url) greeting_string = 'Sent text message to ' + ' '.join(number) card_title = 'Sent motion URL via text message' reprompt_text = ( "I didn't understand. Please say your nine digit mobile phone number." ) return build_response(session['attributes'], build_speechlet_response(card_title, greeting_string, reprompt_text, True)) def on_session_started(session_started_request, session): """ Called when the session starts """ session['attributes'] = {} print('on_session_started requestId=' + session_started_request[ 'requestId'] + ', sessionId=' + session['sessionId']) def handle_session_end_request(): card_title = 'County of LA Board of Supervisors Skill- Thanks' speech_output = ( 'Thank you for using the County of LA Board of Supervisors Skill. See you next time!' ) should_end_session = True return build_response({}, build_speechlet_response(card_title, speech_output, None, should_end_session)) def on_launch(launch_request, session): """ Called when the user launches the skill without specifying what they want """ print('on_launch requestId=' + launch_request['requestId'] + ', sessionId=' + session['sessionId']) return get_welcome_response() def on_intent(intent_request, session): """ Called when the user specifies an intent for this skill """ print('on_intent requestId=' + intent_request['requestId'] + ', sessionId=' + session['sessionId']) intent = intent_request['intent'] intent_name = intent_request['intent']['name'] if intent_name == 'GetLatestAgendaIntent': return get_next_agenda_response(session) elif intent_name == 'GetLatestMotionsIntent': return get_next_motions_response(session) elif intent_name == 'GetNextMotionIntent': return get_next_motions_response(session) elif intent_name == 'SetPhoneNumberIntent': return text_url_to_number(session, intent) elif intent_name == 'AMAZON.HelpIntent': return get_welcome_response() elif intent_name == 'AMAZON.CancelIntent' or intent_name == 'AMAZON.StopIntent': return handle_session_end_request() else: raise ValueError('Invalid intent') def lambda_handler(event, context): print('Test!') print('event.session.application.applicationId=' + event['session'][ 'application']['applicationId']) if event['session']['new']: on_session_started({'requestId': event['request']['requestId']}, event['session']) if event['request']['type'] == 'LaunchRequest': return on_launch(event['request'], event['session']) elif event['request']['type'] == 'IntentRequest': return on_intent(event['request'], event['session']) elif event['request']['type'] == 'SessionEndedRequest': return handle_session_end_request() <|reserved_special_token_1|> # -*- coding: utf-8 -*- import requests import json import boto3 from lxml.html import parse CardTitlePrefix = "Greeting" def build_speechlet_response(title, output, reprompt_text, should_end_session): """ Build a speechlet JSON representation of the title, output text, reprompt text & end of session """ return { 'outputSpeech': { 'type': 'PlainText', 'text': output }, 'card': { 'type': 'Simple', 'title': CardTitlePrefix + " - " + title, 'content': output }, 'reprompt': { 'outputSpeech': { 'type': 'PlainText', 'text': reprompt_text } }, 'shouldEndSession': should_end_session } def build_response(session_attributes, speechlet_response): """ Build the full response JSON from the speechlet response """ return { 'version': '1.0', 'sessionAttributes': session_attributes, 'response': speechlet_response } def get_welcome_response(): welcome_response= "Welcome to the L.A. Board of Supervisors Skill. You can say, give me recent motions or give me the latest agenda." print(welcome_response); session_attributes = {} card_title = "Hello" speech_output = welcome_response; # If the user either does not reply to the welcome message or says something # that is not understood, they will be prompted again with this text. reprompt_text = "I'm sorry - I didn't understand. You should say give me latest motions." should_end_session = True return build_response(session_attributes, build_speechlet_response(card_title, speech_output, reprompt_text, should_end_session)) def replace_with_longform_name(name): if name == "LASD": longformName = "Los Angeles County Sheriff's Department" elif name == "DMH": longformName = "Department of Mental Health" else: longformName = name; return longformName; def get_next_motions_response(session): print("Initial session attributes are "+str(session['attributes'])); if "result_number" not in session['attributes']: print("Second session attributes are "+str(session['attributes'])); session['attributes']['result_number'] = 1; print("Value is "+str(session['attributes']['result_number'])); print("Final session attributes are "+str(session['attributes'])) result_number = session['attributes']['result_number']; host = "http://api.lacounty.gov"; url = host + "/searchAPIWeb/searchapi?type=bcsearch&database=OMD&" \ "SearchTerm=1&title=1&content=1&PStart=" + str(result_number) +"&PEnd=" + str(result_number) +"&_=1509121047612" response = requests.get(url); #print(response.text); data = json.loads(response.text) alexaResponse = ""; if(result_number == 1): alexaResponse = "Here is the latest correspondence before the L.A. board (both upcoming and past): " alexaResponse += str(result_number)+": From the "+replace_with_longform_name(data["results"][0]["department"])+ ", " alexaResponse += "on "+data["results"][0]["date"]+", " alexaResponse += data["results"][0]["title"]+"... " alexaResponse += "You can say text me link or next item" session['attributes']['result_number'] = result_number + 1; session['attributes']['result_url'] = data["results"][0]["url"]; #text_url_to_number(session); reprompt_text = "I'm sorry - I didn't understand. You should say text me link or next item" card_title = "LA Board Latest Motions Message"; greeting_string = alexaResponse; return build_response(session['attributes'], build_speechlet_response(card_title, greeting_string, reprompt_text, False)) def get_next_agenda_response(session): print("Initial session attributes are "+str(session['attributes'])); host = "http://bos.lacounty.gov/Board-Meeting/Board-Agendas"; url = host; page = parse(url) nodes = page.xpath("//div[a[text()='View Agenda']]"); latest_agenda_node = nodes[0]; headline = latest_agenda_node.find("ul").xpath("string()").strip(); print(headline); agenda_url = latest_agenda_node.find("a[@href]").attrib['href']; print("http://bos.lacounty.gov"+agenda_url) agenda_heading = headline; #session['attributes']['result_url'] session['attributes']['result_url'] = "http://bos.lacounty.gov"+agenda_url; card_title = "Agenda"; greeting_string = "I have a link for the "+agenda_heading+". Say text me and I'll send it to you."; reprompt = "Say text me to receive a link to the agenda." return build_response(session['attributes'], build_speechlet_response(card_title, greeting_string, reprompt, False)) def text_url_to_number(session, intent): if "phone_number" not in session['attributes'] and "value" not in intent['slots']['phoneNumber']: greeting_string = "Say your nine digit phone number, including the area code"; card_title = "What's your phone number?"; reprompt_text = "I didn't understand. Please say your nine digit mobile phone number." return build_response(session['attributes'], build_speechlet_response(card_title, greeting_string, reprompt_text, False)) else: number = intent['slots']['phoneNumber']['value']; if "result_url" not in session['attributes']: session['attributes']['result_url'] = 'http://portal.lacounty.gov/wps/portal/omd'; url = session['attributes']['result_url']; session['attributes']['phone_number'] = number; sns_client = boto3.client('sns') response = sns_client.publish( PhoneNumber='1'+str(number), Message="Thank you for using the LA Board of Supervisors Skill. Here's your URL: "+url ) greeting_string = "Sent text message to "+ " ".join(number); card_title = "Sent motion URL via text message"; reprompt_text = "I didn't understand. Please say your nine digit mobile phone number." return build_response(session['attributes'], build_speechlet_response(card_title, greeting_string, reprompt_text, True)) def on_session_started(session_started_request, session): """ Called when the session starts """ #session.attributes['result_number'] = 1 session['attributes'] = {} print("on_session_started requestId=" + session_started_request['requestId'] + ", sessionId=" + session['sessionId']) def handle_session_end_request(): card_title = "County of LA Board of Supervisors Skill- Thanks" speech_output = "Thank you for using the County of LA Board of Supervisors Skill. See you next time!" should_end_session = True return build_response({}, build_speechlet_response(card_title, speech_output, None, should_end_session)); def on_launch(launch_request, session): """ Called when the user launches the skill without specifying what they want """ print("on_launch requestId=" + launch_request['requestId'] + ", sessionId=" + session['sessionId']) # Dispatch to your skill's launch return get_welcome_response() def on_intent(intent_request, session): """ Called when the user specifies an intent for this skill """ print("on_intent requestId=" + intent_request['requestId'] + ", sessionId=" + session['sessionId']) intent = intent_request['intent'] intent_name = intent_request['intent']['name'] # Dispatch to your skill's intent handlers if intent_name == "GetLatestAgendaIntent": return get_next_agenda_response(session) elif intent_name == "GetLatestMotionsIntent": return get_next_motions_response(session) elif intent_name == "GetNextMotionIntent": return get_next_motions_response(session) elif intent_name == "SetPhoneNumberIntent": return text_url_to_number(session, intent); elif intent_name == "AMAZON.HelpIntent": return get_welcome_response() elif intent_name == "AMAZON.CancelIntent" or intent_name == "AMAZON.StopIntent": return handle_session_end_request() else: raise ValueError("Invalid intent") def lambda_handler(event, context): print("Test!") print("event.session.application.applicationId=" + event['session']['application']['applicationId']) if event['session']['new']: on_session_started({'requestId': event['request']['requestId']}, event['session']) if event['request']['type'] == "LaunchRequest": return on_launch(event['request'], event['session']) elif event['request']['type'] == "IntentRequest": return on_intent(event['request'], event['session']) elif event['request']['type'] == "SessionEndedRequest": return handle_session_end_request()
flexible
{ "blob_id": "237277e132c8223c6048be9b754516635ab720e2", "index": 8964, "step-1": "<mask token>\n\n\ndef build_response(session_attributes, speechlet_response):\n \"\"\"\n Build the full response JSON from the speechlet response\n \"\"\"\n return {'version': '1.0', 'sessionAttributes': session_attributes,\n 'response': speechlet_response}\n\n\ndef get_welcome_response():\n welcome_response = (\n 'Welcome to the L.A. Board of Supervisors Skill. You can say, give me recent motions or give me the latest agenda.'\n )\n print(welcome_response)\n session_attributes = {}\n card_title = 'Hello'\n speech_output = welcome_response\n reprompt_text = (\n \"I'm sorry - I didn't understand. You should say give me latest motions.\"\n )\n should_end_session = True\n return build_response(session_attributes, build_speechlet_response(\n card_title, speech_output, reprompt_text, should_end_session))\n\n\n<mask token>\n\n\ndef get_next_motions_response(session):\n print('Initial session attributes are ' + str(session['attributes']))\n if 'result_number' not in session['attributes']:\n print('Second session attributes are ' + str(session['attributes']))\n session['attributes']['result_number'] = 1\n print('Value is ' + str(session['attributes']['result_number']))\n print('Final session attributes are ' + str(session['attributes']))\n result_number = session['attributes']['result_number']\n host = 'http://api.lacounty.gov'\n url = (host +\n '/searchAPIWeb/searchapi?type=bcsearch&database=OMD&SearchTerm=1&title=1&content=1&PStart='\n + str(result_number) + '&PEnd=' + str(result_number) +\n '&_=1509121047612')\n response = requests.get(url)\n data = json.loads(response.text)\n alexaResponse = ''\n if result_number == 1:\n alexaResponse = (\n 'Here is the latest correspondence before the L.A. board (both upcoming and past): '\n )\n alexaResponse += str(result_number\n ) + ': From the ' + replace_with_longform_name(data['results'][0][\n 'department']) + ', '\n alexaResponse += 'on ' + data['results'][0]['date'] + ', '\n alexaResponse += data['results'][0]['title'] + '... '\n alexaResponse += 'You can say text me link or next item'\n session['attributes']['result_number'] = result_number + 1\n session['attributes']['result_url'] = data['results'][0]['url']\n reprompt_text = (\n \"I'm sorry - I didn't understand. You should say text me link or next item\"\n )\n card_title = 'LA Board Latest Motions Message'\n greeting_string = alexaResponse\n return build_response(session['attributes'], build_speechlet_response(\n card_title, greeting_string, reprompt_text, False))\n\n\n<mask token>\n\n\ndef text_url_to_number(session, intent):\n if 'phone_number' not in session['attributes'] and 'value' not in intent[\n 'slots']['phoneNumber']:\n greeting_string = (\n 'Say your nine digit phone number, including the area code')\n card_title = \"What's your phone number?\"\n reprompt_text = (\n \"I didn't understand. Please say your nine digit mobile phone number.\"\n )\n return build_response(session['attributes'],\n build_speechlet_response(card_title, greeting_string,\n reprompt_text, False))\n else:\n number = intent['slots']['phoneNumber']['value']\n if 'result_url' not in session['attributes']:\n session['attributes']['result_url'\n ] = 'http://portal.lacounty.gov/wps/portal/omd'\n url = session['attributes']['result_url']\n session['attributes']['phone_number'] = number\n sns_client = boto3.client('sns')\n response = sns_client.publish(PhoneNumber='1' + str(number),\n Message=\n \"Thank you for using the LA Board of Supervisors Skill. Here's your URL: \"\n + url)\n greeting_string = 'Sent text message to ' + ' '.join(number)\n card_title = 'Sent motion URL via text message'\n reprompt_text = (\n \"I didn't understand. Please say your nine digit mobile phone number.\"\n )\n return build_response(session['attributes'],\n build_speechlet_response(card_title, greeting_string,\n reprompt_text, True))\n\n\n<mask token>\n\n\ndef handle_session_end_request():\n card_title = 'County of LA Board of Supervisors Skill- Thanks'\n speech_output = (\n 'Thank you for using the County of LA Board of Supervisors Skill. See you next time!'\n )\n should_end_session = True\n return build_response({}, build_speechlet_response(card_title,\n speech_output, None, should_end_session))\n\n\ndef on_launch(launch_request, session):\n \"\"\" Called when the user launches the skill without specifying what they want \"\"\"\n print('on_launch requestId=' + launch_request['requestId'] +\n ', sessionId=' + session['sessionId'])\n return get_welcome_response()\n\n\ndef on_intent(intent_request, session):\n \"\"\" Called when the user specifies an intent for this skill \"\"\"\n print('on_intent requestId=' + intent_request['requestId'] +\n ', sessionId=' + session['sessionId'])\n intent = intent_request['intent']\n intent_name = intent_request['intent']['name']\n if intent_name == 'GetLatestAgendaIntent':\n return get_next_agenda_response(session)\n elif intent_name == 'GetLatestMotionsIntent':\n return get_next_motions_response(session)\n elif intent_name == 'GetNextMotionIntent':\n return get_next_motions_response(session)\n elif intent_name == 'SetPhoneNumberIntent':\n return text_url_to_number(session, intent)\n elif intent_name == 'AMAZON.HelpIntent':\n return get_welcome_response()\n elif intent_name == 'AMAZON.CancelIntent' or intent_name == 'AMAZON.StopIntent':\n return handle_session_end_request()\n else:\n raise ValueError('Invalid intent')\n\n\ndef lambda_handler(event, context):\n print('Test!')\n print('event.session.application.applicationId=' + event['session'][\n 'application']['applicationId'])\n if event['session']['new']:\n on_session_started({'requestId': event['request']['requestId']},\n event['session'])\n if event['request']['type'] == 'LaunchRequest':\n return on_launch(event['request'], event['session'])\n elif event['request']['type'] == 'IntentRequest':\n return on_intent(event['request'], event['session'])\n elif event['request']['type'] == 'SessionEndedRequest':\n return handle_session_end_request()\n", "step-2": "<mask token>\n\n\ndef build_response(session_attributes, speechlet_response):\n \"\"\"\n Build the full response JSON from the speechlet response\n \"\"\"\n return {'version': '1.0', 'sessionAttributes': session_attributes,\n 'response': speechlet_response}\n\n\ndef get_welcome_response():\n welcome_response = (\n 'Welcome to the L.A. Board of Supervisors Skill. You can say, give me recent motions or give me the latest agenda.'\n )\n print(welcome_response)\n session_attributes = {}\n card_title = 'Hello'\n speech_output = welcome_response\n reprompt_text = (\n \"I'm sorry - I didn't understand. You should say give me latest motions.\"\n )\n should_end_session = True\n return build_response(session_attributes, build_speechlet_response(\n card_title, speech_output, reprompt_text, should_end_session))\n\n\ndef replace_with_longform_name(name):\n if name == 'LASD':\n longformName = \"Los Angeles County Sheriff's Department\"\n elif name == 'DMH':\n longformName = 'Department of Mental Health'\n else:\n longformName = name\n return longformName\n\n\ndef get_next_motions_response(session):\n print('Initial session attributes are ' + str(session['attributes']))\n if 'result_number' not in session['attributes']:\n print('Second session attributes are ' + str(session['attributes']))\n session['attributes']['result_number'] = 1\n print('Value is ' + str(session['attributes']['result_number']))\n print('Final session attributes are ' + str(session['attributes']))\n result_number = session['attributes']['result_number']\n host = 'http://api.lacounty.gov'\n url = (host +\n '/searchAPIWeb/searchapi?type=bcsearch&database=OMD&SearchTerm=1&title=1&content=1&PStart='\n + str(result_number) + '&PEnd=' + str(result_number) +\n '&_=1509121047612')\n response = requests.get(url)\n data = json.loads(response.text)\n alexaResponse = ''\n if result_number == 1:\n alexaResponse = (\n 'Here is the latest correspondence before the L.A. board (both upcoming and past): '\n )\n alexaResponse += str(result_number\n ) + ': From the ' + replace_with_longform_name(data['results'][0][\n 'department']) + ', '\n alexaResponse += 'on ' + data['results'][0]['date'] + ', '\n alexaResponse += data['results'][0]['title'] + '... '\n alexaResponse += 'You can say text me link or next item'\n session['attributes']['result_number'] = result_number + 1\n session['attributes']['result_url'] = data['results'][0]['url']\n reprompt_text = (\n \"I'm sorry - I didn't understand. You should say text me link or next item\"\n )\n card_title = 'LA Board Latest Motions Message'\n greeting_string = alexaResponse\n return build_response(session['attributes'], build_speechlet_response(\n card_title, greeting_string, reprompt_text, False))\n\n\ndef get_next_agenda_response(session):\n print('Initial session attributes are ' + str(session['attributes']))\n host = 'http://bos.lacounty.gov/Board-Meeting/Board-Agendas'\n url = host\n page = parse(url)\n nodes = page.xpath(\"//div[a[text()='View Agenda']]\")\n latest_agenda_node = nodes[0]\n headline = latest_agenda_node.find('ul').xpath('string()').strip()\n print(headline)\n agenda_url = latest_agenda_node.find('a[@href]').attrib['href']\n print('http://bos.lacounty.gov' + agenda_url)\n agenda_heading = headline\n session['attributes']['result_url'\n ] = 'http://bos.lacounty.gov' + agenda_url\n card_title = 'Agenda'\n greeting_string = ('I have a link for the ' + agenda_heading +\n \". Say text me and I'll send it to you.\")\n reprompt = 'Say text me to receive a link to the agenda.'\n return build_response(session['attributes'], build_speechlet_response(\n card_title, greeting_string, reprompt, False))\n\n\ndef text_url_to_number(session, intent):\n if 'phone_number' not in session['attributes'] and 'value' not in intent[\n 'slots']['phoneNumber']:\n greeting_string = (\n 'Say your nine digit phone number, including the area code')\n card_title = \"What's your phone number?\"\n reprompt_text = (\n \"I didn't understand. Please say your nine digit mobile phone number.\"\n )\n return build_response(session['attributes'],\n build_speechlet_response(card_title, greeting_string,\n reprompt_text, False))\n else:\n number = intent['slots']['phoneNumber']['value']\n if 'result_url' not in session['attributes']:\n session['attributes']['result_url'\n ] = 'http://portal.lacounty.gov/wps/portal/omd'\n url = session['attributes']['result_url']\n session['attributes']['phone_number'] = number\n sns_client = boto3.client('sns')\n response = sns_client.publish(PhoneNumber='1' + str(number),\n Message=\n \"Thank you for using the LA Board of Supervisors Skill. Here's your URL: \"\n + url)\n greeting_string = 'Sent text message to ' + ' '.join(number)\n card_title = 'Sent motion URL via text message'\n reprompt_text = (\n \"I didn't understand. Please say your nine digit mobile phone number.\"\n )\n return build_response(session['attributes'],\n build_speechlet_response(card_title, greeting_string,\n reprompt_text, True))\n\n\ndef on_session_started(session_started_request, session):\n \"\"\" Called when the session starts \"\"\"\n session['attributes'] = {}\n print('on_session_started requestId=' + session_started_request[\n 'requestId'] + ', sessionId=' + session['sessionId'])\n\n\ndef handle_session_end_request():\n card_title = 'County of LA Board of Supervisors Skill- Thanks'\n speech_output = (\n 'Thank you for using the County of LA Board of Supervisors Skill. See you next time!'\n )\n should_end_session = True\n return build_response({}, build_speechlet_response(card_title,\n speech_output, None, should_end_session))\n\n\ndef on_launch(launch_request, session):\n \"\"\" Called when the user launches the skill without specifying what they want \"\"\"\n print('on_launch requestId=' + launch_request['requestId'] +\n ', sessionId=' + session['sessionId'])\n return get_welcome_response()\n\n\ndef on_intent(intent_request, session):\n \"\"\" Called when the user specifies an intent for this skill \"\"\"\n print('on_intent requestId=' + intent_request['requestId'] +\n ', sessionId=' + session['sessionId'])\n intent = intent_request['intent']\n intent_name = intent_request['intent']['name']\n if intent_name == 'GetLatestAgendaIntent':\n return get_next_agenda_response(session)\n elif intent_name == 'GetLatestMotionsIntent':\n return get_next_motions_response(session)\n elif intent_name == 'GetNextMotionIntent':\n return get_next_motions_response(session)\n elif intent_name == 'SetPhoneNumberIntent':\n return text_url_to_number(session, intent)\n elif intent_name == 'AMAZON.HelpIntent':\n return get_welcome_response()\n elif intent_name == 'AMAZON.CancelIntent' or intent_name == 'AMAZON.StopIntent':\n return handle_session_end_request()\n else:\n raise ValueError('Invalid intent')\n\n\ndef lambda_handler(event, context):\n print('Test!')\n print('event.session.application.applicationId=' + event['session'][\n 'application']['applicationId'])\n if event['session']['new']:\n on_session_started({'requestId': event['request']['requestId']},\n event['session'])\n if event['request']['type'] == 'LaunchRequest':\n return on_launch(event['request'], event['session'])\n elif event['request']['type'] == 'IntentRequest':\n return on_intent(event['request'], event['session'])\n elif event['request']['type'] == 'SessionEndedRequest':\n return handle_session_end_request()\n", "step-3": "<mask token>\nCardTitlePrefix = 'Greeting'\n\n\ndef build_speechlet_response(title, output, reprompt_text, should_end_session):\n \"\"\"\n Build a speechlet JSON representation of the title, output text, \n reprompt text & end of session\n \"\"\"\n return {'outputSpeech': {'type': 'PlainText', 'text': output}, 'card':\n {'type': 'Simple', 'title': CardTitlePrefix + ' - ' + title,\n 'content': output}, 'reprompt': {'outputSpeech': {'type':\n 'PlainText', 'text': reprompt_text}}, 'shouldEndSession':\n should_end_session}\n\n\ndef build_response(session_attributes, speechlet_response):\n \"\"\"\n Build the full response JSON from the speechlet response\n \"\"\"\n return {'version': '1.0', 'sessionAttributes': session_attributes,\n 'response': speechlet_response}\n\n\ndef get_welcome_response():\n welcome_response = (\n 'Welcome to the L.A. Board of Supervisors Skill. You can say, give me recent motions or give me the latest agenda.'\n )\n print(welcome_response)\n session_attributes = {}\n card_title = 'Hello'\n speech_output = welcome_response\n reprompt_text = (\n \"I'm sorry - I didn't understand. You should say give me latest motions.\"\n )\n should_end_session = True\n return build_response(session_attributes, build_speechlet_response(\n card_title, speech_output, reprompt_text, should_end_session))\n\n\ndef replace_with_longform_name(name):\n if name == 'LASD':\n longformName = \"Los Angeles County Sheriff's Department\"\n elif name == 'DMH':\n longformName = 'Department of Mental Health'\n else:\n longformName = name\n return longformName\n\n\ndef get_next_motions_response(session):\n print('Initial session attributes are ' + str(session['attributes']))\n if 'result_number' not in session['attributes']:\n print('Second session attributes are ' + str(session['attributes']))\n session['attributes']['result_number'] = 1\n print('Value is ' + str(session['attributes']['result_number']))\n print('Final session attributes are ' + str(session['attributes']))\n result_number = session['attributes']['result_number']\n host = 'http://api.lacounty.gov'\n url = (host +\n '/searchAPIWeb/searchapi?type=bcsearch&database=OMD&SearchTerm=1&title=1&content=1&PStart='\n + str(result_number) + '&PEnd=' + str(result_number) +\n '&_=1509121047612')\n response = requests.get(url)\n data = json.loads(response.text)\n alexaResponse = ''\n if result_number == 1:\n alexaResponse = (\n 'Here is the latest correspondence before the L.A. board (both upcoming and past): '\n )\n alexaResponse += str(result_number\n ) + ': From the ' + replace_with_longform_name(data['results'][0][\n 'department']) + ', '\n alexaResponse += 'on ' + data['results'][0]['date'] + ', '\n alexaResponse += data['results'][0]['title'] + '... '\n alexaResponse += 'You can say text me link or next item'\n session['attributes']['result_number'] = result_number + 1\n session['attributes']['result_url'] = data['results'][0]['url']\n reprompt_text = (\n \"I'm sorry - I didn't understand. You should say text me link or next item\"\n )\n card_title = 'LA Board Latest Motions Message'\n greeting_string = alexaResponse\n return build_response(session['attributes'], build_speechlet_response(\n card_title, greeting_string, reprompt_text, False))\n\n\ndef get_next_agenda_response(session):\n print('Initial session attributes are ' + str(session['attributes']))\n host = 'http://bos.lacounty.gov/Board-Meeting/Board-Agendas'\n url = host\n page = parse(url)\n nodes = page.xpath(\"//div[a[text()='View Agenda']]\")\n latest_agenda_node = nodes[0]\n headline = latest_agenda_node.find('ul').xpath('string()').strip()\n print(headline)\n agenda_url = latest_agenda_node.find('a[@href]').attrib['href']\n print('http://bos.lacounty.gov' + agenda_url)\n agenda_heading = headline\n session['attributes']['result_url'\n ] = 'http://bos.lacounty.gov' + agenda_url\n card_title = 'Agenda'\n greeting_string = ('I have a link for the ' + agenda_heading +\n \". Say text me and I'll send it to you.\")\n reprompt = 'Say text me to receive a link to the agenda.'\n return build_response(session['attributes'], build_speechlet_response(\n card_title, greeting_string, reprompt, False))\n\n\ndef text_url_to_number(session, intent):\n if 'phone_number' not in session['attributes'] and 'value' not in intent[\n 'slots']['phoneNumber']:\n greeting_string = (\n 'Say your nine digit phone number, including the area code')\n card_title = \"What's your phone number?\"\n reprompt_text = (\n \"I didn't understand. Please say your nine digit mobile phone number.\"\n )\n return build_response(session['attributes'],\n build_speechlet_response(card_title, greeting_string,\n reprompt_text, False))\n else:\n number = intent['slots']['phoneNumber']['value']\n if 'result_url' not in session['attributes']:\n session['attributes']['result_url'\n ] = 'http://portal.lacounty.gov/wps/portal/omd'\n url = session['attributes']['result_url']\n session['attributes']['phone_number'] = number\n sns_client = boto3.client('sns')\n response = sns_client.publish(PhoneNumber='1' + str(number),\n Message=\n \"Thank you for using the LA Board of Supervisors Skill. Here's your URL: \"\n + url)\n greeting_string = 'Sent text message to ' + ' '.join(number)\n card_title = 'Sent motion URL via text message'\n reprompt_text = (\n \"I didn't understand. Please say your nine digit mobile phone number.\"\n )\n return build_response(session['attributes'],\n build_speechlet_response(card_title, greeting_string,\n reprompt_text, True))\n\n\ndef on_session_started(session_started_request, session):\n \"\"\" Called when the session starts \"\"\"\n session['attributes'] = {}\n print('on_session_started requestId=' + session_started_request[\n 'requestId'] + ', sessionId=' + session['sessionId'])\n\n\ndef handle_session_end_request():\n card_title = 'County of LA Board of Supervisors Skill- Thanks'\n speech_output = (\n 'Thank you for using the County of LA Board of Supervisors Skill. See you next time!'\n )\n should_end_session = True\n return build_response({}, build_speechlet_response(card_title,\n speech_output, None, should_end_session))\n\n\ndef on_launch(launch_request, session):\n \"\"\" Called when the user launches the skill without specifying what they want \"\"\"\n print('on_launch requestId=' + launch_request['requestId'] +\n ', sessionId=' + session['sessionId'])\n return get_welcome_response()\n\n\ndef on_intent(intent_request, session):\n \"\"\" Called when the user specifies an intent for this skill \"\"\"\n print('on_intent requestId=' + intent_request['requestId'] +\n ', sessionId=' + session['sessionId'])\n intent = intent_request['intent']\n intent_name = intent_request['intent']['name']\n if intent_name == 'GetLatestAgendaIntent':\n return get_next_agenda_response(session)\n elif intent_name == 'GetLatestMotionsIntent':\n return get_next_motions_response(session)\n elif intent_name == 'GetNextMotionIntent':\n return get_next_motions_response(session)\n elif intent_name == 'SetPhoneNumberIntent':\n return text_url_to_number(session, intent)\n elif intent_name == 'AMAZON.HelpIntent':\n return get_welcome_response()\n elif intent_name == 'AMAZON.CancelIntent' or intent_name == 'AMAZON.StopIntent':\n return handle_session_end_request()\n else:\n raise ValueError('Invalid intent')\n\n\ndef lambda_handler(event, context):\n print('Test!')\n print('event.session.application.applicationId=' + event['session'][\n 'application']['applicationId'])\n if event['session']['new']:\n on_session_started({'requestId': event['request']['requestId']},\n event['session'])\n if event['request']['type'] == 'LaunchRequest':\n return on_launch(event['request'], event['session'])\n elif event['request']['type'] == 'IntentRequest':\n return on_intent(event['request'], event['session'])\n elif event['request']['type'] == 'SessionEndedRequest':\n return handle_session_end_request()\n", "step-4": "import requests\nimport json\nimport boto3\nfrom lxml.html import parse\nCardTitlePrefix = 'Greeting'\n\n\ndef build_speechlet_response(title, output, reprompt_text, should_end_session):\n \"\"\"\n Build a speechlet JSON representation of the title, output text, \n reprompt text & end of session\n \"\"\"\n return {'outputSpeech': {'type': 'PlainText', 'text': output}, 'card':\n {'type': 'Simple', 'title': CardTitlePrefix + ' - ' + title,\n 'content': output}, 'reprompt': {'outputSpeech': {'type':\n 'PlainText', 'text': reprompt_text}}, 'shouldEndSession':\n should_end_session}\n\n\ndef build_response(session_attributes, speechlet_response):\n \"\"\"\n Build the full response JSON from the speechlet response\n \"\"\"\n return {'version': '1.0', 'sessionAttributes': session_attributes,\n 'response': speechlet_response}\n\n\ndef get_welcome_response():\n welcome_response = (\n 'Welcome to the L.A. Board of Supervisors Skill. You can say, give me recent motions or give me the latest agenda.'\n )\n print(welcome_response)\n session_attributes = {}\n card_title = 'Hello'\n speech_output = welcome_response\n reprompt_text = (\n \"I'm sorry - I didn't understand. You should say give me latest motions.\"\n )\n should_end_session = True\n return build_response(session_attributes, build_speechlet_response(\n card_title, speech_output, reprompt_text, should_end_session))\n\n\ndef replace_with_longform_name(name):\n if name == 'LASD':\n longformName = \"Los Angeles County Sheriff's Department\"\n elif name == 'DMH':\n longformName = 'Department of Mental Health'\n else:\n longformName = name\n return longformName\n\n\ndef get_next_motions_response(session):\n print('Initial session attributes are ' + str(session['attributes']))\n if 'result_number' not in session['attributes']:\n print('Second session attributes are ' + str(session['attributes']))\n session['attributes']['result_number'] = 1\n print('Value is ' + str(session['attributes']['result_number']))\n print('Final session attributes are ' + str(session['attributes']))\n result_number = session['attributes']['result_number']\n host = 'http://api.lacounty.gov'\n url = (host +\n '/searchAPIWeb/searchapi?type=bcsearch&database=OMD&SearchTerm=1&title=1&content=1&PStart='\n + str(result_number) + '&PEnd=' + str(result_number) +\n '&_=1509121047612')\n response = requests.get(url)\n data = json.loads(response.text)\n alexaResponse = ''\n if result_number == 1:\n alexaResponse = (\n 'Here is the latest correspondence before the L.A. board (both upcoming and past): '\n )\n alexaResponse += str(result_number\n ) + ': From the ' + replace_with_longform_name(data['results'][0][\n 'department']) + ', '\n alexaResponse += 'on ' + data['results'][0]['date'] + ', '\n alexaResponse += data['results'][0]['title'] + '... '\n alexaResponse += 'You can say text me link or next item'\n session['attributes']['result_number'] = result_number + 1\n session['attributes']['result_url'] = data['results'][0]['url']\n reprompt_text = (\n \"I'm sorry - I didn't understand. You should say text me link or next item\"\n )\n card_title = 'LA Board Latest Motions Message'\n greeting_string = alexaResponse\n return build_response(session['attributes'], build_speechlet_response(\n card_title, greeting_string, reprompt_text, False))\n\n\ndef get_next_agenda_response(session):\n print('Initial session attributes are ' + str(session['attributes']))\n host = 'http://bos.lacounty.gov/Board-Meeting/Board-Agendas'\n url = host\n page = parse(url)\n nodes = page.xpath(\"//div[a[text()='View Agenda']]\")\n latest_agenda_node = nodes[0]\n headline = latest_agenda_node.find('ul').xpath('string()').strip()\n print(headline)\n agenda_url = latest_agenda_node.find('a[@href]').attrib['href']\n print('http://bos.lacounty.gov' + agenda_url)\n agenda_heading = headline\n session['attributes']['result_url'\n ] = 'http://bos.lacounty.gov' + agenda_url\n card_title = 'Agenda'\n greeting_string = ('I have a link for the ' + agenda_heading +\n \". Say text me and I'll send it to you.\")\n reprompt = 'Say text me to receive a link to the agenda.'\n return build_response(session['attributes'], build_speechlet_response(\n card_title, greeting_string, reprompt, False))\n\n\ndef text_url_to_number(session, intent):\n if 'phone_number' not in session['attributes'] and 'value' not in intent[\n 'slots']['phoneNumber']:\n greeting_string = (\n 'Say your nine digit phone number, including the area code')\n card_title = \"What's your phone number?\"\n reprompt_text = (\n \"I didn't understand. Please say your nine digit mobile phone number.\"\n )\n return build_response(session['attributes'],\n build_speechlet_response(card_title, greeting_string,\n reprompt_text, False))\n else:\n number = intent['slots']['phoneNumber']['value']\n if 'result_url' not in session['attributes']:\n session['attributes']['result_url'\n ] = 'http://portal.lacounty.gov/wps/portal/omd'\n url = session['attributes']['result_url']\n session['attributes']['phone_number'] = number\n sns_client = boto3.client('sns')\n response = sns_client.publish(PhoneNumber='1' + str(number),\n Message=\n \"Thank you for using the LA Board of Supervisors Skill. Here's your URL: \"\n + url)\n greeting_string = 'Sent text message to ' + ' '.join(number)\n card_title = 'Sent motion URL via text message'\n reprompt_text = (\n \"I didn't understand. Please say your nine digit mobile phone number.\"\n )\n return build_response(session['attributes'],\n build_speechlet_response(card_title, greeting_string,\n reprompt_text, True))\n\n\ndef on_session_started(session_started_request, session):\n \"\"\" Called when the session starts \"\"\"\n session['attributes'] = {}\n print('on_session_started requestId=' + session_started_request[\n 'requestId'] + ', sessionId=' + session['sessionId'])\n\n\ndef handle_session_end_request():\n card_title = 'County of LA Board of Supervisors Skill- Thanks'\n speech_output = (\n 'Thank you for using the County of LA Board of Supervisors Skill. See you next time!'\n )\n should_end_session = True\n return build_response({}, build_speechlet_response(card_title,\n speech_output, None, should_end_session))\n\n\ndef on_launch(launch_request, session):\n \"\"\" Called when the user launches the skill without specifying what they want \"\"\"\n print('on_launch requestId=' + launch_request['requestId'] +\n ', sessionId=' + session['sessionId'])\n return get_welcome_response()\n\n\ndef on_intent(intent_request, session):\n \"\"\" Called when the user specifies an intent for this skill \"\"\"\n print('on_intent requestId=' + intent_request['requestId'] +\n ', sessionId=' + session['sessionId'])\n intent = intent_request['intent']\n intent_name = intent_request['intent']['name']\n if intent_name == 'GetLatestAgendaIntent':\n return get_next_agenda_response(session)\n elif intent_name == 'GetLatestMotionsIntent':\n return get_next_motions_response(session)\n elif intent_name == 'GetNextMotionIntent':\n return get_next_motions_response(session)\n elif intent_name == 'SetPhoneNumberIntent':\n return text_url_to_number(session, intent)\n elif intent_name == 'AMAZON.HelpIntent':\n return get_welcome_response()\n elif intent_name == 'AMAZON.CancelIntent' or intent_name == 'AMAZON.StopIntent':\n return handle_session_end_request()\n else:\n raise ValueError('Invalid intent')\n\n\ndef lambda_handler(event, context):\n print('Test!')\n print('event.session.application.applicationId=' + event['session'][\n 'application']['applicationId'])\n if event['session']['new']:\n on_session_started({'requestId': event['request']['requestId']},\n event['session'])\n if event['request']['type'] == 'LaunchRequest':\n return on_launch(event['request'], event['session'])\n elif event['request']['type'] == 'IntentRequest':\n return on_intent(event['request'], event['session'])\n elif event['request']['type'] == 'SessionEndedRequest':\n return handle_session_end_request()\n", "step-5": "# -*- coding: utf-8 -*-\nimport requests\nimport json\nimport boto3\nfrom lxml.html import parse\n\nCardTitlePrefix = \"Greeting\"\n\ndef build_speechlet_response(title, output, reprompt_text, should_end_session):\n \"\"\"\n Build a speechlet JSON representation of the title, output text, \n reprompt text & end of session\n \"\"\"\n return {\n 'outputSpeech': {\n 'type': 'PlainText',\n 'text': output\n },\n 'card': {\n 'type': 'Simple',\n 'title': CardTitlePrefix + \" - \" + title,\n 'content': output\n },\n 'reprompt': {\n 'outputSpeech': {\n 'type': 'PlainText',\n 'text': reprompt_text\n }\n },\n 'shouldEndSession': should_end_session\n }\n \ndef build_response(session_attributes, speechlet_response):\n \"\"\"\n Build the full response JSON from the speechlet response\n \"\"\"\n return {\n 'version': '1.0',\n 'sessionAttributes': session_attributes,\n 'response': speechlet_response\n }\n\ndef get_welcome_response():\n welcome_response= \"Welcome to the L.A. Board of Supervisors Skill. You can say, give me recent motions or give me the latest agenda.\"\n print(welcome_response);\n\n session_attributes = {}\n card_title = \"Hello\"\n speech_output = welcome_response;\n # If the user either does not reply to the welcome message or says something\n # that is not understood, they will be prompted again with this text.\n reprompt_text = \"I'm sorry - I didn't understand. You should say give me latest motions.\"\n should_end_session = True\n return build_response(session_attributes, build_speechlet_response(card_title, speech_output, reprompt_text, should_end_session))\n\ndef replace_with_longform_name(name):\n\n if name == \"LASD\":\n longformName = \"Los Angeles County Sheriff's Department\"\n elif name == \"DMH\":\n longformName = \"Department of Mental Health\"\n else:\n longformName = name;\n\n return longformName;\n\n\ndef get_next_motions_response(session):\n \n print(\"Initial session attributes are \"+str(session['attributes']));\n\n if \"result_number\" not in session['attributes']:\n print(\"Second session attributes are \"+str(session['attributes']));\n session['attributes']['result_number'] = 1;\n print(\"Value is \"+str(session['attributes']['result_number']));\n print(\"Final session attributes are \"+str(session['attributes']))\n\n result_number = session['attributes']['result_number'];\n host = \"http://api.lacounty.gov\";\n\n url = host + \"/searchAPIWeb/searchapi?type=bcsearch&database=OMD&\" \\\n \"SearchTerm=1&title=1&content=1&PStart=\" + str(result_number) +\"&PEnd=\" + str(result_number) +\"&_=1509121047612\"\n\n response = requests.get(url);\n #print(response.text);\n data = json.loads(response.text)\n\n alexaResponse = \"\";\n if(result_number == 1):\n alexaResponse = \"Here is the latest correspondence before the L.A. board (both upcoming and past): \"\n\n alexaResponse += str(result_number)+\": From the \"+replace_with_longform_name(data[\"results\"][0][\"department\"])+ \", \"\n alexaResponse += \"on \"+data[\"results\"][0][\"date\"]+\", \"\n alexaResponse += data[\"results\"][0][\"title\"]+\"... \"\n \n alexaResponse += \"You can say text me link or next item\"\n \n session['attributes']['result_number'] = result_number + 1;\n session['attributes']['result_url'] = data[\"results\"][0][\"url\"];\n \n #text_url_to_number(session);\n reprompt_text = \"I'm sorry - I didn't understand. You should say text me link or next item\"\n \n card_title = \"LA Board Latest Motions Message\";\n greeting_string = alexaResponse;\n return build_response(session['attributes'], build_speechlet_response(card_title, greeting_string, reprompt_text, False))\n \ndef get_next_agenda_response(session):\n \n print(\"Initial session attributes are \"+str(session['attributes']));\n \n host = \"http://bos.lacounty.gov/Board-Meeting/Board-Agendas\";\n url = host;\n page = parse(url)\n nodes = page.xpath(\"//div[a[text()='View Agenda']]\");\n latest_agenda_node = nodes[0];\n headline = latest_agenda_node.find(\"ul\").xpath(\"string()\").strip();\n \n print(headline);\n agenda_url = latest_agenda_node.find(\"a[@href]\").attrib['href'];\n print(\"http://bos.lacounty.gov\"+agenda_url)\n \n agenda_heading = headline;\n #session['attributes']['result_url']\n session['attributes']['result_url'] = \"http://bos.lacounty.gov\"+agenda_url;\n card_title = \"Agenda\";\n greeting_string = \"I have a link for the \"+agenda_heading+\". Say text me and I'll send it to you.\";\n reprompt = \"Say text me to receive a link to the agenda.\"\n\n return build_response(session['attributes'], build_speechlet_response(card_title, greeting_string, reprompt, False))\n \n \ndef text_url_to_number(session, intent):\n \n if \"phone_number\" not in session['attributes'] and \"value\" not in intent['slots']['phoneNumber']:\n greeting_string = \"Say your nine digit phone number, including the area code\";\n card_title = \"What's your phone number?\";\n reprompt_text = \"I didn't understand. Please say your nine digit mobile phone number.\"\n return build_response(session['attributes'], build_speechlet_response(card_title, greeting_string, reprompt_text, False))\n else:\n number = intent['slots']['phoneNumber']['value'];\n if \"result_url\" not in session['attributes']:\n session['attributes']['result_url'] = 'http://portal.lacounty.gov/wps/portal/omd';\n \n url = session['attributes']['result_url'];\n session['attributes']['phone_number'] = number;\n \n sns_client = boto3.client('sns')\n response = sns_client.publish(\n PhoneNumber='1'+str(number), \n Message=\"Thank you for using the LA Board of Supervisors Skill. Here's your URL: \"+url\n )\n greeting_string = \"Sent text message to \"+ \" \".join(number);\n card_title = \"Sent motion URL via text message\";\n reprompt_text = \"I didn't understand. Please say your nine digit mobile phone number.\"\n return build_response(session['attributes'], build_speechlet_response(card_title, greeting_string, reprompt_text, True))\n\ndef on_session_started(session_started_request, session):\n \"\"\" Called when the session starts \"\"\"\n \n #session.attributes['result_number'] = 1\n session['attributes'] = {}\n print(\"on_session_started requestId=\" + session_started_request['requestId']\n + \", sessionId=\" + session['sessionId'])\n\ndef handle_session_end_request():\n card_title = \"County of LA Board of Supervisors Skill- Thanks\"\n speech_output = \"Thank you for using the County of LA Board of Supervisors Skill. See you next time!\"\n should_end_session = True\n return build_response({}, build_speechlet_response(card_title, speech_output, None, should_end_session));\n \ndef on_launch(launch_request, session):\n \"\"\" Called when the user launches the skill without specifying what they want \"\"\"\n print(\"on_launch requestId=\" + launch_request['requestId'] +\n \", sessionId=\" + session['sessionId'])\n # Dispatch to your skill's launch\n return get_welcome_response()\n \ndef on_intent(intent_request, session):\n \"\"\" Called when the user specifies an intent for this skill \"\"\"\n print(\"on_intent requestId=\" + intent_request['requestId'] +\n \", sessionId=\" + session['sessionId'])\n \n intent = intent_request['intent']\n intent_name = intent_request['intent']['name']\n # Dispatch to your skill's intent handlers\n if intent_name == \"GetLatestAgendaIntent\":\n return get_next_agenda_response(session)\n elif intent_name == \"GetLatestMotionsIntent\":\n return get_next_motions_response(session)\n elif intent_name == \"GetNextMotionIntent\":\n return get_next_motions_response(session)\n elif intent_name == \"SetPhoneNumberIntent\":\n return text_url_to_number(session, intent);\n elif intent_name == \"AMAZON.HelpIntent\":\n return get_welcome_response()\n elif intent_name == \"AMAZON.CancelIntent\" or intent_name == \"AMAZON.StopIntent\":\n return handle_session_end_request()\n else:\n raise ValueError(\"Invalid intent\")\n\ndef lambda_handler(event, context):\n print(\"Test!\")\n \n print(\"event.session.application.applicationId=\" +\n event['session']['application']['applicationId'])\n \n if event['session']['new']:\n on_session_started({'requestId': event['request']['requestId']},\n event['session'])\n if event['request']['type'] == \"LaunchRequest\":\n return on_launch(event['request'], event['session'])\n elif event['request']['type'] == \"IntentRequest\":\n return on_intent(event['request'], event['session'])\n elif event['request']['type'] == \"SessionEndedRequest\":\n return handle_session_end_request()\n", "step-ids": [ 8, 11, 13, 14, 15 ] }
[ 8, 11, 13, 14, 15 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> parser.add_argument('domain_name', metavar='D', type=str, nargs='+', help= 'domain name to give to virtual host. multiple domains can be specified at once' ) <|reserved_special_token_0|> print( 'The following virtual host(s) will be created under their respective names.' ) <|reserved_special_token_0|> for arg in vars(args): print(getattr(args, arg)) <|reserved_special_token_0|> print('Note: port defaults to 80') for vh in sys.argv: if vh == 'create_apache_vhost.py': continue port = input('Which port should be used for ' + vh + '?: ') if port: port_list.append(port) else: port_list.append('80') while True: ans = input('Proceed? [Y/n] ') if ans == 'n' or ans == 'N': print('Exiting') quit() elif ans == 'Y' or ans == 'y': print('Proceeding') break else: print('Invald input') <|reserved_special_token_0|> if install_sts != 0: print('Installing Apache') subprocess.call(['sudo', 'apt', 'install', 'apache2']) subprocess.call(['ufw', 'allow', "'Apache'"]) <|reserved_special_token_0|> for vh in sys.argv: if vh == 'create_apache_vhost.py': continue print('Creating virtual host: ' + vh) src_path = '/var/www/html/' + vh subprocess.call(['sudo', 'mkdir', src_path]) subprocess.call(['sudo', 'chown', '-R', username + ':' + username, src_path]) subprocess.call(['sudo', 'chmod', '755', src_path]) subprocess.call(['sudo', 'touch', src_path + 'index.html']) with open(src_path + '/index.html', 'a') as out: out.write('<html>\n <head>\n <title>Welcome to ' + vh + """</title> </head> <body> <h1>""" + vh + ' virtual host is working!</h1>\n </body>\n</html>') conf_path = '/etc/apache2/sites-available/' + vh + '.conf' subprocess.call(['sudo', 'touch', conf_path]) with open(conf_path, 'w') as out: out.write('<VirtualHost *:' + port_list[index] + """> ServerAdmin webmaster@localhost ServerName """ + vh + """ ServerAlias www.""" + vh + """.com DocumentRoot /var/www/html/""" + vh + """ ErrorLog ${APACHE_LOG_DIR}/error.log CustomLog ${APACHE_LOG_DIR}/access.log combined </VirtualHost>""" ) subprocess.call(['sudo', 'a2ensite', vh]) print('\n [' + vh + '] virtual host was successfully created!') print(' - Source is located at ' + src_path) print(' - Config file is located at ' + conf_path + '\n') index += 1 subprocess.call(['systemctl', 'restart', 'apache2']) <|reserved_special_token_1|> <|reserved_special_token_0|> parser = argparse.ArgumentParser(description= 'Setup a new apache virtual host on an Ubuntu system. Only tested on versions 18.04 and 20.04' ) parser.add_argument('domain_name', metavar='D', type=str, nargs='+', help= 'domain name to give to virtual host. multiple domains can be specified at once' ) args = parser.parse_args() print( 'The following virtual host(s) will be created under their respective names.' ) fa_flag = False for arg in vars(args): print(getattr(args, arg)) port_list = [] print('Note: port defaults to 80') for vh in sys.argv: if vh == 'create_apache_vhost.py': continue port = input('Which port should be used for ' + vh + '?: ') if port: port_list.append(port) else: port_list.append('80') while True: ans = input('Proceed? [Y/n] ') if ans == 'n' or ans == 'N': print('Exiting') quit() elif ans == 'Y' or ans == 'y': print('Proceeding') break else: print('Invald input') install_sts = subprocess.call(['test', '-e', '/etc/apache2']) if install_sts != 0: print('Installing Apache') subprocess.call(['sudo', 'apt', 'install', 'apache2']) subprocess.call(['ufw', 'allow', "'Apache'"]) username = getpass.getuser() index = 0 for vh in sys.argv: if vh == 'create_apache_vhost.py': continue print('Creating virtual host: ' + vh) src_path = '/var/www/html/' + vh subprocess.call(['sudo', 'mkdir', src_path]) subprocess.call(['sudo', 'chown', '-R', username + ':' + username, src_path]) subprocess.call(['sudo', 'chmod', '755', src_path]) subprocess.call(['sudo', 'touch', src_path + 'index.html']) with open(src_path + '/index.html', 'a') as out: out.write('<html>\n <head>\n <title>Welcome to ' + vh + """</title> </head> <body> <h1>""" + vh + ' virtual host is working!</h1>\n </body>\n</html>') conf_path = '/etc/apache2/sites-available/' + vh + '.conf' subprocess.call(['sudo', 'touch', conf_path]) with open(conf_path, 'w') as out: out.write('<VirtualHost *:' + port_list[index] + """> ServerAdmin webmaster@localhost ServerName """ + vh + """ ServerAlias www.""" + vh + """.com DocumentRoot /var/www/html/""" + vh + """ ErrorLog ${APACHE_LOG_DIR}/error.log CustomLog ${APACHE_LOG_DIR}/access.log combined </VirtualHost>""" ) subprocess.call(['sudo', 'a2ensite', vh]) print('\n [' + vh + '] virtual host was successfully created!') print(' - Source is located at ' + src_path) print(' - Config file is located at ' + conf_path + '\n') index += 1 subprocess.call(['systemctl', 'restart', 'apache2']) <|reserved_special_token_1|> import argparse import sys import subprocess import getpass parser = argparse.ArgumentParser(description= 'Setup a new apache virtual host on an Ubuntu system. Only tested on versions 18.04 and 20.04' ) parser.add_argument('domain_name', metavar='D', type=str, nargs='+', help= 'domain name to give to virtual host. multiple domains can be specified at once' ) args = parser.parse_args() print( 'The following virtual host(s) will be created under their respective names.' ) fa_flag = False for arg in vars(args): print(getattr(args, arg)) port_list = [] print('Note: port defaults to 80') for vh in sys.argv: if vh == 'create_apache_vhost.py': continue port = input('Which port should be used for ' + vh + '?: ') if port: port_list.append(port) else: port_list.append('80') while True: ans = input('Proceed? [Y/n] ') if ans == 'n' or ans == 'N': print('Exiting') quit() elif ans == 'Y' or ans == 'y': print('Proceeding') break else: print('Invald input') install_sts = subprocess.call(['test', '-e', '/etc/apache2']) if install_sts != 0: print('Installing Apache') subprocess.call(['sudo', 'apt', 'install', 'apache2']) subprocess.call(['ufw', 'allow', "'Apache'"]) username = getpass.getuser() index = 0 for vh in sys.argv: if vh == 'create_apache_vhost.py': continue print('Creating virtual host: ' + vh) src_path = '/var/www/html/' + vh subprocess.call(['sudo', 'mkdir', src_path]) subprocess.call(['sudo', 'chown', '-R', username + ':' + username, src_path]) subprocess.call(['sudo', 'chmod', '755', src_path]) subprocess.call(['sudo', 'touch', src_path + 'index.html']) with open(src_path + '/index.html', 'a') as out: out.write('<html>\n <head>\n <title>Welcome to ' + vh + """</title> </head> <body> <h1>""" + vh + ' virtual host is working!</h1>\n </body>\n</html>') conf_path = '/etc/apache2/sites-available/' + vh + '.conf' subprocess.call(['sudo', 'touch', conf_path]) with open(conf_path, 'w') as out: out.write('<VirtualHost *:' + port_list[index] + """> ServerAdmin webmaster@localhost ServerName """ + vh + """ ServerAlias www.""" + vh + """.com DocumentRoot /var/www/html/""" + vh + """ ErrorLog ${APACHE_LOG_DIR}/error.log CustomLog ${APACHE_LOG_DIR}/access.log combined </VirtualHost>""" ) subprocess.call(['sudo', 'a2ensite', vh]) print('\n [' + vh + '] virtual host was successfully created!') print(' - Source is located at ' + src_path) print(' - Config file is located at ' + conf_path + '\n') index += 1 subprocess.call(['systemctl', 'restart', 'apache2']) <|reserved_special_token_1|> import argparse import sys import subprocess import getpass # Process arguments parser = argparse.ArgumentParser(description='Setup a new apache virtual host on an Ubuntu system. Only tested on versions 18.04 and 20.04') parser.add_argument('domain_name', metavar='D', type=str, nargs='+', help='domain name to give to virtual host. multiple domains can be specified at once') args = parser.parse_args() # Confirm action with user print("The following virtual host(s) will be created under their respective names.") fa_flag = False for arg in vars(args): print(getattr(args, arg)) # List of port numbers port_list = [] # Ask for ports for the each domain print("Note: port defaults to 80") for vh in sys.argv: if vh == 'create_apache_vhost.py': continue port = input("Which port should be used for " + vh + "?: ") if port: port_list.append(port) else: port_list.append("80") while True: ans = input("Proceed? [Y/n] ") if ans == 'n' or ans == 'N': print("Exiting") quit() elif ans == 'Y' or ans == 'y': print("Proceeding") break else: print("Invald input") # Install apache2 if not yet installed install_sts = subprocess.call(['test', '-e', '/etc/apache2']) if install_sts != 0: print("Installing Apache") subprocess.call(['sudo', 'apt', 'install', 'apache2']) subprocess.call(['ufw', 'allow', "'Apache'"]) # Get username username = getpass.getuser() # Iterate though each virtual host to be created index = 0 for vh in sys.argv: if vh == 'create_apache_vhost.py': continue print("Creating virtual host: " + vh) src_path = '/var/www/html/' + vh subprocess.call(['sudo', 'mkdir', src_path]) subprocess.call(['sudo', 'chown', '-R', username + ':' + username, src_path]) subprocess.call(['sudo', 'chmod', '755', src_path]) subprocess.call(['sudo', 'touch', src_path + 'index.html']) with open(src_path + '/index.html', 'a') as out: out.write("""<html> <head> <title>Welcome to """ + vh + """</title> </head> <body> <h1>""" + vh + """ virtual host is working!</h1> </body> </html>""") conf_path = '/etc/apache2/sites-available/' + vh + '.conf' subprocess.call(['sudo', 'touch', conf_path]) with open(conf_path, 'w') as out: out.write("""<VirtualHost *:""" + port_list[index] + """> ServerAdmin webmaster@localhost ServerName """ + vh + """ ServerAlias www.""" + vh + """.com DocumentRoot /var/www/html/""" + vh + """ ErrorLog ${APACHE_LOG_DIR}/error.log CustomLog ${APACHE_LOG_DIR}/access.log combined </VirtualHost>""") subprocess.call(['sudo', 'a2ensite', vh]) print("\n [" + vh + "] virtual host was successfully created!") print(" - Source is located at " + src_path) print(" - Config file is located at " + conf_path + "\n") index += 1 subprocess.call(['systemctl', 'restart', 'apache2'])
flexible
{ "blob_id": "a8e67ddbb741af6a9ff7540fef8c21468321ede0", "index": 7996, "step-1": "<mask token>\n", "step-2": "<mask token>\nparser.add_argument('domain_name', metavar='D', type=str, nargs='+', help=\n 'domain name to give to virtual host. multiple domains can be specified at once'\n )\n<mask token>\nprint(\n 'The following virtual host(s) will be created under their respective names.'\n )\n<mask token>\nfor arg in vars(args):\n print(getattr(args, arg))\n<mask token>\nprint('Note: port defaults to 80')\nfor vh in sys.argv:\n if vh == 'create_apache_vhost.py':\n continue\n port = input('Which port should be used for ' + vh + '?: ')\n if port:\n port_list.append(port)\n else:\n port_list.append('80')\nwhile True:\n ans = input('Proceed? [Y/n] ')\n if ans == 'n' or ans == 'N':\n print('Exiting')\n quit()\n elif ans == 'Y' or ans == 'y':\n print('Proceeding')\n break\n else:\n print('Invald input')\n<mask token>\nif install_sts != 0:\n print('Installing Apache')\n subprocess.call(['sudo', 'apt', 'install', 'apache2'])\n subprocess.call(['ufw', 'allow', \"'Apache'\"])\n<mask token>\nfor vh in sys.argv:\n if vh == 'create_apache_vhost.py':\n continue\n print('Creating virtual host: ' + vh)\n src_path = '/var/www/html/' + vh\n subprocess.call(['sudo', 'mkdir', src_path])\n subprocess.call(['sudo', 'chown', '-R', username + ':' + username,\n src_path])\n subprocess.call(['sudo', 'chmod', '755', src_path])\n subprocess.call(['sudo', 'touch', src_path + 'index.html'])\n with open(src_path + '/index.html', 'a') as out:\n out.write('<html>\\n <head>\\n <title>Welcome to ' + vh +\n \"\"\"</title>\n </head>\n <body>\n <h1>\"\"\" + vh +\n ' virtual host is working!</h1>\\n </body>\\n</html>')\n conf_path = '/etc/apache2/sites-available/' + vh + '.conf'\n subprocess.call(['sudo', 'touch', conf_path])\n with open(conf_path, 'w') as out:\n out.write('<VirtualHost *:' + port_list[index] +\n \"\"\">\n ServerAdmin webmaster@localhost\n ServerName \"\"\" +\n vh + \"\"\"\n ServerAlias www.\"\"\" + vh +\n \"\"\".com\n DocumentRoot /var/www/html/\"\"\" + vh +\n \"\"\"\n ErrorLog ${APACHE_LOG_DIR}/error.log\n CustomLog ${APACHE_LOG_DIR}/access.log combined\n</VirtualHost>\"\"\"\n )\n subprocess.call(['sudo', 'a2ensite', vh])\n print('\\n [' + vh + '] virtual host was successfully created!')\n print(' - Source is located at ' + src_path)\n print(' - Config file is located at ' + conf_path + '\\n')\n index += 1\nsubprocess.call(['systemctl', 'restart', 'apache2'])\n", "step-3": "<mask token>\nparser = argparse.ArgumentParser(description=\n 'Setup a new apache virtual host on an Ubuntu system. Only tested on versions 18.04 and 20.04'\n )\nparser.add_argument('domain_name', metavar='D', type=str, nargs='+', help=\n 'domain name to give to virtual host. multiple domains can be specified at once'\n )\nargs = parser.parse_args()\nprint(\n 'The following virtual host(s) will be created under their respective names.'\n )\nfa_flag = False\nfor arg in vars(args):\n print(getattr(args, arg))\nport_list = []\nprint('Note: port defaults to 80')\nfor vh in sys.argv:\n if vh == 'create_apache_vhost.py':\n continue\n port = input('Which port should be used for ' + vh + '?: ')\n if port:\n port_list.append(port)\n else:\n port_list.append('80')\nwhile True:\n ans = input('Proceed? [Y/n] ')\n if ans == 'n' or ans == 'N':\n print('Exiting')\n quit()\n elif ans == 'Y' or ans == 'y':\n print('Proceeding')\n break\n else:\n print('Invald input')\ninstall_sts = subprocess.call(['test', '-e', '/etc/apache2'])\nif install_sts != 0:\n print('Installing Apache')\n subprocess.call(['sudo', 'apt', 'install', 'apache2'])\n subprocess.call(['ufw', 'allow', \"'Apache'\"])\nusername = getpass.getuser()\nindex = 0\nfor vh in sys.argv:\n if vh == 'create_apache_vhost.py':\n continue\n print('Creating virtual host: ' + vh)\n src_path = '/var/www/html/' + vh\n subprocess.call(['sudo', 'mkdir', src_path])\n subprocess.call(['sudo', 'chown', '-R', username + ':' + username,\n src_path])\n subprocess.call(['sudo', 'chmod', '755', src_path])\n subprocess.call(['sudo', 'touch', src_path + 'index.html'])\n with open(src_path + '/index.html', 'a') as out:\n out.write('<html>\\n <head>\\n <title>Welcome to ' + vh +\n \"\"\"</title>\n </head>\n <body>\n <h1>\"\"\" + vh +\n ' virtual host is working!</h1>\\n </body>\\n</html>')\n conf_path = '/etc/apache2/sites-available/' + vh + '.conf'\n subprocess.call(['sudo', 'touch', conf_path])\n with open(conf_path, 'w') as out:\n out.write('<VirtualHost *:' + port_list[index] +\n \"\"\">\n ServerAdmin webmaster@localhost\n ServerName \"\"\" +\n vh + \"\"\"\n ServerAlias www.\"\"\" + vh +\n \"\"\".com\n DocumentRoot /var/www/html/\"\"\" + vh +\n \"\"\"\n ErrorLog ${APACHE_LOG_DIR}/error.log\n CustomLog ${APACHE_LOG_DIR}/access.log combined\n</VirtualHost>\"\"\"\n )\n subprocess.call(['sudo', 'a2ensite', vh])\n print('\\n [' + vh + '] virtual host was successfully created!')\n print(' - Source is located at ' + src_path)\n print(' - Config file is located at ' + conf_path + '\\n')\n index += 1\nsubprocess.call(['systemctl', 'restart', 'apache2'])\n", "step-4": "import argparse\nimport sys\nimport subprocess\nimport getpass\nparser = argparse.ArgumentParser(description=\n 'Setup a new apache virtual host on an Ubuntu system. Only tested on versions 18.04 and 20.04'\n )\nparser.add_argument('domain_name', metavar='D', type=str, nargs='+', help=\n 'domain name to give to virtual host. multiple domains can be specified at once'\n )\nargs = parser.parse_args()\nprint(\n 'The following virtual host(s) will be created under their respective names.'\n )\nfa_flag = False\nfor arg in vars(args):\n print(getattr(args, arg))\nport_list = []\nprint('Note: port defaults to 80')\nfor vh in sys.argv:\n if vh == 'create_apache_vhost.py':\n continue\n port = input('Which port should be used for ' + vh + '?: ')\n if port:\n port_list.append(port)\n else:\n port_list.append('80')\nwhile True:\n ans = input('Proceed? [Y/n] ')\n if ans == 'n' or ans == 'N':\n print('Exiting')\n quit()\n elif ans == 'Y' or ans == 'y':\n print('Proceeding')\n break\n else:\n print('Invald input')\ninstall_sts = subprocess.call(['test', '-e', '/etc/apache2'])\nif install_sts != 0:\n print('Installing Apache')\n subprocess.call(['sudo', 'apt', 'install', 'apache2'])\n subprocess.call(['ufw', 'allow', \"'Apache'\"])\nusername = getpass.getuser()\nindex = 0\nfor vh in sys.argv:\n if vh == 'create_apache_vhost.py':\n continue\n print('Creating virtual host: ' + vh)\n src_path = '/var/www/html/' + vh\n subprocess.call(['sudo', 'mkdir', src_path])\n subprocess.call(['sudo', 'chown', '-R', username + ':' + username,\n src_path])\n subprocess.call(['sudo', 'chmod', '755', src_path])\n subprocess.call(['sudo', 'touch', src_path + 'index.html'])\n with open(src_path + '/index.html', 'a') as out:\n out.write('<html>\\n <head>\\n <title>Welcome to ' + vh +\n \"\"\"</title>\n </head>\n <body>\n <h1>\"\"\" + vh +\n ' virtual host is working!</h1>\\n </body>\\n</html>')\n conf_path = '/etc/apache2/sites-available/' + vh + '.conf'\n subprocess.call(['sudo', 'touch', conf_path])\n with open(conf_path, 'w') as out:\n out.write('<VirtualHost *:' + port_list[index] +\n \"\"\">\n ServerAdmin webmaster@localhost\n ServerName \"\"\" +\n vh + \"\"\"\n ServerAlias www.\"\"\" + vh +\n \"\"\".com\n DocumentRoot /var/www/html/\"\"\" + vh +\n \"\"\"\n ErrorLog ${APACHE_LOG_DIR}/error.log\n CustomLog ${APACHE_LOG_DIR}/access.log combined\n</VirtualHost>\"\"\"\n )\n subprocess.call(['sudo', 'a2ensite', vh])\n print('\\n [' + vh + '] virtual host was successfully created!')\n print(' - Source is located at ' + src_path)\n print(' - Config file is located at ' + conf_path + '\\n')\n index += 1\nsubprocess.call(['systemctl', 'restart', 'apache2'])\n", "step-5": "import argparse\nimport sys\nimport subprocess\nimport getpass\n\n# Process arguments\nparser = argparse.ArgumentParser(description='Setup a new apache virtual host on an Ubuntu system. Only tested on versions 18.04 and 20.04')\nparser.add_argument('domain_name', metavar='D', type=str, nargs='+', help='domain name to give to virtual host. multiple domains can be specified at once')\nargs = parser.parse_args()\n\n# Confirm action with user\nprint(\"The following virtual host(s) will be created under their respective names.\")\nfa_flag = False\nfor arg in vars(args):\n print(getattr(args, arg))\n\n# List of port numbers\nport_list = []\n\n# Ask for ports for the each domain\nprint(\"Note: port defaults to 80\")\nfor vh in sys.argv:\n if vh == 'create_apache_vhost.py':\n continue\n\n port = input(\"Which port should be used for \" + vh + \"?: \")\n if port:\n port_list.append(port)\n else:\n port_list.append(\"80\")\n\nwhile True:\n ans = input(\"Proceed? [Y/n] \")\n if ans == 'n' or ans == 'N':\n print(\"Exiting\")\n quit()\n elif ans == 'Y' or ans == 'y':\n print(\"Proceeding\")\n break\n else:\n print(\"Invald input\")\n\n# Install apache2 if not yet installed\ninstall_sts = subprocess.call(['test', '-e', '/etc/apache2'])\nif install_sts != 0:\n print(\"Installing Apache\")\n subprocess.call(['sudo', 'apt', 'install', 'apache2'])\n subprocess.call(['ufw', 'allow', \"'Apache'\"])\n\n# Get username\nusername = getpass.getuser()\n\n# Iterate though each virtual host to be created\nindex = 0\nfor vh in sys.argv:\n if vh == 'create_apache_vhost.py':\n continue\n \n print(\"Creating virtual host: \" + vh)\n\n src_path = '/var/www/html/' + vh\n subprocess.call(['sudo', 'mkdir', src_path])\n subprocess.call(['sudo', 'chown', '-R', username + ':' + username, src_path])\n subprocess.call(['sudo', 'chmod', '755', src_path])\n subprocess.call(['sudo', 'touch', src_path + 'index.html'])\n\n with open(src_path + '/index.html', 'a') as out:\n out.write(\"\"\"<html>\n <head>\n <title>Welcome to \"\"\" + vh + \"\"\"</title>\n </head>\n <body>\n <h1>\"\"\" + vh + \"\"\" virtual host is working!</h1>\n </body>\n</html>\"\"\")\n \n conf_path = '/etc/apache2/sites-available/' + vh + '.conf'\n subprocess.call(['sudo', 'touch', conf_path])\n\n with open(conf_path, 'w') as out:\n out.write(\"\"\"<VirtualHost *:\"\"\" + port_list[index] + \"\"\">\n ServerAdmin webmaster@localhost\n ServerName \"\"\" + vh + \"\"\"\n ServerAlias www.\"\"\" + vh + \"\"\".com\n DocumentRoot /var/www/html/\"\"\" + vh + \"\"\"\n ErrorLog ${APACHE_LOG_DIR}/error.log\n CustomLog ${APACHE_LOG_DIR}/access.log combined\n</VirtualHost>\"\"\")\n\n subprocess.call(['sudo', 'a2ensite', vh])\n\n print(\"\\n [\" + vh + \"] virtual host was successfully created!\")\n print(\" - Source is located at \" + src_path)\n print(\" - Config file is located at \" + conf_path + \"\\n\")\n\n index += 1\n\nsubprocess.call(['systemctl', 'restart', 'apache2'])\n\n \n\n\n \n\n\n \n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> for name in ('Manny', 'Moe', 'Jack'): print('Hi ya', name + '!') <|reserved_special_token_1|> #!/usr/bin/env python3 """ Greets the Pep Boys. """ for name in "Manny", "Moe", "Jack": print("Hi ya", name + '!')
flexible
{ "blob_id": "81ff77064a299b4fcd456f341ecb40ba5afe3295", "index": 1714, "step-1": "<mask token>\n", "step-2": "<mask token>\nfor name in ('Manny', 'Moe', 'Jack'):\n print('Hi ya', name + '!')\n", "step-3": "#!/usr/bin/env python3\n\"\"\" Greets the Pep Boys.\n\"\"\"\n\nfor name in \"Manny\", \"Moe\", \"Jack\":\n print(\"Hi ya\", name + '!')\n\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
<|reserved_special_token_0|> def create_oauth_flow(): """Prepare Google OAuth workflow from config file.""" app.flow = flow_from_clientsecrets(str(Path(app.config['ROOT_DIR'], 'configs/client_secrets.json')), scope=['email', 'profile'], redirect_uri=url_for('auth.oauth2callback', _external=True)) def create_jwt(user, name=None, renewable=False): """Create a JWT.""" session_user = sessionize_user(user, name) session_customer = sessionize_customer(Customer.get_by_name(user. customers[0])) return format_jwt(session_user, session_customer, renewable) def sessionize_user(user, name): document = user.to_dict(include_meta=True) sessionized = {} sessionized.update(document['_source']) sessionized['_id'] = document['_id'] sessionized['google_name'] = name return sessionized def sessionize_customer(customer): document = customer.to_dict(include_meta=True) sessionized = {} sessionized.update(document['_source']) sessionized['_id'] = document['_id'] return sessionized <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def create_oauth_flow(): """Prepare Google OAuth workflow from config file.""" app.flow = flow_from_clientsecrets(str(Path(app.config['ROOT_DIR'], 'configs/client_secrets.json')), scope=['email', 'profile'], redirect_uri=url_for('auth.oauth2callback', _external=True)) def create_jwt(user, name=None, renewable=False): """Create a JWT.""" session_user = sessionize_user(user, name) session_customer = sessionize_customer(Customer.get_by_name(user. customers[0])) return format_jwt(session_user, session_customer, renewable) def sessionize_user(user, name): document = user.to_dict(include_meta=True) sessionized = {} sessionized.update(document['_source']) sessionized['_id'] = document['_id'] sessionized['google_name'] = name return sessionized def sessionize_customer(customer): document = customer.to_dict(include_meta=True) sessionized = {} sessionized.update(document['_source']) sessionized['_id'] = document['_id'] return sessionized <|reserved_special_token_0|> def set_params(url, params): """Set GET parameters on a URL.""" components = urlparse(url) query = parse_qs(components.query) query.update(params) components = components._replace(query=urlencode(query, doseq=True)) return urlunparse(components) <|reserved_special_token_1|> <|reserved_special_token_0|> def create_oauth_flow(): """Prepare Google OAuth workflow from config file.""" app.flow = flow_from_clientsecrets(str(Path(app.config['ROOT_DIR'], 'configs/client_secrets.json')), scope=['email', 'profile'], redirect_uri=url_for('auth.oauth2callback', _external=True)) def create_jwt(user, name=None, renewable=False): """Create a JWT.""" session_user = sessionize_user(user, name) session_customer = sessionize_customer(Customer.get_by_name(user. customers[0])) return format_jwt(session_user, session_customer, renewable) def sessionize_user(user, name): document = user.to_dict(include_meta=True) sessionized = {} sessionized.update(document['_source']) sessionized['_id'] = document['_id'] sessionized['google_name'] = name return sessionized def sessionize_customer(customer): document = customer.to_dict(include_meta=True) sessionized = {} sessionized.update(document['_source']) sessionized['_id'] = document['_id'] return sessionized def format_jwt(user, active_customer, renewable): """Format a JWT and MAC it.""" now = int(time.time()) claims = {'exp': now + app.config['AUTH_TOKEN_LIFETIME'], 'nbf': now, 'iss': app.config['AUTH_TOKEN_ISSUER'], 'iat': now, 'user': user, 'active_customer': active_customer, 'renewable': renewable} return jwt.encode(claims, key=app.config['AUTH_JWT_SECRET'], algorithm= app.config['AUTH_JWT_ALGORITHM']) def set_params(url, params): """Set GET parameters on a URL.""" components = urlparse(url) query = parse_qs(components.query) query.update(params) components = components._replace(query=urlencode(query, doseq=True)) return urlunparse(components) <|reserved_special_token_1|> from __future__ import absolute_import, division, print_function, unicode_literals import time from urllib import urlencode from urlparse import parse_qs, urlparse, urlunparse from flask import current_app as app from flask import url_for from jose import jwt from oauth2client.client import flow_from_clientsecrets from pathlib2 import Path from .models import Customer def create_oauth_flow(): """Prepare Google OAuth workflow from config file.""" app.flow = flow_from_clientsecrets(str(Path(app.config['ROOT_DIR'], 'configs/client_secrets.json')), scope=['email', 'profile'], redirect_uri=url_for('auth.oauth2callback', _external=True)) def create_jwt(user, name=None, renewable=False): """Create a JWT.""" session_user = sessionize_user(user, name) session_customer = sessionize_customer(Customer.get_by_name(user. customers[0])) return format_jwt(session_user, session_customer, renewable) def sessionize_user(user, name): document = user.to_dict(include_meta=True) sessionized = {} sessionized.update(document['_source']) sessionized['_id'] = document['_id'] sessionized['google_name'] = name return sessionized def sessionize_customer(customer): document = customer.to_dict(include_meta=True) sessionized = {} sessionized.update(document['_source']) sessionized['_id'] = document['_id'] return sessionized def format_jwt(user, active_customer, renewable): """Format a JWT and MAC it.""" now = int(time.time()) claims = {'exp': now + app.config['AUTH_TOKEN_LIFETIME'], 'nbf': now, 'iss': app.config['AUTH_TOKEN_ISSUER'], 'iat': now, 'user': user, 'active_customer': active_customer, 'renewable': renewable} return jwt.encode(claims, key=app.config['AUTH_JWT_SECRET'], algorithm= app.config['AUTH_JWT_ALGORITHM']) def set_params(url, params): """Set GET parameters on a URL.""" components = urlparse(url) query = parse_qs(components.query) query.update(params) components = components._replace(query=urlencode(query, doseq=True)) return urlunparse(components) <|reserved_special_token_1|> # coding: utf-8 from __future__ import ( absolute_import, division, print_function, unicode_literals, ) import time from urllib import urlencode from urlparse import parse_qs, urlparse, urlunparse from flask import current_app as app from flask import url_for from jose import jwt from oauth2client.client import flow_from_clientsecrets from pathlib2 import Path from .models import Customer def create_oauth_flow(): """Prepare Google OAuth workflow from config file.""" app.flow = flow_from_clientsecrets( str(Path(app.config['ROOT_DIR'], 'configs/client_secrets.json')), scope=['email', 'profile'], redirect_uri=url_for('auth.oauth2callback', _external=True), ) def create_jwt(user, name=None, renewable=False): """Create a JWT.""" session_user = sessionize_user(user, name) session_customer = sessionize_customer( Customer.get_by_name(user.customers[0]) ) return format_jwt(session_user, session_customer, renewable) def sessionize_user(user, name): document = user.to_dict(include_meta=True) sessionized = {} sessionized.update(document['_source']) sessionized['_id'] = document['_id'] sessionized['google_name'] = name return sessionized def sessionize_customer(customer): document = customer.to_dict(include_meta=True) sessionized = {} sessionized.update(document['_source']) sessionized['_id'] = document['_id'] return sessionized def format_jwt(user, active_customer, renewable): """Format a JWT and MAC it.""" now = int(time.time()) claims = { # reserved: https://tools.ietf.org/html/rfc7519#section-4.1 'exp': now + app.config['AUTH_TOKEN_LIFETIME'], 'nbf': now, # not before 'iss': app.config['AUTH_TOKEN_ISSUER'], 'iat': now, # issue date # private: https://tools.ietf.org/html/rfc7519#section-4.3 'user': user, 'active_customer': active_customer, 'renewable': renewable, } return jwt.encode( claims, key=app.config['AUTH_JWT_SECRET'], algorithm=app.config['AUTH_JWT_ALGORITHM'], ) def set_params(url, params): """Set GET parameters on a URL.""" components = urlparse(url) query = parse_qs(components.query) query.update(params) components = components._replace(query=urlencode(query, doseq=True)) return urlunparse(components)
flexible
{ "blob_id": "fe73a80b15cad025a33930ddd9abb31524cd0244", "index": 9404, "step-1": "<mask token>\n\n\ndef create_oauth_flow():\n \"\"\"Prepare Google OAuth workflow from config file.\"\"\"\n app.flow = flow_from_clientsecrets(str(Path(app.config['ROOT_DIR'],\n 'configs/client_secrets.json')), scope=['email', 'profile'],\n redirect_uri=url_for('auth.oauth2callback', _external=True))\n\n\ndef create_jwt(user, name=None, renewable=False):\n \"\"\"Create a JWT.\"\"\"\n session_user = sessionize_user(user, name)\n session_customer = sessionize_customer(Customer.get_by_name(user.\n customers[0]))\n return format_jwt(session_user, session_customer, renewable)\n\n\ndef sessionize_user(user, name):\n document = user.to_dict(include_meta=True)\n sessionized = {}\n sessionized.update(document['_source'])\n sessionized['_id'] = document['_id']\n sessionized['google_name'] = name\n return sessionized\n\n\ndef sessionize_customer(customer):\n document = customer.to_dict(include_meta=True)\n sessionized = {}\n sessionized.update(document['_source'])\n sessionized['_id'] = document['_id']\n return sessionized\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef create_oauth_flow():\n \"\"\"Prepare Google OAuth workflow from config file.\"\"\"\n app.flow = flow_from_clientsecrets(str(Path(app.config['ROOT_DIR'],\n 'configs/client_secrets.json')), scope=['email', 'profile'],\n redirect_uri=url_for('auth.oauth2callback', _external=True))\n\n\ndef create_jwt(user, name=None, renewable=False):\n \"\"\"Create a JWT.\"\"\"\n session_user = sessionize_user(user, name)\n session_customer = sessionize_customer(Customer.get_by_name(user.\n customers[0]))\n return format_jwt(session_user, session_customer, renewable)\n\n\ndef sessionize_user(user, name):\n document = user.to_dict(include_meta=True)\n sessionized = {}\n sessionized.update(document['_source'])\n sessionized['_id'] = document['_id']\n sessionized['google_name'] = name\n return sessionized\n\n\ndef sessionize_customer(customer):\n document = customer.to_dict(include_meta=True)\n sessionized = {}\n sessionized.update(document['_source'])\n sessionized['_id'] = document['_id']\n return sessionized\n\n\n<mask token>\n\n\ndef set_params(url, params):\n \"\"\"Set GET parameters on a URL.\"\"\"\n components = urlparse(url)\n query = parse_qs(components.query)\n query.update(params)\n components = components._replace(query=urlencode(query, doseq=True))\n return urlunparse(components)\n", "step-3": "<mask token>\n\n\ndef create_oauth_flow():\n \"\"\"Prepare Google OAuth workflow from config file.\"\"\"\n app.flow = flow_from_clientsecrets(str(Path(app.config['ROOT_DIR'],\n 'configs/client_secrets.json')), scope=['email', 'profile'],\n redirect_uri=url_for('auth.oauth2callback', _external=True))\n\n\ndef create_jwt(user, name=None, renewable=False):\n \"\"\"Create a JWT.\"\"\"\n session_user = sessionize_user(user, name)\n session_customer = sessionize_customer(Customer.get_by_name(user.\n customers[0]))\n return format_jwt(session_user, session_customer, renewable)\n\n\ndef sessionize_user(user, name):\n document = user.to_dict(include_meta=True)\n sessionized = {}\n sessionized.update(document['_source'])\n sessionized['_id'] = document['_id']\n sessionized['google_name'] = name\n return sessionized\n\n\ndef sessionize_customer(customer):\n document = customer.to_dict(include_meta=True)\n sessionized = {}\n sessionized.update(document['_source'])\n sessionized['_id'] = document['_id']\n return sessionized\n\n\ndef format_jwt(user, active_customer, renewable):\n \"\"\"Format a JWT and MAC it.\"\"\"\n now = int(time.time())\n claims = {'exp': now + app.config['AUTH_TOKEN_LIFETIME'], 'nbf': now,\n 'iss': app.config['AUTH_TOKEN_ISSUER'], 'iat': now, 'user': user,\n 'active_customer': active_customer, 'renewable': renewable}\n return jwt.encode(claims, key=app.config['AUTH_JWT_SECRET'], algorithm=\n app.config['AUTH_JWT_ALGORITHM'])\n\n\ndef set_params(url, params):\n \"\"\"Set GET parameters on a URL.\"\"\"\n components = urlparse(url)\n query = parse_qs(components.query)\n query.update(params)\n components = components._replace(query=urlencode(query, doseq=True))\n return urlunparse(components)\n", "step-4": "from __future__ import absolute_import, division, print_function, unicode_literals\nimport time\nfrom urllib import urlencode\nfrom urlparse import parse_qs, urlparse, urlunparse\nfrom flask import current_app as app\nfrom flask import url_for\nfrom jose import jwt\nfrom oauth2client.client import flow_from_clientsecrets\nfrom pathlib2 import Path\nfrom .models import Customer\n\n\ndef create_oauth_flow():\n \"\"\"Prepare Google OAuth workflow from config file.\"\"\"\n app.flow = flow_from_clientsecrets(str(Path(app.config['ROOT_DIR'],\n 'configs/client_secrets.json')), scope=['email', 'profile'],\n redirect_uri=url_for('auth.oauth2callback', _external=True))\n\n\ndef create_jwt(user, name=None, renewable=False):\n \"\"\"Create a JWT.\"\"\"\n session_user = sessionize_user(user, name)\n session_customer = sessionize_customer(Customer.get_by_name(user.\n customers[0]))\n return format_jwt(session_user, session_customer, renewable)\n\n\ndef sessionize_user(user, name):\n document = user.to_dict(include_meta=True)\n sessionized = {}\n sessionized.update(document['_source'])\n sessionized['_id'] = document['_id']\n sessionized['google_name'] = name\n return sessionized\n\n\ndef sessionize_customer(customer):\n document = customer.to_dict(include_meta=True)\n sessionized = {}\n sessionized.update(document['_source'])\n sessionized['_id'] = document['_id']\n return sessionized\n\n\ndef format_jwt(user, active_customer, renewable):\n \"\"\"Format a JWT and MAC it.\"\"\"\n now = int(time.time())\n claims = {'exp': now + app.config['AUTH_TOKEN_LIFETIME'], 'nbf': now,\n 'iss': app.config['AUTH_TOKEN_ISSUER'], 'iat': now, 'user': user,\n 'active_customer': active_customer, 'renewable': renewable}\n return jwt.encode(claims, key=app.config['AUTH_JWT_SECRET'], algorithm=\n app.config['AUTH_JWT_ALGORITHM'])\n\n\ndef set_params(url, params):\n \"\"\"Set GET parameters on a URL.\"\"\"\n components = urlparse(url)\n query = parse_qs(components.query)\n query.update(params)\n components = components._replace(query=urlencode(query, doseq=True))\n return urlunparse(components)\n", "step-5": "# coding: utf-8\n\nfrom __future__ import (\n absolute_import,\n division,\n print_function,\n unicode_literals,\n)\n\nimport time\nfrom urllib import urlencode\nfrom urlparse import parse_qs, urlparse, urlunparse\n\nfrom flask import current_app as app\nfrom flask import url_for\nfrom jose import jwt\nfrom oauth2client.client import flow_from_clientsecrets\nfrom pathlib2 import Path\n\nfrom .models import Customer\n\n\ndef create_oauth_flow():\n \"\"\"Prepare Google OAuth workflow from config file.\"\"\"\n app.flow = flow_from_clientsecrets(\n str(Path(app.config['ROOT_DIR'], 'configs/client_secrets.json')),\n scope=['email', 'profile'],\n redirect_uri=url_for('auth.oauth2callback', _external=True),\n )\n\n\ndef create_jwt(user, name=None, renewable=False):\n \"\"\"Create a JWT.\"\"\"\n session_user = sessionize_user(user, name)\n session_customer = sessionize_customer(\n Customer.get_by_name(user.customers[0])\n )\n\n return format_jwt(session_user, session_customer, renewable)\n\n\ndef sessionize_user(user, name):\n document = user.to_dict(include_meta=True)\n\n sessionized = {}\n sessionized.update(document['_source'])\n sessionized['_id'] = document['_id']\n sessionized['google_name'] = name\n\n return sessionized\n\n\ndef sessionize_customer(customer):\n document = customer.to_dict(include_meta=True)\n\n sessionized = {}\n sessionized.update(document['_source'])\n sessionized['_id'] = document['_id']\n\n return sessionized\n\n\ndef format_jwt(user, active_customer, renewable):\n \"\"\"Format a JWT and MAC it.\"\"\"\n now = int(time.time())\n\n claims = {\n # reserved: https://tools.ietf.org/html/rfc7519#section-4.1\n 'exp': now + app.config['AUTH_TOKEN_LIFETIME'],\n 'nbf': now, # not before\n 'iss': app.config['AUTH_TOKEN_ISSUER'],\n 'iat': now, # issue date\n # private: https://tools.ietf.org/html/rfc7519#section-4.3\n 'user': user,\n 'active_customer': active_customer,\n 'renewable': renewable,\n }\n\n return jwt.encode(\n claims,\n key=app.config['AUTH_JWT_SECRET'],\n algorithm=app.config['AUTH_JWT_ALGORITHM'],\n )\n\n\ndef set_params(url, params):\n \"\"\"Set GET parameters on a URL.\"\"\"\n components = urlparse(url)\n\n query = parse_qs(components.query)\n query.update(params)\n\n components = components._replace(query=urlencode(query, doseq=True))\n return urlunparse(components)\n", "step-ids": [ 4, 5, 6, 7, 8 ] }
[ 4, 5, 6, 7, 8 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> class Solution: <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_1|> class Solution: <|reserved_special_token_0|> def detectCycle(self, head): cycle_len = -1 one_node, two_node = head, head while two_node: for i in xrange(2): if two_node: two_node = two_node.next if two_node == one_node: cycle_len = 1 two_node = one_node.next while two_node != one_node: cycle_len += 1 two_node = two_node.next break else: break one_node = one_node.next if not two_node or cycle_len != -1: break if cycle_len == -1: return None one_node, two_node = head, head i = 0 while i < cycle_len: two_node = two_node.next i += 1 while one_node != two_node: one_node = one_node.next two_node = two_node.next return one_node <|reserved_special_token_1|> class Solution: """ @param head: The first node of the linked list. @return: The node where the cycle begins. if there is no cycle, return null """ def detectCycle(self, head): cycle_len = -1 one_node, two_node = head, head while two_node: for i in xrange(2): if two_node: two_node = two_node.next if two_node == one_node: cycle_len = 1 two_node = one_node.next while two_node != one_node: cycle_len += 1 two_node = two_node.next break else: break one_node = one_node.next if not two_node or cycle_len != -1: break if cycle_len == -1: return None one_node, two_node = head, head i = 0 while i < cycle_len: two_node = two_node.next i += 1 while one_node != two_node: one_node = one_node.next two_node = two_node.next return one_node <|reserved_special_token_1|> # -*- coding: utf-8 -*- class Solution: """ @param head: The first node of the linked list. @return: The node where the cycle begins. if there is no cycle, return null """ def detectCycle(self, head): # write your code here # 先确定是否有环,然后确定环的大小,再遍历确定位置。 cycle_len = -1 one_node, two_node = head, head while two_node: for i in xrange(2): if two_node: two_node = two_node.next if two_node == one_node: cycle_len = 1 two_node = one_node.next while two_node != one_node: # 算出环的长度 cycle_len += 1 two_node = two_node.next break else: break one_node = one_node.next if (not two_node) or (cycle_len != -1): break if cycle_len == -1: return None one_node, two_node = head, head # two_node先前进的距离等于环的长度 i = 0 while i < cycle_len: two_node = two_node.next i += 1 while one_node != two_node: one_node = one_node.next two_node = two_node.next return one_node
flexible
{ "blob_id": "3319614d154b16190f3cd8f4f65c3b0e0da277e9", "index": 9751, "step-1": "<mask token>\n", "step-2": "class Solution:\n <mask token>\n <mask token>\n", "step-3": "class Solution:\n <mask token>\n\n def detectCycle(self, head):\n cycle_len = -1\n one_node, two_node = head, head\n while two_node:\n for i in xrange(2):\n if two_node:\n two_node = two_node.next\n if two_node == one_node:\n cycle_len = 1\n two_node = one_node.next\n while two_node != one_node:\n cycle_len += 1\n two_node = two_node.next\n break\n else:\n break\n one_node = one_node.next\n if not two_node or cycle_len != -1:\n break\n if cycle_len == -1:\n return None\n one_node, two_node = head, head\n i = 0\n while i < cycle_len:\n two_node = two_node.next\n i += 1\n while one_node != two_node:\n one_node = one_node.next\n two_node = two_node.next\n return one_node\n", "step-4": "class Solution:\n \"\"\"\n @param head: The first node of the linked list.\n @return: The node where the cycle begins. \n if there is no cycle, return null\n \"\"\"\n\n def detectCycle(self, head):\n cycle_len = -1\n one_node, two_node = head, head\n while two_node:\n for i in xrange(2):\n if two_node:\n two_node = two_node.next\n if two_node == one_node:\n cycle_len = 1\n two_node = one_node.next\n while two_node != one_node:\n cycle_len += 1\n two_node = two_node.next\n break\n else:\n break\n one_node = one_node.next\n if not two_node or cycle_len != -1:\n break\n if cycle_len == -1:\n return None\n one_node, two_node = head, head\n i = 0\n while i < cycle_len:\n two_node = two_node.next\n i += 1\n while one_node != two_node:\n one_node = one_node.next\n two_node = two_node.next\n return one_node\n", "step-5": "# -*- coding: utf-8 -*-\n\nclass Solution:\n \"\"\"\n @param head: The first node of the linked list.\n @return: The node where the cycle begins. \n if there is no cycle, return null\n \"\"\"\n def detectCycle(self, head):\n # write your code here\n # 先确定是否有环,然后确定环的大小,再遍历确定位置。\n cycle_len = -1\n one_node, two_node = head, head\n while two_node:\n for i in xrange(2):\n if two_node:\n two_node = two_node.next\n if two_node == one_node:\n cycle_len = 1\n two_node = one_node.next\n while two_node != one_node: # 算出环的长度\n cycle_len += 1\n two_node = two_node.next\n break\n else:\n break\n one_node = one_node.next\n if (not two_node) or (cycle_len != -1):\n break\n if cycle_len == -1:\n return None\n one_node, two_node = head, head # two_node先前进的距离等于环的长度\n i = 0\n while i < cycle_len:\n two_node = two_node.next\n i += 1\n while one_node != two_node:\n one_node = one_node.next\n two_node = two_node.next\n return one_node", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> def _set_webhook(): status = bot.set_webhook(WEBHOOK_URL) if not status: print('Webhook setup failed') sys.exit(1) else: print('Your webhook URL has been set to "{}"'.format(WEBHOOK_URL)) @app.route('/hook', methods=['POST']) def webhook_handler(): update = telegram.Update.de_json(request.get_json(force=True), bot) machine.advance(update) return 'ok' @app.route('/show-fsm', methods=['GET']) def show_fsm(): byte_io = BytesIO() machine.graph.draw(byte_io, prog='dot', format='png') byte_io.seek(0) return send_file(byte_io, attachment_filename='fsm.png', mimetype= 'image/png') <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def _set_webhook(): status = bot.set_webhook(WEBHOOK_URL) if not status: print('Webhook setup failed') sys.exit(1) else: print('Your webhook URL has been set to "{}"'.format(WEBHOOK_URL)) @app.route('/hook', methods=['POST']) def webhook_handler(): update = telegram.Update.de_json(request.get_json(force=True), bot) machine.advance(update) return 'ok' @app.route('/show-fsm', methods=['GET']) def show_fsm(): byte_io = BytesIO() machine.graph.draw(byte_io, prog='dot', format='png') byte_io.seek(0) return send_file(byte_io, attachment_filename='fsm.png', mimetype= 'image/png') if __name__ == '__main__': _set_webhook() app.run() <|reserved_special_token_1|> <|reserved_special_token_0|> API_TOKEN = '375541027:AAFvLkySNkMSGgOl7PtsPIsJgnxophQpllQ' WEBHOOK_URL = 'https://a140f4ad.ngrok.io/show-fsm' app = Flask(__name__) bot = telegram.Bot(token=API_TOKEN) machine = TocMachine(states=['user', 'state3', 'state4', 'state5', 'state6', 'state7', 'state8', 'state9', 'state10', 'state11', 'state12', 'state13', 'state14', 'state15'], transitions=[{'trigger': 'advance', 'source': 'user', 'dest': 'state3', 'conditions': 'is_going_from_state0_to_state3'}, {'trigger': 'advance', 'source': 'state3', 'dest': 'state4', 'conditions': 'is_going_from_state3_to_state4'}, {'trigger': 'advance', 'source': 'state4', 'dest': 'state5', 'conditions': 'is_going_from_state4_to_state5'}, {'trigger': 'advance', 'source': 'state5', 'dest': 'state6', 'conditions': 'is_going_from_state5_to_state6'}, {'trigger': 'advance', 'source': 'state5', 'dest': 'state7', 'conditions': 'is_going_from_state5_to_state7'}, {'trigger': 'advance', 'source': 'state4', 'dest': 'state8', 'conditions': 'is_going_from_state4_to_state8'}, {'trigger': 'advance', 'source': 'state8', 'dest': 'state9', 'conditions': 'is_going_from_state8_to_state9'}, {'trigger': 'advance', 'source': 'state6', 'dest': 'state8', 'conditions': 'is_going_from_state6_to_state8'}, {'trigger': 'advance', 'source': 'state7', 'dest': 'state8', 'conditions': 'is_going_from_state7_to_state8'}, {'trigger': 'advance', 'source': 'state9', 'dest': 'state5', 'conditions': 'is_going_from_state9_to_state5'}, {'trigger': 'advance', 'source': 'state9', 'dest': 'state10', 'conditions': 'is_going_from_state9_to_state10'}, {'trigger': 'advance', 'source': 'state6', 'dest': 'state10', 'conditions': 'is_going_from_state6_to_state10'}, {'trigger': 'advance', 'source': 'state7', 'dest': 'state10', 'conditions': 'is_going_from_state7_to_state10'}, {'trigger': 'advance', 'source': 'state8', 'dest': 'state11', 'conditions': 'is_going_from_state8_to_state11'}, {'trigger': 'advance', 'source': 'state11', 'dest': 'state10', 'conditions': 'is_going_from_state11_to_state10'}, {'trigger': 'advance', 'source': 'state11', 'dest': 'state5', 'conditions': 'is_going_from_state11_to_state5'}, {'trigger': 'advance', 'source': 'state8', 'dest': 'state12', 'conditions': 'is_going_from_state8_to_state12'}, {'trigger': 'advance', 'source': 'state12', 'dest': 'state10', 'conditions': 'is_going_from_state12_to_state10'}, {'trigger': 'advance', 'source': 'state12', 'dest': 'state5', 'conditions': 'is_going_from_state12_to_state5'}, {'trigger': 'advance', 'source': 'state8', 'dest': 'state13', 'conditions': 'is_going_from_state8_to_state13'}, {'trigger': 'advance', 'source': 'state13', 'dest': 'state10', 'conditions': 'is_going_from_state13_to_state10'}, {'trigger': 'advance', 'source': 'state13', 'dest': 'state5', 'conditions': 'is_going_from_state13_to_state5'}, {'trigger': 'advance', 'source': 'state8', 'dest': 'state14', 'conditions': 'is_going_from_state8_to_state14'}, {'trigger': 'advance', 'source': 'state14', 'dest': 'state10', 'conditions': 'is_going_from_state14_to_state10'}, {'trigger': 'advance', 'source': 'state14', 'dest': 'state5', 'conditions': 'is_going_from_state14_to_state5'}, {'trigger': 'advance', 'source': 'state8', 'dest': 'state15', 'conditions': 'is_going_from_state8_to_state15'}, {'trigger': 'advance', 'source': 'state15', 'dest': 'state10', 'conditions': 'is_going_from_state15_to_state10'}, {'trigger': 'advance', 'source': 'state15', 'dest': 'state5', 'conditions': 'is_going_from_state15_to_state5'}, {'trigger': 'go_back', 'source': [ 'state10'], 'dest': 'user'}], initial='user', auto_transitions=False, show_conditions=True) def _set_webhook(): status = bot.set_webhook(WEBHOOK_URL) if not status: print('Webhook setup failed') sys.exit(1) else: print('Your webhook URL has been set to "{}"'.format(WEBHOOK_URL)) @app.route('/hook', methods=['POST']) def webhook_handler(): update = telegram.Update.de_json(request.get_json(force=True), bot) machine.advance(update) return 'ok' @app.route('/show-fsm', methods=['GET']) def show_fsm(): byte_io = BytesIO() machine.graph.draw(byte_io, prog='dot', format='png') byte_io.seek(0) return send_file(byte_io, attachment_filename='fsm.png', mimetype= 'image/png') if __name__ == '__main__': _set_webhook() app.run() <|reserved_special_token_1|> import sys from io import BytesIO import telegram from flask import Flask, request, send_file from fsm import TocMachine API_TOKEN = '375541027:AAFvLkySNkMSGgOl7PtsPIsJgnxophQpllQ' WEBHOOK_URL = 'https://a140f4ad.ngrok.io/show-fsm' app = Flask(__name__) bot = telegram.Bot(token=API_TOKEN) machine = TocMachine(states=['user', 'state3', 'state4', 'state5', 'state6', 'state7', 'state8', 'state9', 'state10', 'state11', 'state12', 'state13', 'state14', 'state15'], transitions=[{'trigger': 'advance', 'source': 'user', 'dest': 'state3', 'conditions': 'is_going_from_state0_to_state3'}, {'trigger': 'advance', 'source': 'state3', 'dest': 'state4', 'conditions': 'is_going_from_state3_to_state4'}, {'trigger': 'advance', 'source': 'state4', 'dest': 'state5', 'conditions': 'is_going_from_state4_to_state5'}, {'trigger': 'advance', 'source': 'state5', 'dest': 'state6', 'conditions': 'is_going_from_state5_to_state6'}, {'trigger': 'advance', 'source': 'state5', 'dest': 'state7', 'conditions': 'is_going_from_state5_to_state7'}, {'trigger': 'advance', 'source': 'state4', 'dest': 'state8', 'conditions': 'is_going_from_state4_to_state8'}, {'trigger': 'advance', 'source': 'state8', 'dest': 'state9', 'conditions': 'is_going_from_state8_to_state9'}, {'trigger': 'advance', 'source': 'state6', 'dest': 'state8', 'conditions': 'is_going_from_state6_to_state8'}, {'trigger': 'advance', 'source': 'state7', 'dest': 'state8', 'conditions': 'is_going_from_state7_to_state8'}, {'trigger': 'advance', 'source': 'state9', 'dest': 'state5', 'conditions': 'is_going_from_state9_to_state5'}, {'trigger': 'advance', 'source': 'state9', 'dest': 'state10', 'conditions': 'is_going_from_state9_to_state10'}, {'trigger': 'advance', 'source': 'state6', 'dest': 'state10', 'conditions': 'is_going_from_state6_to_state10'}, {'trigger': 'advance', 'source': 'state7', 'dest': 'state10', 'conditions': 'is_going_from_state7_to_state10'}, {'trigger': 'advance', 'source': 'state8', 'dest': 'state11', 'conditions': 'is_going_from_state8_to_state11'}, {'trigger': 'advance', 'source': 'state11', 'dest': 'state10', 'conditions': 'is_going_from_state11_to_state10'}, {'trigger': 'advance', 'source': 'state11', 'dest': 'state5', 'conditions': 'is_going_from_state11_to_state5'}, {'trigger': 'advance', 'source': 'state8', 'dest': 'state12', 'conditions': 'is_going_from_state8_to_state12'}, {'trigger': 'advance', 'source': 'state12', 'dest': 'state10', 'conditions': 'is_going_from_state12_to_state10'}, {'trigger': 'advance', 'source': 'state12', 'dest': 'state5', 'conditions': 'is_going_from_state12_to_state5'}, {'trigger': 'advance', 'source': 'state8', 'dest': 'state13', 'conditions': 'is_going_from_state8_to_state13'}, {'trigger': 'advance', 'source': 'state13', 'dest': 'state10', 'conditions': 'is_going_from_state13_to_state10'}, {'trigger': 'advance', 'source': 'state13', 'dest': 'state5', 'conditions': 'is_going_from_state13_to_state5'}, {'trigger': 'advance', 'source': 'state8', 'dest': 'state14', 'conditions': 'is_going_from_state8_to_state14'}, {'trigger': 'advance', 'source': 'state14', 'dest': 'state10', 'conditions': 'is_going_from_state14_to_state10'}, {'trigger': 'advance', 'source': 'state14', 'dest': 'state5', 'conditions': 'is_going_from_state14_to_state5'}, {'trigger': 'advance', 'source': 'state8', 'dest': 'state15', 'conditions': 'is_going_from_state8_to_state15'}, {'trigger': 'advance', 'source': 'state15', 'dest': 'state10', 'conditions': 'is_going_from_state15_to_state10'}, {'trigger': 'advance', 'source': 'state15', 'dest': 'state5', 'conditions': 'is_going_from_state15_to_state5'}, {'trigger': 'go_back', 'source': [ 'state10'], 'dest': 'user'}], initial='user', auto_transitions=False, show_conditions=True) def _set_webhook(): status = bot.set_webhook(WEBHOOK_URL) if not status: print('Webhook setup failed') sys.exit(1) else: print('Your webhook URL has been set to "{}"'.format(WEBHOOK_URL)) @app.route('/hook', methods=['POST']) def webhook_handler(): update = telegram.Update.de_json(request.get_json(force=True), bot) machine.advance(update) return 'ok' @app.route('/show-fsm', methods=['GET']) def show_fsm(): byte_io = BytesIO() machine.graph.draw(byte_io, prog='dot', format='png') byte_io.seek(0) return send_file(byte_io, attachment_filename='fsm.png', mimetype= 'image/png') if __name__ == '__main__': _set_webhook() app.run() <|reserved_special_token_1|> import sys from io import BytesIO import telegram from flask import Flask, request, send_file from fsm import TocMachine API_TOKEN = '375541027:AAFvLkySNkMSGgOl7PtsPIsJgnxophQpllQ' WEBHOOK_URL = 'https://a140f4ad.ngrok.io/show-fsm' app = Flask(__name__) bot = telegram.Bot(token=API_TOKEN) machine = TocMachine( states=[ 'user', 'state3', 'state4', 'state5', 'state6', 'state7', 'state8', 'state9', 'state10', 'state11', 'state12', 'state13', 'state14', 'state15' ], transitions=[ { 'trigger': 'advance', 'source': 'user', 'dest': 'state3', 'conditions': 'is_going_from_state0_to_state3' }, { 'trigger': 'advance', 'source': 'state3', 'dest': 'state4', 'conditions': 'is_going_from_state3_to_state4' }, { 'trigger': 'advance', 'source': 'state4', 'dest': 'state5', 'conditions': 'is_going_from_state4_to_state5' }, { 'trigger': 'advance', 'source': 'state5', 'dest': 'state6', 'conditions': 'is_going_from_state5_to_state6' }, { 'trigger': 'advance', 'source': 'state5', 'dest': 'state7', 'conditions': 'is_going_from_state5_to_state7' }, { 'trigger': 'advance', 'source': 'state4', 'dest': 'state8', 'conditions': 'is_going_from_state4_to_state8' }, { 'trigger': 'advance', 'source': 'state8', 'dest': 'state9', 'conditions': 'is_going_from_state8_to_state9' }, { 'trigger': 'advance', 'source': 'state6', 'dest': 'state8', 'conditions': 'is_going_from_state6_to_state8' }, { 'trigger': 'advance', 'source': 'state7', 'dest': 'state8', 'conditions': 'is_going_from_state7_to_state8' }, { 'trigger': 'advance', 'source': 'state9', 'dest': 'state5', 'conditions': 'is_going_from_state9_to_state5' }, { 'trigger': 'advance', 'source': 'state9', 'dest': 'state10', 'conditions': 'is_going_from_state9_to_state10' }, { 'trigger': 'advance', 'source': 'state6', 'dest': 'state10', 'conditions': 'is_going_from_state6_to_state10' }, { 'trigger': 'advance', 'source': 'state7', 'dest': 'state10', 'conditions': 'is_going_from_state7_to_state10' }, { 'trigger': 'advance', 'source': 'state8', 'dest': 'state11', 'conditions': 'is_going_from_state8_to_state11' }, { 'trigger': 'advance', 'source': 'state11', 'dest': 'state10', 'conditions': 'is_going_from_state11_to_state10' }, { 'trigger': 'advance', 'source': 'state11', 'dest': 'state5', 'conditions': 'is_going_from_state11_to_state5' }, { 'trigger': 'advance', 'source': 'state8', 'dest': 'state12', 'conditions': 'is_going_from_state8_to_state12' }, { 'trigger': 'advance', 'source': 'state12', 'dest': 'state10', 'conditions': 'is_going_from_state12_to_state10' }, { 'trigger': 'advance', 'source': 'state12', 'dest': 'state5', 'conditions': 'is_going_from_state12_to_state5' }, { 'trigger': 'advance', 'source': 'state8', 'dest': 'state13', 'conditions': 'is_going_from_state8_to_state13' }, { 'trigger': 'advance', 'source': 'state13', 'dest': 'state10', 'conditions': 'is_going_from_state13_to_state10' }, { 'trigger': 'advance', 'source': 'state13', 'dest': 'state5', 'conditions': 'is_going_from_state13_to_state5' }, { 'trigger': 'advance', 'source': 'state8', 'dest': 'state14', 'conditions': 'is_going_from_state8_to_state14' }, { 'trigger': 'advance', 'source': 'state14', 'dest': 'state10', 'conditions': 'is_going_from_state14_to_state10' }, { 'trigger': 'advance', 'source': 'state14', 'dest': 'state5', 'conditions': 'is_going_from_state14_to_state5' }, { 'trigger': 'advance', 'source': 'state8', 'dest': 'state15', 'conditions': 'is_going_from_state8_to_state15' }, { 'trigger': 'advance', 'source': 'state15', 'dest': 'state10', 'conditions': 'is_going_from_state15_to_state10' }, { 'trigger': 'advance', 'source': 'state15', 'dest': 'state5', 'conditions': 'is_going_from_state15_to_state5' }, { 'trigger': 'go_back', 'source': [ 'state10' ], 'dest': 'user' } ], initial='user', auto_transitions=False, show_conditions=True, ) def _set_webhook(): status = bot.set_webhook(WEBHOOK_URL) if not status: print('Webhook setup failed') sys.exit(1) else: print('Your webhook URL has been set to "{}"'.format(WEBHOOK_URL)) @app.route('/hook', methods=['POST']) def webhook_handler(): update = telegram.Update.de_json(request.get_json(force=True), bot) machine.advance(update) return 'ok' @app.route('/show-fsm', methods=['GET']) def show_fsm(): byte_io = BytesIO() machine.graph.draw(byte_io, prog='dot', format='png') byte_io.seek(0) return send_file(byte_io, attachment_filename='fsm.png', mimetype='image/png') if __name__ == "__main__": _set_webhook() app.run()
flexible
{ "blob_id": "984efa858e782777472d84aab85471616a05b0e0", "index": 2886, "step-1": "<mask token>\n\n\ndef _set_webhook():\n status = bot.set_webhook(WEBHOOK_URL)\n if not status:\n print('Webhook setup failed')\n sys.exit(1)\n else:\n print('Your webhook URL has been set to \"{}\"'.format(WEBHOOK_URL))\n\n\n@app.route('/hook', methods=['POST'])\ndef webhook_handler():\n update = telegram.Update.de_json(request.get_json(force=True), bot)\n machine.advance(update)\n return 'ok'\n\n\n@app.route('/show-fsm', methods=['GET'])\ndef show_fsm():\n byte_io = BytesIO()\n machine.graph.draw(byte_io, prog='dot', format='png')\n byte_io.seek(0)\n return send_file(byte_io, attachment_filename='fsm.png', mimetype=\n 'image/png')\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef _set_webhook():\n status = bot.set_webhook(WEBHOOK_URL)\n if not status:\n print('Webhook setup failed')\n sys.exit(1)\n else:\n print('Your webhook URL has been set to \"{}\"'.format(WEBHOOK_URL))\n\n\n@app.route('/hook', methods=['POST'])\ndef webhook_handler():\n update = telegram.Update.de_json(request.get_json(force=True), bot)\n machine.advance(update)\n return 'ok'\n\n\n@app.route('/show-fsm', methods=['GET'])\ndef show_fsm():\n byte_io = BytesIO()\n machine.graph.draw(byte_io, prog='dot', format='png')\n byte_io.seek(0)\n return send_file(byte_io, attachment_filename='fsm.png', mimetype=\n 'image/png')\n\n\nif __name__ == '__main__':\n _set_webhook()\n app.run()\n", "step-3": "<mask token>\nAPI_TOKEN = '375541027:AAFvLkySNkMSGgOl7PtsPIsJgnxophQpllQ'\nWEBHOOK_URL = 'https://a140f4ad.ngrok.io/show-fsm'\napp = Flask(__name__)\nbot = telegram.Bot(token=API_TOKEN)\nmachine = TocMachine(states=['user', 'state3', 'state4', 'state5', 'state6',\n 'state7', 'state8', 'state9', 'state10', 'state11', 'state12',\n 'state13', 'state14', 'state15'], transitions=[{'trigger': 'advance',\n 'source': 'user', 'dest': 'state3', 'conditions':\n 'is_going_from_state0_to_state3'}, {'trigger': 'advance', 'source':\n 'state3', 'dest': 'state4', 'conditions':\n 'is_going_from_state3_to_state4'}, {'trigger': 'advance', 'source':\n 'state4', 'dest': 'state5', 'conditions':\n 'is_going_from_state4_to_state5'}, {'trigger': 'advance', 'source':\n 'state5', 'dest': 'state6', 'conditions':\n 'is_going_from_state5_to_state6'}, {'trigger': 'advance', 'source':\n 'state5', 'dest': 'state7', 'conditions':\n 'is_going_from_state5_to_state7'}, {'trigger': 'advance', 'source':\n 'state4', 'dest': 'state8', 'conditions':\n 'is_going_from_state4_to_state8'}, {'trigger': 'advance', 'source':\n 'state8', 'dest': 'state9', 'conditions':\n 'is_going_from_state8_to_state9'}, {'trigger': 'advance', 'source':\n 'state6', 'dest': 'state8', 'conditions':\n 'is_going_from_state6_to_state8'}, {'trigger': 'advance', 'source':\n 'state7', 'dest': 'state8', 'conditions':\n 'is_going_from_state7_to_state8'}, {'trigger': 'advance', 'source':\n 'state9', 'dest': 'state5', 'conditions':\n 'is_going_from_state9_to_state5'}, {'trigger': 'advance', 'source':\n 'state9', 'dest': 'state10', 'conditions':\n 'is_going_from_state9_to_state10'}, {'trigger': 'advance', 'source':\n 'state6', 'dest': 'state10', 'conditions':\n 'is_going_from_state6_to_state10'}, {'trigger': 'advance', 'source':\n 'state7', 'dest': 'state10', 'conditions':\n 'is_going_from_state7_to_state10'}, {'trigger': 'advance', 'source':\n 'state8', 'dest': 'state11', 'conditions':\n 'is_going_from_state8_to_state11'}, {'trigger': 'advance', 'source':\n 'state11', 'dest': 'state10', 'conditions':\n 'is_going_from_state11_to_state10'}, {'trigger': 'advance', 'source':\n 'state11', 'dest': 'state5', 'conditions':\n 'is_going_from_state11_to_state5'}, {'trigger': 'advance', 'source':\n 'state8', 'dest': 'state12', 'conditions':\n 'is_going_from_state8_to_state12'}, {'trigger': 'advance', 'source':\n 'state12', 'dest': 'state10', 'conditions':\n 'is_going_from_state12_to_state10'}, {'trigger': 'advance', 'source':\n 'state12', 'dest': 'state5', 'conditions':\n 'is_going_from_state12_to_state5'}, {'trigger': 'advance', 'source':\n 'state8', 'dest': 'state13', 'conditions':\n 'is_going_from_state8_to_state13'}, {'trigger': 'advance', 'source':\n 'state13', 'dest': 'state10', 'conditions':\n 'is_going_from_state13_to_state10'}, {'trigger': 'advance', 'source':\n 'state13', 'dest': 'state5', 'conditions':\n 'is_going_from_state13_to_state5'}, {'trigger': 'advance', 'source':\n 'state8', 'dest': 'state14', 'conditions':\n 'is_going_from_state8_to_state14'}, {'trigger': 'advance', 'source':\n 'state14', 'dest': 'state10', 'conditions':\n 'is_going_from_state14_to_state10'}, {'trigger': 'advance', 'source':\n 'state14', 'dest': 'state5', 'conditions':\n 'is_going_from_state14_to_state5'}, {'trigger': 'advance', 'source':\n 'state8', 'dest': 'state15', 'conditions':\n 'is_going_from_state8_to_state15'}, {'trigger': 'advance', 'source':\n 'state15', 'dest': 'state10', 'conditions':\n 'is_going_from_state15_to_state10'}, {'trigger': 'advance', 'source':\n 'state15', 'dest': 'state5', 'conditions':\n 'is_going_from_state15_to_state5'}, {'trigger': 'go_back', 'source': [\n 'state10'], 'dest': 'user'}], initial='user', auto_transitions=False,\n show_conditions=True)\n\n\ndef _set_webhook():\n status = bot.set_webhook(WEBHOOK_URL)\n if not status:\n print('Webhook setup failed')\n sys.exit(1)\n else:\n print('Your webhook URL has been set to \"{}\"'.format(WEBHOOK_URL))\n\n\n@app.route('/hook', methods=['POST'])\ndef webhook_handler():\n update = telegram.Update.de_json(request.get_json(force=True), bot)\n machine.advance(update)\n return 'ok'\n\n\n@app.route('/show-fsm', methods=['GET'])\ndef show_fsm():\n byte_io = BytesIO()\n machine.graph.draw(byte_io, prog='dot', format='png')\n byte_io.seek(0)\n return send_file(byte_io, attachment_filename='fsm.png', mimetype=\n 'image/png')\n\n\nif __name__ == '__main__':\n _set_webhook()\n app.run()\n", "step-4": "import sys\nfrom io import BytesIO\nimport telegram\nfrom flask import Flask, request, send_file\nfrom fsm import TocMachine\nAPI_TOKEN = '375541027:AAFvLkySNkMSGgOl7PtsPIsJgnxophQpllQ'\nWEBHOOK_URL = 'https://a140f4ad.ngrok.io/show-fsm'\napp = Flask(__name__)\nbot = telegram.Bot(token=API_TOKEN)\nmachine = TocMachine(states=['user', 'state3', 'state4', 'state5', 'state6',\n 'state7', 'state8', 'state9', 'state10', 'state11', 'state12',\n 'state13', 'state14', 'state15'], transitions=[{'trigger': 'advance',\n 'source': 'user', 'dest': 'state3', 'conditions':\n 'is_going_from_state0_to_state3'}, {'trigger': 'advance', 'source':\n 'state3', 'dest': 'state4', 'conditions':\n 'is_going_from_state3_to_state4'}, {'trigger': 'advance', 'source':\n 'state4', 'dest': 'state5', 'conditions':\n 'is_going_from_state4_to_state5'}, {'trigger': 'advance', 'source':\n 'state5', 'dest': 'state6', 'conditions':\n 'is_going_from_state5_to_state6'}, {'trigger': 'advance', 'source':\n 'state5', 'dest': 'state7', 'conditions':\n 'is_going_from_state5_to_state7'}, {'trigger': 'advance', 'source':\n 'state4', 'dest': 'state8', 'conditions':\n 'is_going_from_state4_to_state8'}, {'trigger': 'advance', 'source':\n 'state8', 'dest': 'state9', 'conditions':\n 'is_going_from_state8_to_state9'}, {'trigger': 'advance', 'source':\n 'state6', 'dest': 'state8', 'conditions':\n 'is_going_from_state6_to_state8'}, {'trigger': 'advance', 'source':\n 'state7', 'dest': 'state8', 'conditions':\n 'is_going_from_state7_to_state8'}, {'trigger': 'advance', 'source':\n 'state9', 'dest': 'state5', 'conditions':\n 'is_going_from_state9_to_state5'}, {'trigger': 'advance', 'source':\n 'state9', 'dest': 'state10', 'conditions':\n 'is_going_from_state9_to_state10'}, {'trigger': 'advance', 'source':\n 'state6', 'dest': 'state10', 'conditions':\n 'is_going_from_state6_to_state10'}, {'trigger': 'advance', 'source':\n 'state7', 'dest': 'state10', 'conditions':\n 'is_going_from_state7_to_state10'}, {'trigger': 'advance', 'source':\n 'state8', 'dest': 'state11', 'conditions':\n 'is_going_from_state8_to_state11'}, {'trigger': 'advance', 'source':\n 'state11', 'dest': 'state10', 'conditions':\n 'is_going_from_state11_to_state10'}, {'trigger': 'advance', 'source':\n 'state11', 'dest': 'state5', 'conditions':\n 'is_going_from_state11_to_state5'}, {'trigger': 'advance', 'source':\n 'state8', 'dest': 'state12', 'conditions':\n 'is_going_from_state8_to_state12'}, {'trigger': 'advance', 'source':\n 'state12', 'dest': 'state10', 'conditions':\n 'is_going_from_state12_to_state10'}, {'trigger': 'advance', 'source':\n 'state12', 'dest': 'state5', 'conditions':\n 'is_going_from_state12_to_state5'}, {'trigger': 'advance', 'source':\n 'state8', 'dest': 'state13', 'conditions':\n 'is_going_from_state8_to_state13'}, {'trigger': 'advance', 'source':\n 'state13', 'dest': 'state10', 'conditions':\n 'is_going_from_state13_to_state10'}, {'trigger': 'advance', 'source':\n 'state13', 'dest': 'state5', 'conditions':\n 'is_going_from_state13_to_state5'}, {'trigger': 'advance', 'source':\n 'state8', 'dest': 'state14', 'conditions':\n 'is_going_from_state8_to_state14'}, {'trigger': 'advance', 'source':\n 'state14', 'dest': 'state10', 'conditions':\n 'is_going_from_state14_to_state10'}, {'trigger': 'advance', 'source':\n 'state14', 'dest': 'state5', 'conditions':\n 'is_going_from_state14_to_state5'}, {'trigger': 'advance', 'source':\n 'state8', 'dest': 'state15', 'conditions':\n 'is_going_from_state8_to_state15'}, {'trigger': 'advance', 'source':\n 'state15', 'dest': 'state10', 'conditions':\n 'is_going_from_state15_to_state10'}, {'trigger': 'advance', 'source':\n 'state15', 'dest': 'state5', 'conditions':\n 'is_going_from_state15_to_state5'}, {'trigger': 'go_back', 'source': [\n 'state10'], 'dest': 'user'}], initial='user', auto_transitions=False,\n show_conditions=True)\n\n\ndef _set_webhook():\n status = bot.set_webhook(WEBHOOK_URL)\n if not status:\n print('Webhook setup failed')\n sys.exit(1)\n else:\n print('Your webhook URL has been set to \"{}\"'.format(WEBHOOK_URL))\n\n\n@app.route('/hook', methods=['POST'])\ndef webhook_handler():\n update = telegram.Update.de_json(request.get_json(force=True), bot)\n machine.advance(update)\n return 'ok'\n\n\n@app.route('/show-fsm', methods=['GET'])\ndef show_fsm():\n byte_io = BytesIO()\n machine.graph.draw(byte_io, prog='dot', format='png')\n byte_io.seek(0)\n return send_file(byte_io, attachment_filename='fsm.png', mimetype=\n 'image/png')\n\n\nif __name__ == '__main__':\n _set_webhook()\n app.run()\n", "step-5": "import sys\nfrom io import BytesIO\n\nimport telegram\nfrom flask import Flask, request, send_file\n\nfrom fsm import TocMachine\n\n\nAPI_TOKEN = '375541027:AAFvLkySNkMSGgOl7PtsPIsJgnxophQpllQ'\nWEBHOOK_URL = 'https://a140f4ad.ngrok.io/show-fsm'\n\napp = Flask(__name__)\nbot = telegram.Bot(token=API_TOKEN)\nmachine = TocMachine(\n states=[\n 'user',\n 'state3',\n 'state4',\n 'state5',\n 'state6',\n 'state7',\n 'state8',\n 'state9',\n 'state10',\n 'state11',\n 'state12',\n 'state13',\n 'state14',\n 'state15'\n ],\n transitions=[\n {\n 'trigger': 'advance',\n 'source': 'user',\n 'dest': 'state3',\n 'conditions': 'is_going_from_state0_to_state3'\n },\n {\n 'trigger': 'advance',\n 'source': 'state3',\n 'dest': 'state4',\n 'conditions': 'is_going_from_state3_to_state4'\n },\n {\n 'trigger': 'advance',\n 'source': 'state4',\n 'dest': 'state5',\n 'conditions': 'is_going_from_state4_to_state5'\n },\n {\n 'trigger': 'advance',\n 'source': 'state5',\n 'dest': 'state6',\n 'conditions': 'is_going_from_state5_to_state6'\n },\n {\n 'trigger': 'advance',\n 'source': 'state5',\n 'dest': 'state7',\n 'conditions': 'is_going_from_state5_to_state7'\n },\n {\n 'trigger': 'advance',\n 'source': 'state4',\n 'dest': 'state8',\n 'conditions': 'is_going_from_state4_to_state8'\n },\n {\n 'trigger': 'advance',\n 'source': 'state8',\n 'dest': 'state9',\n 'conditions': 'is_going_from_state8_to_state9'\n },\n {\n 'trigger': 'advance',\n 'source': 'state6',\n 'dest': 'state8',\n 'conditions': 'is_going_from_state6_to_state8'\n },\n {\n 'trigger': 'advance',\n 'source': 'state7',\n 'dest': 'state8',\n 'conditions': 'is_going_from_state7_to_state8'\n },\n {\n 'trigger': 'advance',\n 'source': 'state9',\n 'dest': 'state5',\n 'conditions': 'is_going_from_state9_to_state5'\n },\n {\n 'trigger': 'advance',\n 'source': 'state9',\n 'dest': 'state10',\n 'conditions': 'is_going_from_state9_to_state10'\n },\n {\n 'trigger': 'advance',\n 'source': 'state6',\n 'dest': 'state10',\n 'conditions': 'is_going_from_state6_to_state10'\n },\n {\n 'trigger': 'advance',\n 'source': 'state7',\n 'dest': 'state10',\n 'conditions': 'is_going_from_state7_to_state10'\n },\n {\n 'trigger': 'advance',\n 'source': 'state8',\n 'dest': 'state11',\n 'conditions': 'is_going_from_state8_to_state11'\n },\n {\n 'trigger': 'advance',\n 'source': 'state11',\n 'dest': 'state10',\n 'conditions': 'is_going_from_state11_to_state10'\n },\n {\n 'trigger': 'advance',\n 'source': 'state11',\n 'dest': 'state5',\n 'conditions': 'is_going_from_state11_to_state5'\n },\n {\n 'trigger': 'advance',\n 'source': 'state8',\n 'dest': 'state12',\n 'conditions': 'is_going_from_state8_to_state12'\n },\n {\n 'trigger': 'advance',\n 'source': 'state12',\n 'dest': 'state10',\n 'conditions': 'is_going_from_state12_to_state10'\n },\n {\n 'trigger': 'advance',\n 'source': 'state12',\n 'dest': 'state5',\n 'conditions': 'is_going_from_state12_to_state5'\n },\n {\n 'trigger': 'advance',\n 'source': 'state8',\n 'dest': 'state13',\n 'conditions': 'is_going_from_state8_to_state13'\n },\n {\n 'trigger': 'advance',\n 'source': 'state13',\n 'dest': 'state10',\n 'conditions': 'is_going_from_state13_to_state10'\n },\n {\n 'trigger': 'advance',\n 'source': 'state13',\n 'dest': 'state5',\n 'conditions': 'is_going_from_state13_to_state5'\n },\n {\n 'trigger': 'advance',\n 'source': 'state8',\n 'dest': 'state14',\n 'conditions': 'is_going_from_state8_to_state14'\n },\n {\n 'trigger': 'advance',\n 'source': 'state14',\n 'dest': 'state10',\n 'conditions': 'is_going_from_state14_to_state10'\n },\n {\n 'trigger': 'advance',\n 'source': 'state14',\n 'dest': 'state5',\n 'conditions': 'is_going_from_state14_to_state5'\n },\n {\n 'trigger': 'advance',\n 'source': 'state8',\n 'dest': 'state15',\n 'conditions': 'is_going_from_state8_to_state15'\n },\n {\n 'trigger': 'advance',\n 'source': 'state15',\n 'dest': 'state10',\n 'conditions': 'is_going_from_state15_to_state10'\n },\n {\n 'trigger': 'advance',\n 'source': 'state15',\n 'dest': 'state5',\n 'conditions': 'is_going_from_state15_to_state5'\n },\n {\n 'trigger': 'go_back',\n 'source': [\n 'state10'\n ],\n 'dest': 'user'\n }\n ],\n initial='user',\n auto_transitions=False,\n show_conditions=True,\n)\n\n\ndef _set_webhook():\n status = bot.set_webhook(WEBHOOK_URL)\n if not status:\n print('Webhook setup failed')\n sys.exit(1)\n else:\n print('Your webhook URL has been set to \"{}\"'.format(WEBHOOK_URL))\n\n\n@app.route('/hook', methods=['POST'])\ndef webhook_handler():\n update = telegram.Update.de_json(request.get_json(force=True), bot)\n machine.advance(update)\n return 'ok'\n\n\n@app.route('/show-fsm', methods=['GET'])\ndef show_fsm():\n byte_io = BytesIO()\n machine.graph.draw(byte_io, prog='dot', format='png')\n byte_io.seek(0)\n return send_file(byte_io, attachment_filename='fsm.png', mimetype='image/png')\n\n\nif __name__ == \"__main__\":\n _set_webhook()\n app.run()\n", "step-ids": [ 3, 4, 5, 6, 7 ] }
[ 3, 4, 5, 6, 7 ]
import random from PyQt4.QtGui import ( QWidget, QHBoxLayout, QPushButton, QMainWindow, QIcon, QAction, QShortcut, QKeySequence, QFileDialog, QMessageBox) from PyQt4 import QtCore class Controls(QWidget): def __init__(self, parent): super(Controls, self).__init__(parent) self.layout = QHBoxLayout(self) self.openButton = QPushButton('Open', self) self.layout.addWidget(self.openButton) self.playPauseButton = QPushButton('Play', self) # TODO implement pausing self.layout.addWidget(self.playPauseButton) self.nextButton = QPushButton('Next', self) self.layout.addWidget(self.nextButton) self.__nextShortcut = QShortcut(QKeySequence.MoveToNextChar, self) self.__nextShortcut.activated.connect(self.nextButton.click) self.__playPauseShortcut = QShortcut(QKeySequence.fromString(' '), self) self.__playPauseShortcut.activated.connect(self.playPauseButton.click) class MainWindow(QMainWindow): playSong = QtCore.pyqtSignal(str) # arg is path to file def __init__(self, music_dir): super(MainWindow, self).__init__() self.__music_dir = music_dir self.resize(400, 70) self.move(0, 0) self.setWindowTitle('Drink') self.setWindowIcon(QIcon('icon.png')) self.controls = Controls(self) self.setCentralWidget(self.controls) self.controls.openButton.clicked.connect(self.open) self.show() def open(self): try: fileName = QFileDialog.getOpenFileName( self, "Open", self.__music_dir, "Mp3 Files (*.mp3)") self.playSong.emit(fileName) except Exception as error: QMessageBox.critical(self, "Open error", error.message)
normal
{ "blob_id": "4e86dd74374297c3b0ce8fea93910003dac7d5d7", "index": 8742, "step-1": "<mask token>\n\n\nclass MainWindow(QMainWindow):\n playSong = QtCore.pyqtSignal(str)\n\n def __init__(self, music_dir):\n super(MainWindow, self).__init__()\n self.__music_dir = music_dir\n self.resize(400, 70)\n self.move(0, 0)\n self.setWindowTitle('Drink')\n self.setWindowIcon(QIcon('icon.png'))\n self.controls = Controls(self)\n self.setCentralWidget(self.controls)\n self.controls.openButton.clicked.connect(self.open)\n self.show()\n\n def open(self):\n try:\n fileName = QFileDialog.getOpenFileName(self, 'Open', self.\n __music_dir, 'Mp3 Files (*.mp3)')\n self.playSong.emit(fileName)\n except Exception as error:\n QMessageBox.critical(self, 'Open error', error.message)\n", "step-2": "<mask token>\n\n\nclass Controls(QWidget):\n <mask token>\n\n\nclass MainWindow(QMainWindow):\n playSong = QtCore.pyqtSignal(str)\n\n def __init__(self, music_dir):\n super(MainWindow, self).__init__()\n self.__music_dir = music_dir\n self.resize(400, 70)\n self.move(0, 0)\n self.setWindowTitle('Drink')\n self.setWindowIcon(QIcon('icon.png'))\n self.controls = Controls(self)\n self.setCentralWidget(self.controls)\n self.controls.openButton.clicked.connect(self.open)\n self.show()\n\n def open(self):\n try:\n fileName = QFileDialog.getOpenFileName(self, 'Open', self.\n __music_dir, 'Mp3 Files (*.mp3)')\n self.playSong.emit(fileName)\n except Exception as error:\n QMessageBox.critical(self, 'Open error', error.message)\n", "step-3": "<mask token>\n\n\nclass Controls(QWidget):\n\n def __init__(self, parent):\n super(Controls, self).__init__(parent)\n self.layout = QHBoxLayout(self)\n self.openButton = QPushButton('Open', self)\n self.layout.addWidget(self.openButton)\n self.playPauseButton = QPushButton('Play', self)\n self.layout.addWidget(self.playPauseButton)\n self.nextButton = QPushButton('Next', self)\n self.layout.addWidget(self.nextButton)\n self.__nextShortcut = QShortcut(QKeySequence.MoveToNextChar, self)\n self.__nextShortcut.activated.connect(self.nextButton.click)\n self.__playPauseShortcut = QShortcut(QKeySequence.fromString(' '), self\n )\n self.__playPauseShortcut.activated.connect(self.playPauseButton.click)\n\n\nclass MainWindow(QMainWindow):\n playSong = QtCore.pyqtSignal(str)\n\n def __init__(self, music_dir):\n super(MainWindow, self).__init__()\n self.__music_dir = music_dir\n self.resize(400, 70)\n self.move(0, 0)\n self.setWindowTitle('Drink')\n self.setWindowIcon(QIcon('icon.png'))\n self.controls = Controls(self)\n self.setCentralWidget(self.controls)\n self.controls.openButton.clicked.connect(self.open)\n self.show()\n\n def open(self):\n try:\n fileName = QFileDialog.getOpenFileName(self, 'Open', self.\n __music_dir, 'Mp3 Files (*.mp3)')\n self.playSong.emit(fileName)\n except Exception as error:\n QMessageBox.critical(self, 'Open error', error.message)\n", "step-4": "import random\nfrom PyQt4.QtGui import QWidget, QHBoxLayout, QPushButton, QMainWindow, QIcon, QAction, QShortcut, QKeySequence, QFileDialog, QMessageBox\nfrom PyQt4 import QtCore\n\n\nclass Controls(QWidget):\n\n def __init__(self, parent):\n super(Controls, self).__init__(parent)\n self.layout = QHBoxLayout(self)\n self.openButton = QPushButton('Open', self)\n self.layout.addWidget(self.openButton)\n self.playPauseButton = QPushButton('Play', self)\n self.layout.addWidget(self.playPauseButton)\n self.nextButton = QPushButton('Next', self)\n self.layout.addWidget(self.nextButton)\n self.__nextShortcut = QShortcut(QKeySequence.MoveToNextChar, self)\n self.__nextShortcut.activated.connect(self.nextButton.click)\n self.__playPauseShortcut = QShortcut(QKeySequence.fromString(' '), self\n )\n self.__playPauseShortcut.activated.connect(self.playPauseButton.click)\n\n\nclass MainWindow(QMainWindow):\n playSong = QtCore.pyqtSignal(str)\n\n def __init__(self, music_dir):\n super(MainWindow, self).__init__()\n self.__music_dir = music_dir\n self.resize(400, 70)\n self.move(0, 0)\n self.setWindowTitle('Drink')\n self.setWindowIcon(QIcon('icon.png'))\n self.controls = Controls(self)\n self.setCentralWidget(self.controls)\n self.controls.openButton.clicked.connect(self.open)\n self.show()\n\n def open(self):\n try:\n fileName = QFileDialog.getOpenFileName(self, 'Open', self.\n __music_dir, 'Mp3 Files (*.mp3)')\n self.playSong.emit(fileName)\n except Exception as error:\n QMessageBox.critical(self, 'Open error', error.message)\n", "step-5": "import random\r\n\r\nfrom PyQt4.QtGui import (\r\n QWidget, QHBoxLayout, QPushButton, QMainWindow, QIcon, QAction, QShortcut,\r\n QKeySequence, QFileDialog, QMessageBox)\r\nfrom PyQt4 import QtCore\r\n\r\nclass Controls(QWidget):\r\n def __init__(self, parent): \r\n super(Controls, self).__init__(parent)\r\n self.layout = QHBoxLayout(self)\r\n\r\n self.openButton = QPushButton('Open', self)\r\n self.layout.addWidget(self.openButton)\r\n\r\n self.playPauseButton = QPushButton('Play', self) # TODO implement pausing\r\n self.layout.addWidget(self.playPauseButton)\r\n\r\n self.nextButton = QPushButton('Next', self)\r\n self.layout.addWidget(self.nextButton)\r\n \r\n self.__nextShortcut = QShortcut(QKeySequence.MoveToNextChar, self)\r\n self.__nextShortcut.activated.connect(self.nextButton.click)\r\n\r\n self.__playPauseShortcut = QShortcut(QKeySequence.fromString(' '), self)\r\n self.__playPauseShortcut.activated.connect(self.playPauseButton.click)\r\n\r\n\r\nclass MainWindow(QMainWindow):\r\n playSong = QtCore.pyqtSignal(str) # arg is path to file\r\n\r\n def __init__(self, music_dir):\r\n super(MainWindow, self).__init__()\r\n\r\n self.__music_dir = music_dir\r\n\r\n self.resize(400, 70)\r\n self.move(0, 0)\r\n self.setWindowTitle('Drink')\r\n self.setWindowIcon(QIcon('icon.png'))\r\n \r\n self.controls = Controls(self)\r\n self.setCentralWidget(self.controls)\r\n\r\n self.controls.openButton.clicked.connect(self.open)\r\n\r\n self.show()\r\n\r\n def open(self):\r\n try:\r\n fileName = QFileDialog.getOpenFileName(\r\n self, \"Open\", self.__music_dir, \"Mp3 Files (*.mp3)\")\r\n self.playSong.emit(fileName)\r\n except Exception as error:\r\n QMessageBox.critical(self, \"Open error\", error.message)\r\n", "step-ids": [ 4, 5, 6, 7, 8 ] }
[ 4, 5, 6, 7, 8 ]
#!/usr/bin/python # -*- coding: utf-8 -*- # Python version 3.8.5 # # Author Maria Catharina van Veen # # Purpose To provide users with a tool to create # or edit an html file. # # Tested OS This code was written and tested to # work with Windows 10. import os from tkinter import * import tkinter as tk from tkinter.scrolledtext import ScrolledText import WebPageGeneratorGui as gui import WebPageGeneratorFunc as func class MainWindow(Frame): def __init__(self, root): Frame.__init__(self, root) self.root = root self.root.title("Web Page Generator") self.root.bgcolor = "#AA0000" self.root.config(bg = self.root.bgcolor) gui.loadGUI(self) if __name__ == "__main__": root = Tk() app = MainWindow(root) root.mainloop()
normal
{ "blob_id": "63e5ead200fb2884d93f19e7d9b8dc76c7f4f0e3", "index": 4611, "step-1": "<mask token>\n\n\nclass MainWindow(Frame):\n <mask token>\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass MainWindow(Frame):\n\n def __init__(self, root):\n Frame.__init__(self, root)\n self.root = root\n self.root.title('Web Page Generator')\n self.root.bgcolor = '#AA0000'\n self.root.config(bg=self.root.bgcolor)\n gui.loadGUI(self)\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass MainWindow(Frame):\n\n def __init__(self, root):\n Frame.__init__(self, root)\n self.root = root\n self.root.title('Web Page Generator')\n self.root.bgcolor = '#AA0000'\n self.root.config(bg=self.root.bgcolor)\n gui.loadGUI(self)\n\n\nif __name__ == '__main__':\n root = Tk()\n app = MainWindow(root)\n root.mainloop()\n", "step-4": "import os\nfrom tkinter import *\nimport tkinter as tk\nfrom tkinter.scrolledtext import ScrolledText\nimport WebPageGeneratorGui as gui\nimport WebPageGeneratorFunc as func\n\n\nclass MainWindow(Frame):\n\n def __init__(self, root):\n Frame.__init__(self, root)\n self.root = root\n self.root.title('Web Page Generator')\n self.root.bgcolor = '#AA0000'\n self.root.config(bg=self.root.bgcolor)\n gui.loadGUI(self)\n\n\nif __name__ == '__main__':\n root = Tk()\n app = MainWindow(root)\n root.mainloop()\n", "step-5": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\n# Python version 3.8.5\n#\n# Author Maria Catharina van Veen\n#\n# Purpose To provide users with a tool to create\n# or edit an html file.\n#\n# Tested OS This code was written and tested to\n# work with Windows 10.\n\n\nimport os\nfrom tkinter import *\nimport tkinter as tk\nfrom tkinter.scrolledtext import ScrolledText\n\nimport WebPageGeneratorGui as gui\nimport WebPageGeneratorFunc as func\n\nclass MainWindow(Frame):\n def __init__(self, root):\n Frame.__init__(self, root)\n self.root = root\n self.root.title(\"Web Page Generator\")\n self.root.bgcolor = \"#AA0000\"\n self.root.config(bg = self.root.bgcolor)\n gui.loadGUI(self)\n\n\nif __name__ == \"__main__\":\n root = Tk()\n app = MainWindow(root)\n root.mainloop()\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> from .most_serializers import *
flexible
{ "blob_id": "a718949ed95b7d78f091b1e0f237eed151b102ae", "index": 2160, "step-1": "<mask token>\n", "step-2": "from .most_serializers import *\n", "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0, 1 ] }
[ 0, 1 ]
import sys from melody_types import * import dataclasses """ Marks notes for grace notes """ # Mark grace notes on the peak note of every segment def _peaks(song): for phrase in song.phrases: for pe in phrase.phrase_elements: if type(pe) == Segment: if pe.direction != SegmentDirection.UPDOWN: continue # Get peak note for i in range(1, len(pe.notes)): if pe.notes[i].pitch < pe.notes[i - 1].pitch: pe.notes[i - 1].grace = True print('sup', file=sys.stderr) break # Adds a grace note to consonant notes in every segment def _consonant(song): pass def _insert_grace_notes(song): for phrase in song.phrases: for pe in phrase.phrase_elements: if type(pe) != Segment: continue segment = pe initial_len = len(segment.notes) new_notes = [] flag = False for i in range(len(pe.notes)): if segment.notes[i].grace and not flag: new_note = Note(pitch=phrase.scale.skip_up(segment.notes[i].pitch, 1), new=True, duration=1/4) new_notes += [new_note] segment.notes[i].duration -= 1/4 flag = True new_notes += [dataclasses.replace(segment.notes[i])] assert(len(new_notes) - initial_len <= 1) pe.notes = list(new_notes) def add_grace_notes(song): _peaks(song) _insert_grace_notes(song)
normal
{ "blob_id": "ac83d7d39319c08c35302abfb312ebee463b75b2", "index": 5130, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef _insert_grace_notes(song):\n for phrase in song.phrases:\n for pe in phrase.phrase_elements:\n if type(pe) != Segment:\n continue\n segment = pe\n initial_len = len(segment.notes)\n new_notes = []\n flag = False\n for i in range(len(pe.notes)):\n if segment.notes[i].grace and not flag:\n new_note = Note(pitch=phrase.scale.skip_up(segment.\n notes[i].pitch, 1), new=True, duration=1 / 4)\n new_notes += [new_note]\n segment.notes[i].duration -= 1 / 4\n flag = True\n new_notes += [dataclasses.replace(segment.notes[i])]\n assert len(new_notes) - initial_len <= 1\n pe.notes = list(new_notes)\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef _peaks(song):\n for phrase in song.phrases:\n for pe in phrase.phrase_elements:\n if type(pe) == Segment:\n if pe.direction != SegmentDirection.UPDOWN:\n continue\n for i in range(1, len(pe.notes)):\n if pe.notes[i].pitch < pe.notes[i - 1].pitch:\n pe.notes[i - 1].grace = True\n print('sup', file=sys.stderr)\n break\n\n\ndef _consonant(song):\n pass\n\n\ndef _insert_grace_notes(song):\n for phrase in song.phrases:\n for pe in phrase.phrase_elements:\n if type(pe) != Segment:\n continue\n segment = pe\n initial_len = len(segment.notes)\n new_notes = []\n flag = False\n for i in range(len(pe.notes)):\n if segment.notes[i].grace and not flag:\n new_note = Note(pitch=phrase.scale.skip_up(segment.\n notes[i].pitch, 1), new=True, duration=1 / 4)\n new_notes += [new_note]\n segment.notes[i].duration -= 1 / 4\n flag = True\n new_notes += [dataclasses.replace(segment.notes[i])]\n assert len(new_notes) - initial_len <= 1\n pe.notes = list(new_notes)\n\n\n<mask token>\n", "step-4": "<mask token>\n\n\ndef _peaks(song):\n for phrase in song.phrases:\n for pe in phrase.phrase_elements:\n if type(pe) == Segment:\n if pe.direction != SegmentDirection.UPDOWN:\n continue\n for i in range(1, len(pe.notes)):\n if pe.notes[i].pitch < pe.notes[i - 1].pitch:\n pe.notes[i - 1].grace = True\n print('sup', file=sys.stderr)\n break\n\n\ndef _consonant(song):\n pass\n\n\ndef _insert_grace_notes(song):\n for phrase in song.phrases:\n for pe in phrase.phrase_elements:\n if type(pe) != Segment:\n continue\n segment = pe\n initial_len = len(segment.notes)\n new_notes = []\n flag = False\n for i in range(len(pe.notes)):\n if segment.notes[i].grace and not flag:\n new_note = Note(pitch=phrase.scale.skip_up(segment.\n notes[i].pitch, 1), new=True, duration=1 / 4)\n new_notes += [new_note]\n segment.notes[i].duration -= 1 / 4\n flag = True\n new_notes += [dataclasses.replace(segment.notes[i])]\n assert len(new_notes) - initial_len <= 1\n pe.notes = list(new_notes)\n\n\ndef add_grace_notes(song):\n _peaks(song)\n _insert_grace_notes(song)\n", "step-5": "import sys\nfrom melody_types import *\nimport dataclasses\n\"\"\"\nMarks notes for grace notes\n\"\"\"\n\n# Mark grace notes on the peak note of every segment\ndef _peaks(song):\n for phrase in song.phrases:\n for pe in phrase.phrase_elements:\n if type(pe) == Segment:\n if pe.direction != SegmentDirection.UPDOWN:\n continue\n\n # Get peak note\n for i in range(1, len(pe.notes)):\n if pe.notes[i].pitch < pe.notes[i - 1].pitch:\n pe.notes[i - 1].grace = True\n print('sup', file=sys.stderr)\n break\n\n# Adds a grace note to consonant notes in every segment\ndef _consonant(song):\n pass\n\ndef _insert_grace_notes(song):\n for phrase in song.phrases:\n for pe in phrase.phrase_elements:\n if type(pe) != Segment:\n continue\n\n segment = pe\n initial_len = len(segment.notes)\n\n new_notes = []\n flag = False\n for i in range(len(pe.notes)):\n if segment.notes[i].grace and not flag:\n new_note = Note(pitch=phrase.scale.skip_up(segment.notes[i].pitch, 1), new=True, duration=1/4)\n new_notes += [new_note]\n segment.notes[i].duration -= 1/4\n flag = True\n new_notes += [dataclasses.replace(segment.notes[i])]\n\n assert(len(new_notes) - initial_len <= 1)\n pe.notes = list(new_notes)\n\ndef add_grace_notes(song):\n _peaks(song)\n _insert_grace_notes(song)\n", "step-ids": [ 0, 1, 3, 4, 6 ] }
[ 0, 1, 3, 4, 6 ]
#!/usr/bin/python # -*-coding:utf-8 -*- import smtplib import MySQLdb import datetime import types def sendEmail(sender,passwd,host,port,receivers,date,mail) : message = MIMEText(mail, 'html', 'utf-8') message['From'] = Header("告警发送者<"+sender+">", 'utf-8') subject = str(date) + '服务器告警通知' message['Subject'] = Header(subject, 'utf-8') try: smtpObj = smtplib.SMTP_SSL(host,port) smtpObj.ehlo() smtpObj.login(sender,passwd) smtpObj.sendmail(sender, receivers, message.as_string()) smtpObj.quit() print "邮件发送成功" except smtplib.SMTPException: print "Error: 无法发送邮件" if __name__ == '__main__' : sender = 'liucl@helianhealth.com' passwd = '@Chuck20110923' host = 'smtp.exmail.qq.com' port = 465 receivers = ['547000225@qq.com','longof@126.com'] daytime = (datetime.date.today() - datetime.timedelta(days=1) ). strftime('%Y%m%d') mail = '服务器问题警报!!!' sendEmail(sender,passwd,host,port,receivers,daytime,mail)
normal
{ "blob_id": "221a75d37fbb49e8508fc786ee8e6e90b19e12c0", "index": 4683, "step-1": "#!/usr/bin/python\n# -*-coding:utf-8 -*-\nimport smtplib\nimport MySQLdb\nimport datetime\nimport types\ndef sendEmail(sender,passwd,host,port,receivers,date,mail) :\n message = MIMEText(mail, 'html', 'utf-8')\n message['From'] = Header(\"告警发送者<\"+sender+\">\", 'utf-8')\n subject = str(date) + '服务器告警通知'\n message['Subject'] = Header(subject, 'utf-8')\n try:\n smtpObj = smtplib.SMTP_SSL(host,port)\n smtpObj.ehlo()\n smtpObj.login(sender,passwd)\n smtpObj.sendmail(sender, receivers, message.as_string())\n smtpObj.quit()\n print \"邮件发送成功\"\n except smtplib.SMTPException:\n print \"Error: 无法发送邮件\"\nif __name__ == '__main__' :\n sender = 'liucl@helianhealth.com'\n passwd = '@Chuck20110923'\n host = 'smtp.exmail.qq.com'\n port = 465\n receivers = ['547000225@qq.com','longof@126.com']\n daytime = (datetime.date.today() - datetime.timedelta(days=1) ). strftime('%Y%m%d')\n mail = '服务器问题警报!!!'\n sendEmail(sender,passwd,host,port,receivers,daytime,mail)", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
<|reserved_special_token_0|> class DateTimeField(Field): <|reserved_special_token_0|> def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: try: self.data = datetime.strptime(value, self.format) if self.tzinfo: self.data = self.data.replace(tzinfo=self.tzinfo) except ValueError: self.data = None raise ValueError("invalid datetime: '{}'".format(values[0]) ) elif self.no_to_default: self.data = self.default else: self.data = None class DateField(Field): def __init__(self, label=None, validators=None, format='%Y-%m-%d', empty_to_default=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.format = format self.empty_to_default = empty_to_default self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: try: self.data = datetime.strptime(value, self.format).date() except ValueError: self.data = None raise ValueError("invalid datetime: '{}'".format(values[0]) ) elif self.no_to_default: self.data = self.default else: self.data = None class ObjectIdField(Field): def __init__(self, label=None, validators=None, empty_to_default=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.empty_to_default = empty_to_default self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: try: self.data = ObjectId(value) except InvalidId: self.data = None raise ValueError("invalid ObjectId: '{}'".format(values[0]) ) elif self.no_to_default: self.data = self.default else: self.data = None class TagListField(Field): def __init__(self, label='', validators=None, sep=',,、', empty_to_default=True, element_field=None, unique=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.sep = sep self.empty_to_default = empty_to_default self.element_field = element_field self.unique = unique self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: value = [v.strip() for v in re.split('[{}]'.format(self.sep ), value)] value = [v for v in value if v != ''] if self.unique: value = list(OrderedDict.fromkeys(value)) self.data = value elif self.no_to_default: self.data = self.default else: self.data = None def post_validate(self, form, stop_validation): if stop_validation: return if self.data is not None and self.element_field is not None: fields = [] for v in self.data: field = copy(self.element_field).bind(form, '') field.process_formdata([v]) if not field.validate(form): self.errors.extend(field.errors) else: fields.append(field) if len(self.errors) == 0: self.data = [v.data for v in fields] class JsonField(Field): def __init__(self, label=None, validators=None, empty_to_default=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.empty_to_default = empty_to_default self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: try: self.data = json.loads(value) except ValueError: self.data = None raise ValueError("invalid json: '{}'".format(values[0])) elif self.no_to_default: self.data = self.default else: self.data = None class CompoundField(Field): def __init__(self, label='', validators=None, empty_to_default=True, fields=None, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.empty_to_default = empty_to_default self.fields = fields or [] self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: self.data = value elif self.no_to_default: self.data = self.default else: self.data = None def post_validate(self, form, stop_validation): if stop_validation: return if self.data is not None: for field in self.fields: field = field.bind(form, '') try: field.process_formdata([self.data]) except ValueError as e: self.errors.append(str(e)) continue if not field.validate(form): self.errors.extend(field.errors) else: self.data = field.data self.errors = [] break <|reserved_special_token_1|> <|reserved_special_token_0|> class BooleanField(Field): <|reserved_special_token_0|> <|reserved_special_token_0|> class DateTimeField(Field): def __init__(self, label=None, validators=None, format= '%Y-%m-%d %H:%M:%S', empty_to_default=True, tzinfo=None, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.format = format self.empty_to_default = empty_to_default self.tzinfo = tzinfo self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: try: self.data = datetime.strptime(value, self.format) if self.tzinfo: self.data = self.data.replace(tzinfo=self.tzinfo) except ValueError: self.data = None raise ValueError("invalid datetime: '{}'".format(values[0]) ) elif self.no_to_default: self.data = self.default else: self.data = None class DateField(Field): def __init__(self, label=None, validators=None, format='%Y-%m-%d', empty_to_default=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.format = format self.empty_to_default = empty_to_default self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: try: self.data = datetime.strptime(value, self.format).date() except ValueError: self.data = None raise ValueError("invalid datetime: '{}'".format(values[0]) ) elif self.no_to_default: self.data = self.default else: self.data = None class ObjectIdField(Field): def __init__(self, label=None, validators=None, empty_to_default=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.empty_to_default = empty_to_default self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: try: self.data = ObjectId(value) except InvalidId: self.data = None raise ValueError("invalid ObjectId: '{}'".format(values[0]) ) elif self.no_to_default: self.data = self.default else: self.data = None class TagListField(Field): def __init__(self, label='', validators=None, sep=',,、', empty_to_default=True, element_field=None, unique=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.sep = sep self.empty_to_default = empty_to_default self.element_field = element_field self.unique = unique self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: value = [v.strip() for v in re.split('[{}]'.format(self.sep ), value)] value = [v for v in value if v != ''] if self.unique: value = list(OrderedDict.fromkeys(value)) self.data = value elif self.no_to_default: self.data = self.default else: self.data = None def post_validate(self, form, stop_validation): if stop_validation: return if self.data is not None and self.element_field is not None: fields = [] for v in self.data: field = copy(self.element_field).bind(form, '') field.process_formdata([v]) if not field.validate(form): self.errors.extend(field.errors) else: fields.append(field) if len(self.errors) == 0: self.data = [v.data for v in fields] class JsonField(Field): def __init__(self, label=None, validators=None, empty_to_default=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.empty_to_default = empty_to_default self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: try: self.data = json.loads(value) except ValueError: self.data = None raise ValueError("invalid json: '{}'".format(values[0])) elif self.no_to_default: self.data = self.default else: self.data = None class CompoundField(Field): def __init__(self, label='', validators=None, empty_to_default=True, fields=None, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.empty_to_default = empty_to_default self.fields = fields or [] self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: self.data = value elif self.no_to_default: self.data = self.default else: self.data = None def post_validate(self, form, stop_validation): if stop_validation: return if self.data is not None: for field in self.fields: field = field.bind(form, '') try: field.process_formdata([self.data]) except ValueError as e: self.errors.append(str(e)) continue if not field.validate(form): self.errors.extend(field.errors) else: self.data = field.data self.errors = [] break <|reserved_special_token_1|> <|reserved_special_token_0|> class BooleanField(Field): def __init__(self, label=None, validators=None, false_values=('false', '0'), empty_to_default=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.false_values = false_values self.empty_to_default = empty_to_default self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: self.data = False if value in self.false_values else True elif self.no_to_default: self.data = self.default else: self.data = None class DateTimeField(Field): def __init__(self, label=None, validators=None, format= '%Y-%m-%d %H:%M:%S', empty_to_default=True, tzinfo=None, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.format = format self.empty_to_default = empty_to_default self.tzinfo = tzinfo self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: try: self.data = datetime.strptime(value, self.format) if self.tzinfo: self.data = self.data.replace(tzinfo=self.tzinfo) except ValueError: self.data = None raise ValueError("invalid datetime: '{}'".format(values[0]) ) elif self.no_to_default: self.data = self.default else: self.data = None class DateField(Field): def __init__(self, label=None, validators=None, format='%Y-%m-%d', empty_to_default=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.format = format self.empty_to_default = empty_to_default self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: try: self.data = datetime.strptime(value, self.format).date() except ValueError: self.data = None raise ValueError("invalid datetime: '{}'".format(values[0]) ) elif self.no_to_default: self.data = self.default else: self.data = None class ObjectIdField(Field): def __init__(self, label=None, validators=None, empty_to_default=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.empty_to_default = empty_to_default self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: try: self.data = ObjectId(value) except InvalidId: self.data = None raise ValueError("invalid ObjectId: '{}'".format(values[0]) ) elif self.no_to_default: self.data = self.default else: self.data = None class TagListField(Field): def __init__(self, label='', validators=None, sep=',,、', empty_to_default=True, element_field=None, unique=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.sep = sep self.empty_to_default = empty_to_default self.element_field = element_field self.unique = unique self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: value = [v.strip() for v in re.split('[{}]'.format(self.sep ), value)] value = [v for v in value if v != ''] if self.unique: value = list(OrderedDict.fromkeys(value)) self.data = value elif self.no_to_default: self.data = self.default else: self.data = None def post_validate(self, form, stop_validation): if stop_validation: return if self.data is not None and self.element_field is not None: fields = [] for v in self.data: field = copy(self.element_field).bind(form, '') field.process_formdata([v]) if not field.validate(form): self.errors.extend(field.errors) else: fields.append(field) if len(self.errors) == 0: self.data = [v.data for v in fields] class JsonField(Field): def __init__(self, label=None, validators=None, empty_to_default=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.empty_to_default = empty_to_default self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: try: self.data = json.loads(value) except ValueError: self.data = None raise ValueError("invalid json: '{}'".format(values[0])) elif self.no_to_default: self.data = self.default else: self.data = None class CompoundField(Field): def __init__(self, label='', validators=None, empty_to_default=True, fields=None, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.empty_to_default = empty_to_default self.fields = fields or [] self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: self.data = value elif self.no_to_default: self.data = self.default else: self.data = None def post_validate(self, form, stop_validation): if stop_validation: return if self.data is not None: for field in self.fields: field = field.bind(form, '') try: field.process_formdata([self.data]) except ValueError as e: self.errors.append(str(e)) continue if not field.validate(form): self.errors.extend(field.errors) else: self.data = field.data self.errors = [] break <|reserved_special_token_1|> <|reserved_special_token_0|> class StringField(Field): <|reserved_special_token_0|> <|reserved_special_token_0|> class IntegerField(Field): def __init__(self, label=None, validators=None, empty_to_default=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.empty_to_default = empty_to_default self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: try: self.data = int(value) except ValueError: self.data = None raise ValueError("invalid int: '{}'".format(values[0])) elif self.no_to_default: self.data = self.default else: self.data = None class FloatField(Field): def __init__(self, label=None, validators=None, empty_to_default=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.empty_to_default = empty_to_default self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: try: self.data = float(value) except ValueError: self.data = None raise ValueError("invalid float: '{}'".format(values[0])) elif self.no_to_default: self.data = self.default else: self.data = None class BooleanField(Field): def __init__(self, label=None, validators=None, false_values=('false', '0'), empty_to_default=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.false_values = false_values self.empty_to_default = empty_to_default self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: self.data = False if value in self.false_values else True elif self.no_to_default: self.data = self.default else: self.data = None class DateTimeField(Field): def __init__(self, label=None, validators=None, format= '%Y-%m-%d %H:%M:%S', empty_to_default=True, tzinfo=None, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.format = format self.empty_to_default = empty_to_default self.tzinfo = tzinfo self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: try: self.data = datetime.strptime(value, self.format) if self.tzinfo: self.data = self.data.replace(tzinfo=self.tzinfo) except ValueError: self.data = None raise ValueError("invalid datetime: '{}'".format(values[0]) ) elif self.no_to_default: self.data = self.default else: self.data = None class DateField(Field): def __init__(self, label=None, validators=None, format='%Y-%m-%d', empty_to_default=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.format = format self.empty_to_default = empty_to_default self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: try: self.data = datetime.strptime(value, self.format).date() except ValueError: self.data = None raise ValueError("invalid datetime: '{}'".format(values[0]) ) elif self.no_to_default: self.data = self.default else: self.data = None class ObjectIdField(Field): def __init__(self, label=None, validators=None, empty_to_default=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.empty_to_default = empty_to_default self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: try: self.data = ObjectId(value) except InvalidId: self.data = None raise ValueError("invalid ObjectId: '{}'".format(values[0]) ) elif self.no_to_default: self.data = self.default else: self.data = None class TagListField(Field): def __init__(self, label='', validators=None, sep=',,、', empty_to_default=True, element_field=None, unique=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.sep = sep self.empty_to_default = empty_to_default self.element_field = element_field self.unique = unique self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: value = [v.strip() for v in re.split('[{}]'.format(self.sep ), value)] value = [v for v in value if v != ''] if self.unique: value = list(OrderedDict.fromkeys(value)) self.data = value elif self.no_to_default: self.data = self.default else: self.data = None def post_validate(self, form, stop_validation): if stop_validation: return if self.data is not None and self.element_field is not None: fields = [] for v in self.data: field = copy(self.element_field).bind(form, '') field.process_formdata([v]) if not field.validate(form): self.errors.extend(field.errors) else: fields.append(field) if len(self.errors) == 0: self.data = [v.data for v in fields] class JsonField(Field): def __init__(self, label=None, validators=None, empty_to_default=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.empty_to_default = empty_to_default self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: try: self.data = json.loads(value) except ValueError: self.data = None raise ValueError("invalid json: '{}'".format(values[0])) elif self.no_to_default: self.data = self.default else: self.data = None class CompoundField(Field): def __init__(self, label='', validators=None, empty_to_default=True, fields=None, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.empty_to_default = empty_to_default self.fields = fields or [] self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == '': self.data = self.default if self.empty_to_default else '' else: self.data = value elif self.no_to_default: self.data = self.default else: self.data = None def post_validate(self, form, stop_validation): if stop_validation: return if self.data is not None: for field in self.fields: field = field.bind(form, '') try: field.process_formdata([self.data]) except ValueError as e: self.errors.append(str(e)) continue if not field.validate(form): self.errors.extend(field.errors) else: self.data = field.data self.errors = [] break <|reserved_special_token_1|> from collections import OrderedDict import re from copy import copy from datetime import datetime import json from bson import ObjectId from bson.errors import InvalidId from wtforms import Field class StringField(Field): def __init__(self, label=None, validators=None, empty_to_default=True, strip=True, words_filter=None, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.empty_to_default = empty_to_default self.strip = strip self.words_filter = words_filter self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0] if self.strip: value = value.strip() if self.words_filter: value = self.words_filter.filter(value) if value == "": self.data = self.default if self.empty_to_default else "" else: self.data = value else: if self.no_to_default: self.data = self.default else: self.data = None class IntegerField(Field): def __init__(self, label=None, validators=None, empty_to_default=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.empty_to_default = empty_to_default self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == "": self.data = self.default if self.empty_to_default else "" else: try: self.data = int(value) except ValueError: self.data = None raise ValueError("invalid int: '{}'" .format(values[0])) else: if self.no_to_default: self.data = self.default else: self.data = None class FloatField(Field): def __init__(self, label=None, validators=None, empty_to_default=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.empty_to_default = empty_to_default self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == "": self.data = self.default if self.empty_to_default else "" else: try: self.data = float(value) except ValueError: self.data = None raise ValueError("invalid float: '{}'" .format(values[0])) else: if self.no_to_default: self.data = self.default else: self.data = None class BooleanField(Field): def __init__(self, label=None, validators=None, false_values=('false', '0'), empty_to_default=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.false_values = false_values self.empty_to_default = empty_to_default self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == "": self.data = self.default if self.empty_to_default else "" else: self.data = False if value in self.false_values else True else: if self.no_to_default: self.data = self.default else: self.data = None class DateTimeField(Field): def __init__(self, label=None, validators=None, format='%Y-%m-%d %H:%M:%S', empty_to_default=True, tzinfo=None, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.format = format self.empty_to_default = empty_to_default self.tzinfo = tzinfo self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == "": self.data = self.default if self.empty_to_default else "" else: try: self.data = datetime.strptime(value, self.format) if self.tzinfo: self.data = self.data.replace(tzinfo=self.tzinfo) except ValueError: self.data = None raise ValueError("invalid datetime: '{}'" .format(values[0])) else: if self.no_to_default: self.data = self.default else: self.data = None class DateField(Field): def __init__(self, label=None, validators=None, format='%Y-%m-%d', empty_to_default=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.format = format self.empty_to_default = empty_to_default self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == "": self.data = self.default if self.empty_to_default else "" else: try: self.data = datetime.strptime(value, self.format).date() except ValueError: self.data = None raise ValueError("invalid datetime: '{}'" .format(values[0])) else: if self.no_to_default: self.data = self.default else: self.data = None class ObjectIdField(Field): def __init__(self, label=None, validators=None, empty_to_default=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.empty_to_default = empty_to_default self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == "": self.data = self.default if self.empty_to_default else "" else: try: self.data = ObjectId(value) except InvalidId: self.data = None raise ValueError("invalid ObjectId: '{}'" .format(values[0])) else: if self.no_to_default: self.data = self.default else: self.data = None class TagListField(Field): def __init__(self, label='', validators=None, sep=",,、", empty_to_default=True, element_field=None, unique=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.sep = sep self.empty_to_default = empty_to_default self.element_field = element_field self.unique = unique self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == "": self.data = self.default if self.empty_to_default else "" else: value = [v.strip() for v in re.split( r"[{}]".format(self.sep), value)] value = [v for v in value if v != ""] if self.unique: value = list(OrderedDict.fromkeys(value)) self.data = value else: if self.no_to_default: self.data = self.default else: self.data = None def post_validate(self, form, stop_validation): if stop_validation: return if self.data is not None and self.element_field is not None: fields = [] for v in self.data: field = copy(self.element_field).bind(form, '') field.process_formdata([v]) if not field.validate(form): self.errors.extend(field.errors) else: fields.append(field) if len(self.errors) == 0: self.data = [v.data for v in fields] class JsonField(Field): def __init__(self, label=None, validators=None, empty_to_default=True, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.empty_to_default = empty_to_default self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == "": self.data = self.default if self.empty_to_default else "" else: try: self.data = json.loads(value) except ValueError: self.data = None raise ValueError("invalid json: '{}'" .format(values[0])) else: if self.no_to_default: self.data = self.default else: self.data = None class CompoundField(Field): def __init__(self, label='', validators=None, empty_to_default=True, fields=None, no_to_default=True, **kwargs): super().__init__(label, validators, **kwargs) self.empty_to_default = empty_to_default self.fields = fields or [] self.no_to_default = no_to_default def process_formdata(self, values): if values: value = values[0].strip() if value == "": self.data = self.default if self.empty_to_default else "" else: self.data = value else: if self.no_to_default: self.data = self.default else: self.data = None def post_validate(self, form, stop_validation): if stop_validation: return if self.data is not None: for field in self.fields: field = field.bind(form, '') try: field.process_formdata([self.data]) except ValueError as e: self.errors.append(str(e)) continue if not field.validate(form): self.errors.extend(field.errors) else: self.data = field.data self.errors = [] break
flexible
{ "blob_id": "72b29764f584c7f824eaa63ab0fdb1839a8d9102", "index": 8166, "step-1": "<mask token>\n\n\nclass DateTimeField(Field):\n <mask token>\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n try:\n self.data = datetime.strptime(value, self.format)\n if self.tzinfo:\n self.data = self.data.replace(tzinfo=self.tzinfo)\n except ValueError:\n self.data = None\n raise ValueError(\"invalid datetime: '{}'\".format(values[0])\n )\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass DateField(Field):\n\n def __init__(self, label=None, validators=None, format='%Y-%m-%d',\n empty_to_default=True, no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.format = format\n self.empty_to_default = empty_to_default\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n try:\n self.data = datetime.strptime(value, self.format).date()\n except ValueError:\n self.data = None\n raise ValueError(\"invalid datetime: '{}'\".format(values[0])\n )\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass ObjectIdField(Field):\n\n def __init__(self, label=None, validators=None, empty_to_default=True,\n no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.empty_to_default = empty_to_default\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n try:\n self.data = ObjectId(value)\n except InvalidId:\n self.data = None\n raise ValueError(\"invalid ObjectId: '{}'\".format(values[0])\n )\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass TagListField(Field):\n\n def __init__(self, label='', validators=None, sep=',,、',\n empty_to_default=True, element_field=None, unique=True,\n no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.sep = sep\n self.empty_to_default = empty_to_default\n self.element_field = element_field\n self.unique = unique\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n value = [v.strip() for v in re.split('[{}]'.format(self.sep\n ), value)]\n value = [v for v in value if v != '']\n if self.unique:\n value = list(OrderedDict.fromkeys(value))\n self.data = value\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n def post_validate(self, form, stop_validation):\n if stop_validation:\n return\n if self.data is not None and self.element_field is not None:\n fields = []\n for v in self.data:\n field = copy(self.element_field).bind(form, '')\n field.process_formdata([v])\n if not field.validate(form):\n self.errors.extend(field.errors)\n else:\n fields.append(field)\n if len(self.errors) == 0:\n self.data = [v.data for v in fields]\n\n\nclass JsonField(Field):\n\n def __init__(self, label=None, validators=None, empty_to_default=True,\n no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.empty_to_default = empty_to_default\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n try:\n self.data = json.loads(value)\n except ValueError:\n self.data = None\n raise ValueError(\"invalid json: '{}'\".format(values[0]))\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass CompoundField(Field):\n\n def __init__(self, label='', validators=None, empty_to_default=True,\n fields=None, no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.empty_to_default = empty_to_default\n self.fields = fields or []\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n self.data = value\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n def post_validate(self, form, stop_validation):\n if stop_validation:\n return\n if self.data is not None:\n for field in self.fields:\n field = field.bind(form, '')\n try:\n field.process_formdata([self.data])\n except ValueError as e:\n self.errors.append(str(e))\n continue\n if not field.validate(form):\n self.errors.extend(field.errors)\n else:\n self.data = field.data\n self.errors = []\n break\n", "step-2": "<mask token>\n\n\nclass BooleanField(Field):\n <mask token>\n <mask token>\n\n\nclass DateTimeField(Field):\n\n def __init__(self, label=None, validators=None, format=\n '%Y-%m-%d %H:%M:%S', empty_to_default=True, tzinfo=None,\n no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.format = format\n self.empty_to_default = empty_to_default\n self.tzinfo = tzinfo\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n try:\n self.data = datetime.strptime(value, self.format)\n if self.tzinfo:\n self.data = self.data.replace(tzinfo=self.tzinfo)\n except ValueError:\n self.data = None\n raise ValueError(\"invalid datetime: '{}'\".format(values[0])\n )\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass DateField(Field):\n\n def __init__(self, label=None, validators=None, format='%Y-%m-%d',\n empty_to_default=True, no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.format = format\n self.empty_to_default = empty_to_default\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n try:\n self.data = datetime.strptime(value, self.format).date()\n except ValueError:\n self.data = None\n raise ValueError(\"invalid datetime: '{}'\".format(values[0])\n )\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass ObjectIdField(Field):\n\n def __init__(self, label=None, validators=None, empty_to_default=True,\n no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.empty_to_default = empty_to_default\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n try:\n self.data = ObjectId(value)\n except InvalidId:\n self.data = None\n raise ValueError(\"invalid ObjectId: '{}'\".format(values[0])\n )\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass TagListField(Field):\n\n def __init__(self, label='', validators=None, sep=',,、',\n empty_to_default=True, element_field=None, unique=True,\n no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.sep = sep\n self.empty_to_default = empty_to_default\n self.element_field = element_field\n self.unique = unique\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n value = [v.strip() for v in re.split('[{}]'.format(self.sep\n ), value)]\n value = [v for v in value if v != '']\n if self.unique:\n value = list(OrderedDict.fromkeys(value))\n self.data = value\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n def post_validate(self, form, stop_validation):\n if stop_validation:\n return\n if self.data is not None and self.element_field is not None:\n fields = []\n for v in self.data:\n field = copy(self.element_field).bind(form, '')\n field.process_formdata([v])\n if not field.validate(form):\n self.errors.extend(field.errors)\n else:\n fields.append(field)\n if len(self.errors) == 0:\n self.data = [v.data for v in fields]\n\n\nclass JsonField(Field):\n\n def __init__(self, label=None, validators=None, empty_to_default=True,\n no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.empty_to_default = empty_to_default\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n try:\n self.data = json.loads(value)\n except ValueError:\n self.data = None\n raise ValueError(\"invalid json: '{}'\".format(values[0]))\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass CompoundField(Field):\n\n def __init__(self, label='', validators=None, empty_to_default=True,\n fields=None, no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.empty_to_default = empty_to_default\n self.fields = fields or []\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n self.data = value\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n def post_validate(self, form, stop_validation):\n if stop_validation:\n return\n if self.data is not None:\n for field in self.fields:\n field = field.bind(form, '')\n try:\n field.process_formdata([self.data])\n except ValueError as e:\n self.errors.append(str(e))\n continue\n if not field.validate(form):\n self.errors.extend(field.errors)\n else:\n self.data = field.data\n self.errors = []\n break\n", "step-3": "<mask token>\n\n\nclass BooleanField(Field):\n\n def __init__(self, label=None, validators=None, false_values=('false',\n '0'), empty_to_default=True, no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.false_values = false_values\n self.empty_to_default = empty_to_default\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n self.data = False if value in self.false_values else True\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass DateTimeField(Field):\n\n def __init__(self, label=None, validators=None, format=\n '%Y-%m-%d %H:%M:%S', empty_to_default=True, tzinfo=None,\n no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.format = format\n self.empty_to_default = empty_to_default\n self.tzinfo = tzinfo\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n try:\n self.data = datetime.strptime(value, self.format)\n if self.tzinfo:\n self.data = self.data.replace(tzinfo=self.tzinfo)\n except ValueError:\n self.data = None\n raise ValueError(\"invalid datetime: '{}'\".format(values[0])\n )\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass DateField(Field):\n\n def __init__(self, label=None, validators=None, format='%Y-%m-%d',\n empty_to_default=True, no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.format = format\n self.empty_to_default = empty_to_default\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n try:\n self.data = datetime.strptime(value, self.format).date()\n except ValueError:\n self.data = None\n raise ValueError(\"invalid datetime: '{}'\".format(values[0])\n )\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass ObjectIdField(Field):\n\n def __init__(self, label=None, validators=None, empty_to_default=True,\n no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.empty_to_default = empty_to_default\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n try:\n self.data = ObjectId(value)\n except InvalidId:\n self.data = None\n raise ValueError(\"invalid ObjectId: '{}'\".format(values[0])\n )\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass TagListField(Field):\n\n def __init__(self, label='', validators=None, sep=',,、',\n empty_to_default=True, element_field=None, unique=True,\n no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.sep = sep\n self.empty_to_default = empty_to_default\n self.element_field = element_field\n self.unique = unique\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n value = [v.strip() for v in re.split('[{}]'.format(self.sep\n ), value)]\n value = [v for v in value if v != '']\n if self.unique:\n value = list(OrderedDict.fromkeys(value))\n self.data = value\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n def post_validate(self, form, stop_validation):\n if stop_validation:\n return\n if self.data is not None and self.element_field is not None:\n fields = []\n for v in self.data:\n field = copy(self.element_field).bind(form, '')\n field.process_formdata([v])\n if not field.validate(form):\n self.errors.extend(field.errors)\n else:\n fields.append(field)\n if len(self.errors) == 0:\n self.data = [v.data for v in fields]\n\n\nclass JsonField(Field):\n\n def __init__(self, label=None, validators=None, empty_to_default=True,\n no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.empty_to_default = empty_to_default\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n try:\n self.data = json.loads(value)\n except ValueError:\n self.data = None\n raise ValueError(\"invalid json: '{}'\".format(values[0]))\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass CompoundField(Field):\n\n def __init__(self, label='', validators=None, empty_to_default=True,\n fields=None, no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.empty_to_default = empty_to_default\n self.fields = fields or []\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n self.data = value\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n def post_validate(self, form, stop_validation):\n if stop_validation:\n return\n if self.data is not None:\n for field in self.fields:\n field = field.bind(form, '')\n try:\n field.process_formdata([self.data])\n except ValueError as e:\n self.errors.append(str(e))\n continue\n if not field.validate(form):\n self.errors.extend(field.errors)\n else:\n self.data = field.data\n self.errors = []\n break\n", "step-4": "<mask token>\n\n\nclass StringField(Field):\n <mask token>\n <mask token>\n\n\nclass IntegerField(Field):\n\n def __init__(self, label=None, validators=None, empty_to_default=True,\n no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.empty_to_default = empty_to_default\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n try:\n self.data = int(value)\n except ValueError:\n self.data = None\n raise ValueError(\"invalid int: '{}'\".format(values[0]))\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass FloatField(Field):\n\n def __init__(self, label=None, validators=None, empty_to_default=True,\n no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.empty_to_default = empty_to_default\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n try:\n self.data = float(value)\n except ValueError:\n self.data = None\n raise ValueError(\"invalid float: '{}'\".format(values[0]))\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass BooleanField(Field):\n\n def __init__(self, label=None, validators=None, false_values=('false',\n '0'), empty_to_default=True, no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.false_values = false_values\n self.empty_to_default = empty_to_default\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n self.data = False if value in self.false_values else True\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass DateTimeField(Field):\n\n def __init__(self, label=None, validators=None, format=\n '%Y-%m-%d %H:%M:%S', empty_to_default=True, tzinfo=None,\n no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.format = format\n self.empty_to_default = empty_to_default\n self.tzinfo = tzinfo\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n try:\n self.data = datetime.strptime(value, self.format)\n if self.tzinfo:\n self.data = self.data.replace(tzinfo=self.tzinfo)\n except ValueError:\n self.data = None\n raise ValueError(\"invalid datetime: '{}'\".format(values[0])\n )\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass DateField(Field):\n\n def __init__(self, label=None, validators=None, format='%Y-%m-%d',\n empty_to_default=True, no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.format = format\n self.empty_to_default = empty_to_default\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n try:\n self.data = datetime.strptime(value, self.format).date()\n except ValueError:\n self.data = None\n raise ValueError(\"invalid datetime: '{}'\".format(values[0])\n )\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass ObjectIdField(Field):\n\n def __init__(self, label=None, validators=None, empty_to_default=True,\n no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.empty_to_default = empty_to_default\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n try:\n self.data = ObjectId(value)\n except InvalidId:\n self.data = None\n raise ValueError(\"invalid ObjectId: '{}'\".format(values[0])\n )\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass TagListField(Field):\n\n def __init__(self, label='', validators=None, sep=',,、',\n empty_to_default=True, element_field=None, unique=True,\n no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.sep = sep\n self.empty_to_default = empty_to_default\n self.element_field = element_field\n self.unique = unique\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n value = [v.strip() for v in re.split('[{}]'.format(self.sep\n ), value)]\n value = [v for v in value if v != '']\n if self.unique:\n value = list(OrderedDict.fromkeys(value))\n self.data = value\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n def post_validate(self, form, stop_validation):\n if stop_validation:\n return\n if self.data is not None and self.element_field is not None:\n fields = []\n for v in self.data:\n field = copy(self.element_field).bind(form, '')\n field.process_formdata([v])\n if not field.validate(form):\n self.errors.extend(field.errors)\n else:\n fields.append(field)\n if len(self.errors) == 0:\n self.data = [v.data for v in fields]\n\n\nclass JsonField(Field):\n\n def __init__(self, label=None, validators=None, empty_to_default=True,\n no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.empty_to_default = empty_to_default\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n try:\n self.data = json.loads(value)\n except ValueError:\n self.data = None\n raise ValueError(\"invalid json: '{}'\".format(values[0]))\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass CompoundField(Field):\n\n def __init__(self, label='', validators=None, empty_to_default=True,\n fields=None, no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n self.empty_to_default = empty_to_default\n self.fields = fields or []\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == '':\n self.data = self.default if self.empty_to_default else ''\n else:\n self.data = value\n elif self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n def post_validate(self, form, stop_validation):\n if stop_validation:\n return\n if self.data is not None:\n for field in self.fields:\n field = field.bind(form, '')\n try:\n field.process_formdata([self.data])\n except ValueError as e:\n self.errors.append(str(e))\n continue\n if not field.validate(form):\n self.errors.extend(field.errors)\n else:\n self.data = field.data\n self.errors = []\n break\n", "step-5": "from collections import OrderedDict\nimport re\nfrom copy import copy\nfrom datetime import datetime\nimport json\n\nfrom bson import ObjectId\nfrom bson.errors import InvalidId\nfrom wtforms import Field\n\n\nclass StringField(Field):\n\n def __init__(self, label=None, validators=None, empty_to_default=True,\n strip=True, words_filter=None, no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n\n self.empty_to_default = empty_to_default\n self.strip = strip\n self.words_filter = words_filter\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0]\n\n if self.strip:\n value = value.strip()\n\n if self.words_filter:\n value = self.words_filter.filter(value)\n\n if value == \"\":\n self.data = self.default if self.empty_to_default else \"\"\n else:\n self.data = value\n else:\n if self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass IntegerField(Field):\n\n def __init__(self, label=None, validators=None, empty_to_default=True,\n no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n\n self.empty_to_default = empty_to_default\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == \"\":\n self.data = self.default if self.empty_to_default else \"\"\n else:\n try:\n self.data = int(value)\n except ValueError:\n self.data = None\n raise ValueError(\"invalid int: '{}'\"\n .format(values[0]))\n else:\n if self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass FloatField(Field):\n\n def __init__(self, label=None, validators=None, empty_to_default=True,\n no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n\n self.empty_to_default = empty_to_default\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == \"\":\n self.data = self.default if self.empty_to_default else \"\"\n else:\n try:\n self.data = float(value)\n except ValueError:\n self.data = None\n raise ValueError(\"invalid float: '{}'\"\n .format(values[0]))\n else:\n if self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass BooleanField(Field):\n\n def __init__(self, label=None, validators=None, false_values=('false', '0'),\n empty_to_default=True, no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n\n self.false_values = false_values\n self.empty_to_default = empty_to_default\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == \"\":\n self.data = self.default if self.empty_to_default else \"\"\n else:\n self.data = False if value in self.false_values else True\n else:\n if self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass DateTimeField(Field):\n\n def __init__(self, label=None, validators=None,\n format='%Y-%m-%d %H:%M:%S', empty_to_default=True,\n tzinfo=None, no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n\n self.format = format\n self.empty_to_default = empty_to_default\n self.tzinfo = tzinfo\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == \"\":\n self.data = self.default if self.empty_to_default else \"\"\n else:\n try:\n self.data = datetime.strptime(value, self.format)\n if self.tzinfo:\n self.data = self.data.replace(tzinfo=self.tzinfo)\n except ValueError:\n self.data = None\n raise ValueError(\"invalid datetime: '{}'\"\n .format(values[0]))\n else:\n if self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass DateField(Field):\n\n def __init__(self, label=None, validators=None, format='%Y-%m-%d',\n empty_to_default=True, no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n\n self.format = format\n self.empty_to_default = empty_to_default\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == \"\":\n self.data = self.default if self.empty_to_default else \"\"\n else:\n try:\n self.data = datetime.strptime(value, self.format).date()\n except ValueError:\n self.data = None\n raise ValueError(\"invalid datetime: '{}'\"\n .format(values[0]))\n else:\n if self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass ObjectIdField(Field):\n\n def __init__(self, label=None, validators=None, empty_to_default=True,\n no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n\n self.empty_to_default = empty_to_default\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == \"\":\n self.data = self.default if self.empty_to_default else \"\"\n else:\n try:\n self.data = ObjectId(value)\n except InvalidId:\n self.data = None\n raise ValueError(\"invalid ObjectId: '{}'\"\n .format(values[0]))\n else:\n if self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass TagListField(Field):\n\n def __init__(self, label='', validators=None, sep=\",,、\",\n empty_to_default=True, element_field=None, unique=True,\n no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n\n self.sep = sep\n self.empty_to_default = empty_to_default\n self.element_field = element_field\n self.unique = unique\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == \"\":\n self.data = self.default if self.empty_to_default else \"\"\n else:\n value = [v.strip() for v in re.split(\n r\"[{}]\".format(self.sep), value)]\n\n value = [v for v in value if v != \"\"]\n\n if self.unique:\n value = list(OrderedDict.fromkeys(value))\n\n self.data = value\n else:\n if self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n def post_validate(self, form, stop_validation):\n if stop_validation:\n return\n\n if self.data is not None and self.element_field is not None:\n fields = []\n for v in self.data:\n field = copy(self.element_field).bind(form, '')\n field.process_formdata([v])\n if not field.validate(form):\n self.errors.extend(field.errors)\n else:\n fields.append(field)\n\n if len(self.errors) == 0:\n self.data = [v.data for v in fields]\n\n\nclass JsonField(Field):\n\n def __init__(self, label=None, validators=None, empty_to_default=True,\n no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n\n self.empty_to_default = empty_to_default\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == \"\":\n self.data = self.default if self.empty_to_default else \"\"\n else:\n try:\n self.data = json.loads(value)\n except ValueError:\n self.data = None\n raise ValueError(\"invalid json: '{}'\"\n .format(values[0]))\n else:\n if self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n\nclass CompoundField(Field):\n\n def __init__(self, label='', validators=None, empty_to_default=True,\n fields=None, no_to_default=True, **kwargs):\n super().__init__(label, validators, **kwargs)\n\n self.empty_to_default = empty_to_default\n self.fields = fields or []\n self.no_to_default = no_to_default\n\n def process_formdata(self, values):\n if values:\n value = values[0].strip()\n if value == \"\":\n self.data = self.default if self.empty_to_default else \"\"\n else:\n self.data = value\n else:\n if self.no_to_default:\n self.data = self.default\n else:\n self.data = None\n\n def post_validate(self, form, stop_validation):\n if stop_validation:\n return\n\n if self.data is not None:\n for field in self.fields:\n field = field.bind(form, '')\n\n try:\n field.process_formdata([self.data])\n except ValueError as e:\n self.errors.append(str(e))\n continue\n\n if not field.validate(form):\n self.errors.extend(field.errors)\n else:\n self.data = field.data\n self.errors = []\n break\n", "step-ids": [ 19, 21, 23, 30, 34 ] }
[ 19, 21, 23, 30, 34 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> with open(MODEL_LABELS_FILENAME, 'rb') as f: lb = pickle.load(f) <|reserved_special_token_0|> for root, dirs, files in os.walk(CAPTCHA_IMAGE_FOLDER): for name in tqdm(files, desc='Solving captchas'): kernel = 5, 5 image = cv2.imread(os.path.join(root, name)) image = cv2.cvtColor(image, cv2.COLOR_RGBA2GRAY) image = cv2.copyMakeBorder(image, 8, 8, 8, 8, cv2.BORDER_CONSTANT, None, 255) k = np.ones((5, 5), np.float32) / 25 image = cv2.filter2D(image, -1, k) ret, image = cv2.threshold(image, 127, 255, cv2.THRESH_BINARY_INV) clear_chunks(image, 0, 50) image = cv2.erode(image, kernel, iterations=1) contours = cv2.findContours(image, cv2.RETR_EXTERNAL, cv2. CHAIN_APPROX_SIMPLE)[-2] letter_image_regions = [] contours = sorted(contours, key=lambda x: cv2.contourArea(x), reverse=True) contours = contours[:5] for contour in contours: if cv2.contourArea(contour) < 60: continue x, y, w, h = cv2.boundingRect(contour) if w / h > 1.5: half_width = int(w / 2) letter_image_regions.append((x, y, half_width, h)) letter_image_regions.append((x + half_width, y, half_width, h)) else: letter_image_regions.append((x, y, w, h)) if len(letter_image_regions) != 5: incorrectly_segmented += 1 continue print( f'Found {len(letter_image_regions)} letter regions instead of 5 , the guess will likely be incorrect' ) letter_image_regions = sorted(letter_image_regions, key=lambda x: x[0]) chars = [] i = 0 for x, y, w, h in letter_image_regions: letter = image[y - 2:y + h + 2, x - 2:x + w + 2] chars.append(letter) i += 1 predictions = [] for letter in chars: letter = resize_to_fit(letter, 20, 20) letter = np.expand_dims(letter, axis=2) letter = np.expand_dims(letter, axis=0) prediction = model.predict(letter) letter_text = lb.inverse_transform(prediction)[0] predictions.append(letter_text) gc1, gc2, gc3, gc4, gc5 = predictions c1, c2, c3, c4, c5, e1, e2, e3, e4 = name correct_guesses = 0 if c1 == gc1: c1_correct += 1 correct_guesses += 1 if c2 == gc2: c2_correct += 1 correct_guesses += 1 if c3 == gc3: c3_correct += 1 correct_guesses += 1 if c4 == gc4: c4_correct += 1 correct_guesses += 1 if c5 == gc5: c5_correct += 1 correct_guesses += 1 if ''.join(predictions) == ''.join([c1, c2, c3, c4, c5]): total_correct += 1 n = correct_guesses_dict.get(correct_guesses, 0) + 1 correct_guesses_dict[correct_guesses] = n print(f"Prediction for {name}: {''.join(predictions)}") print(f'correct c1: {c1_correct}') print(f'correct c2: {c2_correct}') print(f'correct c3: {c3_correct}') print(f'correct c4: {c4_correct}') print(f'correct c5: {c5_correct}') print(f'correct total: {total_correct}') print(f'correctly segmented: {10000 - incorrectly_segmented}') print(correct_guesses_dict) <|reserved_special_token_1|> <|reserved_special_token_0|> os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' <|reserved_special_token_0|> c1_correct = 0 c2_correct = 0 c3_correct = 0 c4_correct = 0 c5_correct = 0 total_correct = 0 incorrectly_segmented = 0 correct_guesses_dict = {} MODEL_FILENAME = 'captcha_model.hdf5' MODEL_LABELS_FILENAME = 'model_labels.dat' CAPTCHA_IMAGE_FOLDER = 'test captchas' with open(MODEL_LABELS_FILENAME, 'rb') as f: lb = pickle.load(f) model = load_model(MODEL_FILENAME) for root, dirs, files in os.walk(CAPTCHA_IMAGE_FOLDER): for name in tqdm(files, desc='Solving captchas'): kernel = 5, 5 image = cv2.imread(os.path.join(root, name)) image = cv2.cvtColor(image, cv2.COLOR_RGBA2GRAY) image = cv2.copyMakeBorder(image, 8, 8, 8, 8, cv2.BORDER_CONSTANT, None, 255) k = np.ones((5, 5), np.float32) / 25 image = cv2.filter2D(image, -1, k) ret, image = cv2.threshold(image, 127, 255, cv2.THRESH_BINARY_INV) clear_chunks(image, 0, 50) image = cv2.erode(image, kernel, iterations=1) contours = cv2.findContours(image, cv2.RETR_EXTERNAL, cv2. CHAIN_APPROX_SIMPLE)[-2] letter_image_regions = [] contours = sorted(contours, key=lambda x: cv2.contourArea(x), reverse=True) contours = contours[:5] for contour in contours: if cv2.contourArea(contour) < 60: continue x, y, w, h = cv2.boundingRect(contour) if w / h > 1.5: half_width = int(w / 2) letter_image_regions.append((x, y, half_width, h)) letter_image_regions.append((x + half_width, y, half_width, h)) else: letter_image_regions.append((x, y, w, h)) if len(letter_image_regions) != 5: incorrectly_segmented += 1 continue print( f'Found {len(letter_image_regions)} letter regions instead of 5 , the guess will likely be incorrect' ) letter_image_regions = sorted(letter_image_regions, key=lambda x: x[0]) chars = [] i = 0 for x, y, w, h in letter_image_regions: letter = image[y - 2:y + h + 2, x - 2:x + w + 2] chars.append(letter) i += 1 predictions = [] for letter in chars: letter = resize_to_fit(letter, 20, 20) letter = np.expand_dims(letter, axis=2) letter = np.expand_dims(letter, axis=0) prediction = model.predict(letter) letter_text = lb.inverse_transform(prediction)[0] predictions.append(letter_text) gc1, gc2, gc3, gc4, gc5 = predictions c1, c2, c3, c4, c5, e1, e2, e3, e4 = name correct_guesses = 0 if c1 == gc1: c1_correct += 1 correct_guesses += 1 if c2 == gc2: c2_correct += 1 correct_guesses += 1 if c3 == gc3: c3_correct += 1 correct_guesses += 1 if c4 == gc4: c4_correct += 1 correct_guesses += 1 if c5 == gc5: c5_correct += 1 correct_guesses += 1 if ''.join(predictions) == ''.join([c1, c2, c3, c4, c5]): total_correct += 1 n = correct_guesses_dict.get(correct_guesses, 0) + 1 correct_guesses_dict[correct_guesses] = n print(f"Prediction for {name}: {''.join(predictions)}") print(f'correct c1: {c1_correct}') print(f'correct c2: {c2_correct}') print(f'correct c3: {c3_correct}') print(f'correct c4: {c4_correct}') print(f'correct c5: {c5_correct}') print(f'correct total: {total_correct}') print(f'correctly segmented: {10000 - incorrectly_segmented}') print(correct_guesses_dict) <|reserved_special_token_1|> import os os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' from keras.models import load_model from utils import resize_to_fit, clear_chunks, stack_windows from imutils import paths import numpy as np import imutils import cv2 as cv2 import pickle from tqdm import tqdm c1_correct = 0 c2_correct = 0 c3_correct = 0 c4_correct = 0 c5_correct = 0 total_correct = 0 incorrectly_segmented = 0 correct_guesses_dict = {} MODEL_FILENAME = 'captcha_model.hdf5' MODEL_LABELS_FILENAME = 'model_labels.dat' CAPTCHA_IMAGE_FOLDER = 'test captchas' with open(MODEL_LABELS_FILENAME, 'rb') as f: lb = pickle.load(f) model = load_model(MODEL_FILENAME) for root, dirs, files in os.walk(CAPTCHA_IMAGE_FOLDER): for name in tqdm(files, desc='Solving captchas'): kernel = 5, 5 image = cv2.imread(os.path.join(root, name)) image = cv2.cvtColor(image, cv2.COLOR_RGBA2GRAY) image = cv2.copyMakeBorder(image, 8, 8, 8, 8, cv2.BORDER_CONSTANT, None, 255) k = np.ones((5, 5), np.float32) / 25 image = cv2.filter2D(image, -1, k) ret, image = cv2.threshold(image, 127, 255, cv2.THRESH_BINARY_INV) clear_chunks(image, 0, 50) image = cv2.erode(image, kernel, iterations=1) contours = cv2.findContours(image, cv2.RETR_EXTERNAL, cv2. CHAIN_APPROX_SIMPLE)[-2] letter_image_regions = [] contours = sorted(contours, key=lambda x: cv2.contourArea(x), reverse=True) contours = contours[:5] for contour in contours: if cv2.contourArea(contour) < 60: continue x, y, w, h = cv2.boundingRect(contour) if w / h > 1.5: half_width = int(w / 2) letter_image_regions.append((x, y, half_width, h)) letter_image_regions.append((x + half_width, y, half_width, h)) else: letter_image_regions.append((x, y, w, h)) if len(letter_image_regions) != 5: incorrectly_segmented += 1 continue print( f'Found {len(letter_image_regions)} letter regions instead of 5 , the guess will likely be incorrect' ) letter_image_regions = sorted(letter_image_regions, key=lambda x: x[0]) chars = [] i = 0 for x, y, w, h in letter_image_regions: letter = image[y - 2:y + h + 2, x - 2:x + w + 2] chars.append(letter) i += 1 predictions = [] for letter in chars: letter = resize_to_fit(letter, 20, 20) letter = np.expand_dims(letter, axis=2) letter = np.expand_dims(letter, axis=0) prediction = model.predict(letter) letter_text = lb.inverse_transform(prediction)[0] predictions.append(letter_text) gc1, gc2, gc3, gc4, gc5 = predictions c1, c2, c3, c4, c5, e1, e2, e3, e4 = name correct_guesses = 0 if c1 == gc1: c1_correct += 1 correct_guesses += 1 if c2 == gc2: c2_correct += 1 correct_guesses += 1 if c3 == gc3: c3_correct += 1 correct_guesses += 1 if c4 == gc4: c4_correct += 1 correct_guesses += 1 if c5 == gc5: c5_correct += 1 correct_guesses += 1 if ''.join(predictions) == ''.join([c1, c2, c3, c4, c5]): total_correct += 1 n = correct_guesses_dict.get(correct_guesses, 0) + 1 correct_guesses_dict[correct_guesses] = n print(f"Prediction for {name}: {''.join(predictions)}") print(f'correct c1: {c1_correct}') print(f'correct c2: {c2_correct}') print(f'correct c3: {c3_correct}') print(f'correct c4: {c4_correct}') print(f'correct c5: {c5_correct}') print(f'correct total: {total_correct}') print(f'correctly segmented: {10000 - incorrectly_segmented}') print(correct_guesses_dict) <|reserved_special_token_1|> import os os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' from keras.models import load_model from utils import resize_to_fit, clear_chunks, stack_windows from imutils import paths import numpy as np import imutils import cv2 as cv2 import pickle from tqdm import tqdm c1_correct = 0 c2_correct = 0 c3_correct = 0 c4_correct = 0 c5_correct = 0 total_correct = 0 incorrectly_segmented = 0 correct_guesses_dict = {} MODEL_FILENAME = "captcha_model.hdf5" MODEL_LABELS_FILENAME = "model_labels.dat" CAPTCHA_IMAGE_FOLDER = "test captchas" # Load up the model labels (so we can translate model predictions to actual letters) with open(MODEL_LABELS_FILENAME, "rb") as f: lb = pickle.load(f) # Load the trained neural network model = load_model(MODEL_FILENAME) for root, dirs, files in os.walk(CAPTCHA_IMAGE_FOLDER): for name in tqdm(files, desc='Solving captchas'): kernel = (5,5) #load image image = cv2.imread(os.path.join(root, name)) image = cv2.cvtColor(image, cv2.COLOR_RGBA2GRAY) #add padding image = cv2.copyMakeBorder(image, 8, 8, 8, 8, cv2.BORDER_CONSTANT, None, 255) #blur k = np.ones((5,5),np.float32)/25 image = cv2.filter2D(image,-1,k) # threshhold image ret, image = cv2.threshold(image, 127, 255, cv2.THRESH_BINARY_INV) # clear white dots clear_chunks(image,0,50) # erosion image = cv2.erode(image, kernel, iterations=1) # get contours contours = cv2.findContours(image, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)[-2] #segment letters letter_image_regions = [] #(x, y, w ,h) contours = sorted(contours, key=lambda x: cv2.contourArea(x), reverse=True) contours = contours[:5] for contour in contours: if cv2.contourArea(contour) < 60: continue (x, y, w, h) = cv2.boundingRect(contour) if w / h > 1.5: half_width = int(w / 2) letter_image_regions.append((x, y, half_width, h)) letter_image_regions.append((x + half_width, y, half_width, h)) else: letter_image_regions.append((x, y, w, h)) if len(letter_image_regions) != 5: incorrectly_segmented += 1 continue print(f"Found {len(letter_image_regions)} letter regions instead of 5 , the guess will likely be incorrect") letter_image_regions = sorted(letter_image_regions, key=lambda x: x[0]) chars = [] i=0 for (x,y,w,h) in letter_image_regions: letter = image[y-2:y+h+2, x-2:x+w+2] chars.append(letter) i+=1 predictions = [] for letter in chars: # Re-size the letter image to 20x20 pixels to match training data letter = resize_to_fit(letter, 20, 20) # Turn the single image into a 4d list of images to make Keras happy letter = np.expand_dims(letter, axis=2) letter = np.expand_dims(letter, axis=0) # Ask the neural network to make a prediction prediction = model.predict(letter) # Convert the one-hot-encoded prediction back to a normal letter letter_text = lb.inverse_transform(prediction)[0] predictions.append(letter_text) gc1, gc2, gc3, gc4, gc5 = predictions c1, c2, c3, c4, c5, e1, e2, e3, e4 = name correct_guesses = 0 if c1 == gc1: c1_correct += 1 correct_guesses += 1 if c2 == gc2: c2_correct += 1 correct_guesses += 1 if c3 == gc3: c3_correct += 1 correct_guesses += 1 if c4 == gc4: c4_correct += 1 correct_guesses += 1 if c5 == gc5: c5_correct += 1 correct_guesses += 1 if ''.join(predictions) == ''.join([c1,c2,c3,c4,c5]): total_correct += 1 n = correct_guesses_dict.get(correct_guesses, 0) + 1 correct_guesses_dict[correct_guesses] = n print(f"Prediction for {name}: {''.join(predictions)}") print(f"correct c1: {c1_correct}") print(f"correct c2: {c2_correct}") print(f"correct c3: {c3_correct}") print(f"correct c4: {c4_correct}") print(f"correct c5: {c5_correct}") print(f"correct total: {total_correct}") print(f"correctly segmented: {10000 - incorrectly_segmented}") print(correct_guesses_dict)
flexible
{ "blob_id": "c2ddf31bce4a5f3ae2b0d5455bbc9942f92bff40", "index": 275, "step-1": "<mask token>\n", "step-2": "<mask token>\nwith open(MODEL_LABELS_FILENAME, 'rb') as f:\n lb = pickle.load(f)\n<mask token>\nfor root, dirs, files in os.walk(CAPTCHA_IMAGE_FOLDER):\n for name in tqdm(files, desc='Solving captchas'):\n kernel = 5, 5\n image = cv2.imread(os.path.join(root, name))\n image = cv2.cvtColor(image, cv2.COLOR_RGBA2GRAY)\n image = cv2.copyMakeBorder(image, 8, 8, 8, 8, cv2.BORDER_CONSTANT,\n None, 255)\n k = np.ones((5, 5), np.float32) / 25\n image = cv2.filter2D(image, -1, k)\n ret, image = cv2.threshold(image, 127, 255, cv2.THRESH_BINARY_INV)\n clear_chunks(image, 0, 50)\n image = cv2.erode(image, kernel, iterations=1)\n contours = cv2.findContours(image, cv2.RETR_EXTERNAL, cv2.\n CHAIN_APPROX_SIMPLE)[-2]\n letter_image_regions = []\n contours = sorted(contours, key=lambda x: cv2.contourArea(x),\n reverse=True)\n contours = contours[:5]\n for contour in contours:\n if cv2.contourArea(contour) < 60:\n continue\n x, y, w, h = cv2.boundingRect(contour)\n if w / h > 1.5:\n half_width = int(w / 2)\n letter_image_regions.append((x, y, half_width, h))\n letter_image_regions.append((x + half_width, y, half_width, h))\n else:\n letter_image_regions.append((x, y, w, h))\n if len(letter_image_regions) != 5:\n incorrectly_segmented += 1\n continue\n print(\n f'Found {len(letter_image_regions)} letter regions instead of 5 , the guess will likely be incorrect'\n )\n letter_image_regions = sorted(letter_image_regions, key=lambda x: x[0])\n chars = []\n i = 0\n for x, y, w, h in letter_image_regions:\n letter = image[y - 2:y + h + 2, x - 2:x + w + 2]\n chars.append(letter)\n i += 1\n predictions = []\n for letter in chars:\n letter = resize_to_fit(letter, 20, 20)\n letter = np.expand_dims(letter, axis=2)\n letter = np.expand_dims(letter, axis=0)\n prediction = model.predict(letter)\n letter_text = lb.inverse_transform(prediction)[0]\n predictions.append(letter_text)\n gc1, gc2, gc3, gc4, gc5 = predictions\n c1, c2, c3, c4, c5, e1, e2, e3, e4 = name\n correct_guesses = 0\n if c1 == gc1:\n c1_correct += 1\n correct_guesses += 1\n if c2 == gc2:\n c2_correct += 1\n correct_guesses += 1\n if c3 == gc3:\n c3_correct += 1\n correct_guesses += 1\n if c4 == gc4:\n c4_correct += 1\n correct_guesses += 1\n if c5 == gc5:\n c5_correct += 1\n correct_guesses += 1\n if ''.join(predictions) == ''.join([c1, c2, c3, c4, c5]):\n total_correct += 1\n n = correct_guesses_dict.get(correct_guesses, 0) + 1\n correct_guesses_dict[correct_guesses] = n\n print(f\"Prediction for {name}: {''.join(predictions)}\")\nprint(f'correct c1: {c1_correct}')\nprint(f'correct c2: {c2_correct}')\nprint(f'correct c3: {c3_correct}')\nprint(f'correct c4: {c4_correct}')\nprint(f'correct c5: {c5_correct}')\nprint(f'correct total: {total_correct}')\nprint(f'correctly segmented: {10000 - incorrectly_segmented}')\nprint(correct_guesses_dict)\n", "step-3": "<mask token>\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'\n<mask token>\nc1_correct = 0\nc2_correct = 0\nc3_correct = 0\nc4_correct = 0\nc5_correct = 0\ntotal_correct = 0\nincorrectly_segmented = 0\ncorrect_guesses_dict = {}\nMODEL_FILENAME = 'captcha_model.hdf5'\nMODEL_LABELS_FILENAME = 'model_labels.dat'\nCAPTCHA_IMAGE_FOLDER = 'test captchas'\nwith open(MODEL_LABELS_FILENAME, 'rb') as f:\n lb = pickle.load(f)\nmodel = load_model(MODEL_FILENAME)\nfor root, dirs, files in os.walk(CAPTCHA_IMAGE_FOLDER):\n for name in tqdm(files, desc='Solving captchas'):\n kernel = 5, 5\n image = cv2.imread(os.path.join(root, name))\n image = cv2.cvtColor(image, cv2.COLOR_RGBA2GRAY)\n image = cv2.copyMakeBorder(image, 8, 8, 8, 8, cv2.BORDER_CONSTANT,\n None, 255)\n k = np.ones((5, 5), np.float32) / 25\n image = cv2.filter2D(image, -1, k)\n ret, image = cv2.threshold(image, 127, 255, cv2.THRESH_BINARY_INV)\n clear_chunks(image, 0, 50)\n image = cv2.erode(image, kernel, iterations=1)\n contours = cv2.findContours(image, cv2.RETR_EXTERNAL, cv2.\n CHAIN_APPROX_SIMPLE)[-2]\n letter_image_regions = []\n contours = sorted(contours, key=lambda x: cv2.contourArea(x),\n reverse=True)\n contours = contours[:5]\n for contour in contours:\n if cv2.contourArea(contour) < 60:\n continue\n x, y, w, h = cv2.boundingRect(contour)\n if w / h > 1.5:\n half_width = int(w / 2)\n letter_image_regions.append((x, y, half_width, h))\n letter_image_regions.append((x + half_width, y, half_width, h))\n else:\n letter_image_regions.append((x, y, w, h))\n if len(letter_image_regions) != 5:\n incorrectly_segmented += 1\n continue\n print(\n f'Found {len(letter_image_regions)} letter regions instead of 5 , the guess will likely be incorrect'\n )\n letter_image_regions = sorted(letter_image_regions, key=lambda x: x[0])\n chars = []\n i = 0\n for x, y, w, h in letter_image_regions:\n letter = image[y - 2:y + h + 2, x - 2:x + w + 2]\n chars.append(letter)\n i += 1\n predictions = []\n for letter in chars:\n letter = resize_to_fit(letter, 20, 20)\n letter = np.expand_dims(letter, axis=2)\n letter = np.expand_dims(letter, axis=0)\n prediction = model.predict(letter)\n letter_text = lb.inverse_transform(prediction)[0]\n predictions.append(letter_text)\n gc1, gc2, gc3, gc4, gc5 = predictions\n c1, c2, c3, c4, c5, e1, e2, e3, e4 = name\n correct_guesses = 0\n if c1 == gc1:\n c1_correct += 1\n correct_guesses += 1\n if c2 == gc2:\n c2_correct += 1\n correct_guesses += 1\n if c3 == gc3:\n c3_correct += 1\n correct_guesses += 1\n if c4 == gc4:\n c4_correct += 1\n correct_guesses += 1\n if c5 == gc5:\n c5_correct += 1\n correct_guesses += 1\n if ''.join(predictions) == ''.join([c1, c2, c3, c4, c5]):\n total_correct += 1\n n = correct_guesses_dict.get(correct_guesses, 0) + 1\n correct_guesses_dict[correct_guesses] = n\n print(f\"Prediction for {name}: {''.join(predictions)}\")\nprint(f'correct c1: {c1_correct}')\nprint(f'correct c2: {c2_correct}')\nprint(f'correct c3: {c3_correct}')\nprint(f'correct c4: {c4_correct}')\nprint(f'correct c5: {c5_correct}')\nprint(f'correct total: {total_correct}')\nprint(f'correctly segmented: {10000 - incorrectly_segmented}')\nprint(correct_guesses_dict)\n", "step-4": "import os\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'\nfrom keras.models import load_model\nfrom utils import resize_to_fit, clear_chunks, stack_windows\nfrom imutils import paths\nimport numpy as np\nimport imutils\nimport cv2 as cv2\nimport pickle\nfrom tqdm import tqdm\nc1_correct = 0\nc2_correct = 0\nc3_correct = 0\nc4_correct = 0\nc5_correct = 0\ntotal_correct = 0\nincorrectly_segmented = 0\ncorrect_guesses_dict = {}\nMODEL_FILENAME = 'captcha_model.hdf5'\nMODEL_LABELS_FILENAME = 'model_labels.dat'\nCAPTCHA_IMAGE_FOLDER = 'test captchas'\nwith open(MODEL_LABELS_FILENAME, 'rb') as f:\n lb = pickle.load(f)\nmodel = load_model(MODEL_FILENAME)\nfor root, dirs, files in os.walk(CAPTCHA_IMAGE_FOLDER):\n for name in tqdm(files, desc='Solving captchas'):\n kernel = 5, 5\n image = cv2.imread(os.path.join(root, name))\n image = cv2.cvtColor(image, cv2.COLOR_RGBA2GRAY)\n image = cv2.copyMakeBorder(image, 8, 8, 8, 8, cv2.BORDER_CONSTANT,\n None, 255)\n k = np.ones((5, 5), np.float32) / 25\n image = cv2.filter2D(image, -1, k)\n ret, image = cv2.threshold(image, 127, 255, cv2.THRESH_BINARY_INV)\n clear_chunks(image, 0, 50)\n image = cv2.erode(image, kernel, iterations=1)\n contours = cv2.findContours(image, cv2.RETR_EXTERNAL, cv2.\n CHAIN_APPROX_SIMPLE)[-2]\n letter_image_regions = []\n contours = sorted(contours, key=lambda x: cv2.contourArea(x),\n reverse=True)\n contours = contours[:5]\n for contour in contours:\n if cv2.contourArea(contour) < 60:\n continue\n x, y, w, h = cv2.boundingRect(contour)\n if w / h > 1.5:\n half_width = int(w / 2)\n letter_image_regions.append((x, y, half_width, h))\n letter_image_regions.append((x + half_width, y, half_width, h))\n else:\n letter_image_regions.append((x, y, w, h))\n if len(letter_image_regions) != 5:\n incorrectly_segmented += 1\n continue\n print(\n f'Found {len(letter_image_regions)} letter regions instead of 5 , the guess will likely be incorrect'\n )\n letter_image_regions = sorted(letter_image_regions, key=lambda x: x[0])\n chars = []\n i = 0\n for x, y, w, h in letter_image_regions:\n letter = image[y - 2:y + h + 2, x - 2:x + w + 2]\n chars.append(letter)\n i += 1\n predictions = []\n for letter in chars:\n letter = resize_to_fit(letter, 20, 20)\n letter = np.expand_dims(letter, axis=2)\n letter = np.expand_dims(letter, axis=0)\n prediction = model.predict(letter)\n letter_text = lb.inverse_transform(prediction)[0]\n predictions.append(letter_text)\n gc1, gc2, gc3, gc4, gc5 = predictions\n c1, c2, c3, c4, c5, e1, e2, e3, e4 = name\n correct_guesses = 0\n if c1 == gc1:\n c1_correct += 1\n correct_guesses += 1\n if c2 == gc2:\n c2_correct += 1\n correct_guesses += 1\n if c3 == gc3:\n c3_correct += 1\n correct_guesses += 1\n if c4 == gc4:\n c4_correct += 1\n correct_guesses += 1\n if c5 == gc5:\n c5_correct += 1\n correct_guesses += 1\n if ''.join(predictions) == ''.join([c1, c2, c3, c4, c5]):\n total_correct += 1\n n = correct_guesses_dict.get(correct_guesses, 0) + 1\n correct_guesses_dict[correct_guesses] = n\n print(f\"Prediction for {name}: {''.join(predictions)}\")\nprint(f'correct c1: {c1_correct}')\nprint(f'correct c2: {c2_correct}')\nprint(f'correct c3: {c3_correct}')\nprint(f'correct c4: {c4_correct}')\nprint(f'correct c5: {c5_correct}')\nprint(f'correct total: {total_correct}')\nprint(f'correctly segmented: {10000 - incorrectly_segmented}')\nprint(correct_guesses_dict)\n", "step-5": "import os\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'\n\nfrom keras.models import load_model\nfrom utils import resize_to_fit, clear_chunks, stack_windows\nfrom imutils import paths\nimport numpy as np\nimport imutils\nimport cv2 as cv2\nimport pickle\nfrom tqdm import tqdm\n\nc1_correct = 0\nc2_correct = 0\nc3_correct = 0\nc4_correct = 0\nc5_correct = 0\n\ntotal_correct = 0\nincorrectly_segmented = 0\n\ncorrect_guesses_dict = {}\n\nMODEL_FILENAME = \"captcha_model.hdf5\"\nMODEL_LABELS_FILENAME = \"model_labels.dat\"\nCAPTCHA_IMAGE_FOLDER = \"test captchas\"\n\n\n# Load up the model labels (so we can translate model predictions to actual letters)\nwith open(MODEL_LABELS_FILENAME, \"rb\") as f:\n lb = pickle.load(f)\n\n# Load the trained neural network\nmodel = load_model(MODEL_FILENAME)\n\n\nfor root, dirs, files in os.walk(CAPTCHA_IMAGE_FOLDER):\n for name in tqdm(files, desc='Solving captchas'):\n \n kernel = (5,5)\n\n #load image\n image = cv2.imread(os.path.join(root, name))\n image = cv2.cvtColor(image, cv2.COLOR_RGBA2GRAY)\n \n #add padding\n image = cv2.copyMakeBorder(image, 8, 8, 8, 8, cv2.BORDER_CONSTANT, None, 255)\n\n #blur\n k = np.ones((5,5),np.float32)/25\n image = cv2.filter2D(image,-1,k)\n\n # threshhold image\n ret, image = cv2.threshold(image, 127, 255, cv2.THRESH_BINARY_INV)\n\n # clear white dots\n clear_chunks(image,0,50)\n\n # erosion\n image = cv2.erode(image, kernel, iterations=1)\n\n # get contours\n contours = cv2.findContours(image, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)[-2]\n\n #segment letters\n letter_image_regions = [] #(x, y, w ,h)\n \n \n contours = sorted(contours, key=lambda x: cv2.contourArea(x), reverse=True)\n contours = contours[:5]\n \n for contour in contours:\n \n if cv2.contourArea(contour) < 60:\n continue\n\n \n (x, y, w, h) = cv2.boundingRect(contour)\n\n if w / h > 1.5:\n half_width = int(w / 2)\n letter_image_regions.append((x, y, half_width, h))\n letter_image_regions.append((x + half_width, y, half_width, h))\n else:\n letter_image_regions.append((x, y, w, h))\n\n if len(letter_image_regions) != 5:\n incorrectly_segmented += 1\n continue\n print(f\"Found {len(letter_image_regions)} letter regions instead of 5 , the guess will likely be incorrect\")\n \n \n letter_image_regions = sorted(letter_image_regions, key=lambda x: x[0])\n\n chars = []\n i=0\n for (x,y,w,h) in letter_image_regions:\n letter = image[y-2:y+h+2, x-2:x+w+2]\n chars.append(letter)\n i+=1\n\n predictions = []\n\n for letter in chars:\n # Re-size the letter image to 20x20 pixels to match training data\n letter = resize_to_fit(letter, 20, 20)\n\n # Turn the single image into a 4d list of images to make Keras happy\n letter = np.expand_dims(letter, axis=2)\n letter = np.expand_dims(letter, axis=0)\n\n # Ask the neural network to make a prediction\n prediction = model.predict(letter)\n\n # Convert the one-hot-encoded prediction back to a normal letter\n letter_text = lb.inverse_transform(prediction)[0]\n predictions.append(letter_text)\n\n gc1, gc2, gc3, gc4, gc5 = predictions\n c1, c2, c3, c4, c5, e1, e2, e3, e4 = name \n\n correct_guesses = 0\n\n if c1 == gc1:\n c1_correct += 1\n correct_guesses += 1\n if c2 == gc2:\n c2_correct += 1\n correct_guesses += 1\n if c3 == gc3:\n c3_correct += 1\n correct_guesses += 1\n if c4 == gc4:\n c4_correct += 1\n correct_guesses += 1\n if c5 == gc5:\n c5_correct += 1\n correct_guesses += 1\n\n if ''.join(predictions) == ''.join([c1,c2,c3,c4,c5]):\n total_correct += 1\n\n n = correct_guesses_dict.get(correct_guesses, 0) + 1\n correct_guesses_dict[correct_guesses] = n\n\n print(f\"Prediction for {name}: {''.join(predictions)}\")\n \nprint(f\"correct c1: {c1_correct}\")\nprint(f\"correct c2: {c2_correct}\")\nprint(f\"correct c3: {c3_correct}\")\nprint(f\"correct c4: {c4_correct}\")\nprint(f\"correct c5: {c5_correct}\")\n\nprint(f\"correct total: {total_correct}\")\n\nprint(f\"correctly segmented: {10000 - incorrectly_segmented}\")\n\nprint(correct_guesses_dict)\n \n \n ", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> def final_frequency(): frequency = 0 with open('input') as f: for line in f: frequency += int(line) return frequency <|reserved_special_token_0|> <|reserved_special_token_1|> def final_frequency(): frequency = 0 with open('input') as f: for line in f: frequency += int(line) return frequency print(final_frequency()) <|reserved_special_token_1|> # Should print 516 def final_frequency(): frequency = 0 with open('input') as f: for line in f: frequency += int(line) return frequency print(final_frequency())
flexible
{ "blob_id": "4d68b663933070cb287689b70d6ded07958cef22", "index": 3047, "step-1": "<mask token>\n", "step-2": "def final_frequency():\n frequency = 0\n with open('input') as f:\n for line in f:\n frequency += int(line)\n return frequency\n\n\n<mask token>\n", "step-3": "def final_frequency():\n frequency = 0\n with open('input') as f:\n for line in f:\n frequency += int(line)\n return frequency\n\n\nprint(final_frequency())\n", "step-4": "# Should print 516\ndef final_frequency():\n frequency = 0\n\n with open('input') as f:\n for line in f:\n frequency += int(line)\n\n return frequency\n\n\nprint(final_frequency())\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> logging.basicConfig(level=logging.INFO) <|reserved_special_token_0|> @dp.message_handler(commands=['start', 'help']) async def send_welcome(message: types.Message): await message.reply( """Портал Государственных услуг округа Кустоже Используете команду /id + номер_паспорта для получения информации о владельце. Так же можно использовать команду /fullname + имя + фамилия, для получения информации о гражданине, регистр и порядок не важен Для получения информации о гражданах по фамилии воспользуйтесь командой /lastname + Фамилия Для получения данных о гражданах по национальности используйте /get_scan_nat+ брасогорец\\отовичанин Для добавления гражданина воспользуйтесь командой /add_person ИМЯ+ФАМИЛИЯ+НОМЕР_ПАСПОРТА+НАЦИОНАЛЬНОСТЬ+НОМЕР_ЛИЦЕНЗИИ_ОРУЖЕЙНОЙ+Преступление ( если нет лицензии и преступления пишем НЕТ Для удаления гражданина используйте /delete_person + номер паспорта Для добавления лицензии на оружение воспользуйтесь командой /add_gun_lic+id+номер_лицензии Для удалениея /delete_gun_lic + id Для добавления преступления гражданину используйте команду /add_crime +id + преступление Для удаления преступления воспользуйтесь командой /delete_crime + id""" ) @dp.message_handler(commands=['id']) async def echo(message: types.Message): print('попросили данные по ID') arguments = message.get_args() print(arguments) cur.execute('select * from barsa where id=:id', {'id': arguments}) res = cur.fetchone() cur.execute('select count(*) from barsa where lastname=:lastname', { 'lastname': res[2]}) res2 = cur.fetchone() print(res2) result = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[2 ] + '\n' + 'Номер Паспорта:' + ' ' + res[3 ] + '\n' + 'Национальность:' + ' ' + res[4 ] + '\n' + 'Родвественников:' + ' ' + str(res2[0] ) + '\n' + 'Номер лицензии на оружие:' + ' ' + res[5 ] + '\n' + 'Преступление:' + ' ' + res[6] await bot.send_message(message.from_user.id, result) os.chdir(idfolder) for file in glob.glob(res[3] + '.jpg'): img = open(file, 'rb') await bot.send_photo(message.from_user.id, img) @dp.message_handler(commands=['fullname']) async def echo(message: types.Message): print('попросили данные по имени') arguments = message.get_args() print(arguments) s = arguments.lower() s = s.split() cur.execute( 'select * from barsa where name=:name and lastname=:lastname or name=:lastname and lastname=:name' , {'name': s[0], 'lastname': s[1]}) res = cur.fetchone() cur.execute('select count(*) from barsa where lastname=:lastname', { 'lastname': s[1]}) res2 = cur.fetchone() print(res2[0]) result = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[2 ] + '\n' + 'Номер Паспорта:' + ' ' + res[3 ] + '\n' + 'Национальность:' + ' ' + res[4 ] + '\n' + 'Родвественников:' + ' ' + str(res2[0] ) + '\n' + 'Номер лицензии на оружие:' + ' ' + res[5 ] + '\n' + 'Преступление:' + ' ' + res[6] await bot.send_message(message.from_user.id, result) os.chdir(idfolder) for file in glob.glob(res[3] + '.jpg'): img = open(file, 'rb') await bot.send_photo(message.from_user.id, img) @dp.message_handler(commands=['lastname']) async def echo(message: types.Message): arguments = message.get_args() print('попросили данные по Фамилии:', arguments) s = arguments.lower() cur.execute('select * from barsa where lastname=:lastname ', { 'lastname': s}) res = cur.fetchall() os.chdir(idfolder) for f in res: print(f) cur.execute('select * from barsa where id=:id', {'id': f[3]}) res = cur.fetchone() req = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[2 ] + '\n' + 'Номер Паспорта:' + ' ' + res[3 ] + '\n' + 'Национальность:' + ' ' + res[4 ] + '\n' + 'Номер лицензии на оружие:' + ' ' + res[5 ] + '\n' + 'Преступление:' + ' ' + res[6] await bot.send_message(message.from_user.id, req) for file in glob.glob(res[3] + '.jpg'): img = open(file, 'rb') await bot.send_photo(message.from_user.id, img) @dp.message_handler(commands=['get_scan_nat']) async def echo(message: types.Message): print('попросили ВСЕ СКАНЫ по по национальности') arguments = message.get_args() print(arguments) s = arguments s = s.capitalize() cur.execute('select id from barsa where nat_=:nat_', {'nat_': s}) res = cur.fetchall() out = [item for t in res for item in t] out = [s.replace(' ', '') for s in out] os.chdir(idfolder) for f in out: print(f) if (f == '472-641218' or f == '757-067985' or f == '642-741978' or f == '696-082959' or f == '442-446766' or f == '702-973965'): cur.execute('select * from barsa where id=:id', {'id': f}) res = cur.fetchone() req = 'Имя:' + ' ' + res[1 ] + '\n' + 'Фамилия:' + ' ' + res[2 ] + '\n' + 'Номер Паспорта:' + ' ' + res[3 ] + '\n' + 'Национальность:' + ' ' + res[4 ] + '\n' + 'Номер лицензии на оружие:' + ' ' + res[5 ] + '\n' + 'Преступление:' + ' ' + res[6] await bot.send_message(message.from_user.id, req) else: img = open(f + '.jpg', 'rb') print('ok') await bot.send_photo(message.from_user.id, img) @dp.message_handler(commands=['add_person']) async def echo(message: types.Message): arguments = message.get_args() s = arguments.split() print(s) sqlite_insert_query = """INSERT INTO barsa (name, lastname, id, nat_,gunlic,crime) VALUES (?,?,?,?,?,?)""" data_tuple = s[0], s[1], s[2], s[3], s[4], s[5] cur.execute(sqlite_insert_query, data_tuple) conn.commit() print('Запись о гражданине успешно добавлена ') cur.execute('select * from barsa where id=:id', {'id': s[2]}) res = cur.fetchone() req = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[2 ] + '\n' + 'Номер Паспорта:' + ' ' + res[3 ] + '\n' + 'Национальность:' + ' ' + res[4 ] + '\n' + 'Номер лицензии на оружие:' + ' ' + res[5 ] + '\n' + 'Преступление:' + ' ' + res[6] await bot.send_message(message.from_user.id, req) @dp.message_handler(commands=['delete_person']) async def echo(message: types.Message): arguments = message.get_args() print('попросили удалить гражаднина с ID:', arguments) s = arguments cur.execute('delete from barsa where id=:id', {'id': s}) conn.commit() print('Гражданин с ID :', arguments, 'удален') res = 'Гражданин с ID :', arguments, 'удален' await bot.send_message(message.from_user.id, res) @dp.message_handler(commands=['add_gun_lic']) async def echo(message: types.Message): arguments = message.get_args() s = arguments.split() print(s[0], s[1]) cur.execute(' update barsa set gunlic=:gunlic where id=:id', {'gunlic': s[1], 'id': s[0]}) conn.commit() print('Record Updated successfully ') cur.execute('select * from barsa where id=:id', {'id': s[0]}) res = cur.fetchone() print(res) req = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[2 ] + '\n' + 'Номер Паспорта:' + ' ' + res[3 ] + '\n' + 'Национальность:' + ' ' + res[4 ] + '\n' + 'Номер лицензии на оружие:' + ' ' + res[5] await bot.send_message(message.from_user.id, req) @dp.message_handler(commands=['gun_lic']) async def echo(message: types.Message): arguments = message.get_args() s = arguments print(s) cur.execute('select * from barsa where gunlic=:gunlic', {'gunlic': s}) res = cur.fetchone() print(res) req = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[2 ] + '\n' + 'Номер Паспорта:' + ' ' + res[3 ] + '\n' + 'Национальность:' + ' ' + res[4 ] + '\n' + 'Номер лицензии на оружие:' + ' ' + res[5 ] + '\n' + 'Преступление:' + ' ' + res[6] await bot.send_message(message.from_user.id, req) @dp.message_handler(commands=['delete_gun_lic']) async def echo(message: types.Message): arguments = message.get_args() s = arguments print(s) no = 'нет' cur.execute('update barsa set gunlic=:gunlic1 where id=:id', {'gunlic1': no, 'id': s}) res1 = cur.fetchone() conn.commit() ans = 'Оружейная лицения гражданина:', s, ' удалена' await bot.send_message(message.from_user.id, ans) @dp.message_handler(commands=['add_crime']) async def echo(message: types.Message): arguments = message.get_args() s = arguments.split() print(s[0], s[1]) cur.execute(' update barsa set crime=:crime where id=:id', {'crime': s[ 1], 'id': s[0]}) conn.commit() print('Record Updated successfully ') cur.execute('select * from barsa where id=:id', {'id': s[0]}) res = cur.fetchone() print(res) req = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[2 ] + '\n' + 'Номер Паспорта:' + ' ' + res[3 ] + '\n' + 'Национальность:' + ' ' + res[4 ] + '\n' + 'Номер лицензии на оружие:' + ' ' + res[5 ] + '\n' + 'Преступление:' + ' ' + res[6] await bot.send_message(message.from_user.id, req) @dp.message_handler(commands=['delete_crime']) async def echo(message: types.Message): arguments = message.get_args() s = arguments print(s) no = 'нет' cur.execute('update barsa set crime=:crime where id=:id', {'crime': no, 'id': s}) res1 = cur.fetchone() conn.commit() ans = 'Преступление гражданина:', s, ' удалено' await bot.send_message(message.from_user.id, ans) if __name__ == '__main__': executor.start_polling(dp, skip_updates=False) <|reserved_special_token_1|> <|reserved_special_token_0|> TOKEN = '1772334389:AAE5wv8gssOFOgxQjQwKk7rUSKQHr6NTjus' logging.basicConfig(level=logging.INFO) bot = Bot(token=TOKEN) dp = Dispatcher(bot) path1 = 'C:\\Users\\const\\PycharmProjects\\t' conn = sqlite3.connect('kustoge.db') cur = conn.cursor() chunksize = 10 idfolder = 'C:\\Users\\const\\PycharmProjects\\goskustoge\\data\\id' @dp.message_handler(commands=['start', 'help']) async def send_welcome(message: types.Message): await message.reply( """Портал Государственных услуг округа Кустоже Используете команду /id + номер_паспорта для получения информации о владельце. Так же можно использовать команду /fullname + имя + фамилия, для получения информации о гражданине, регистр и порядок не важен Для получения информации о гражданах по фамилии воспользуйтесь командой /lastname + Фамилия Для получения данных о гражданах по национальности используйте /get_scan_nat+ брасогорец\\отовичанин Для добавления гражданина воспользуйтесь командой /add_person ИМЯ+ФАМИЛИЯ+НОМЕР_ПАСПОРТА+НАЦИОНАЛЬНОСТЬ+НОМЕР_ЛИЦЕНЗИИ_ОРУЖЕЙНОЙ+Преступление ( если нет лицензии и преступления пишем НЕТ Для удаления гражданина используйте /delete_person + номер паспорта Для добавления лицензии на оружение воспользуйтесь командой /add_gun_lic+id+номер_лицензии Для удалениея /delete_gun_lic + id Для добавления преступления гражданину используйте команду /add_crime +id + преступление Для удаления преступления воспользуйтесь командой /delete_crime + id""" ) @dp.message_handler(commands=['id']) async def echo(message: types.Message): print('попросили данные по ID') arguments = message.get_args() print(arguments) cur.execute('select * from barsa where id=:id', {'id': arguments}) res = cur.fetchone() cur.execute('select count(*) from barsa where lastname=:lastname', { 'lastname': res[2]}) res2 = cur.fetchone() print(res2) result = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[2 ] + '\n' + 'Номер Паспорта:' + ' ' + res[3 ] + '\n' + 'Национальность:' + ' ' + res[4 ] + '\n' + 'Родвественников:' + ' ' + str(res2[0] ) + '\n' + 'Номер лицензии на оружие:' + ' ' + res[5 ] + '\n' + 'Преступление:' + ' ' + res[6] await bot.send_message(message.from_user.id, result) os.chdir(idfolder) for file in glob.glob(res[3] + '.jpg'): img = open(file, 'rb') await bot.send_photo(message.from_user.id, img) @dp.message_handler(commands=['fullname']) async def echo(message: types.Message): print('попросили данные по имени') arguments = message.get_args() print(arguments) s = arguments.lower() s = s.split() cur.execute( 'select * from barsa where name=:name and lastname=:lastname or name=:lastname and lastname=:name' , {'name': s[0], 'lastname': s[1]}) res = cur.fetchone() cur.execute('select count(*) from barsa where lastname=:lastname', { 'lastname': s[1]}) res2 = cur.fetchone() print(res2[0]) result = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[2 ] + '\n' + 'Номер Паспорта:' + ' ' + res[3 ] + '\n' + 'Национальность:' + ' ' + res[4 ] + '\n' + 'Родвественников:' + ' ' + str(res2[0] ) + '\n' + 'Номер лицензии на оружие:' + ' ' + res[5 ] + '\n' + 'Преступление:' + ' ' + res[6] await bot.send_message(message.from_user.id, result) os.chdir(idfolder) for file in glob.glob(res[3] + '.jpg'): img = open(file, 'rb') await bot.send_photo(message.from_user.id, img) @dp.message_handler(commands=['lastname']) async def echo(message: types.Message): arguments = message.get_args() print('попросили данные по Фамилии:', arguments) s = arguments.lower() cur.execute('select * from barsa where lastname=:lastname ', { 'lastname': s}) res = cur.fetchall() os.chdir(idfolder) for f in res: print(f) cur.execute('select * from barsa where id=:id', {'id': f[3]}) res = cur.fetchone() req = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[2 ] + '\n' + 'Номер Паспорта:' + ' ' + res[3 ] + '\n' + 'Национальность:' + ' ' + res[4 ] + '\n' + 'Номер лицензии на оружие:' + ' ' + res[5 ] + '\n' + 'Преступление:' + ' ' + res[6] await bot.send_message(message.from_user.id, req) for file in glob.glob(res[3] + '.jpg'): img = open(file, 'rb') await bot.send_photo(message.from_user.id, img) @dp.message_handler(commands=['get_scan_nat']) async def echo(message: types.Message): print('попросили ВСЕ СКАНЫ по по национальности') arguments = message.get_args() print(arguments) s = arguments s = s.capitalize() cur.execute('select id from barsa where nat_=:nat_', {'nat_': s}) res = cur.fetchall() out = [item for t in res for item in t] out = [s.replace(' ', '') for s in out] os.chdir(idfolder) for f in out: print(f) if (f == '472-641218' or f == '757-067985' or f == '642-741978' or f == '696-082959' or f == '442-446766' or f == '702-973965'): cur.execute('select * from barsa where id=:id', {'id': f}) res = cur.fetchone() req = 'Имя:' + ' ' + res[1 ] + '\n' + 'Фамилия:' + ' ' + res[2 ] + '\n' + 'Номер Паспорта:' + ' ' + res[3 ] + '\n' + 'Национальность:' + ' ' + res[4 ] + '\n' + 'Номер лицензии на оружие:' + ' ' + res[5 ] + '\n' + 'Преступление:' + ' ' + res[6] await bot.send_message(message.from_user.id, req) else: img = open(f + '.jpg', 'rb') print('ok') await bot.send_photo(message.from_user.id, img) @dp.message_handler(commands=['add_person']) async def echo(message: types.Message): arguments = message.get_args() s = arguments.split() print(s) sqlite_insert_query = """INSERT INTO barsa (name, lastname, id, nat_,gunlic,crime) VALUES (?,?,?,?,?,?)""" data_tuple = s[0], s[1], s[2], s[3], s[4], s[5] cur.execute(sqlite_insert_query, data_tuple) conn.commit() print('Запись о гражданине успешно добавлена ') cur.execute('select * from barsa where id=:id', {'id': s[2]}) res = cur.fetchone() req = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[2 ] + '\n' + 'Номер Паспорта:' + ' ' + res[3 ] + '\n' + 'Национальность:' + ' ' + res[4 ] + '\n' + 'Номер лицензии на оружие:' + ' ' + res[5 ] + '\n' + 'Преступление:' + ' ' + res[6] await bot.send_message(message.from_user.id, req) @dp.message_handler(commands=['delete_person']) async def echo(message: types.Message): arguments = message.get_args() print('попросили удалить гражаднина с ID:', arguments) s = arguments cur.execute('delete from barsa where id=:id', {'id': s}) conn.commit() print('Гражданин с ID :', arguments, 'удален') res = 'Гражданин с ID :', arguments, 'удален' await bot.send_message(message.from_user.id, res) @dp.message_handler(commands=['add_gun_lic']) async def echo(message: types.Message): arguments = message.get_args() s = arguments.split() print(s[0], s[1]) cur.execute(' update barsa set gunlic=:gunlic where id=:id', {'gunlic': s[1], 'id': s[0]}) conn.commit() print('Record Updated successfully ') cur.execute('select * from barsa where id=:id', {'id': s[0]}) res = cur.fetchone() print(res) req = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[2 ] + '\n' + 'Номер Паспорта:' + ' ' + res[3 ] + '\n' + 'Национальность:' + ' ' + res[4 ] + '\n' + 'Номер лицензии на оружие:' + ' ' + res[5] await bot.send_message(message.from_user.id, req) @dp.message_handler(commands=['gun_lic']) async def echo(message: types.Message): arguments = message.get_args() s = arguments print(s) cur.execute('select * from barsa where gunlic=:gunlic', {'gunlic': s}) res = cur.fetchone() print(res) req = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[2 ] + '\n' + 'Номер Паспорта:' + ' ' + res[3 ] + '\n' + 'Национальность:' + ' ' + res[4 ] + '\n' + 'Номер лицензии на оружие:' + ' ' + res[5 ] + '\n' + 'Преступление:' + ' ' + res[6] await bot.send_message(message.from_user.id, req) @dp.message_handler(commands=['delete_gun_lic']) async def echo(message: types.Message): arguments = message.get_args() s = arguments print(s) no = 'нет' cur.execute('update barsa set gunlic=:gunlic1 where id=:id', {'gunlic1': no, 'id': s}) res1 = cur.fetchone() conn.commit() ans = 'Оружейная лицения гражданина:', s, ' удалена' await bot.send_message(message.from_user.id, ans) @dp.message_handler(commands=['add_crime']) async def echo(message: types.Message): arguments = message.get_args() s = arguments.split() print(s[0], s[1]) cur.execute(' update barsa set crime=:crime where id=:id', {'crime': s[ 1], 'id': s[0]}) conn.commit() print('Record Updated successfully ') cur.execute('select * from barsa where id=:id', {'id': s[0]}) res = cur.fetchone() print(res) req = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[2 ] + '\n' + 'Номер Паспорта:' + ' ' + res[3 ] + '\n' + 'Национальность:' + ' ' + res[4 ] + '\n' + 'Номер лицензии на оружие:' + ' ' + res[5 ] + '\n' + 'Преступление:' + ' ' + res[6] await bot.send_message(message.from_user.id, req) @dp.message_handler(commands=['delete_crime']) async def echo(message: types.Message): arguments = message.get_args() s = arguments print(s) no = 'нет' cur.execute('update barsa set crime=:crime where id=:id', {'crime': no, 'id': s}) res1 = cur.fetchone() conn.commit() ans = 'Преступление гражданина:', s, ' удалено' await bot.send_message(message.from_user.id, ans) if __name__ == '__main__': executor.start_polling(dp, skip_updates=False) <|reserved_special_token_1|> import glob import logging import os import sqlite3 from aiogram import Bot, Dispatcher, executor, types TOKEN = '1772334389:AAE5wv8gssOFOgxQjQwKk7rUSKQHr6NTjus' logging.basicConfig(level=logging.INFO) bot = Bot(token=TOKEN) dp = Dispatcher(bot) path1 = 'C:\\Users\\const\\PycharmProjects\\t' conn = sqlite3.connect('kustoge.db') cur = conn.cursor() chunksize = 10 idfolder = 'C:\\Users\\const\\PycharmProjects\\goskustoge\\data\\id' @dp.message_handler(commands=['start', 'help']) async def send_welcome(message: types.Message): await message.reply( """Портал Государственных услуг округа Кустоже Используете команду /id + номер_паспорта для получения информации о владельце. Так же можно использовать команду /fullname + имя + фамилия, для получения информации о гражданине, регистр и порядок не важен Для получения информации о гражданах по фамилии воспользуйтесь командой /lastname + Фамилия Для получения данных о гражданах по национальности используйте /get_scan_nat+ брасогорец\\отовичанин Для добавления гражданина воспользуйтесь командой /add_person ИМЯ+ФАМИЛИЯ+НОМЕР_ПАСПОРТА+НАЦИОНАЛЬНОСТЬ+НОМЕР_ЛИЦЕНЗИИ_ОРУЖЕЙНОЙ+Преступление ( если нет лицензии и преступления пишем НЕТ Для удаления гражданина используйте /delete_person + номер паспорта Для добавления лицензии на оружение воспользуйтесь командой /add_gun_lic+id+номер_лицензии Для удалениея /delete_gun_lic + id Для добавления преступления гражданину используйте команду /add_crime +id + преступление Для удаления преступления воспользуйтесь командой /delete_crime + id""" ) @dp.message_handler(commands=['id']) async def echo(message: types.Message): print('попросили данные по ID') arguments = message.get_args() print(arguments) cur.execute('select * from barsa where id=:id', {'id': arguments}) res = cur.fetchone() cur.execute('select count(*) from barsa where lastname=:lastname', { 'lastname': res[2]}) res2 = cur.fetchone() print(res2) result = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[2 ] + '\n' + 'Номер Паспорта:' + ' ' + res[3 ] + '\n' + 'Национальность:' + ' ' + res[4 ] + '\n' + 'Родвественников:' + ' ' + str(res2[0] ) + '\n' + 'Номер лицензии на оружие:' + ' ' + res[5 ] + '\n' + 'Преступление:' + ' ' + res[6] await bot.send_message(message.from_user.id, result) os.chdir(idfolder) for file in glob.glob(res[3] + '.jpg'): img = open(file, 'rb') await bot.send_photo(message.from_user.id, img) @dp.message_handler(commands=['fullname']) async def echo(message: types.Message): print('попросили данные по имени') arguments = message.get_args() print(arguments) s = arguments.lower() s = s.split() cur.execute( 'select * from barsa where name=:name and lastname=:lastname or name=:lastname and lastname=:name' , {'name': s[0], 'lastname': s[1]}) res = cur.fetchone() cur.execute('select count(*) from barsa where lastname=:lastname', { 'lastname': s[1]}) res2 = cur.fetchone() print(res2[0]) result = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[2 ] + '\n' + 'Номер Паспорта:' + ' ' + res[3 ] + '\n' + 'Национальность:' + ' ' + res[4 ] + '\n' + 'Родвественников:' + ' ' + str(res2[0] ) + '\n' + 'Номер лицензии на оружие:' + ' ' + res[5 ] + '\n' + 'Преступление:' + ' ' + res[6] await bot.send_message(message.from_user.id, result) os.chdir(idfolder) for file in glob.glob(res[3] + '.jpg'): img = open(file, 'rb') await bot.send_photo(message.from_user.id, img) @dp.message_handler(commands=['lastname']) async def echo(message: types.Message): arguments = message.get_args() print('попросили данные по Фамилии:', arguments) s = arguments.lower() cur.execute('select * from barsa where lastname=:lastname ', { 'lastname': s}) res = cur.fetchall() os.chdir(idfolder) for f in res: print(f) cur.execute('select * from barsa where id=:id', {'id': f[3]}) res = cur.fetchone() req = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[2 ] + '\n' + 'Номер Паспорта:' + ' ' + res[3 ] + '\n' + 'Национальность:' + ' ' + res[4 ] + '\n' + 'Номер лицензии на оружие:' + ' ' + res[5 ] + '\n' + 'Преступление:' + ' ' + res[6] await bot.send_message(message.from_user.id, req) for file in glob.glob(res[3] + '.jpg'): img = open(file, 'rb') await bot.send_photo(message.from_user.id, img) @dp.message_handler(commands=['get_scan_nat']) async def echo(message: types.Message): print('попросили ВСЕ СКАНЫ по по национальности') arguments = message.get_args() print(arguments) s = arguments s = s.capitalize() cur.execute('select id from barsa where nat_=:nat_', {'nat_': s}) res = cur.fetchall() out = [item for t in res for item in t] out = [s.replace(' ', '') for s in out] os.chdir(idfolder) for f in out: print(f) if (f == '472-641218' or f == '757-067985' or f == '642-741978' or f == '696-082959' or f == '442-446766' or f == '702-973965'): cur.execute('select * from barsa where id=:id', {'id': f}) res = cur.fetchone() req = 'Имя:' + ' ' + res[1 ] + '\n' + 'Фамилия:' + ' ' + res[2 ] + '\n' + 'Номер Паспорта:' + ' ' + res[3 ] + '\n' + 'Национальность:' + ' ' + res[4 ] + '\n' + 'Номер лицензии на оружие:' + ' ' + res[5 ] + '\n' + 'Преступление:' + ' ' + res[6] await bot.send_message(message.from_user.id, req) else: img = open(f + '.jpg', 'rb') print('ok') await bot.send_photo(message.from_user.id, img) @dp.message_handler(commands=['add_person']) async def echo(message: types.Message): arguments = message.get_args() s = arguments.split() print(s) sqlite_insert_query = """INSERT INTO barsa (name, lastname, id, nat_,gunlic,crime) VALUES (?,?,?,?,?,?)""" data_tuple = s[0], s[1], s[2], s[3], s[4], s[5] cur.execute(sqlite_insert_query, data_tuple) conn.commit() print('Запись о гражданине успешно добавлена ') cur.execute('select * from barsa where id=:id', {'id': s[2]}) res = cur.fetchone() req = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[2 ] + '\n' + 'Номер Паспорта:' + ' ' + res[3 ] + '\n' + 'Национальность:' + ' ' + res[4 ] + '\n' + 'Номер лицензии на оружие:' + ' ' + res[5 ] + '\n' + 'Преступление:' + ' ' + res[6] await bot.send_message(message.from_user.id, req) @dp.message_handler(commands=['delete_person']) async def echo(message: types.Message): arguments = message.get_args() print('попросили удалить гражаднина с ID:', arguments) s = arguments cur.execute('delete from barsa where id=:id', {'id': s}) conn.commit() print('Гражданин с ID :', arguments, 'удален') res = 'Гражданин с ID :', arguments, 'удален' await bot.send_message(message.from_user.id, res) @dp.message_handler(commands=['add_gun_lic']) async def echo(message: types.Message): arguments = message.get_args() s = arguments.split() print(s[0], s[1]) cur.execute(' update barsa set gunlic=:gunlic where id=:id', {'gunlic': s[1], 'id': s[0]}) conn.commit() print('Record Updated successfully ') cur.execute('select * from barsa where id=:id', {'id': s[0]}) res = cur.fetchone() print(res) req = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[2 ] + '\n' + 'Номер Паспорта:' + ' ' + res[3 ] + '\n' + 'Национальность:' + ' ' + res[4 ] + '\n' + 'Номер лицензии на оружие:' + ' ' + res[5] await bot.send_message(message.from_user.id, req) @dp.message_handler(commands=['gun_lic']) async def echo(message: types.Message): arguments = message.get_args() s = arguments print(s) cur.execute('select * from barsa where gunlic=:gunlic', {'gunlic': s}) res = cur.fetchone() print(res) req = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[2 ] + '\n' + 'Номер Паспорта:' + ' ' + res[3 ] + '\n' + 'Национальность:' + ' ' + res[4 ] + '\n' + 'Номер лицензии на оружие:' + ' ' + res[5 ] + '\n' + 'Преступление:' + ' ' + res[6] await bot.send_message(message.from_user.id, req) @dp.message_handler(commands=['delete_gun_lic']) async def echo(message: types.Message): arguments = message.get_args() s = arguments print(s) no = 'нет' cur.execute('update barsa set gunlic=:gunlic1 where id=:id', {'gunlic1': no, 'id': s}) res1 = cur.fetchone() conn.commit() ans = 'Оружейная лицения гражданина:', s, ' удалена' await bot.send_message(message.from_user.id, ans) @dp.message_handler(commands=['add_crime']) async def echo(message: types.Message): arguments = message.get_args() s = arguments.split() print(s[0], s[1]) cur.execute(' update barsa set crime=:crime where id=:id', {'crime': s[ 1], 'id': s[0]}) conn.commit() print('Record Updated successfully ') cur.execute('select * from barsa where id=:id', {'id': s[0]}) res = cur.fetchone() print(res) req = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[2 ] + '\n' + 'Номер Паспорта:' + ' ' + res[3 ] + '\n' + 'Национальность:' + ' ' + res[4 ] + '\n' + 'Номер лицензии на оружие:' + ' ' + res[5 ] + '\n' + 'Преступление:' + ' ' + res[6] await bot.send_message(message.from_user.id, req) @dp.message_handler(commands=['delete_crime']) async def echo(message: types.Message): arguments = message.get_args() s = arguments print(s) no = 'нет' cur.execute('update barsa set crime=:crime where id=:id', {'crime': no, 'id': s}) res1 = cur.fetchone() conn.commit() ans = 'Преступление гражданина:', s, ' удалено' await bot.send_message(message.from_user.id, ans) if __name__ == '__main__': executor.start_polling(dp, skip_updates=False) <|reserved_special_token_1|> import glob import logging import os import sqlite3 from aiogram import Bot, Dispatcher, executor, types TOKEN = '1772334389:AAE5wv8gssOFOgxQjQwKk7rUSKQHr6NTjus' logging.basicConfig(level=logging.INFO) bot = Bot(token=TOKEN) dp = Dispatcher(bot) path1 = 'C:\\Users\\const\\PycharmProjects\\t' conn = sqlite3.connect('kustoge.db') cur = conn.cursor() chunksize = 10 idfolder = "C:\\Users\\const\\PycharmProjects\\goskustoge\\data\\id" @dp.message_handler(commands=['start', 'help']) async def send_welcome(message: types.Message): await message.reply( "Портал Государственных услуг округа Кустоже\n\n Используете команду /id + номер_паспорта \n для " "получения информации о владельце.\n\n Так же можно использовать команду /fullname + имя + фамилия, " "для получения информации о гражданине, регистр и порядок не важен\n\n Для получения информации о гражданах по " "фамилии воспользуйтесь командой /lastname + Фамилия \n\n Для получения данных о гражданах по национальности " "используйте /get_scan_nat+ брасогорец\отовичанин \n\n Для добавления гражданина " "воспользуйтесь командой /add_person ИМЯ+ФАМИЛИЯ+НОМЕР_ПАСПОРТА+НАЦИОНАЛЬНОСТЬ+НОМЕР_ЛИЦЕНЗИИ_" "ОРУЖЕЙНОЙ+Преступление ( если нет лицензии и преступления пишем НЕТ \n\n Для удаления гражданина " "используйте /delete_person + номер паспорта \n\n Для добавления лицензии на оружение воспользуйтесь командой " "/add_gun_lic+id+номер_лицензии \n\n " "Для удалениея /delete_gun_lic + id \n\n Для добавления преступления гражданину используйте команду " "/add_crime +id + преступление \n Для удаления преступления воспользуйтесь командой /delete_crime + id") @dp.message_handler(commands=['id']) async def echo(message: types.Message): print("попросили данные по ID") arguments = message.get_args() print(arguments) cur.execute("select * from barsa where id=:id", {"id": arguments}) res = cur.fetchone() cur.execute("select count(*) from barsa where lastname=:lastname", {"lastname": res[2]}) res2 = cur.fetchone() print(res2) result = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[2] + '\n' + 'Номер Паспорта:' + ' ' + \ res[3] + '\n' + 'Национальность:' + ' ' + res[4] + "\n" + "Родвественников:" + ' ' + str( res2[0]) + '\n' + "Номер " \ "лицензии на " \ "оружие:" + " " + \ res[5] + '\n' + "Преступление:" + " " + res[6] await bot.send_message(message.from_user.id, result) os.chdir(idfolder) for file in glob.glob(res[3] + ".jpg"): img = open(file, "rb") await bot.send_photo(message.from_user.id, img) @dp.message_handler(commands=['fullname']) async def echo(message: types.Message): print("попросили данные по имени") arguments = message.get_args() print(arguments) s = arguments.lower() s = s.split() cur.execute("select * from barsa where name=:name and lastname=:lastname or name=:lastname and lastname=:name", {"name": s[0], "lastname": s[1]}) res = cur.fetchone() cur.execute("select count(*) from barsa where lastname=:lastname", {"lastname": s[1]}) res2 = cur.fetchone() print(res2[0]) result = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[2] + '\n' + 'Номер Паспорта:' + ' ' + \ res[3] + '\n' + 'Национальность:' + ' ' + res[4] + "\n" + "Родвественников:" + ' ' + str( res2[0]) + '\n' + "Номер " \ "лицензии на " \ "оружие:" + " " + \ res[5] + '\n' + \ "Преступление:" + " " + res[6] await bot.send_message(message.from_user.id, result) os.chdir(idfolder) for file in glob.glob(res[3] + ".jpg"): img = open(file, "rb") await bot.send_photo(message.from_user.id, img) @dp.message_handler(commands=['lastname']) async def echo(message: types.Message): arguments = message.get_args() print("попросили данные по Фамилии:", arguments) s = arguments.lower() cur.execute("select * from barsa where lastname=:lastname ", {"lastname": s}) res = cur.fetchall() os.chdir(idfolder) for f in res: print(f) cur.execute("select * from barsa where id=:id", {"id": f[3]}) res = cur.fetchone() req = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[ 2] + '\n' + 'Номер Паспорта:' + ' ' + res[3] + '\n' + 'Национальность:' + ' ' + res[ 4] + '\n' + "Номер " \ "лицензии на " \ "оружие:" + " " + \ res[5] + '\n' + \ "Преступление:" + " " + res[6] await bot.send_message(message.from_user.id, req) for file in glob.glob(res[3] + ".jpg"): img = open(file, "rb") await bot.send_photo(message.from_user.id, img) @dp.message_handler(commands=['get_scan_nat']) async def echo(message: types.Message): print("попросили ВСЕ СКАНЫ по по национальности") arguments = message.get_args() print(arguments) s = arguments s = s.capitalize() cur.execute("select id from barsa where nat_=:nat_", {"nat_": s}) res = cur.fetchall() out = [item for t in res for item in t] out = [s.replace(" ", "") for s in out] os.chdir(idfolder) for f in out: print(f) if f == '472-641218' or f == '757-067985' or f == '642-741978' or f == '696-082959' or f == '442-446766' or f == '702-973965': cur.execute("select * from barsa where id=:id", {"id": f}) res = cur.fetchone() req = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[ 2] + '\n' + 'Номер Паспорта:' + ' ' + res[3] + '\n' + 'Национальность:' + ' ' + res[ 4] + '\n' + "Номер " \ "лицензии на " \ "оружие:" + " " + \ res[5] + '\n' + \ "Преступление:" + " " + res[6] await bot.send_message(message.from_user.id, req) else: img = open(f + '.jpg', "rb") print('ok') await bot.send_photo(message.from_user.id, img) @dp.message_handler(commands=['add_person']) async def echo(message: types.Message): arguments = message.get_args() s = arguments.split() print(s) sqlite_insert_query = """INSERT INTO barsa (name, lastname, id, nat_,gunlic,crime) VALUES (?,?,?,?,?,?)""" data_tuple = (s[0], s[1], s[2], s[3], s[4], s[5]) cur.execute(sqlite_insert_query, data_tuple) conn.commit() print("Запись о гражданине успешно добавлена ") cur.execute("select * from barsa where id=:id", {"id": s[2]}) res = cur.fetchone() req = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[ 2] + '\n' + 'Номер Паспорта:' + ' ' + res[3] + '\n' + 'Национальность:' + ' ' + res[4] + '\n' + "Номер " \ "лицензии на " \ "оружие:" + " " + \ res[5] + '\n' + \ "Преступление:" + " " + res[6] await bot.send_message(message.from_user.id, req) @dp.message_handler(commands=['delete_person']) async def echo(message: types.Message): arguments = message.get_args() print("попросили удалить гражаднина с ID:", arguments) s = arguments cur.execute("delete from barsa where id=:id", {"id": s}) conn.commit() print("Гражданин с ID :", arguments, 'удален') res = "Гражданин с ID :", arguments, 'удален' await bot.send_message(message.from_user.id, res) @dp.message_handler(commands=['add_gun_lic']) async def echo(message: types.Message): arguments = message.get_args() s = arguments.split() print(s[0], s[1]) cur.execute(" update barsa set gunlic=:gunlic where id=:id", {"gunlic": s[1], "id": s[0]}) conn.commit() print("Record Updated successfully ") cur.execute("select * from barsa where id=:id", {"id": s[0]}) res = cur.fetchone() print(res) req = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[ 2] + '\n' + 'Номер Паспорта:' + ' ' + res[3] + '\n' + 'Национальность:' + ' ' + res[4] + '\n' + "Номер " \ "лицензии на " \ "оружие:" + " " + \ res[5] await bot.send_message(message.from_user.id, req) @dp.message_handler(commands=['gun_lic']) async def echo(message: types.Message): arguments = message.get_args() s = arguments print(s) cur.execute("select * from barsa where gunlic=:gunlic", {"gunlic": s}) res = cur.fetchone() print(res) req = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[ 2] + '\n' + 'Номер Паспорта:' + ' ' + res[3] + '\n' + 'Национальность:' + ' ' + res[4] + '\n' + "Номер " \ "лицензии на " \ "оружие:" + " " + \ res[5] + '\n' + \ "Преступление:" + " " + res[6] await bot.send_message(message.from_user.id, req) @dp.message_handler(commands=['delete_gun_lic']) async def echo(message: types.Message): arguments = message.get_args() s = arguments print(s) no = 'нет' cur.execute("update barsa set gunlic=:gunlic1 where id=:id", {"gunlic1": no, "id": s}) res1 = cur.fetchone() conn.commit() ans = "Оружейная лицения гражданина:",s, " удалена" await bot.send_message(message.from_user.id, ans) @dp.message_handler(commands=['add_crime']) async def echo(message: types.Message): arguments = message.get_args() s = arguments.split() print(s[0], s[1]) cur.execute(" update barsa set crime=:crime where id=:id", {"crime": s[1], "id": s[0]}) conn.commit() print("Record Updated successfully ") cur.execute("select * from barsa where id=:id", {"id": s[0]}) res = cur.fetchone() print(res) req = 'Имя:' + ' ' + res[1] + '\n' + 'Фамилия:' + ' ' + res[ 2] + '\n' + 'Номер Паспорта:' + ' ' + res[3] + '\n' + 'Национальность:' + ' ' + res[4] + '\n' + "Номер " \ "лицензии на " \ "оружие:" + " " + \ res[5] + '\n' + \ "Преступление:" + " " + res[6] await bot.send_message(message.from_user.id, req) @dp.message_handler(commands=['delete_crime']) async def echo(message: types.Message): arguments = message.get_args() s = arguments print(s) no = 'нет' cur.execute("update barsa set crime=:crime where id=:id", {"crime": no, "id": s}) res1 = cur.fetchone() conn.commit() ans = "Преступление гражданина:",s, " удалено" await bot.send_message(message.from_user.id, ans) if __name__ == '__main__': executor.start_polling(dp, skip_updates=False)
flexible
{ "blob_id": "4193fa992d06890afb660c072842cf1b85a43774", "index": 3207, "step-1": "<mask token>\n", "step-2": "<mask token>\nlogging.basicConfig(level=logging.INFO)\n<mask token>\n\n\n@dp.message_handler(commands=['start', 'help'])\nasync def send_welcome(message: types.Message):\n await message.reply(\n \"\"\"Портал Государственных услуг округа Кустоже\n\n Используете команду /id + номер_паспорта \n для получения информации о владельце.\n\n Так же можно использовать команду /fullname + имя + фамилия, для получения информации о гражданине, регистр и порядок не важен\n\n Для получения информации о гражданах по фамилии воспользуйтесь командой /lastname + Фамилия \n\n Для получения данных о гражданах по национальности используйте /get_scan_nat+ брасогорец\\\\отовичанин \n\n Для добавления гражданина воспользуйтесь командой /add_person ИМЯ+ФАМИЛИЯ+НОМЕР_ПАСПОРТА+НАЦИОНАЛЬНОСТЬ+НОМЕР_ЛИЦЕНЗИИ_ОРУЖЕЙНОЙ+Преступление ( если нет лицензии и преступления пишем НЕТ \n\n Для удаления гражданина используйте /delete_person + номер паспорта \n\n Для добавления лицензии на оружение воспользуйтесь командой /add_gun_lic+id+номер_лицензии \n\n Для удалениея /delete_gun_lic + id \n\n Для добавления преступления гражданину используйте команду /add_crime +id + преступление \n Для удаления преступления воспользуйтесь командой /delete_crime + id\"\"\"\n )\n\n\n@dp.message_handler(commands=['id'])\nasync def echo(message: types.Message):\n print('попросили данные по ID')\n arguments = message.get_args()\n print(arguments)\n cur.execute('select * from barsa where id=:id', {'id': arguments})\n res = cur.fetchone()\n cur.execute('select count(*) from barsa where lastname=:lastname', {\n 'lastname': res[2]})\n res2 = cur.fetchone()\n print(res2)\n result = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[2\n ] + '\\n' + 'Номер Паспорта:' + ' ' + res[3\n ] + '\\n' + 'Национальность:' + ' ' + res[4\n ] + '\\n' + 'Родвественников:' + ' ' + str(res2[0]\n ) + '\\n' + 'Номер лицензии на оружие:' + ' ' + res[5\n ] + '\\n' + 'Преступление:' + ' ' + res[6]\n await bot.send_message(message.from_user.id, result)\n os.chdir(idfolder)\n for file in glob.glob(res[3] + '.jpg'):\n img = open(file, 'rb')\n await bot.send_photo(message.from_user.id, img)\n\n\n@dp.message_handler(commands=['fullname'])\nasync def echo(message: types.Message):\n print('попросили данные по имени')\n arguments = message.get_args()\n print(arguments)\n s = arguments.lower()\n s = s.split()\n cur.execute(\n 'select * from barsa where name=:name and lastname=:lastname or name=:lastname and lastname=:name'\n , {'name': s[0], 'lastname': s[1]})\n res = cur.fetchone()\n cur.execute('select count(*) from barsa where lastname=:lastname', {\n 'lastname': s[1]})\n res2 = cur.fetchone()\n print(res2[0])\n result = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[2\n ] + '\\n' + 'Номер Паспорта:' + ' ' + res[3\n ] + '\\n' + 'Национальность:' + ' ' + res[4\n ] + '\\n' + 'Родвественников:' + ' ' + str(res2[0]\n ) + '\\n' + 'Номер лицензии на оружие:' + ' ' + res[5\n ] + '\\n' + 'Преступление:' + ' ' + res[6]\n await bot.send_message(message.from_user.id, result)\n os.chdir(idfolder)\n for file in glob.glob(res[3] + '.jpg'):\n img = open(file, 'rb')\n await bot.send_photo(message.from_user.id, img)\n\n\n@dp.message_handler(commands=['lastname'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n print('попросили данные по Фамилии:', arguments)\n s = arguments.lower()\n cur.execute('select * from barsa where lastname=:lastname ', {\n 'lastname': s})\n res = cur.fetchall()\n os.chdir(idfolder)\n for f in res:\n print(f)\n cur.execute('select * from barsa where id=:id', {'id': f[3]})\n res = cur.fetchone()\n req = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[2\n ] + '\\n' + 'Номер Паспорта:' + ' ' + res[3\n ] + '\\n' + 'Национальность:' + ' ' + res[4\n ] + '\\n' + 'Номер лицензии на оружие:' + ' ' + res[5\n ] + '\\n' + 'Преступление:' + ' ' + res[6]\n await bot.send_message(message.from_user.id, req)\n for file in glob.glob(res[3] + '.jpg'):\n img = open(file, 'rb')\n await bot.send_photo(message.from_user.id, img)\n\n\n@dp.message_handler(commands=['get_scan_nat'])\nasync def echo(message: types.Message):\n print('попросили ВСЕ СКАНЫ по по национальности')\n arguments = message.get_args()\n print(arguments)\n s = arguments\n s = s.capitalize()\n cur.execute('select id from barsa where nat_=:nat_', {'nat_': s})\n res = cur.fetchall()\n out = [item for t in res for item in t]\n out = [s.replace(' ', '') for s in out]\n os.chdir(idfolder)\n for f in out:\n print(f)\n if (f == '472-641218' or f == '757-067985' or f == '642-741978' or \n f == '696-082959' or f == '442-446766' or f == '702-973965'):\n cur.execute('select * from barsa where id=:id', {'id': f})\n res = cur.fetchone()\n req = 'Имя:' + ' ' + res[1\n ] + '\\n' + 'Фамилия:' + ' ' + res[2\n ] + '\\n' + 'Номер Паспорта:' + ' ' + res[3\n ] + '\\n' + 'Национальность:' + ' ' + res[4\n ] + '\\n' + 'Номер лицензии на оружие:' + ' ' + res[5\n ] + '\\n' + 'Преступление:' + ' ' + res[6]\n await bot.send_message(message.from_user.id, req)\n else:\n img = open(f + '.jpg', 'rb')\n print('ok')\n await bot.send_photo(message.from_user.id, img)\n\n\n@dp.message_handler(commands=['add_person'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n s = arguments.split()\n print(s)\n sqlite_insert_query = \"\"\"INSERT INTO barsa\n (name, lastname, id, nat_,gunlic,crime)\n VALUES\n (?,?,?,?,?,?)\"\"\"\n data_tuple = s[0], s[1], s[2], s[3], s[4], s[5]\n cur.execute(sqlite_insert_query, data_tuple)\n conn.commit()\n print('Запись о гражданине успешно добавлена ')\n cur.execute('select * from barsa where id=:id', {'id': s[2]})\n res = cur.fetchone()\n req = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[2\n ] + '\\n' + 'Номер Паспорта:' + ' ' + res[3\n ] + '\\n' + 'Национальность:' + ' ' + res[4\n ] + '\\n' + 'Номер лицензии на оружие:' + ' ' + res[5\n ] + '\\n' + 'Преступление:' + ' ' + res[6]\n await bot.send_message(message.from_user.id, req)\n\n\n@dp.message_handler(commands=['delete_person'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n print('попросили удалить гражаднина с ID:', arguments)\n s = arguments\n cur.execute('delete from barsa where id=:id', {'id': s})\n conn.commit()\n print('Гражданин с ID :', arguments, 'удален')\n res = 'Гражданин с ID :', arguments, 'удален'\n await bot.send_message(message.from_user.id, res)\n\n\n@dp.message_handler(commands=['add_gun_lic'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n s = arguments.split()\n print(s[0], s[1])\n cur.execute(' update barsa set gunlic=:gunlic where id=:id', {'gunlic':\n s[1], 'id': s[0]})\n conn.commit()\n print('Record Updated successfully ')\n cur.execute('select * from barsa where id=:id', {'id': s[0]})\n res = cur.fetchone()\n print(res)\n req = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[2\n ] + '\\n' + 'Номер Паспорта:' + ' ' + res[3\n ] + '\\n' + 'Национальность:' + ' ' + res[4\n ] + '\\n' + 'Номер лицензии на оружие:' + ' ' + res[5]\n await bot.send_message(message.from_user.id, req)\n\n\n@dp.message_handler(commands=['gun_lic'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n s = arguments\n print(s)\n cur.execute('select * from barsa where gunlic=:gunlic', {'gunlic': s})\n res = cur.fetchone()\n print(res)\n req = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[2\n ] + '\\n' + 'Номер Паспорта:' + ' ' + res[3\n ] + '\\n' + 'Национальность:' + ' ' + res[4\n ] + '\\n' + 'Номер лицензии на оружие:' + ' ' + res[5\n ] + '\\n' + 'Преступление:' + ' ' + res[6]\n await bot.send_message(message.from_user.id, req)\n\n\n@dp.message_handler(commands=['delete_gun_lic'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n s = arguments\n print(s)\n no = 'нет'\n cur.execute('update barsa set gunlic=:gunlic1 where id=:id', {'gunlic1':\n no, 'id': s})\n res1 = cur.fetchone()\n conn.commit()\n ans = 'Оружейная лицения гражданина:', s, ' удалена'\n await bot.send_message(message.from_user.id, ans)\n\n\n@dp.message_handler(commands=['add_crime'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n s = arguments.split()\n print(s[0], s[1])\n cur.execute(' update barsa set crime=:crime where id=:id', {'crime': s[\n 1], 'id': s[0]})\n conn.commit()\n print('Record Updated successfully ')\n cur.execute('select * from barsa where id=:id', {'id': s[0]})\n res = cur.fetchone()\n print(res)\n req = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[2\n ] + '\\n' + 'Номер Паспорта:' + ' ' + res[3\n ] + '\\n' + 'Национальность:' + ' ' + res[4\n ] + '\\n' + 'Номер лицензии на оружие:' + ' ' + res[5\n ] + '\\n' + 'Преступление:' + ' ' + res[6]\n await bot.send_message(message.from_user.id, req)\n\n\n@dp.message_handler(commands=['delete_crime'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n s = arguments\n print(s)\n no = 'нет'\n cur.execute('update barsa set crime=:crime where id=:id', {'crime': no,\n 'id': s})\n res1 = cur.fetchone()\n conn.commit()\n ans = 'Преступление гражданина:', s, ' удалено'\n await bot.send_message(message.from_user.id, ans)\n\n\nif __name__ == '__main__':\n executor.start_polling(dp, skip_updates=False)\n", "step-3": "<mask token>\nTOKEN = '1772334389:AAE5wv8gssOFOgxQjQwKk7rUSKQHr6NTjus'\nlogging.basicConfig(level=logging.INFO)\nbot = Bot(token=TOKEN)\ndp = Dispatcher(bot)\npath1 = 'C:\\\\Users\\\\const\\\\PycharmProjects\\\\t'\nconn = sqlite3.connect('kustoge.db')\ncur = conn.cursor()\nchunksize = 10\nidfolder = 'C:\\\\Users\\\\const\\\\PycharmProjects\\\\goskustoge\\\\data\\\\id'\n\n\n@dp.message_handler(commands=['start', 'help'])\nasync def send_welcome(message: types.Message):\n await message.reply(\n \"\"\"Портал Государственных услуг округа Кустоже\n\n Используете команду /id + номер_паспорта \n для получения информации о владельце.\n\n Так же можно использовать команду /fullname + имя + фамилия, для получения информации о гражданине, регистр и порядок не важен\n\n Для получения информации о гражданах по фамилии воспользуйтесь командой /lastname + Фамилия \n\n Для получения данных о гражданах по национальности используйте /get_scan_nat+ брасогорец\\\\отовичанин \n\n Для добавления гражданина воспользуйтесь командой /add_person ИМЯ+ФАМИЛИЯ+НОМЕР_ПАСПОРТА+НАЦИОНАЛЬНОСТЬ+НОМЕР_ЛИЦЕНЗИИ_ОРУЖЕЙНОЙ+Преступление ( если нет лицензии и преступления пишем НЕТ \n\n Для удаления гражданина используйте /delete_person + номер паспорта \n\n Для добавления лицензии на оружение воспользуйтесь командой /add_gun_lic+id+номер_лицензии \n\n Для удалениея /delete_gun_lic + id \n\n Для добавления преступления гражданину используйте команду /add_crime +id + преступление \n Для удаления преступления воспользуйтесь командой /delete_crime + id\"\"\"\n )\n\n\n@dp.message_handler(commands=['id'])\nasync def echo(message: types.Message):\n print('попросили данные по ID')\n arguments = message.get_args()\n print(arguments)\n cur.execute('select * from barsa where id=:id', {'id': arguments})\n res = cur.fetchone()\n cur.execute('select count(*) from barsa where lastname=:lastname', {\n 'lastname': res[2]})\n res2 = cur.fetchone()\n print(res2)\n result = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[2\n ] + '\\n' + 'Номер Паспорта:' + ' ' + res[3\n ] + '\\n' + 'Национальность:' + ' ' + res[4\n ] + '\\n' + 'Родвественников:' + ' ' + str(res2[0]\n ) + '\\n' + 'Номер лицензии на оружие:' + ' ' + res[5\n ] + '\\n' + 'Преступление:' + ' ' + res[6]\n await bot.send_message(message.from_user.id, result)\n os.chdir(idfolder)\n for file in glob.glob(res[3] + '.jpg'):\n img = open(file, 'rb')\n await bot.send_photo(message.from_user.id, img)\n\n\n@dp.message_handler(commands=['fullname'])\nasync def echo(message: types.Message):\n print('попросили данные по имени')\n arguments = message.get_args()\n print(arguments)\n s = arguments.lower()\n s = s.split()\n cur.execute(\n 'select * from barsa where name=:name and lastname=:lastname or name=:lastname and lastname=:name'\n , {'name': s[0], 'lastname': s[1]})\n res = cur.fetchone()\n cur.execute('select count(*) from barsa where lastname=:lastname', {\n 'lastname': s[1]})\n res2 = cur.fetchone()\n print(res2[0])\n result = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[2\n ] + '\\n' + 'Номер Паспорта:' + ' ' + res[3\n ] + '\\n' + 'Национальность:' + ' ' + res[4\n ] + '\\n' + 'Родвественников:' + ' ' + str(res2[0]\n ) + '\\n' + 'Номер лицензии на оружие:' + ' ' + res[5\n ] + '\\n' + 'Преступление:' + ' ' + res[6]\n await bot.send_message(message.from_user.id, result)\n os.chdir(idfolder)\n for file in glob.glob(res[3] + '.jpg'):\n img = open(file, 'rb')\n await bot.send_photo(message.from_user.id, img)\n\n\n@dp.message_handler(commands=['lastname'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n print('попросили данные по Фамилии:', arguments)\n s = arguments.lower()\n cur.execute('select * from barsa where lastname=:lastname ', {\n 'lastname': s})\n res = cur.fetchall()\n os.chdir(idfolder)\n for f in res:\n print(f)\n cur.execute('select * from barsa where id=:id', {'id': f[3]})\n res = cur.fetchone()\n req = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[2\n ] + '\\n' + 'Номер Паспорта:' + ' ' + res[3\n ] + '\\n' + 'Национальность:' + ' ' + res[4\n ] + '\\n' + 'Номер лицензии на оружие:' + ' ' + res[5\n ] + '\\n' + 'Преступление:' + ' ' + res[6]\n await bot.send_message(message.from_user.id, req)\n for file in glob.glob(res[3] + '.jpg'):\n img = open(file, 'rb')\n await bot.send_photo(message.from_user.id, img)\n\n\n@dp.message_handler(commands=['get_scan_nat'])\nasync def echo(message: types.Message):\n print('попросили ВСЕ СКАНЫ по по национальности')\n arguments = message.get_args()\n print(arguments)\n s = arguments\n s = s.capitalize()\n cur.execute('select id from barsa where nat_=:nat_', {'nat_': s})\n res = cur.fetchall()\n out = [item for t in res for item in t]\n out = [s.replace(' ', '') for s in out]\n os.chdir(idfolder)\n for f in out:\n print(f)\n if (f == '472-641218' or f == '757-067985' or f == '642-741978' or \n f == '696-082959' or f == '442-446766' or f == '702-973965'):\n cur.execute('select * from barsa where id=:id', {'id': f})\n res = cur.fetchone()\n req = 'Имя:' + ' ' + res[1\n ] + '\\n' + 'Фамилия:' + ' ' + res[2\n ] + '\\n' + 'Номер Паспорта:' + ' ' + res[3\n ] + '\\n' + 'Национальность:' + ' ' + res[4\n ] + '\\n' + 'Номер лицензии на оружие:' + ' ' + res[5\n ] + '\\n' + 'Преступление:' + ' ' + res[6]\n await bot.send_message(message.from_user.id, req)\n else:\n img = open(f + '.jpg', 'rb')\n print('ok')\n await bot.send_photo(message.from_user.id, img)\n\n\n@dp.message_handler(commands=['add_person'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n s = arguments.split()\n print(s)\n sqlite_insert_query = \"\"\"INSERT INTO barsa\n (name, lastname, id, nat_,gunlic,crime)\n VALUES\n (?,?,?,?,?,?)\"\"\"\n data_tuple = s[0], s[1], s[2], s[3], s[4], s[5]\n cur.execute(sqlite_insert_query, data_tuple)\n conn.commit()\n print('Запись о гражданине успешно добавлена ')\n cur.execute('select * from barsa where id=:id', {'id': s[2]})\n res = cur.fetchone()\n req = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[2\n ] + '\\n' + 'Номер Паспорта:' + ' ' + res[3\n ] + '\\n' + 'Национальность:' + ' ' + res[4\n ] + '\\n' + 'Номер лицензии на оружие:' + ' ' + res[5\n ] + '\\n' + 'Преступление:' + ' ' + res[6]\n await bot.send_message(message.from_user.id, req)\n\n\n@dp.message_handler(commands=['delete_person'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n print('попросили удалить гражаднина с ID:', arguments)\n s = arguments\n cur.execute('delete from barsa where id=:id', {'id': s})\n conn.commit()\n print('Гражданин с ID :', arguments, 'удален')\n res = 'Гражданин с ID :', arguments, 'удален'\n await bot.send_message(message.from_user.id, res)\n\n\n@dp.message_handler(commands=['add_gun_lic'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n s = arguments.split()\n print(s[0], s[1])\n cur.execute(' update barsa set gunlic=:gunlic where id=:id', {'gunlic':\n s[1], 'id': s[0]})\n conn.commit()\n print('Record Updated successfully ')\n cur.execute('select * from barsa where id=:id', {'id': s[0]})\n res = cur.fetchone()\n print(res)\n req = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[2\n ] + '\\n' + 'Номер Паспорта:' + ' ' + res[3\n ] + '\\n' + 'Национальность:' + ' ' + res[4\n ] + '\\n' + 'Номер лицензии на оружие:' + ' ' + res[5]\n await bot.send_message(message.from_user.id, req)\n\n\n@dp.message_handler(commands=['gun_lic'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n s = arguments\n print(s)\n cur.execute('select * from barsa where gunlic=:gunlic', {'gunlic': s})\n res = cur.fetchone()\n print(res)\n req = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[2\n ] + '\\n' + 'Номер Паспорта:' + ' ' + res[3\n ] + '\\n' + 'Национальность:' + ' ' + res[4\n ] + '\\n' + 'Номер лицензии на оружие:' + ' ' + res[5\n ] + '\\n' + 'Преступление:' + ' ' + res[6]\n await bot.send_message(message.from_user.id, req)\n\n\n@dp.message_handler(commands=['delete_gun_lic'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n s = arguments\n print(s)\n no = 'нет'\n cur.execute('update barsa set gunlic=:gunlic1 where id=:id', {'gunlic1':\n no, 'id': s})\n res1 = cur.fetchone()\n conn.commit()\n ans = 'Оружейная лицения гражданина:', s, ' удалена'\n await bot.send_message(message.from_user.id, ans)\n\n\n@dp.message_handler(commands=['add_crime'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n s = arguments.split()\n print(s[0], s[1])\n cur.execute(' update barsa set crime=:crime where id=:id', {'crime': s[\n 1], 'id': s[0]})\n conn.commit()\n print('Record Updated successfully ')\n cur.execute('select * from barsa where id=:id', {'id': s[0]})\n res = cur.fetchone()\n print(res)\n req = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[2\n ] + '\\n' + 'Номер Паспорта:' + ' ' + res[3\n ] + '\\n' + 'Национальность:' + ' ' + res[4\n ] + '\\n' + 'Номер лицензии на оружие:' + ' ' + res[5\n ] + '\\n' + 'Преступление:' + ' ' + res[6]\n await bot.send_message(message.from_user.id, req)\n\n\n@dp.message_handler(commands=['delete_crime'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n s = arguments\n print(s)\n no = 'нет'\n cur.execute('update barsa set crime=:crime where id=:id', {'crime': no,\n 'id': s})\n res1 = cur.fetchone()\n conn.commit()\n ans = 'Преступление гражданина:', s, ' удалено'\n await bot.send_message(message.from_user.id, ans)\n\n\nif __name__ == '__main__':\n executor.start_polling(dp, skip_updates=False)\n", "step-4": "import glob\nimport logging\nimport os\nimport sqlite3\nfrom aiogram import Bot, Dispatcher, executor, types\nTOKEN = '1772334389:AAE5wv8gssOFOgxQjQwKk7rUSKQHr6NTjus'\nlogging.basicConfig(level=logging.INFO)\nbot = Bot(token=TOKEN)\ndp = Dispatcher(bot)\npath1 = 'C:\\\\Users\\\\const\\\\PycharmProjects\\\\t'\nconn = sqlite3.connect('kustoge.db')\ncur = conn.cursor()\nchunksize = 10\nidfolder = 'C:\\\\Users\\\\const\\\\PycharmProjects\\\\goskustoge\\\\data\\\\id'\n\n\n@dp.message_handler(commands=['start', 'help'])\nasync def send_welcome(message: types.Message):\n await message.reply(\n \"\"\"Портал Государственных услуг округа Кустоже\n\n Используете команду /id + номер_паспорта \n для получения информации о владельце.\n\n Так же можно использовать команду /fullname + имя + фамилия, для получения информации о гражданине, регистр и порядок не важен\n\n Для получения информации о гражданах по фамилии воспользуйтесь командой /lastname + Фамилия \n\n Для получения данных о гражданах по национальности используйте /get_scan_nat+ брасогорец\\\\отовичанин \n\n Для добавления гражданина воспользуйтесь командой /add_person ИМЯ+ФАМИЛИЯ+НОМЕР_ПАСПОРТА+НАЦИОНАЛЬНОСТЬ+НОМЕР_ЛИЦЕНЗИИ_ОРУЖЕЙНОЙ+Преступление ( если нет лицензии и преступления пишем НЕТ \n\n Для удаления гражданина используйте /delete_person + номер паспорта \n\n Для добавления лицензии на оружение воспользуйтесь командой /add_gun_lic+id+номер_лицензии \n\n Для удалениея /delete_gun_lic + id \n\n Для добавления преступления гражданину используйте команду /add_crime +id + преступление \n Для удаления преступления воспользуйтесь командой /delete_crime + id\"\"\"\n )\n\n\n@dp.message_handler(commands=['id'])\nasync def echo(message: types.Message):\n print('попросили данные по ID')\n arguments = message.get_args()\n print(arguments)\n cur.execute('select * from barsa where id=:id', {'id': arguments})\n res = cur.fetchone()\n cur.execute('select count(*) from barsa where lastname=:lastname', {\n 'lastname': res[2]})\n res2 = cur.fetchone()\n print(res2)\n result = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[2\n ] + '\\n' + 'Номер Паспорта:' + ' ' + res[3\n ] + '\\n' + 'Национальность:' + ' ' + res[4\n ] + '\\n' + 'Родвественников:' + ' ' + str(res2[0]\n ) + '\\n' + 'Номер лицензии на оружие:' + ' ' + res[5\n ] + '\\n' + 'Преступление:' + ' ' + res[6]\n await bot.send_message(message.from_user.id, result)\n os.chdir(idfolder)\n for file in glob.glob(res[3] + '.jpg'):\n img = open(file, 'rb')\n await bot.send_photo(message.from_user.id, img)\n\n\n@dp.message_handler(commands=['fullname'])\nasync def echo(message: types.Message):\n print('попросили данные по имени')\n arguments = message.get_args()\n print(arguments)\n s = arguments.lower()\n s = s.split()\n cur.execute(\n 'select * from barsa where name=:name and lastname=:lastname or name=:lastname and lastname=:name'\n , {'name': s[0], 'lastname': s[1]})\n res = cur.fetchone()\n cur.execute('select count(*) from barsa where lastname=:lastname', {\n 'lastname': s[1]})\n res2 = cur.fetchone()\n print(res2[0])\n result = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[2\n ] + '\\n' + 'Номер Паспорта:' + ' ' + res[3\n ] + '\\n' + 'Национальность:' + ' ' + res[4\n ] + '\\n' + 'Родвественников:' + ' ' + str(res2[0]\n ) + '\\n' + 'Номер лицензии на оружие:' + ' ' + res[5\n ] + '\\n' + 'Преступление:' + ' ' + res[6]\n await bot.send_message(message.from_user.id, result)\n os.chdir(idfolder)\n for file in glob.glob(res[3] + '.jpg'):\n img = open(file, 'rb')\n await bot.send_photo(message.from_user.id, img)\n\n\n@dp.message_handler(commands=['lastname'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n print('попросили данные по Фамилии:', arguments)\n s = arguments.lower()\n cur.execute('select * from barsa where lastname=:lastname ', {\n 'lastname': s})\n res = cur.fetchall()\n os.chdir(idfolder)\n for f in res:\n print(f)\n cur.execute('select * from barsa where id=:id', {'id': f[3]})\n res = cur.fetchone()\n req = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[2\n ] + '\\n' + 'Номер Паспорта:' + ' ' + res[3\n ] + '\\n' + 'Национальность:' + ' ' + res[4\n ] + '\\n' + 'Номер лицензии на оружие:' + ' ' + res[5\n ] + '\\n' + 'Преступление:' + ' ' + res[6]\n await bot.send_message(message.from_user.id, req)\n for file in glob.glob(res[3] + '.jpg'):\n img = open(file, 'rb')\n await bot.send_photo(message.from_user.id, img)\n\n\n@dp.message_handler(commands=['get_scan_nat'])\nasync def echo(message: types.Message):\n print('попросили ВСЕ СКАНЫ по по национальности')\n arguments = message.get_args()\n print(arguments)\n s = arguments\n s = s.capitalize()\n cur.execute('select id from barsa where nat_=:nat_', {'nat_': s})\n res = cur.fetchall()\n out = [item for t in res for item in t]\n out = [s.replace(' ', '') for s in out]\n os.chdir(idfolder)\n for f in out:\n print(f)\n if (f == '472-641218' or f == '757-067985' or f == '642-741978' or \n f == '696-082959' or f == '442-446766' or f == '702-973965'):\n cur.execute('select * from barsa where id=:id', {'id': f})\n res = cur.fetchone()\n req = 'Имя:' + ' ' + res[1\n ] + '\\n' + 'Фамилия:' + ' ' + res[2\n ] + '\\n' + 'Номер Паспорта:' + ' ' + res[3\n ] + '\\n' + 'Национальность:' + ' ' + res[4\n ] + '\\n' + 'Номер лицензии на оружие:' + ' ' + res[5\n ] + '\\n' + 'Преступление:' + ' ' + res[6]\n await bot.send_message(message.from_user.id, req)\n else:\n img = open(f + '.jpg', 'rb')\n print('ok')\n await bot.send_photo(message.from_user.id, img)\n\n\n@dp.message_handler(commands=['add_person'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n s = arguments.split()\n print(s)\n sqlite_insert_query = \"\"\"INSERT INTO barsa\n (name, lastname, id, nat_,gunlic,crime)\n VALUES\n (?,?,?,?,?,?)\"\"\"\n data_tuple = s[0], s[1], s[2], s[3], s[4], s[5]\n cur.execute(sqlite_insert_query, data_tuple)\n conn.commit()\n print('Запись о гражданине успешно добавлена ')\n cur.execute('select * from barsa where id=:id', {'id': s[2]})\n res = cur.fetchone()\n req = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[2\n ] + '\\n' + 'Номер Паспорта:' + ' ' + res[3\n ] + '\\n' + 'Национальность:' + ' ' + res[4\n ] + '\\n' + 'Номер лицензии на оружие:' + ' ' + res[5\n ] + '\\n' + 'Преступление:' + ' ' + res[6]\n await bot.send_message(message.from_user.id, req)\n\n\n@dp.message_handler(commands=['delete_person'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n print('попросили удалить гражаднина с ID:', arguments)\n s = arguments\n cur.execute('delete from barsa where id=:id', {'id': s})\n conn.commit()\n print('Гражданин с ID :', arguments, 'удален')\n res = 'Гражданин с ID :', arguments, 'удален'\n await bot.send_message(message.from_user.id, res)\n\n\n@dp.message_handler(commands=['add_gun_lic'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n s = arguments.split()\n print(s[0], s[1])\n cur.execute(' update barsa set gunlic=:gunlic where id=:id', {'gunlic':\n s[1], 'id': s[0]})\n conn.commit()\n print('Record Updated successfully ')\n cur.execute('select * from barsa where id=:id', {'id': s[0]})\n res = cur.fetchone()\n print(res)\n req = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[2\n ] + '\\n' + 'Номер Паспорта:' + ' ' + res[3\n ] + '\\n' + 'Национальность:' + ' ' + res[4\n ] + '\\n' + 'Номер лицензии на оружие:' + ' ' + res[5]\n await bot.send_message(message.from_user.id, req)\n\n\n@dp.message_handler(commands=['gun_lic'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n s = arguments\n print(s)\n cur.execute('select * from barsa where gunlic=:gunlic', {'gunlic': s})\n res = cur.fetchone()\n print(res)\n req = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[2\n ] + '\\n' + 'Номер Паспорта:' + ' ' + res[3\n ] + '\\n' + 'Национальность:' + ' ' + res[4\n ] + '\\n' + 'Номер лицензии на оружие:' + ' ' + res[5\n ] + '\\n' + 'Преступление:' + ' ' + res[6]\n await bot.send_message(message.from_user.id, req)\n\n\n@dp.message_handler(commands=['delete_gun_lic'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n s = arguments\n print(s)\n no = 'нет'\n cur.execute('update barsa set gunlic=:gunlic1 where id=:id', {'gunlic1':\n no, 'id': s})\n res1 = cur.fetchone()\n conn.commit()\n ans = 'Оружейная лицения гражданина:', s, ' удалена'\n await bot.send_message(message.from_user.id, ans)\n\n\n@dp.message_handler(commands=['add_crime'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n s = arguments.split()\n print(s[0], s[1])\n cur.execute(' update barsa set crime=:crime where id=:id', {'crime': s[\n 1], 'id': s[0]})\n conn.commit()\n print('Record Updated successfully ')\n cur.execute('select * from barsa where id=:id', {'id': s[0]})\n res = cur.fetchone()\n print(res)\n req = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[2\n ] + '\\n' + 'Номер Паспорта:' + ' ' + res[3\n ] + '\\n' + 'Национальность:' + ' ' + res[4\n ] + '\\n' + 'Номер лицензии на оружие:' + ' ' + res[5\n ] + '\\n' + 'Преступление:' + ' ' + res[6]\n await bot.send_message(message.from_user.id, req)\n\n\n@dp.message_handler(commands=['delete_crime'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n s = arguments\n print(s)\n no = 'нет'\n cur.execute('update barsa set crime=:crime where id=:id', {'crime': no,\n 'id': s})\n res1 = cur.fetchone()\n conn.commit()\n ans = 'Преступление гражданина:', s, ' удалено'\n await bot.send_message(message.from_user.id, ans)\n\n\nif __name__ == '__main__':\n executor.start_polling(dp, skip_updates=False)\n", "step-5": "import glob\nimport logging\nimport os\nimport sqlite3\n\nfrom aiogram import Bot, Dispatcher, executor, types\n\nTOKEN = '1772334389:AAE5wv8gssOFOgxQjQwKk7rUSKQHr6NTjus'\nlogging.basicConfig(level=logging.INFO)\nbot = Bot(token=TOKEN)\ndp = Dispatcher(bot)\npath1 = 'C:\\\\Users\\\\const\\\\PycharmProjects\\\\t'\n\nconn = sqlite3.connect('kustoge.db')\ncur = conn.cursor()\nchunksize = 10\n\nidfolder = \"C:\\\\Users\\\\const\\\\PycharmProjects\\\\goskustoge\\\\data\\\\id\"\n\n\n@dp.message_handler(commands=['start', 'help'])\nasync def send_welcome(message: types.Message):\n await message.reply(\n \"Портал Государственных услуг округа Кустоже\\n\\n Используете команду /id + номер_паспорта \\n для \"\n \"получения информации о владельце.\\n\\n Так же можно использовать команду /fullname + имя + фамилия, \"\n \"для получения информации о гражданине, регистр и порядок не важен\\n\\n Для получения информации о гражданах по \"\n \"фамилии воспользуйтесь командой /lastname + Фамилия \\n\\n Для получения данных о гражданах по национальности \"\n \"используйте /get_scan_nat+ брасогорец\\отовичанин \\n\\n Для добавления гражданина \"\n \"воспользуйтесь командой /add_person ИМЯ+ФАМИЛИЯ+НОМЕР_ПАСПОРТА+НАЦИОНАЛЬНОСТЬ+НОМЕР_ЛИЦЕНЗИИ_\"\n \"ОРУЖЕЙНОЙ+Преступление ( если нет лицензии и преступления пишем НЕТ \\n\\n Для удаления гражданина \"\n \"используйте /delete_person + номер паспорта \\n\\n Для добавления лицензии на оружение воспользуйтесь командой \"\n \"/add_gun_lic+id+номер_лицензии \\n\\n \"\n \"Для удалениея /delete_gun_lic + id \\n\\n Для добавления преступления гражданину используйте команду \"\n \"/add_crime +id + преступление \\n Для удаления преступления воспользуйтесь командой /delete_crime + id\")\n\n\n@dp.message_handler(commands=['id'])\nasync def echo(message: types.Message):\n print(\"попросили данные по ID\")\n arguments = message.get_args()\n print(arguments)\n cur.execute(\"select * from barsa where id=:id\", {\"id\": arguments})\n res = cur.fetchone()\n cur.execute(\"select count(*) from barsa where lastname=:lastname\",\n {\"lastname\": res[2]})\n res2 = cur.fetchone()\n print(res2)\n result = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[2] + '\\n' + 'Номер Паспорта:' + ' ' + \\\n res[3] + '\\n' + 'Национальность:' + ' ' + res[4] + \"\\n\" + \"Родвественников:\" + ' ' + str(\n res2[0]) + '\\n' + \"Номер \" \\\n \"лицензии на \" \\\n \"оружие:\" + \" \" + \\\n res[5] + '\\n' + \"Преступление:\" + \" \" + res[6]\n await bot.send_message(message.from_user.id, result)\n os.chdir(idfolder)\n for file in glob.glob(res[3] + \".jpg\"):\n img = open(file, \"rb\")\n await bot.send_photo(message.from_user.id, img)\n\n\n@dp.message_handler(commands=['fullname'])\nasync def echo(message: types.Message):\n print(\"попросили данные по имени\")\n arguments = message.get_args()\n print(arguments)\n s = arguments.lower()\n s = s.split()\n cur.execute(\"select * from barsa where name=:name and lastname=:lastname or name=:lastname and lastname=:name\",\n {\"name\": s[0], \"lastname\": s[1]})\n res = cur.fetchone()\n cur.execute(\"select count(*) from barsa where lastname=:lastname\",\n {\"lastname\": s[1]})\n res2 = cur.fetchone()\n print(res2[0])\n result = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[2] + '\\n' + 'Номер Паспорта:' + ' ' + \\\n res[3] + '\\n' + 'Национальность:' + ' ' + res[4] + \"\\n\" + \"Родвественников:\" + ' ' + str(\n res2[0]) + '\\n' + \"Номер \" \\\n \"лицензии на \" \\\n \"оружие:\" + \" \" + \\\n res[5] + '\\n' + \\\n \"Преступление:\" + \" \" + res[6]\n await bot.send_message(message.from_user.id, result)\n os.chdir(idfolder)\n for file in glob.glob(res[3] + \".jpg\"):\n img = open(file, \"rb\")\n await bot.send_photo(message.from_user.id, img)\n\n\n@dp.message_handler(commands=['lastname'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n print(\"попросили данные по Фамилии:\", arguments)\n s = arguments.lower()\n cur.execute(\"select * from barsa where lastname=:lastname \",\n {\"lastname\": s})\n res = cur.fetchall()\n os.chdir(idfolder)\n for f in res:\n print(f)\n cur.execute(\"select * from barsa where id=:id\", {\"id\": f[3]})\n res = cur.fetchone()\n req = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[\n 2] + '\\n' + 'Номер Паспорта:' + ' ' + res[3] + '\\n' + 'Национальность:' + ' ' + res[\n 4] + '\\n' + \"Номер \" \\\n \"лицензии на \" \\\n \"оружие:\" + \" \" + \\\n res[5] + '\\n' + \\\n \"Преступление:\" + \" \" + res[6]\n await bot.send_message(message.from_user.id, req)\n\n for file in glob.glob(res[3] + \".jpg\"):\n img = open(file, \"rb\")\n await bot.send_photo(message.from_user.id, img)\n\n\n@dp.message_handler(commands=['get_scan_nat'])\nasync def echo(message: types.Message):\n print(\"попросили ВСЕ СКАНЫ по по национальности\")\n arguments = message.get_args()\n print(arguments)\n s = arguments\n s = s.capitalize()\n cur.execute(\"select id from barsa where nat_=:nat_\", {\"nat_\": s})\n res = cur.fetchall()\n out = [item for t in res for item in t]\n out = [s.replace(\" \", \"\") for s in out]\n os.chdir(idfolder)\n for f in out:\n print(f)\n if f == '472-641218' or f == '757-067985' or f == '642-741978' or f == '696-082959' or f == '442-446766' or f == '702-973965':\n cur.execute(\"select * from barsa where id=:id\", {\"id\": f})\n res = cur.fetchone()\n req = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[\n 2] + '\\n' + 'Номер Паспорта:' + ' ' + res[3] + '\\n' + 'Национальность:' + ' ' + res[\n 4] + '\\n' + \"Номер \" \\\n \"лицензии на \" \\\n \"оружие:\" + \" \" + \\\n res[5] + '\\n' + \\\n \"Преступление:\" + \" \" + res[6]\n await bot.send_message(message.from_user.id, req)\n else:\n img = open(f + '.jpg', \"rb\")\n print('ok')\n await bot.send_photo(message.from_user.id, img)\n\n\n@dp.message_handler(commands=['add_person'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n s = arguments.split()\n print(s)\n sqlite_insert_query = \"\"\"INSERT INTO barsa\n (name, lastname, id, nat_,gunlic,crime)\n VALUES\n (?,?,?,?,?,?)\"\"\"\n data_tuple = (s[0], s[1], s[2], s[3], s[4], s[5])\n cur.execute(sqlite_insert_query, data_tuple)\n conn.commit()\n\n print(\"Запись о гражданине успешно добавлена \")\n cur.execute(\"select * from barsa where id=:id\", {\"id\": s[2]})\n res = cur.fetchone()\n req = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[\n 2] + '\\n' + 'Номер Паспорта:' + ' ' + res[3] + '\\n' + 'Национальность:' + ' ' + res[4] + '\\n' + \"Номер \" \\\n \"лицензии на \" \\\n \"оружие:\" + \" \" + \\\n res[5] + '\\n' + \\\n \"Преступление:\" + \" \" + res[6]\n await bot.send_message(message.from_user.id, req)\n\n\n@dp.message_handler(commands=['delete_person'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n print(\"попросили удалить гражаднина с ID:\", arguments)\n s = arguments\n cur.execute(\"delete from barsa where id=:id\", {\"id\": s})\n conn.commit()\n print(\"Гражданин с ID :\", arguments, 'удален')\n res = \"Гражданин с ID :\", arguments, 'удален'\n await bot.send_message(message.from_user.id, res)\n\n\n@dp.message_handler(commands=['add_gun_lic'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n s = arguments.split()\n print(s[0], s[1])\n cur.execute(\" update barsa set gunlic=:gunlic where id=:id\", {\"gunlic\": s[1], \"id\": s[0]})\n conn.commit()\n print(\"Record Updated successfully \")\n cur.execute(\"select * from barsa where id=:id\", {\"id\": s[0]})\n res = cur.fetchone()\n print(res)\n req = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[\n 2] + '\\n' + 'Номер Паспорта:' + ' ' + res[3] + '\\n' + 'Национальность:' + ' ' + res[4] + '\\n' + \"Номер \" \\\n \"лицензии на \" \\\n \"оружие:\" + \" \" + \\\n res[5]\n await bot.send_message(message.from_user.id, req)\n\n\n@dp.message_handler(commands=['gun_lic'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n s = arguments\n print(s)\n cur.execute(\"select * from barsa where gunlic=:gunlic\", {\"gunlic\": s})\n res = cur.fetchone()\n print(res)\n req = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[\n 2] + '\\n' + 'Номер Паспорта:' + ' ' + res[3] + '\\n' + 'Национальность:' + ' ' + res[4] + '\\n' + \"Номер \" \\\n \"лицензии на \" \\\n \"оружие:\" + \" \" + \\\n res[5] + '\\n' + \\\n \"Преступление:\" + \" \" + res[6]\n await bot.send_message(message.from_user.id, req)\n\n\n@dp.message_handler(commands=['delete_gun_lic'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n s = arguments\n print(s)\n no = 'нет'\n cur.execute(\"update barsa set gunlic=:gunlic1 where id=:id\", {\"gunlic1\": no, \"id\": s})\n res1 = cur.fetchone()\n conn.commit()\n ans = \"Оружейная лицения гражданина:\",s, \" удалена\"\n await bot.send_message(message.from_user.id, ans)\n\n\n@dp.message_handler(commands=['add_crime'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n s = arguments.split()\n print(s[0], s[1])\n cur.execute(\" update barsa set crime=:crime where id=:id\", {\"crime\": s[1], \"id\": s[0]})\n conn.commit()\n print(\"Record Updated successfully \")\n cur.execute(\"select * from barsa where id=:id\", {\"id\": s[0]})\n res = cur.fetchone()\n print(res)\n req = 'Имя:' + ' ' + res[1] + '\\n' + 'Фамилия:' + ' ' + res[\n 2] + '\\n' + 'Номер Паспорта:' + ' ' + res[3] + '\\n' + 'Национальность:' + ' ' + res[4] + '\\n' + \"Номер \" \\\n \"лицензии на \" \\\n \"оружие:\" + \" \" + \\\n res[5] + '\\n' + \\\n \"Преступление:\" + \" \" + res[6]\n await bot.send_message(message.from_user.id, req)\n\n@dp.message_handler(commands=['delete_crime'])\nasync def echo(message: types.Message):\n arguments = message.get_args()\n s = arguments\n print(s)\n no = 'нет'\n cur.execute(\"update barsa set crime=:crime where id=:id\", {\"crime\": no, \"id\": s})\n res1 = cur.fetchone()\n conn.commit()\n ans = \"Преступление гражданина:\",s, \" удалено\"\n await bot.send_message(message.from_user.id, ans)\n\n\nif __name__ == '__main__':\n executor.start_polling(dp, skip_updates=False)\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
import inspect import json import os import re import urllib.request from functools import wraps from ..errors import NotFoundError class API: def __init__(self, base_url, version=1): self.BASE = base_url or 'https://api.starlist.pro/v{}'.format(version) self.PROFILE = self.BASE + '/player' self.CLUB = self.BASE + '/club' self.LEADERBOARD = self.BASE + '/leaderboards' self.EVENTS = self.BASE + '/events' self.MISC = self.BASE + '/misc' self.BATTLELOG = self.PROFILE + '/battlelog' self.CLUB_SEARCH = self.CLUB + '/search' self.CONSTANTS = 'https://fourjr.herokuapp.com/bs/constants/' # self.BRAWLERS = [ # 'shelly', 'nita', 'colt', 'bull', 'jessie', # league reward 0-500 # 'brock', 'dynamike', 'bo', 'tick', '8-bit' # league reward 1000+ # 'el primo', 'barley', 'poco', 'rosa', # rare # 'rico', 'penny', 'darryl', 'carl', # super rare # 'frank', 'pam', 'piper', 'bibi', # epic # 'mortis', 'tara', 'gene', # mythic # 'spike', 'crow', 'leon' # legendary # ] path = os.path.join(os.path.dirname(__file__), os.path.pardir) with open(os.path.join(path, '__init__.py')) as f: self.VERSION = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', f.read(), re.MULTILINE).group(1) try: data = json.loads(urllib.request.urlopen(self.CONSTANTS).read()) except (TypeError, urllib.error.HTTPError, urllib.error.URLError): self.BRAWLERS = {} else: if data: self.BRAWLERS = {b['tID'].lower(): str(b['scId'])[:2] + '0' + str(b['scId'])[2:] for b in data['characters'] if b['tID']} else: self.BRAWLERS = {} def bstag(tag): tag = tag.strip('#').upper().replace('O', '0') allowed = '0289PYLQGRJCUV' if len(tag) < 3: raise NotFoundError('Tag less than 3 characters.', 404) invalid = [c for c in tag if c not in allowed] if invalid: raise NotFoundError(invalid, 404) return tag def typecasted(func): '''Decorator that converts arguments via annotations. Source: https://github.com/cgrok/clashroyale/blob/master/clashroyale/official_api/utils.py#L11''' signature = inspect.signature(func).parameters.items() @wraps(func) def wrapper(*args, **kwargs): args = list(args) new_args = [] new_kwargs = {} for _, param in signature: converter = param.annotation if converter is inspect._empty: converter = lambda a: a # do nothing if param.kind is param.POSITIONAL_OR_KEYWORD: if args: to_conv = args.pop(0) new_args.append(converter(to_conv)) elif param.kind is param.VAR_POSITIONAL: for a in args: new_args.append(converter(a)) else: for k, v in kwargs.items(): nk, nv = converter(k, v) new_kwargs[nk] = nv return func(*new_args, **new_kwargs) return wrapper
normal
{ "blob_id": "3f3db7e8813f49fe0265e110236b6dc4fed6cd1b", "index": 7214, "step-1": "<mask token>\n\n\nclass API:\n\n def __init__(self, base_url, version=1):\n self.BASE = base_url or 'https://api.starlist.pro/v{}'.format(version)\n self.PROFILE = self.BASE + '/player'\n self.CLUB = self.BASE + '/club'\n self.LEADERBOARD = self.BASE + '/leaderboards'\n self.EVENTS = self.BASE + '/events'\n self.MISC = self.BASE + '/misc'\n self.BATTLELOG = self.PROFILE + '/battlelog'\n self.CLUB_SEARCH = self.CLUB + '/search'\n self.CONSTANTS = 'https://fourjr.herokuapp.com/bs/constants/'\n path = os.path.join(os.path.dirname(__file__), os.path.pardir)\n with open(os.path.join(path, '__init__.py')) as f:\n self.VERSION = re.search('^__version__ = [\\\\\\'\"]([^\\\\\\'\"]*)[\\\\\\'\"]'\n , f.read(), re.MULTILINE).group(1)\n try:\n data = json.loads(urllib.request.urlopen(self.CONSTANTS).read())\n except (TypeError, urllib.error.HTTPError, urllib.error.URLError):\n self.BRAWLERS = {}\n else:\n if data:\n self.BRAWLERS = {b['tID'].lower(): (str(b['scId'])[:2] +\n '0' + str(b['scId'])[2:]) for b in data['characters'] if\n b['tID']}\n else:\n self.BRAWLERS = {}\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass API:\n\n def __init__(self, base_url, version=1):\n self.BASE = base_url or 'https://api.starlist.pro/v{}'.format(version)\n self.PROFILE = self.BASE + '/player'\n self.CLUB = self.BASE + '/club'\n self.LEADERBOARD = self.BASE + '/leaderboards'\n self.EVENTS = self.BASE + '/events'\n self.MISC = self.BASE + '/misc'\n self.BATTLELOG = self.PROFILE + '/battlelog'\n self.CLUB_SEARCH = self.CLUB + '/search'\n self.CONSTANTS = 'https://fourjr.herokuapp.com/bs/constants/'\n path = os.path.join(os.path.dirname(__file__), os.path.pardir)\n with open(os.path.join(path, '__init__.py')) as f:\n self.VERSION = re.search('^__version__ = [\\\\\\'\"]([^\\\\\\'\"]*)[\\\\\\'\"]'\n , f.read(), re.MULTILINE).group(1)\n try:\n data = json.loads(urllib.request.urlopen(self.CONSTANTS).read())\n except (TypeError, urllib.error.HTTPError, urllib.error.URLError):\n self.BRAWLERS = {}\n else:\n if data:\n self.BRAWLERS = {b['tID'].lower(): (str(b['scId'])[:2] +\n '0' + str(b['scId'])[2:]) for b in data['characters'] if\n b['tID']}\n else:\n self.BRAWLERS = {}\n\n\n<mask token>\n\n\ndef typecasted(func):\n \"\"\"Decorator that converts arguments via annotations.\n Source: https://github.com/cgrok/clashroyale/blob/master/clashroyale/official_api/utils.py#L11\"\"\"\n signature = inspect.signature(func).parameters.items()\n\n @wraps(func)\n def wrapper(*args, **kwargs):\n args = list(args)\n new_args = []\n new_kwargs = {}\n for _, param in signature:\n converter = param.annotation\n if converter is inspect._empty:\n converter = lambda a: a\n if param.kind is param.POSITIONAL_OR_KEYWORD:\n if args:\n to_conv = args.pop(0)\n new_args.append(converter(to_conv))\n elif param.kind is param.VAR_POSITIONAL:\n for a in args:\n new_args.append(converter(a))\n else:\n for k, v in kwargs.items():\n nk, nv = converter(k, v)\n new_kwargs[nk] = nv\n return func(*new_args, **new_kwargs)\n return wrapper\n", "step-3": "<mask token>\n\n\nclass API:\n\n def __init__(self, base_url, version=1):\n self.BASE = base_url or 'https://api.starlist.pro/v{}'.format(version)\n self.PROFILE = self.BASE + '/player'\n self.CLUB = self.BASE + '/club'\n self.LEADERBOARD = self.BASE + '/leaderboards'\n self.EVENTS = self.BASE + '/events'\n self.MISC = self.BASE + '/misc'\n self.BATTLELOG = self.PROFILE + '/battlelog'\n self.CLUB_SEARCH = self.CLUB + '/search'\n self.CONSTANTS = 'https://fourjr.herokuapp.com/bs/constants/'\n path = os.path.join(os.path.dirname(__file__), os.path.pardir)\n with open(os.path.join(path, '__init__.py')) as f:\n self.VERSION = re.search('^__version__ = [\\\\\\'\"]([^\\\\\\'\"]*)[\\\\\\'\"]'\n , f.read(), re.MULTILINE).group(1)\n try:\n data = json.loads(urllib.request.urlopen(self.CONSTANTS).read())\n except (TypeError, urllib.error.HTTPError, urllib.error.URLError):\n self.BRAWLERS = {}\n else:\n if data:\n self.BRAWLERS = {b['tID'].lower(): (str(b['scId'])[:2] +\n '0' + str(b['scId'])[2:]) for b in data['characters'] if\n b['tID']}\n else:\n self.BRAWLERS = {}\n\n\ndef bstag(tag):\n tag = tag.strip('#').upper().replace('O', '0')\n allowed = '0289PYLQGRJCUV'\n if len(tag) < 3:\n raise NotFoundError('Tag less than 3 characters.', 404)\n invalid = [c for c in tag if c not in allowed]\n if invalid:\n raise NotFoundError(invalid, 404)\n return tag\n\n\ndef typecasted(func):\n \"\"\"Decorator that converts arguments via annotations.\n Source: https://github.com/cgrok/clashroyale/blob/master/clashroyale/official_api/utils.py#L11\"\"\"\n signature = inspect.signature(func).parameters.items()\n\n @wraps(func)\n def wrapper(*args, **kwargs):\n args = list(args)\n new_args = []\n new_kwargs = {}\n for _, param in signature:\n converter = param.annotation\n if converter is inspect._empty:\n converter = lambda a: a\n if param.kind is param.POSITIONAL_OR_KEYWORD:\n if args:\n to_conv = args.pop(0)\n new_args.append(converter(to_conv))\n elif param.kind is param.VAR_POSITIONAL:\n for a in args:\n new_args.append(converter(a))\n else:\n for k, v in kwargs.items():\n nk, nv = converter(k, v)\n new_kwargs[nk] = nv\n return func(*new_args, **new_kwargs)\n return wrapper\n", "step-4": "import inspect\nimport json\nimport os\nimport re\nimport urllib.request\nfrom functools import wraps\nfrom ..errors import NotFoundError\n\n\nclass API:\n\n def __init__(self, base_url, version=1):\n self.BASE = base_url or 'https://api.starlist.pro/v{}'.format(version)\n self.PROFILE = self.BASE + '/player'\n self.CLUB = self.BASE + '/club'\n self.LEADERBOARD = self.BASE + '/leaderboards'\n self.EVENTS = self.BASE + '/events'\n self.MISC = self.BASE + '/misc'\n self.BATTLELOG = self.PROFILE + '/battlelog'\n self.CLUB_SEARCH = self.CLUB + '/search'\n self.CONSTANTS = 'https://fourjr.herokuapp.com/bs/constants/'\n path = os.path.join(os.path.dirname(__file__), os.path.pardir)\n with open(os.path.join(path, '__init__.py')) as f:\n self.VERSION = re.search('^__version__ = [\\\\\\'\"]([^\\\\\\'\"]*)[\\\\\\'\"]'\n , f.read(), re.MULTILINE).group(1)\n try:\n data = json.loads(urllib.request.urlopen(self.CONSTANTS).read())\n except (TypeError, urllib.error.HTTPError, urllib.error.URLError):\n self.BRAWLERS = {}\n else:\n if data:\n self.BRAWLERS = {b['tID'].lower(): (str(b['scId'])[:2] +\n '0' + str(b['scId'])[2:]) for b in data['characters'] if\n b['tID']}\n else:\n self.BRAWLERS = {}\n\n\ndef bstag(tag):\n tag = tag.strip('#').upper().replace('O', '0')\n allowed = '0289PYLQGRJCUV'\n if len(tag) < 3:\n raise NotFoundError('Tag less than 3 characters.', 404)\n invalid = [c for c in tag if c not in allowed]\n if invalid:\n raise NotFoundError(invalid, 404)\n return tag\n\n\ndef typecasted(func):\n \"\"\"Decorator that converts arguments via annotations.\n Source: https://github.com/cgrok/clashroyale/blob/master/clashroyale/official_api/utils.py#L11\"\"\"\n signature = inspect.signature(func).parameters.items()\n\n @wraps(func)\n def wrapper(*args, **kwargs):\n args = list(args)\n new_args = []\n new_kwargs = {}\n for _, param in signature:\n converter = param.annotation\n if converter is inspect._empty:\n converter = lambda a: a\n if param.kind is param.POSITIONAL_OR_KEYWORD:\n if args:\n to_conv = args.pop(0)\n new_args.append(converter(to_conv))\n elif param.kind is param.VAR_POSITIONAL:\n for a in args:\n new_args.append(converter(a))\n else:\n for k, v in kwargs.items():\n nk, nv = converter(k, v)\n new_kwargs[nk] = nv\n return func(*new_args, **new_kwargs)\n return wrapper\n", "step-5": "import inspect\nimport json\nimport os\nimport re\nimport urllib.request\nfrom functools import wraps\n\nfrom ..errors import NotFoundError\n\n\nclass API:\n def __init__(self, base_url, version=1):\n self.BASE = base_url or 'https://api.starlist.pro/v{}'.format(version)\n self.PROFILE = self.BASE + '/player'\n self.CLUB = self.BASE + '/club'\n self.LEADERBOARD = self.BASE + '/leaderboards'\n self.EVENTS = self.BASE + '/events'\n self.MISC = self.BASE + '/misc'\n self.BATTLELOG = self.PROFILE + '/battlelog'\n self.CLUB_SEARCH = self.CLUB + '/search'\n self.CONSTANTS = 'https://fourjr.herokuapp.com/bs/constants/'\n # self.BRAWLERS = [\n # 'shelly', 'nita', 'colt', 'bull', 'jessie', # league reward 0-500\n # 'brock', 'dynamike', 'bo', 'tick', '8-bit' # league reward 1000+\n # 'el primo', 'barley', 'poco', 'rosa', # rare\n # 'rico', 'penny', 'darryl', 'carl', # super rare\n # 'frank', 'pam', 'piper', 'bibi', # epic\n # 'mortis', 'tara', 'gene', # mythic\n # 'spike', 'crow', 'leon' # legendary\n # ]\n\n path = os.path.join(os.path.dirname(__file__), os.path.pardir)\n with open(os.path.join(path, '__init__.py')) as f:\n self.VERSION = re.search(r'^__version__ = [\\'\"]([^\\'\"]*)[\\'\"]', f.read(), re.MULTILINE).group(1)\n\n try:\n data = json.loads(urllib.request.urlopen(self.CONSTANTS).read())\n except (TypeError, urllib.error.HTTPError, urllib.error.URLError):\n self.BRAWLERS = {}\n else:\n if data:\n self.BRAWLERS = {b['tID'].lower(): str(b['scId'])[:2] + '0' + str(b['scId'])[2:] for b in data['characters'] if b['tID']}\n else:\n self.BRAWLERS = {}\n\n\ndef bstag(tag):\n tag = tag.strip('#').upper().replace('O', '0')\n allowed = '0289PYLQGRJCUV'\n if len(tag) < 3:\n raise NotFoundError('Tag less than 3 characters.', 404)\n invalid = [c for c in tag if c not in allowed]\n if invalid:\n raise NotFoundError(invalid, 404)\n return tag\n\ndef typecasted(func):\n '''Decorator that converts arguments via annotations.\n Source: https://github.com/cgrok/clashroyale/blob/master/clashroyale/official_api/utils.py#L11'''\n signature = inspect.signature(func).parameters.items()\n\n @wraps(func)\n def wrapper(*args, **kwargs):\n args = list(args)\n new_args = []\n new_kwargs = {}\n for _, param in signature:\n converter = param.annotation\n if converter is inspect._empty:\n converter = lambda a: a # do nothing\n if param.kind is param.POSITIONAL_OR_KEYWORD:\n if args:\n to_conv = args.pop(0)\n new_args.append(converter(to_conv))\n elif param.kind is param.VAR_POSITIONAL:\n for a in args:\n new_args.append(converter(a))\n else:\n for k, v in kwargs.items():\n nk, nv = converter(k, v)\n new_kwargs[nk] = nv\n return func(*new_args, **new_kwargs)\n return wrapper\n", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> reload(sys) sys.setdefaultencoding('utf-8') <|reserved_special_token_0|> for l in root.iter('l'): file.write(''.join(l.itertext()) + '\n') file.close() <|reserved_special_token_1|> <|reserved_special_token_0|> reload(sys) sys.setdefaultencoding('utf-8') <|reserved_special_token_0|> tree = ET.parse('iliad1.xml') root = tree.getroot() file = open('iliad1_clean.txt', 'w') for l in root.iter('l'): file.write(''.join(l.itertext()) + '\n') file.close() <|reserved_special_token_1|> import sys reload(sys) sys.setdefaultencoding('utf-8') import xml.etree.ElementTree as ET tree = ET.parse('iliad1.xml') root = tree.getroot() file = open('iliad1_clean.txt', 'w') for l in root.iter('l'): file.write(''.join(l.itertext()) + '\n') file.close() <|reserved_special_token_1|> import sys reload(sys) sys.setdefaultencoding('utf-8') import xml.etree.ElementTree as ET tree = ET.parse('iliad1.xml') root = tree.getroot() file = open('iliad1_clean.txt','w') for l in root.iter('l'): file.write(''.join(l.itertext()) + "\n") file.close()
flexible
{ "blob_id": "cfea7848dfb41c913e5d8fec2f0f4f8afaaa09f3", "index": 5928, "step-1": "<mask token>\n", "step-2": "<mask token>\nreload(sys)\nsys.setdefaultencoding('utf-8')\n<mask token>\nfor l in root.iter('l'):\n file.write(''.join(l.itertext()) + '\\n')\nfile.close()\n", "step-3": "<mask token>\nreload(sys)\nsys.setdefaultencoding('utf-8')\n<mask token>\ntree = ET.parse('iliad1.xml')\nroot = tree.getroot()\nfile = open('iliad1_clean.txt', 'w')\nfor l in root.iter('l'):\n file.write(''.join(l.itertext()) + '\\n')\nfile.close()\n", "step-4": "import sys\nreload(sys)\nsys.setdefaultencoding('utf-8')\nimport xml.etree.ElementTree as ET\ntree = ET.parse('iliad1.xml')\nroot = tree.getroot()\nfile = open('iliad1_clean.txt', 'w')\nfor l in root.iter('l'):\n file.write(''.join(l.itertext()) + '\\n')\nfile.close()\n", "step-5": "import sys\nreload(sys)\nsys.setdefaultencoding('utf-8')\n\nimport xml.etree.ElementTree as ET\ntree = ET.parse('iliad1.xml')\nroot = tree.getroot()\n\nfile = open('iliad1_clean.txt','w')\nfor l in root.iter('l'):\n\tfile.write(''.join(l.itertext()) + \"\\n\")\nfile.close()", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> class QNetwork: <|reserved_special_token_0|> def __init__(self, state_dim: int, action_dim: int=3, hidden_layer_sizes: List=[128, 256, 256, 128], activation: str='relu'): self._state_dim = state_dim self._action_dim = action_dim self._hidden_layer_sizes = hidden_layer_sizes self._activation = activation self._model = tf.keras.Sequential() self._model.add(Dense(units=self._hidden_layer_sizes[0], input_dim= self._state_dim, activation=self._activation)) for i in range(2, len(self._hidden_layer_sizes)): self._model.add(Dense(self._hidden_layer_sizes[i], activation= self._activation)) self._model.add(Dense(self._action_dim, activation='linear')) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class QNetwork: <|reserved_special_token_0|> def __init__(self, state_dim: int, action_dim: int=3, hidden_layer_sizes: List=[128, 256, 256, 128], activation: str='relu'): self._state_dim = state_dim self._action_dim = action_dim self._hidden_layer_sizes = hidden_layer_sizes self._activation = activation self._model = tf.keras.Sequential() self._model.add(Dense(units=self._hidden_layer_sizes[0], input_dim= self._state_dim, activation=self._activation)) for i in range(2, len(self._hidden_layer_sizes)): self._model.add(Dense(self._hidden_layer_sizes[i], activation= self._activation)) self._model.add(Dense(self._action_dim, activation='linear')) def get_model(self) ->tf.keras.Model: return self._model <|reserved_special_token_1|> <|reserved_special_token_0|> class QNetwork: """Create the neural network architecture for the DQN agent.""" def __init__(self, state_dim: int, action_dim: int=3, hidden_layer_sizes: List=[128, 256, 256, 128], activation: str='relu'): self._state_dim = state_dim self._action_dim = action_dim self._hidden_layer_sizes = hidden_layer_sizes self._activation = activation self._model = tf.keras.Sequential() self._model.add(Dense(units=self._hidden_layer_sizes[0], input_dim= self._state_dim, activation=self._activation)) for i in range(2, len(self._hidden_layer_sizes)): self._model.add(Dense(self._hidden_layer_sizes[i], activation= self._activation)) self._model.add(Dense(self._action_dim, activation='linear')) def get_model(self) ->tf.keras.Model: return self._model <|reserved_special_token_1|> from typing import List import tensorflow as tf from tensorflow.keras.layers import Dense <|reserved_special_token_0|> class QNetwork: """Create the neural network architecture for the DQN agent.""" def __init__(self, state_dim: int, action_dim: int=3, hidden_layer_sizes: List=[128, 256, 256, 128], activation: str='relu'): self._state_dim = state_dim self._action_dim = action_dim self._hidden_layer_sizes = hidden_layer_sizes self._activation = activation self._model = tf.keras.Sequential() self._model.add(Dense(units=self._hidden_layer_sizes[0], input_dim= self._state_dim, activation=self._activation)) for i in range(2, len(self._hidden_layer_sizes)): self._model.add(Dense(self._hidden_layer_sizes[i], activation= self._activation)) self._model.add(Dense(self._action_dim, activation='linear')) def get_model(self) ->tf.keras.Model: return self._model <|reserved_special_token_1|> from typing import List import tensorflow as tf from tensorflow.keras.layers import Dense """Possible agent network structures implemented as Tensorflow Modules""" class QNetwork: """Create the neural network architecture for the DQN agent.""" def __init__( self, state_dim: int, action_dim: int = 3, # Default: agents can hold=0, buy=1, or sell=2. hidden_layer_sizes: List = [128, 256, 256, 128], activation: str = "relu", ): self._state_dim = state_dim self._action_dim = action_dim self._hidden_layer_sizes = hidden_layer_sizes self._activation = activation self._model = tf.keras.Sequential() self._model.add( Dense( units=self._hidden_layer_sizes[0], input_dim=self._state_dim, activation=self._activation, ) ) for i in range(2, len(self._hidden_layer_sizes)): self._model.add( Dense(self._hidden_layer_sizes[i], activation=self._activation) ) self._model.add(Dense(self._action_dim, activation="linear")) def get_model(self) -> tf.keras.Model: return self._model
flexible
{ "blob_id": "a3e655350fb5fe7999bea4a87fb62c7698fb63f1", "index": 6663, "step-1": "<mask token>\n\n\nclass QNetwork:\n <mask token>\n\n def __init__(self, state_dim: int, action_dim: int=3,\n hidden_layer_sizes: List=[128, 256, 256, 128], activation: str='relu'):\n self._state_dim = state_dim\n self._action_dim = action_dim\n self._hidden_layer_sizes = hidden_layer_sizes\n self._activation = activation\n self._model = tf.keras.Sequential()\n self._model.add(Dense(units=self._hidden_layer_sizes[0], input_dim=\n self._state_dim, activation=self._activation))\n for i in range(2, len(self._hidden_layer_sizes)):\n self._model.add(Dense(self._hidden_layer_sizes[i], activation=\n self._activation))\n self._model.add(Dense(self._action_dim, activation='linear'))\n <mask token>\n", "step-2": "<mask token>\n\n\nclass QNetwork:\n <mask token>\n\n def __init__(self, state_dim: int, action_dim: int=3,\n hidden_layer_sizes: List=[128, 256, 256, 128], activation: str='relu'):\n self._state_dim = state_dim\n self._action_dim = action_dim\n self._hidden_layer_sizes = hidden_layer_sizes\n self._activation = activation\n self._model = tf.keras.Sequential()\n self._model.add(Dense(units=self._hidden_layer_sizes[0], input_dim=\n self._state_dim, activation=self._activation))\n for i in range(2, len(self._hidden_layer_sizes)):\n self._model.add(Dense(self._hidden_layer_sizes[i], activation=\n self._activation))\n self._model.add(Dense(self._action_dim, activation='linear'))\n\n def get_model(self) ->tf.keras.Model:\n return self._model\n", "step-3": "<mask token>\n\n\nclass QNetwork:\n \"\"\"Create the neural network architecture for the DQN agent.\"\"\"\n\n def __init__(self, state_dim: int, action_dim: int=3,\n hidden_layer_sizes: List=[128, 256, 256, 128], activation: str='relu'):\n self._state_dim = state_dim\n self._action_dim = action_dim\n self._hidden_layer_sizes = hidden_layer_sizes\n self._activation = activation\n self._model = tf.keras.Sequential()\n self._model.add(Dense(units=self._hidden_layer_sizes[0], input_dim=\n self._state_dim, activation=self._activation))\n for i in range(2, len(self._hidden_layer_sizes)):\n self._model.add(Dense(self._hidden_layer_sizes[i], activation=\n self._activation))\n self._model.add(Dense(self._action_dim, activation='linear'))\n\n def get_model(self) ->tf.keras.Model:\n return self._model\n", "step-4": "from typing import List\nimport tensorflow as tf\nfrom tensorflow.keras.layers import Dense\n<mask token>\n\n\nclass QNetwork:\n \"\"\"Create the neural network architecture for the DQN agent.\"\"\"\n\n def __init__(self, state_dim: int, action_dim: int=3,\n hidden_layer_sizes: List=[128, 256, 256, 128], activation: str='relu'):\n self._state_dim = state_dim\n self._action_dim = action_dim\n self._hidden_layer_sizes = hidden_layer_sizes\n self._activation = activation\n self._model = tf.keras.Sequential()\n self._model.add(Dense(units=self._hidden_layer_sizes[0], input_dim=\n self._state_dim, activation=self._activation))\n for i in range(2, len(self._hidden_layer_sizes)):\n self._model.add(Dense(self._hidden_layer_sizes[i], activation=\n self._activation))\n self._model.add(Dense(self._action_dim, activation='linear'))\n\n def get_model(self) ->tf.keras.Model:\n return self._model\n", "step-5": "from typing import List\n\nimport tensorflow as tf\nfrom tensorflow.keras.layers import Dense\n\n\"\"\"Possible agent network structures implemented as Tensorflow Modules\"\"\"\n\n\nclass QNetwork:\n \"\"\"Create the neural network architecture for the DQN agent.\"\"\"\n\n def __init__(\n self,\n state_dim: int,\n action_dim: int = 3, # Default: agents can hold=0, buy=1, or sell=2.\n hidden_layer_sizes: List = [128, 256, 256, 128],\n activation: str = \"relu\",\n ):\n\n self._state_dim = state_dim\n self._action_dim = action_dim\n self._hidden_layer_sizes = hidden_layer_sizes\n self._activation = activation\n\n self._model = tf.keras.Sequential()\n self._model.add(\n Dense(\n units=self._hidden_layer_sizes[0],\n input_dim=self._state_dim,\n activation=self._activation,\n )\n )\n\n for i in range(2, len(self._hidden_layer_sizes)):\n self._model.add(\n Dense(self._hidden_layer_sizes[i], activation=self._activation)\n )\n\n self._model.add(Dense(self._action_dim, activation=\"linear\"))\n\n def get_model(self) -> tf.keras.Model:\n return self._model\n", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> db = {'host': 'localhost', 'user': 'root', 'passwd': 'm74e71', 'database': 'dw_toner'} data_inicial = '1990-01-01' ano_final = 2018 feriados = 'feriados.csv' meses_de_ferias = 1, 2, 7, 12 dias_final_semana = 1, 6, 7 <|reserved_special_token_1|> #!/usr/bin/env python3 # -*- coding: utf-8 -*- db = { 'host': "localhost", 'user': "root", 'passwd': "m74e71", 'database': "dw_toner" } data_inicial = '1990-01-01' ano_final = 2018 feriados = "feriados.csv" meses_de_ferias = (1, 2, 7, 12) #Janeiro, Fevereiro, Julho, Dezembro dias_final_semana = (1, 6, 7) #Domingo, sexta e sábado
flexible
{ "blob_id": "360881cecbad88ea5d150548fba6a39d8dc30681", "index": 8598, "step-1": "<mask token>\n", "step-2": "db = {'host': 'localhost', 'user': 'root', 'passwd': 'm74e71', 'database':\n 'dw_toner'}\ndata_inicial = '1990-01-01'\nano_final = 2018\nferiados = 'feriados.csv'\nmeses_de_ferias = 1, 2, 7, 12\ndias_final_semana = 1, 6, 7\n", "step-3": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\ndb = {\n 'host': \"localhost\",\n 'user': \"root\",\n 'passwd': \"m74e71\",\n 'database': \"dw_toner\"\n}\n\ndata_inicial = '1990-01-01'\nano_final = 2018\n\nferiados = \"feriados.csv\"\n\nmeses_de_ferias = (1, 2, 7, 12) #Janeiro, Fevereiro, Julho, Dezembro\n\ndias_final_semana = (1, 6, 7) #Domingo, sexta e sábado\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
""" quiz materials for feature scaling clustering """ # FYI, the most straightforward implementation might # throw a divide-by-zero error, if the min and max # values are the same # but think about this for a second--that means that every # data point has the same value for that feature! # why would you rescale it? Or even use it at all? from __future__ import division data = [115, 140, 175] def featureScaling(arr): x_max = max(*arr) x_min = min(*arr) if (x_max == x_min): return None return map(lambda x: (x - x_min)/(x_max - x_min), arr) # tests of your feature scaler--line below is input data print featureScaling(data)
normal
{ "blob_id": "6a6a7cc6d4f601f4461488d02e03e832bc7ab634", "index": 2928, "step-1": "\"\"\" quiz materials for feature scaling clustering \"\"\"\n\n# FYI, the most straightforward implementation might\n# throw a divide-by-zero error, if the min and max\n# values are the same\n# but think about this for a second--that means that every\n# data point has the same value for that feature!\n# why would you rescale it? Or even use it at all?\n\nfrom __future__ import division\n\n\ndata = [115, 140, 175]\n\n\ndef featureScaling(arr):\n x_max = max(*arr)\n x_min = min(*arr)\n if (x_max == x_min):\n return None\n return map(lambda x: (x - x_min)/(x_max - x_min), arr)\n\n# tests of your feature scaler--line below is input data\n\n\nprint featureScaling(data)\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
import numpy as np import random with open("./roc.txt", "r") as fin: with open("./roc_shuffle.txt", "w") as fout: tmp = [] for k, line in enumerate(fin): i = k + 1 if i % 6 == 0: idx = [0] + np.random.permutation(range(1,5)).tolist() for sen in np.take(tmp, idx).tolist(): fout.write(sen+"\n") tmp = [] fout.write(line.strip()+"\n") else: tmp.append(line.strip()) with open("./roc.txt", "r") as fin: with open("./roc_repeat.txt", "w") as fout: tmp = [] for k, line in enumerate(fin): i = k + 1 if i % 6 == 0: idx = random.randint(1,4) tmp[idx] = tmp[idx][:-1] + tmp[idx] for sen in tmp: fout.write(sen+"\n") tmp = [] fout.write(line.strip()+"\n") else: tmp.append(line.strip()) with open("./roc.txt", "r") as fin: with open("./roc_replace.txt", "w") as fout: post, tmp = [], [] for k, line in enumerate(fin): i = k + 1 if i % 6 == 0: post.append(tmp) tmp = [] else: tmp.append(line.strip().split()) data = {"1":[], "2":[], "3":[], "4":[], "5":[]} for p in post: for i in range(5): data["%d"%(i+1)].append(p[i]) random_data = data.copy() for i in range(5): random_data["%d"%(i+1)] = np.random.permutation(random_data["%d"%(i+1)]) for k in range(len(post)): idx = np.random.permutation(range(1,5))[0] for i in range(5): if i == idx: fout.write(' '.join(random_data["%d"%(i+1)][k])+"\n") else: fout.write(' '.join(data["%d"%(i+1)][k])+"\n") fout.write("------\n")
normal
{ "blob_id": "2aec0581413d4fb0ffb4090231fde0fed974bf18", "index": 27, "step-1": "<mask token>\n", "step-2": "<mask token>\nwith open('./roc.txt', 'r') as fin:\n with open('./roc_shuffle.txt', 'w') as fout:\n tmp = []\n for k, line in enumerate(fin):\n i = k + 1\n if i % 6 == 0:\n idx = [0] + np.random.permutation(range(1, 5)).tolist()\n for sen in np.take(tmp, idx).tolist():\n fout.write(sen + '\\n')\n tmp = []\n fout.write(line.strip() + '\\n')\n else:\n tmp.append(line.strip())\nwith open('./roc.txt', 'r') as fin:\n with open('./roc_repeat.txt', 'w') as fout:\n tmp = []\n for k, line in enumerate(fin):\n i = k + 1\n if i % 6 == 0:\n idx = random.randint(1, 4)\n tmp[idx] = tmp[idx][:-1] + tmp[idx]\n for sen in tmp:\n fout.write(sen + '\\n')\n tmp = []\n fout.write(line.strip() + '\\n')\n else:\n tmp.append(line.strip())\nwith open('./roc.txt', 'r') as fin:\n with open('./roc_replace.txt', 'w') as fout:\n post, tmp = [], []\n for k, line in enumerate(fin):\n i = k + 1\n if i % 6 == 0:\n post.append(tmp)\n tmp = []\n else:\n tmp.append(line.strip().split())\n data = {'1': [], '2': [], '3': [], '4': [], '5': []}\n for p in post:\n for i in range(5):\n data['%d' % (i + 1)].append(p[i])\n random_data = data.copy()\n for i in range(5):\n random_data['%d' % (i + 1)] = np.random.permutation(random_data\n ['%d' % (i + 1)])\n for k in range(len(post)):\n idx = np.random.permutation(range(1, 5))[0]\n for i in range(5):\n if i == idx:\n fout.write(' '.join(random_data['%d' % (i + 1)][k]) + '\\n')\n else:\n fout.write(' '.join(data['%d' % (i + 1)][k]) + '\\n')\n fout.write('------\\n')\n", "step-3": "import numpy as np\nimport random\nwith open('./roc.txt', 'r') as fin:\n with open('./roc_shuffle.txt', 'w') as fout:\n tmp = []\n for k, line in enumerate(fin):\n i = k + 1\n if i % 6 == 0:\n idx = [0] + np.random.permutation(range(1, 5)).tolist()\n for sen in np.take(tmp, idx).tolist():\n fout.write(sen + '\\n')\n tmp = []\n fout.write(line.strip() + '\\n')\n else:\n tmp.append(line.strip())\nwith open('./roc.txt', 'r') as fin:\n with open('./roc_repeat.txt', 'w') as fout:\n tmp = []\n for k, line in enumerate(fin):\n i = k + 1\n if i % 6 == 0:\n idx = random.randint(1, 4)\n tmp[idx] = tmp[idx][:-1] + tmp[idx]\n for sen in tmp:\n fout.write(sen + '\\n')\n tmp = []\n fout.write(line.strip() + '\\n')\n else:\n tmp.append(line.strip())\nwith open('./roc.txt', 'r') as fin:\n with open('./roc_replace.txt', 'w') as fout:\n post, tmp = [], []\n for k, line in enumerate(fin):\n i = k + 1\n if i % 6 == 0:\n post.append(tmp)\n tmp = []\n else:\n tmp.append(line.strip().split())\n data = {'1': [], '2': [], '3': [], '4': [], '5': []}\n for p in post:\n for i in range(5):\n data['%d' % (i + 1)].append(p[i])\n random_data = data.copy()\n for i in range(5):\n random_data['%d' % (i + 1)] = np.random.permutation(random_data\n ['%d' % (i + 1)])\n for k in range(len(post)):\n idx = np.random.permutation(range(1, 5))[0]\n for i in range(5):\n if i == idx:\n fout.write(' '.join(random_data['%d' % (i + 1)][k]) + '\\n')\n else:\n fout.write(' '.join(data['%d' % (i + 1)][k]) + '\\n')\n fout.write('------\\n')\n", "step-4": "import numpy as np\nimport random\n\nwith open(\"./roc.txt\", \"r\") as fin:\n with open(\"./roc_shuffle.txt\", \"w\") as fout:\n tmp = []\n for k, line in enumerate(fin):\n i = k + 1\n if i % 6 == 0:\n idx = [0] + np.random.permutation(range(1,5)).tolist()\n for sen in np.take(tmp, idx).tolist():\n fout.write(sen+\"\\n\")\n tmp = []\n fout.write(line.strip()+\"\\n\")\n else:\n tmp.append(line.strip())\nwith open(\"./roc.txt\", \"r\") as fin:\n with open(\"./roc_repeat.txt\", \"w\") as fout:\n tmp = []\n for k, line in enumerate(fin):\n i = k + 1\n if i % 6 == 0:\n idx = random.randint(1,4)\n tmp[idx] = tmp[idx][:-1] + tmp[idx]\n for sen in tmp:\n fout.write(sen+\"\\n\")\n tmp = []\n fout.write(line.strip()+\"\\n\")\n else:\n tmp.append(line.strip())\nwith open(\"./roc.txt\", \"r\") as fin:\n with open(\"./roc_replace.txt\", \"w\") as fout:\n post, tmp = [], []\n for k, line in enumerate(fin):\n i = k + 1\n if i % 6 == 0:\n post.append(tmp)\n tmp = []\n else:\n tmp.append(line.strip().split())\n data = {\"1\":[], \"2\":[], \"3\":[], \"4\":[], \"5\":[]}\n for p in post:\n for i in range(5):\n data[\"%d\"%(i+1)].append(p[i])\n random_data = data.copy()\n for i in range(5):\n random_data[\"%d\"%(i+1)] = np.random.permutation(random_data[\"%d\"%(i+1)])\n\n for k in range(len(post)):\n idx = np.random.permutation(range(1,5))[0]\n for i in range(5):\n if i == idx:\n fout.write(' '.join(random_data[\"%d\"%(i+1)][k])+\"\\n\")\n else:\n fout.write(' '.join(data[\"%d\"%(i+1)][k])+\"\\n\")\n fout.write(\"------\\n\")", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
from graph import Graph import ast import itertools def add_nodes(g): nodes = ['a', 'b', 'c', 'd'] for n in nodes: g.add_node(n) def add_desc(g): desc = [('b', 'a'), ('b', 'c'), ('d', 'c')] for d in desc: g.add_desc(d) def add_edges(g): edges = [('b', 'a'), ('b', 'c'), ('d', 'c')] for e in edges: g.add_edge(e) def read_all_paths(n): all_paths = {} with open(n+'.txt', 'r') as infile: for line in infile: path = ast.literal_eval(line) if path: dest = path[-1][0] if dest in all_paths: all_paths[dest].append(path) else: all_paths[dest] = [path] return all_paths def is_blocked(path, obs_dict, g): prev_edge = [] for cur_edge in path: # try to find blocking transitions - either non-observed v-structures, or observed regulars if prev_edge: prev_node, prev_dir = prev_edge cur_node, cur_dir = cur_edge if prev_dir == 1 and cur_dir == 0: # V-structure blocking_v = True for n in g.nodes[prev_node].desc: if obs_dict[n]: blocking_v = False if blocking_v: return True else: # not V-structure if obs_dict[prev_node]: return True prev_edge = cur_edge return False def is_indep(obs_dict, all_paths, g): for path in all_paths: block = is_blocked(path, obs_dict, g) if block: continue else: # we have found a non-blocked path, so indep does not hold return False return True if __name__=='__main__': g = Graph() add_nodes(g) add_edges(g) add_desc(g) g.print_all_edges() g.print_all_descs() for n in g.nodes.keys(): g.get_all_paths(n, n) all_nodes = list(g.nodes.keys()) all_paths = {} for n in all_nodes: all_paths[n] = read_all_paths(n) s = len(all_nodes) obs_dict = {} combs = list(itertools.product([0,1], repeat = s)) for c in combs: for n, val in zip(all_nodes, c): obs_dict[n] = val for i, j in itertools.combinations(all_nodes, 2): indep = is_indep(obs_dict, all_paths[i][j], g) if indep: observed = [all_nodes[idx] for idx, val in enumerate(c) if val] if (not (i in observed)) and (not (j in observed)): print(i, j, str(observed)) # print(i, j, str([all_nodes[idx] for idx, val in enumerate(c) if val])) g.reset_files()
normal
{ "blob_id": "8efee4ad16e938e85a500e5aebf5154b5708b277", "index": 9287, "step-1": "<mask token>\n\n\ndef add_nodes(g):\n nodes = ['a', 'b', 'c', 'd']\n for n in nodes:\n g.add_node(n)\n\n\ndef add_desc(g):\n desc = [('b', 'a'), ('b', 'c'), ('d', 'c')]\n for d in desc:\n g.add_desc(d)\n\n\ndef add_edges(g):\n edges = [('b', 'a'), ('b', 'c'), ('d', 'c')]\n for e in edges:\n g.add_edge(e)\n\n\n<mask token>\n\n\ndef is_blocked(path, obs_dict, g):\n prev_edge = []\n for cur_edge in path:\n if prev_edge:\n prev_node, prev_dir = prev_edge\n cur_node, cur_dir = cur_edge\n if prev_dir == 1 and cur_dir == 0:\n blocking_v = True\n for n in g.nodes[prev_node].desc:\n if obs_dict[n]:\n blocking_v = False\n if blocking_v:\n return True\n elif obs_dict[prev_node]:\n return True\n prev_edge = cur_edge\n return False\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef add_nodes(g):\n nodes = ['a', 'b', 'c', 'd']\n for n in nodes:\n g.add_node(n)\n\n\ndef add_desc(g):\n desc = [('b', 'a'), ('b', 'c'), ('d', 'c')]\n for d in desc:\n g.add_desc(d)\n\n\ndef add_edges(g):\n edges = [('b', 'a'), ('b', 'c'), ('d', 'c')]\n for e in edges:\n g.add_edge(e)\n\n\ndef read_all_paths(n):\n all_paths = {}\n with open(n + '.txt', 'r') as infile:\n for line in infile:\n path = ast.literal_eval(line)\n if path:\n dest = path[-1][0]\n if dest in all_paths:\n all_paths[dest].append(path)\n else:\n all_paths[dest] = [path]\n return all_paths\n\n\ndef is_blocked(path, obs_dict, g):\n prev_edge = []\n for cur_edge in path:\n if prev_edge:\n prev_node, prev_dir = prev_edge\n cur_node, cur_dir = cur_edge\n if prev_dir == 1 and cur_dir == 0:\n blocking_v = True\n for n in g.nodes[prev_node].desc:\n if obs_dict[n]:\n blocking_v = False\n if blocking_v:\n return True\n elif obs_dict[prev_node]:\n return True\n prev_edge = cur_edge\n return False\n\n\ndef is_indep(obs_dict, all_paths, g):\n for path in all_paths:\n block = is_blocked(path, obs_dict, g)\n if block:\n continue\n else:\n return False\n return True\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef add_nodes(g):\n nodes = ['a', 'b', 'c', 'd']\n for n in nodes:\n g.add_node(n)\n\n\ndef add_desc(g):\n desc = [('b', 'a'), ('b', 'c'), ('d', 'c')]\n for d in desc:\n g.add_desc(d)\n\n\ndef add_edges(g):\n edges = [('b', 'a'), ('b', 'c'), ('d', 'c')]\n for e in edges:\n g.add_edge(e)\n\n\ndef read_all_paths(n):\n all_paths = {}\n with open(n + '.txt', 'r') as infile:\n for line in infile:\n path = ast.literal_eval(line)\n if path:\n dest = path[-1][0]\n if dest in all_paths:\n all_paths[dest].append(path)\n else:\n all_paths[dest] = [path]\n return all_paths\n\n\ndef is_blocked(path, obs_dict, g):\n prev_edge = []\n for cur_edge in path:\n if prev_edge:\n prev_node, prev_dir = prev_edge\n cur_node, cur_dir = cur_edge\n if prev_dir == 1 and cur_dir == 0:\n blocking_v = True\n for n in g.nodes[prev_node].desc:\n if obs_dict[n]:\n blocking_v = False\n if blocking_v:\n return True\n elif obs_dict[prev_node]:\n return True\n prev_edge = cur_edge\n return False\n\n\ndef is_indep(obs_dict, all_paths, g):\n for path in all_paths:\n block = is_blocked(path, obs_dict, g)\n if block:\n continue\n else:\n return False\n return True\n\n\nif __name__ == '__main__':\n g = Graph()\n add_nodes(g)\n add_edges(g)\n add_desc(g)\n g.print_all_edges()\n g.print_all_descs()\n for n in g.nodes.keys():\n g.get_all_paths(n, n)\n all_nodes = list(g.nodes.keys())\n all_paths = {}\n for n in all_nodes:\n all_paths[n] = read_all_paths(n)\n s = len(all_nodes)\n obs_dict = {}\n combs = list(itertools.product([0, 1], repeat=s))\n for c in combs:\n for n, val in zip(all_nodes, c):\n obs_dict[n] = val\n for i, j in itertools.combinations(all_nodes, 2):\n indep = is_indep(obs_dict, all_paths[i][j], g)\n if indep:\n observed = [all_nodes[idx] for idx, val in enumerate(c) if val]\n if not i in observed and not j in observed:\n print(i, j, str(observed))\n g.reset_files()\n", "step-4": "from graph import Graph\nimport ast\nimport itertools\n\n\ndef add_nodes(g):\n nodes = ['a', 'b', 'c', 'd']\n for n in nodes:\n g.add_node(n)\n\n\ndef add_desc(g):\n desc = [('b', 'a'), ('b', 'c'), ('d', 'c')]\n for d in desc:\n g.add_desc(d)\n\n\ndef add_edges(g):\n edges = [('b', 'a'), ('b', 'c'), ('d', 'c')]\n for e in edges:\n g.add_edge(e)\n\n\ndef read_all_paths(n):\n all_paths = {}\n with open(n + '.txt', 'r') as infile:\n for line in infile:\n path = ast.literal_eval(line)\n if path:\n dest = path[-1][0]\n if dest in all_paths:\n all_paths[dest].append(path)\n else:\n all_paths[dest] = [path]\n return all_paths\n\n\ndef is_blocked(path, obs_dict, g):\n prev_edge = []\n for cur_edge in path:\n if prev_edge:\n prev_node, prev_dir = prev_edge\n cur_node, cur_dir = cur_edge\n if prev_dir == 1 and cur_dir == 0:\n blocking_v = True\n for n in g.nodes[prev_node].desc:\n if obs_dict[n]:\n blocking_v = False\n if blocking_v:\n return True\n elif obs_dict[prev_node]:\n return True\n prev_edge = cur_edge\n return False\n\n\ndef is_indep(obs_dict, all_paths, g):\n for path in all_paths:\n block = is_blocked(path, obs_dict, g)\n if block:\n continue\n else:\n return False\n return True\n\n\nif __name__ == '__main__':\n g = Graph()\n add_nodes(g)\n add_edges(g)\n add_desc(g)\n g.print_all_edges()\n g.print_all_descs()\n for n in g.nodes.keys():\n g.get_all_paths(n, n)\n all_nodes = list(g.nodes.keys())\n all_paths = {}\n for n in all_nodes:\n all_paths[n] = read_all_paths(n)\n s = len(all_nodes)\n obs_dict = {}\n combs = list(itertools.product([0, 1], repeat=s))\n for c in combs:\n for n, val in zip(all_nodes, c):\n obs_dict[n] = val\n for i, j in itertools.combinations(all_nodes, 2):\n indep = is_indep(obs_dict, all_paths[i][j], g)\n if indep:\n observed = [all_nodes[idx] for idx, val in enumerate(c) if val]\n if not i in observed and not j in observed:\n print(i, j, str(observed))\n g.reset_files()\n", "step-5": "from graph import Graph\nimport ast\nimport itertools\n\n\ndef add_nodes(g):\n\n nodes = ['a', 'b', 'c', 'd']\n for n in nodes:\n g.add_node(n)\n\ndef add_desc(g):\n desc = [('b', 'a'), ('b', 'c'), ('d', 'c')]\n\n for d in desc:\n g.add_desc(d)\n\ndef add_edges(g):\n\n edges = [('b', 'a'), ('b', 'c'), ('d', 'c')]\n\n for e in edges:\n g.add_edge(e)\n\ndef read_all_paths(n):\n all_paths = {}\n with open(n+'.txt', 'r') as infile:\n for line in infile:\n path = ast.literal_eval(line)\n if path:\n dest = path[-1][0]\n if dest in all_paths:\n all_paths[dest].append(path)\n else:\n all_paths[dest] = [path]\n\n return all_paths\n\ndef is_blocked(path, obs_dict, g):\n\n prev_edge = []\n\n for cur_edge in path:\n # try to find blocking transitions - either non-observed v-structures, or observed regulars\n\n if prev_edge:\n prev_node, prev_dir = prev_edge\n cur_node, cur_dir = cur_edge\n\n if prev_dir == 1 and cur_dir == 0:\n # V-structure\n\n blocking_v = True\n for n in g.nodes[prev_node].desc:\n if obs_dict[n]:\n blocking_v = False\n\n if blocking_v:\n return True\n\n\n else:\n # not V-structure\n\n if obs_dict[prev_node]:\n return True\n\n prev_edge = cur_edge\n\n return False\n\n\n\ndef is_indep(obs_dict, all_paths, g):\n\n for path in all_paths:\n block = is_blocked(path, obs_dict, g)\n if block:\n continue\n else:\n # we have found a non-blocked path, so indep does not hold\n return False\n\n return True\n\nif __name__=='__main__':\n\n g = Graph()\n add_nodes(g)\n add_edges(g)\n add_desc(g)\n g.print_all_edges()\n g.print_all_descs()\n for n in g.nodes.keys():\n g.get_all_paths(n, n)\n\n all_nodes = list(g.nodes.keys())\n\n all_paths = {}\n for n in all_nodes:\n all_paths[n] = read_all_paths(n)\n\n s = len(all_nodes)\n obs_dict = {}\n\n combs = list(itertools.product([0,1], repeat = s))\n for c in combs:\n for n, val in zip(all_nodes, c):\n obs_dict[n] = val\n\n for i, j in itertools.combinations(all_nodes, 2):\n\n indep = is_indep(obs_dict, all_paths[i][j], g)\n if indep:\n observed = [all_nodes[idx] for idx, val in enumerate(c) if val]\n if (not (i in observed)) and (not (j in observed)):\n print(i, j, str(observed))\n # print(i, j, str([all_nodes[idx] for idx, val in enumerate(c) if val]))\n\n\n g.reset_files()\n\n\n", "step-ids": [ 4, 6, 7, 8, 9 ] }
[ 4, 6, 7, 8, 9 ]
# -*- coding: utf-8 -*- """ Created on Fri Jul 3 18:27:30 2020 @author: PREET MODH """ for _ in range(int(input())): n=int(input()) xco,yco=[],[] flagx,flagy,xans,yans=1,1,0,0 for x in range(4*n-1): x,y=input().split() xco.append(int(x)) yco.append(int(y)) xco.sort(),yco.sort() xco.append(xco[-1]+1),yco.append(yco[-1]+1) countx,county,i=1,1,0 while(i<len(xco)-1): if flagx==1: if xco[i]==xco[i+1]: countx+=1 else: if countx%2!=0: xans=xco[i] flagx=0 countx=1 if flagy==1: if yco[i]==yco[i+1]: county+=1 else: if county%2!=0: yans=yco[i] flagy=0 county=1 if flagx==0 and flagy==0: break i=i+1 print(xans,yans,end=' ')
normal
{ "blob_id": "d3b0a1d8b9f800c5d34732f4701ea2183405e5b4", "index": 9523, "step-1": "<mask token>\n", "step-2": "<mask token>\nfor _ in range(int(input())):\n n = int(input())\n xco, yco = [], []\n flagx, flagy, xans, yans = 1, 1, 0, 0\n for x in range(4 * n - 1):\n x, y = input().split()\n xco.append(int(x))\n yco.append(int(y))\n xco.sort(), yco.sort()\n xco.append(xco[-1] + 1), yco.append(yco[-1] + 1)\n countx, county, i = 1, 1, 0\n while i < len(xco) - 1:\n if flagx == 1:\n if xco[i] == xco[i + 1]:\n countx += 1\n else:\n if countx % 2 != 0:\n xans = xco[i]\n flagx = 0\n countx = 1\n if flagy == 1:\n if yco[i] == yco[i + 1]:\n county += 1\n else:\n if county % 2 != 0:\n yans = yco[i]\n flagy = 0\n county = 1\n if flagx == 0 and flagy == 0:\n break\n i = i + 1\n print(xans, yans, end=' ')\n", "step-3": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Fri Jul 3 18:27:30 2020\r\n\r\n@author: PREET MODH\r\n\"\"\"\r\n\r\n\r\nfor _ in range(int(input())):\r\n n=int(input())\r\n xco,yco=[],[]\r\n flagx,flagy,xans,yans=1,1,0,0\r\n for x in range(4*n-1):\r\n x,y=input().split()\r\n xco.append(int(x))\r\n yco.append(int(y))\r\n \r\n xco.sort(),yco.sort()\r\n xco.append(xco[-1]+1),yco.append(yco[-1]+1)\r\n \r\n countx,county,i=1,1,0\r\n while(i<len(xco)-1):\r\n if flagx==1:\r\n if xco[i]==xco[i+1]:\r\n countx+=1\r\n else:\r\n if countx%2!=0:\r\n xans=xco[i]\r\n flagx=0\r\n countx=1\r\n if flagy==1:\r\n if yco[i]==yco[i+1]:\r\n county+=1\r\n else:\r\n if county%2!=0:\r\n yans=yco[i]\r\n flagy=0\r\n county=1\r\n if flagx==0 and flagy==0:\r\n break\r\n i=i+1\r\n print(xans,yans,end=' ')\r\n ", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
<|reserved_special_token_0|> class tax(osv.Model): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class tax(osv.Model): _inherit = 'sgr.tax' def send_alerts(self, cr, uid, context=None): self.send_alerts_with_upcoming_days(cr, uid, 2, context=context) def send_alerts_with_upcoming_days(self, cr, uid, upcoming_days, context=None): now = datetime.now() now_plus_upcoming_days = now + timedelta(days=upcoming_days) tax_to_paid_ids = self.search(cr, uid, [('state', '=', 'to_pay')], context=context) tax_due_date_soon = [] taxs_due = [] overdue_taxs = [] for tax in self.browse(cr, uid, tax_to_paid_ids, context=context): if not tax.approval_date: continue approval_date = datetime.strptime(tax.approval_date, date_format) if approval_date <= now: overdue_taxs.append(tax) elif now < approval_date and approval_date <= now_plus_upcoming_days: taxs_due.append(tax) for tax in taxs_due: self.message_post(cr, uid, [tax.id], body= 'Tax payment deadline soon', subtype= 'sgr_alerts.mt_tax_due_date_soon', context=context) for tax in overdue_taxs: self.message_post(cr, uid, [tax.id], body= 'Tax payment deadline expired', subtype= 'sgr_alerts.mt_tax_due_date', context=context) return True tax() <|reserved_special_token_1|> <|reserved_special_token_0|> date_format = '%Y-%m-%d' class tax(osv.Model): _inherit = 'sgr.tax' def send_alerts(self, cr, uid, context=None): self.send_alerts_with_upcoming_days(cr, uid, 2, context=context) def send_alerts_with_upcoming_days(self, cr, uid, upcoming_days, context=None): now = datetime.now() now_plus_upcoming_days = now + timedelta(days=upcoming_days) tax_to_paid_ids = self.search(cr, uid, [('state', '=', 'to_pay')], context=context) tax_due_date_soon = [] taxs_due = [] overdue_taxs = [] for tax in self.browse(cr, uid, tax_to_paid_ids, context=context): if not tax.approval_date: continue approval_date = datetime.strptime(tax.approval_date, date_format) if approval_date <= now: overdue_taxs.append(tax) elif now < approval_date and approval_date <= now_plus_upcoming_days: taxs_due.append(tax) for tax in taxs_due: self.message_post(cr, uid, [tax.id], body= 'Tax payment deadline soon', subtype= 'sgr_alerts.mt_tax_due_date_soon', context=context) for tax in overdue_taxs: self.message_post(cr, uid, [tax.id], body= 'Tax payment deadline expired', subtype= 'sgr_alerts.mt_tax_due_date', context=context) return True tax() <|reserved_special_token_1|> import tools from osv import fields, osv from tools.translate import _ from datetime import datetime, timedelta date_format = '%Y-%m-%d' class tax(osv.Model): _inherit = 'sgr.tax' def send_alerts(self, cr, uid, context=None): self.send_alerts_with_upcoming_days(cr, uid, 2, context=context) def send_alerts_with_upcoming_days(self, cr, uid, upcoming_days, context=None): now = datetime.now() now_plus_upcoming_days = now + timedelta(days=upcoming_days) tax_to_paid_ids = self.search(cr, uid, [('state', '=', 'to_pay')], context=context) tax_due_date_soon = [] taxs_due = [] overdue_taxs = [] for tax in self.browse(cr, uid, tax_to_paid_ids, context=context): if not tax.approval_date: continue approval_date = datetime.strptime(tax.approval_date, date_format) if approval_date <= now: overdue_taxs.append(tax) elif now < approval_date and approval_date <= now_plus_upcoming_days: taxs_due.append(tax) for tax in taxs_due: self.message_post(cr, uid, [tax.id], body= 'Tax payment deadline soon', subtype= 'sgr_alerts.mt_tax_due_date_soon', context=context) for tax in overdue_taxs: self.message_post(cr, uid, [tax.id], body= 'Tax payment deadline expired', subtype= 'sgr_alerts.mt_tax_due_date', context=context) return True tax() <|reserved_special_token_1|> # -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2011 Eficent (<http://www.eficent.com/>) # Jordi Ballester Alomar <jordi.ballester@eficent.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import tools from osv import fields, osv from tools.translate import _ from datetime import datetime, timedelta date_format = '%Y-%m-%d' class tax(osv.Model): _inherit = 'sgr.tax' def send_alerts(self, cr, uid, context=None): self.send_alerts_with_upcoming_days(cr, uid, 2, context=context) def send_alerts_with_upcoming_days(self, cr, uid, upcoming_days, context=None): now = datetime.now() now_plus_upcoming_days = now + timedelta(days=upcoming_days) tax_to_paid_ids = self.search(cr, uid, [('state','=','to_pay')], context=context) tax_due_date_soon = [] taxs_due = [] overdue_taxs = [] for tax in self.browse(cr, uid, tax_to_paid_ids, context=context): if not tax.approval_date: continue approval_date = datetime.strptime(tax.approval_date, date_format) if approval_date <= now: overdue_taxs.append(tax) elif now < approval_date and approval_date <= now_plus_upcoming_days: taxs_due.append(tax) for tax in taxs_due: self.message_post(cr, uid, [tax.id], body="Tax payment deadline soon", subtype="sgr_alerts.mt_tax_due_date_soon", context=context) for tax in overdue_taxs: self.message_post(cr, uid, [tax.id], body="Tax payment deadline expired", subtype="sgr_alerts.mt_tax_due_date", context=context) #all_tax_ids = self.search(cr, uid, [], context=context) #for tax in self.browse(cr, uid, all_tax_ids, context=context): # print 'tax: ' + str(tax.id) # self.message_post(cr, uid, [tax.id], body="Due Date Soon", subtype="sgr_alerts.mt_tax_due_date_soon", context=context) return True tax()
flexible
{ "blob_id": "1ddec426e4ad50f1d0e8a57ed841fbdf8c51b00f", "index": 9871, "step-1": "<mask token>\n\n\nclass tax(osv.Model):\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass tax(osv.Model):\n _inherit = 'sgr.tax'\n\n def send_alerts(self, cr, uid, context=None):\n self.send_alerts_with_upcoming_days(cr, uid, 2, context=context)\n\n def send_alerts_with_upcoming_days(self, cr, uid, upcoming_days,\n context=None):\n now = datetime.now()\n now_plus_upcoming_days = now + timedelta(days=upcoming_days)\n tax_to_paid_ids = self.search(cr, uid, [('state', '=', 'to_pay')],\n context=context)\n tax_due_date_soon = []\n taxs_due = []\n overdue_taxs = []\n for tax in self.browse(cr, uid, tax_to_paid_ids, context=context):\n if not tax.approval_date:\n continue\n approval_date = datetime.strptime(tax.approval_date, date_format)\n if approval_date <= now:\n overdue_taxs.append(tax)\n elif now < approval_date and approval_date <= now_plus_upcoming_days:\n taxs_due.append(tax)\n for tax in taxs_due:\n self.message_post(cr, uid, [tax.id], body=\n 'Tax payment deadline soon', subtype=\n 'sgr_alerts.mt_tax_due_date_soon', context=context)\n for tax in overdue_taxs:\n self.message_post(cr, uid, [tax.id], body=\n 'Tax payment deadline expired', subtype=\n 'sgr_alerts.mt_tax_due_date', context=context)\n return True\n\n\ntax()\n", "step-3": "<mask token>\ndate_format = '%Y-%m-%d'\n\n\nclass tax(osv.Model):\n _inherit = 'sgr.tax'\n\n def send_alerts(self, cr, uid, context=None):\n self.send_alerts_with_upcoming_days(cr, uid, 2, context=context)\n\n def send_alerts_with_upcoming_days(self, cr, uid, upcoming_days,\n context=None):\n now = datetime.now()\n now_plus_upcoming_days = now + timedelta(days=upcoming_days)\n tax_to_paid_ids = self.search(cr, uid, [('state', '=', 'to_pay')],\n context=context)\n tax_due_date_soon = []\n taxs_due = []\n overdue_taxs = []\n for tax in self.browse(cr, uid, tax_to_paid_ids, context=context):\n if not tax.approval_date:\n continue\n approval_date = datetime.strptime(tax.approval_date, date_format)\n if approval_date <= now:\n overdue_taxs.append(tax)\n elif now < approval_date and approval_date <= now_plus_upcoming_days:\n taxs_due.append(tax)\n for tax in taxs_due:\n self.message_post(cr, uid, [tax.id], body=\n 'Tax payment deadline soon', subtype=\n 'sgr_alerts.mt_tax_due_date_soon', context=context)\n for tax in overdue_taxs:\n self.message_post(cr, uid, [tax.id], body=\n 'Tax payment deadline expired', subtype=\n 'sgr_alerts.mt_tax_due_date', context=context)\n return True\n\n\ntax()\n", "step-4": "import tools\nfrom osv import fields, osv\nfrom tools.translate import _\nfrom datetime import datetime, timedelta\ndate_format = '%Y-%m-%d'\n\n\nclass tax(osv.Model):\n _inherit = 'sgr.tax'\n\n def send_alerts(self, cr, uid, context=None):\n self.send_alerts_with_upcoming_days(cr, uid, 2, context=context)\n\n def send_alerts_with_upcoming_days(self, cr, uid, upcoming_days,\n context=None):\n now = datetime.now()\n now_plus_upcoming_days = now + timedelta(days=upcoming_days)\n tax_to_paid_ids = self.search(cr, uid, [('state', '=', 'to_pay')],\n context=context)\n tax_due_date_soon = []\n taxs_due = []\n overdue_taxs = []\n for tax in self.browse(cr, uid, tax_to_paid_ids, context=context):\n if not tax.approval_date:\n continue\n approval_date = datetime.strptime(tax.approval_date, date_format)\n if approval_date <= now:\n overdue_taxs.append(tax)\n elif now < approval_date and approval_date <= now_plus_upcoming_days:\n taxs_due.append(tax)\n for tax in taxs_due:\n self.message_post(cr, uid, [tax.id], body=\n 'Tax payment deadline soon', subtype=\n 'sgr_alerts.mt_tax_due_date_soon', context=context)\n for tax in overdue_taxs:\n self.message_post(cr, uid, [tax.id], body=\n 'Tax payment deadline expired', subtype=\n 'sgr_alerts.mt_tax_due_date', context=context)\n return True\n\n\ntax()\n", "step-5": "# -*- coding: utf-8 -*-\n##############################################################################\n#\n# Copyright (C) 2011 Eficent (<http://www.eficent.com/>)\n# Jordi Ballester Alomar <jordi.ballester@eficent.com>\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Affero General Public License as\n# published by the Free Software Foundation, either version 3 of the\n# License, or (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Affero General Public License for more details.\n#\n# You should have received a copy of the GNU Affero General Public License\n# along with this program. If not, see <http://www.gnu.org/licenses/>.\n#\n##############################################################################\n\nimport tools\nfrom osv import fields, osv\nfrom tools.translate import _\n\nfrom datetime import datetime, timedelta\n\ndate_format = '%Y-%m-%d'\n\nclass tax(osv.Model):\n _inherit = 'sgr.tax'\n \n def send_alerts(self, cr, uid, context=None):\n self.send_alerts_with_upcoming_days(cr, uid, 2, context=context)\n \n def send_alerts_with_upcoming_days(self, cr, uid, upcoming_days, context=None):\n now = datetime.now()\n now_plus_upcoming_days = now + timedelta(days=upcoming_days)\n \n tax_to_paid_ids = self.search(cr, uid, [('state','=','to_pay')], context=context)\n tax_due_date_soon = []\n \n taxs_due = []\n overdue_taxs = []\n \n for tax in self.browse(cr, uid, tax_to_paid_ids, context=context):\n if not tax.approval_date:\n continue\n \n approval_date = datetime.strptime(tax.approval_date, date_format)\n \n if approval_date <= now:\n overdue_taxs.append(tax)\n elif now < approval_date and approval_date <= now_plus_upcoming_days:\n taxs_due.append(tax)\n \n for tax in taxs_due:\n self.message_post(cr, uid, [tax.id], body=\"Tax payment deadline soon\", subtype=\"sgr_alerts.mt_tax_due_date_soon\", context=context)\n \n for tax in overdue_taxs:\n self.message_post(cr, uid, [tax.id], body=\"Tax payment deadline expired\", subtype=\"sgr_alerts.mt_tax_due_date\", context=context)\n \n #all_tax_ids = self.search(cr, uid, [], context=context)\n #for tax in self.browse(cr, uid, all_tax_ids, context=context):\n # print 'tax: ' + str(tax.id)\n # self.message_post(cr, uid, [tax.id], body=\"Due Date Soon\", subtype=\"sgr_alerts.mt_tax_due_date_soon\", context=context)\n \n \n return True\n \n \n \ntax()\n\n\n\n", "step-ids": [ 1, 5, 6, 7, 8 ] }
[ 1, 5, 6, 7, 8 ]
<|reserved_special_token_0|> class User(MBase): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> class UserTimeLine(MBase): """ POSTs that user will see in their timeline """ user_id = columns.Integer(primary_key=True) post_id = columns.BigInt(primary_key=True) class UserProject(MBase): """ Projects that user follows """ user_id = columns.Integer(primary_key=True) project_id = columns.Integer(primary_key=True) class UserPost(MBase): """ All the POSTs of a user """ user_id = columns.Integer(primary_key=True) post_id = columns.BigInt(primary_key=True) class UserFollower(MBase): """ Followers of a user """ user_id = columns.Integer(primary_key=True) follower_id = columns.Integer(primary_key=True) class UserFollowing(MBase): """ A user follows another user """ user_id = columns.Integer(primary_key=True) following_id = columns.Integer(primary_key=True) class ProjectFollower(MBase): project_id = columns.Integer(primary_key=True) user_id = columns.Integer(primary_key=True) class PostFollower(MBase): post_id = columns.TimeUUID(primary_key=True) user_id = columns.Integer(primary_key=True) class ChannelFollower(MBase): channel_id = columns.Integer(primary_key=True) user_id = columns.Integer(primary_key=True) class ChannelTimeLine(MBase): channel_id = columns.Integer(primary_key=True) post_id = columns.BigInt(primary_key=True) class ProjectTimeLine(MBase): project_id = columns.Integer(primary_key=True) post_id = columns.BigInt(primary_key=True) class PostLike(MBase): post_id = columns.BigInt(primary_key=True) user_id = columns.Integer(primary_key=True) class PostComment(MBase): post_id = columns.BigInt(primary_key=True) comment_id = columns.BigInt(primary_key=True) <|reserved_special_token_1|> <|reserved_special_token_0|> class Channel(MBase): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> class User(MBase): id = columns.Integer(primary_key=True) nick = columns.Text(required=True, index=True) follower_count = columns.Counter following_count = columns.Counter extended = columns.Map(columns.Text, columns.Text) class UserTimeLine(MBase): """ POSTs that user will see in their timeline """ user_id = columns.Integer(primary_key=True) post_id = columns.BigInt(primary_key=True) class UserProject(MBase): """ Projects that user follows """ user_id = columns.Integer(primary_key=True) project_id = columns.Integer(primary_key=True) class UserPost(MBase): """ All the POSTs of a user """ user_id = columns.Integer(primary_key=True) post_id = columns.BigInt(primary_key=True) class UserFollower(MBase): """ Followers of a user """ user_id = columns.Integer(primary_key=True) follower_id = columns.Integer(primary_key=True) class UserFollowing(MBase): """ A user follows another user """ user_id = columns.Integer(primary_key=True) following_id = columns.Integer(primary_key=True) class ProjectFollower(MBase): project_id = columns.Integer(primary_key=True) user_id = columns.Integer(primary_key=True) class PostFollower(MBase): post_id = columns.TimeUUID(primary_key=True) user_id = columns.Integer(primary_key=True) class ChannelFollower(MBase): channel_id = columns.Integer(primary_key=True) user_id = columns.Integer(primary_key=True) class ChannelTimeLine(MBase): channel_id = columns.Integer(primary_key=True) post_id = columns.BigInt(primary_key=True) class ProjectTimeLine(MBase): project_id = columns.Integer(primary_key=True) post_id = columns.BigInt(primary_key=True) class PostLike(MBase): post_id = columns.BigInt(primary_key=True) user_id = columns.Integer(primary_key=True) class PostComment(MBase): post_id = columns.BigInt(primary_key=True) comment_id = columns.BigInt(primary_key=True) <|reserved_special_token_1|> <|reserved_special_token_0|> class Project(MBase): id = columns.Integer(primary_key=True) follower_count = columns.Counter class Channel(MBase): id = columns.Integer(primary_key=True) slug = columns.Text(required=True, index=True) name = columns.Text(required=True) class User(MBase): id = columns.Integer(primary_key=True) nick = columns.Text(required=True, index=True) follower_count = columns.Counter following_count = columns.Counter extended = columns.Map(columns.Text, columns.Text) class UserTimeLine(MBase): """ POSTs that user will see in their timeline """ user_id = columns.Integer(primary_key=True) post_id = columns.BigInt(primary_key=True) class UserProject(MBase): """ Projects that user follows """ user_id = columns.Integer(primary_key=True) project_id = columns.Integer(primary_key=True) class UserPost(MBase): """ All the POSTs of a user """ user_id = columns.Integer(primary_key=True) post_id = columns.BigInt(primary_key=True) class UserFollower(MBase): """ Followers of a user """ user_id = columns.Integer(primary_key=True) follower_id = columns.Integer(primary_key=True) class UserFollowing(MBase): """ A user follows another user """ user_id = columns.Integer(primary_key=True) following_id = columns.Integer(primary_key=True) class ProjectFollower(MBase): project_id = columns.Integer(primary_key=True) user_id = columns.Integer(primary_key=True) class PostFollower(MBase): post_id = columns.TimeUUID(primary_key=True) user_id = columns.Integer(primary_key=True) class ChannelFollower(MBase): channel_id = columns.Integer(primary_key=True) user_id = columns.Integer(primary_key=True) class ChannelTimeLine(MBase): channel_id = columns.Integer(primary_key=True) post_id = columns.BigInt(primary_key=True) class ProjectTimeLine(MBase): project_id = columns.Integer(primary_key=True) post_id = columns.BigInt(primary_key=True) class PostLike(MBase): post_id = columns.BigInt(primary_key=True) user_id = columns.Integer(primary_key=True) class PostComment(MBase): post_id = columns.BigInt(primary_key=True) comment_id = columns.BigInt(primary_key=True) <|reserved_special_token_1|> <|reserved_special_token_0|> class Post(MBase): id = columns.BigInt(index=True, primary_key=True) user_id = columns.Integer(required=True, index=True) text = columns.Text(required=True) likes = columns.Counter class Project(MBase): id = columns.Integer(primary_key=True) follower_count = columns.Counter class Channel(MBase): id = columns.Integer(primary_key=True) slug = columns.Text(required=True, index=True) name = columns.Text(required=True) class User(MBase): id = columns.Integer(primary_key=True) nick = columns.Text(required=True, index=True) follower_count = columns.Counter following_count = columns.Counter extended = columns.Map(columns.Text, columns.Text) class UserTimeLine(MBase): """ POSTs that user will see in their timeline """ user_id = columns.Integer(primary_key=True) post_id = columns.BigInt(primary_key=True) class UserProject(MBase): """ Projects that user follows """ user_id = columns.Integer(primary_key=True) project_id = columns.Integer(primary_key=True) class UserPost(MBase): """ All the POSTs of a user """ user_id = columns.Integer(primary_key=True) post_id = columns.BigInt(primary_key=True) class UserFollower(MBase): """ Followers of a user """ user_id = columns.Integer(primary_key=True) follower_id = columns.Integer(primary_key=True) class UserFollowing(MBase): """ A user follows another user """ user_id = columns.Integer(primary_key=True) following_id = columns.Integer(primary_key=True) class ProjectFollower(MBase): project_id = columns.Integer(primary_key=True) user_id = columns.Integer(primary_key=True) class PostFollower(MBase): post_id = columns.TimeUUID(primary_key=True) user_id = columns.Integer(primary_key=True) class ChannelFollower(MBase): channel_id = columns.Integer(primary_key=True) user_id = columns.Integer(primary_key=True) class ChannelTimeLine(MBase): channel_id = columns.Integer(primary_key=True) post_id = columns.BigInt(primary_key=True) class ProjectTimeLine(MBase): project_id = columns.Integer(primary_key=True) post_id = columns.BigInt(primary_key=True) class PostLike(MBase): post_id = columns.BigInt(primary_key=True) user_id = columns.Integer(primary_key=True) class PostComment(MBase): post_id = columns.BigInt(primary_key=True) comment_id = columns.BigInt(primary_key=True) <|reserved_special_token_1|> import uuid from cqlengine import columns from cqlengine.models import Model from datetime import datetime as dt class MBase(Model): __abstract__ = True #__keyspace__ = model_keyspace class Post(MBase): id = columns.BigInt(index=True, primary_key=True) user_id = columns.Integer(required=True, index=True) text = columns.Text(required=True) likes = columns.Counter class Project(MBase): id = columns.Integer(primary_key=True) follower_count = columns.Counter class Channel(MBase): id = columns.Integer(primary_key=True) slug = columns.Text(required=True, index=True) name = columns.Text(required=True) class User(MBase): id = columns.Integer(primary_key=True) nick = columns.Text(required=True, index=True) follower_count = columns.Counter following_count = columns.Counter extended = columns.Map(columns.Text, columns.Text) class UserTimeLine(MBase): """ POSTs that user will see in their timeline """ user_id = columns.Integer(primary_key=True) post_id = columns.BigInt(primary_key=True) class UserProject(MBase): """ Projects that user follows """ user_id = columns.Integer(primary_key=True) project_id = columns.Integer(primary_key=True) class UserPost(MBase): """ All the POSTs of a user """ user_id = columns.Integer(primary_key=True) post_id = columns.BigInt(primary_key=True) class UserFollower(MBase): """ Followers of a user """ user_id = columns.Integer(primary_key=True) follower_id = columns.Integer(primary_key=True) class UserFollowing(MBase): """ A user follows another user """ user_id = columns.Integer(primary_key=True) following_id = columns.Integer(primary_key=True) class ProjectFollower(MBase): project_id = columns.Integer(primary_key=True) user_id = columns.Integer(primary_key=True) class PostFollower(MBase): post_id = columns.TimeUUID(primary_key=True) user_id = columns.Integer(primary_key=True) class ChannelFollower(MBase): channel_id = columns.Integer(primary_key=True) user_id = columns.Integer(primary_key=True) class ChannelTimeLine(MBase): channel_id = columns.Integer(primary_key=True) post_id = columns.BigInt(primary_key=True) class ProjectTimeLine(MBase): project_id = columns.Integer(primary_key=True) post_id = columns.BigInt(primary_key=True) class PostLike(MBase): post_id = columns.BigInt(primary_key=True) user_id = columns.Integer(primary_key=True) class PostComment(MBase): post_id = columns.BigInt(primary_key=True) comment_id = columns.BigInt(primary_key=True)
flexible
{ "blob_id": "9cb734f67d5149b052ff1d412d446aea1654fa69", "index": 9543, "step-1": "<mask token>\n\n\nclass User(MBase):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass UserTimeLine(MBase):\n \"\"\"\n POSTs that user will see in their timeline\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n post_id = columns.BigInt(primary_key=True)\n\n\nclass UserProject(MBase):\n \"\"\"\n Projects that user follows\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n project_id = columns.Integer(primary_key=True)\n\n\nclass UserPost(MBase):\n \"\"\"\n All the POSTs of a user\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n post_id = columns.BigInt(primary_key=True)\n\n\nclass UserFollower(MBase):\n \"\"\"\n Followers of a user\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n follower_id = columns.Integer(primary_key=True)\n\n\nclass UserFollowing(MBase):\n \"\"\"\n A user follows another user\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n following_id = columns.Integer(primary_key=True)\n\n\nclass ProjectFollower(MBase):\n project_id = columns.Integer(primary_key=True)\n user_id = columns.Integer(primary_key=True)\n\n\nclass PostFollower(MBase):\n post_id = columns.TimeUUID(primary_key=True)\n user_id = columns.Integer(primary_key=True)\n\n\nclass ChannelFollower(MBase):\n channel_id = columns.Integer(primary_key=True)\n user_id = columns.Integer(primary_key=True)\n\n\nclass ChannelTimeLine(MBase):\n channel_id = columns.Integer(primary_key=True)\n post_id = columns.BigInt(primary_key=True)\n\n\nclass ProjectTimeLine(MBase):\n project_id = columns.Integer(primary_key=True)\n post_id = columns.BigInt(primary_key=True)\n\n\nclass PostLike(MBase):\n post_id = columns.BigInt(primary_key=True)\n user_id = columns.Integer(primary_key=True)\n\n\nclass PostComment(MBase):\n post_id = columns.BigInt(primary_key=True)\n comment_id = columns.BigInt(primary_key=True)\n", "step-2": "<mask token>\n\n\nclass Channel(MBase):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass User(MBase):\n id = columns.Integer(primary_key=True)\n nick = columns.Text(required=True, index=True)\n follower_count = columns.Counter\n following_count = columns.Counter\n extended = columns.Map(columns.Text, columns.Text)\n\n\nclass UserTimeLine(MBase):\n \"\"\"\n POSTs that user will see in their timeline\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n post_id = columns.BigInt(primary_key=True)\n\n\nclass UserProject(MBase):\n \"\"\"\n Projects that user follows\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n project_id = columns.Integer(primary_key=True)\n\n\nclass UserPost(MBase):\n \"\"\"\n All the POSTs of a user\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n post_id = columns.BigInt(primary_key=True)\n\n\nclass UserFollower(MBase):\n \"\"\"\n Followers of a user\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n follower_id = columns.Integer(primary_key=True)\n\n\nclass UserFollowing(MBase):\n \"\"\"\n A user follows another user\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n following_id = columns.Integer(primary_key=True)\n\n\nclass ProjectFollower(MBase):\n project_id = columns.Integer(primary_key=True)\n user_id = columns.Integer(primary_key=True)\n\n\nclass PostFollower(MBase):\n post_id = columns.TimeUUID(primary_key=True)\n user_id = columns.Integer(primary_key=True)\n\n\nclass ChannelFollower(MBase):\n channel_id = columns.Integer(primary_key=True)\n user_id = columns.Integer(primary_key=True)\n\n\nclass ChannelTimeLine(MBase):\n channel_id = columns.Integer(primary_key=True)\n post_id = columns.BigInt(primary_key=True)\n\n\nclass ProjectTimeLine(MBase):\n project_id = columns.Integer(primary_key=True)\n post_id = columns.BigInt(primary_key=True)\n\n\nclass PostLike(MBase):\n post_id = columns.BigInt(primary_key=True)\n user_id = columns.Integer(primary_key=True)\n\n\nclass PostComment(MBase):\n post_id = columns.BigInt(primary_key=True)\n comment_id = columns.BigInt(primary_key=True)\n", "step-3": "<mask token>\n\n\nclass Project(MBase):\n id = columns.Integer(primary_key=True)\n follower_count = columns.Counter\n\n\nclass Channel(MBase):\n id = columns.Integer(primary_key=True)\n slug = columns.Text(required=True, index=True)\n name = columns.Text(required=True)\n\n\nclass User(MBase):\n id = columns.Integer(primary_key=True)\n nick = columns.Text(required=True, index=True)\n follower_count = columns.Counter\n following_count = columns.Counter\n extended = columns.Map(columns.Text, columns.Text)\n\n\nclass UserTimeLine(MBase):\n \"\"\"\n POSTs that user will see in their timeline\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n post_id = columns.BigInt(primary_key=True)\n\n\nclass UserProject(MBase):\n \"\"\"\n Projects that user follows\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n project_id = columns.Integer(primary_key=True)\n\n\nclass UserPost(MBase):\n \"\"\"\n All the POSTs of a user\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n post_id = columns.BigInt(primary_key=True)\n\n\nclass UserFollower(MBase):\n \"\"\"\n Followers of a user\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n follower_id = columns.Integer(primary_key=True)\n\n\nclass UserFollowing(MBase):\n \"\"\"\n A user follows another user\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n following_id = columns.Integer(primary_key=True)\n\n\nclass ProjectFollower(MBase):\n project_id = columns.Integer(primary_key=True)\n user_id = columns.Integer(primary_key=True)\n\n\nclass PostFollower(MBase):\n post_id = columns.TimeUUID(primary_key=True)\n user_id = columns.Integer(primary_key=True)\n\n\nclass ChannelFollower(MBase):\n channel_id = columns.Integer(primary_key=True)\n user_id = columns.Integer(primary_key=True)\n\n\nclass ChannelTimeLine(MBase):\n channel_id = columns.Integer(primary_key=True)\n post_id = columns.BigInt(primary_key=True)\n\n\nclass ProjectTimeLine(MBase):\n project_id = columns.Integer(primary_key=True)\n post_id = columns.BigInt(primary_key=True)\n\n\nclass PostLike(MBase):\n post_id = columns.BigInt(primary_key=True)\n user_id = columns.Integer(primary_key=True)\n\n\nclass PostComment(MBase):\n post_id = columns.BigInt(primary_key=True)\n comment_id = columns.BigInt(primary_key=True)\n", "step-4": "<mask token>\n\n\nclass Post(MBase):\n id = columns.BigInt(index=True, primary_key=True)\n user_id = columns.Integer(required=True, index=True)\n text = columns.Text(required=True)\n likes = columns.Counter\n\n\nclass Project(MBase):\n id = columns.Integer(primary_key=True)\n follower_count = columns.Counter\n\n\nclass Channel(MBase):\n id = columns.Integer(primary_key=True)\n slug = columns.Text(required=True, index=True)\n name = columns.Text(required=True)\n\n\nclass User(MBase):\n id = columns.Integer(primary_key=True)\n nick = columns.Text(required=True, index=True)\n follower_count = columns.Counter\n following_count = columns.Counter\n extended = columns.Map(columns.Text, columns.Text)\n\n\nclass UserTimeLine(MBase):\n \"\"\"\n POSTs that user will see in their timeline\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n post_id = columns.BigInt(primary_key=True)\n\n\nclass UserProject(MBase):\n \"\"\"\n Projects that user follows\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n project_id = columns.Integer(primary_key=True)\n\n\nclass UserPost(MBase):\n \"\"\"\n All the POSTs of a user\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n post_id = columns.BigInt(primary_key=True)\n\n\nclass UserFollower(MBase):\n \"\"\"\n Followers of a user\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n follower_id = columns.Integer(primary_key=True)\n\n\nclass UserFollowing(MBase):\n \"\"\"\n A user follows another user\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n following_id = columns.Integer(primary_key=True)\n\n\nclass ProjectFollower(MBase):\n project_id = columns.Integer(primary_key=True)\n user_id = columns.Integer(primary_key=True)\n\n\nclass PostFollower(MBase):\n post_id = columns.TimeUUID(primary_key=True)\n user_id = columns.Integer(primary_key=True)\n\n\nclass ChannelFollower(MBase):\n channel_id = columns.Integer(primary_key=True)\n user_id = columns.Integer(primary_key=True)\n\n\nclass ChannelTimeLine(MBase):\n channel_id = columns.Integer(primary_key=True)\n post_id = columns.BigInt(primary_key=True)\n\n\nclass ProjectTimeLine(MBase):\n project_id = columns.Integer(primary_key=True)\n post_id = columns.BigInt(primary_key=True)\n\n\nclass PostLike(MBase):\n post_id = columns.BigInt(primary_key=True)\n user_id = columns.Integer(primary_key=True)\n\n\nclass PostComment(MBase):\n post_id = columns.BigInt(primary_key=True)\n comment_id = columns.BigInt(primary_key=True)\n", "step-5": "import uuid\nfrom cqlengine import columns\nfrom cqlengine.models import Model\nfrom datetime import datetime as dt\n\n\nclass MBase(Model):\n __abstract__ = True\n #__keyspace__ = model_keyspace\n\n\nclass Post(MBase):\n id = columns.BigInt(index=True, primary_key=True)\n user_id = columns.Integer(required=True, index=True)\n text = columns.Text(required=True)\n likes = columns.Counter\n\n\nclass Project(MBase):\n id = columns.Integer(primary_key=True)\n follower_count = columns.Counter\n\n\nclass Channel(MBase):\n id = columns.Integer(primary_key=True)\n slug = columns.Text(required=True, index=True)\n name = columns.Text(required=True)\n\n\nclass User(MBase):\n id = columns.Integer(primary_key=True)\n nick = columns.Text(required=True, index=True)\n follower_count = columns.Counter\n following_count = columns.Counter\n extended = columns.Map(columns.Text, columns.Text)\n\n\nclass UserTimeLine(MBase):\n \"\"\"\n POSTs that user will see in their timeline\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n post_id = columns.BigInt(primary_key=True)\n\n\nclass UserProject(MBase):\n \"\"\"\n Projects that user follows\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n project_id = columns.Integer(primary_key=True)\n\n\nclass UserPost(MBase):\n \"\"\"\n All the POSTs of a user\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n post_id = columns.BigInt(primary_key=True)\n\n\nclass UserFollower(MBase):\n \"\"\"\n Followers of a user\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n follower_id = columns.Integer(primary_key=True)\n\n\nclass UserFollowing(MBase):\n \"\"\"\n A user follows another user\n \"\"\"\n user_id = columns.Integer(primary_key=True)\n following_id = columns.Integer(primary_key=True)\n\n\nclass ProjectFollower(MBase):\n project_id = columns.Integer(primary_key=True)\n user_id = columns.Integer(primary_key=True)\n\n\nclass PostFollower(MBase):\n post_id = columns.TimeUUID(primary_key=True)\n user_id = columns.Integer(primary_key=True)\n\n\nclass ChannelFollower(MBase):\n channel_id = columns.Integer(primary_key=True)\n user_id = columns.Integer(primary_key=True)\n\n\nclass ChannelTimeLine(MBase):\n channel_id = columns.Integer(primary_key=True)\n post_id = columns.BigInt(primary_key=True)\n\n\nclass ProjectTimeLine(MBase):\n project_id = columns.Integer(primary_key=True)\n post_id = columns.BigInt(primary_key=True)\n\n\nclass PostLike(MBase):\n post_id = columns.BigInt(primary_key=True)\n user_id = columns.Integer(primary_key=True)\n\n\nclass PostComment(MBase):\n post_id = columns.BigInt(primary_key=True)\n comment_id = columns.BigInt(primary_key=True)\n", "step-ids": [ 30, 32, 35, 37, 41 ] }
[ 30, 32, 35, 37, 41 ]
<|reserved_special_token_0|> class model_objectdetection_ppm_centernet_v1: <|reserved_special_token_0|> def _build_net(self): self.learning_rate_tensor = tf.compat.v1.placeholder(tf.float32, shape=[], name='learning_rate') print(self.learning_rate_tensor) self.X = tf.compat.v1.placeholder(tf.float32, [None, 512, 512, 3], name='X') print(self.X) self.keep_layer = tf.compat.v1.placeholder(tf.bool, name='phase') print(self.keep_layer) self.Y = tf.compat.v1.placeholder(tf.float32, [None, 128, 128, self .class_count], 'Y') self.SIZE = tf.compat.v1.placeholder(tf.float32, [None, 128, 128, 2 ], 'Y') print(self.Y) with tf.variable_scope('downsamples'): stage_1_1 = conv_block(self.X, conv_type='conv', filters=16, kernel_size=3, strides=2, training=self.keep_layer) stage_1_2 = conv_block(stage_1_1, conv_type='ds', filters=32, kernel_size=3, strides=2, training=self.keep_layer) stage_1_3 = conv_block(stage_1_2, conv_type='ds', filters=64, kernel_size=3, strides=2, training=self.keep_layer) with tf.variable_scope('feature_extraction'): feature1 = bottlenect_block_v1(inputs=stage_1_3, filters=64, kernel_size=3, upsample_rate=2, strides=2, repeat=2, training=self.keep_layer, name='residual1') feature2 = bottlenect_block_v1(inputs=feature1, filters=64, kernel_size=3, upsample_rate=2, strides=2, repeat=2, training=self.keep_layer, name='residual2') feature3 = bottlenect_block_v1(inputs=feature2, filters=32, kernel_size=3, upsample_rate=2, strides=1, repeat=2, training=self.keep_layer, name='residual3') with tf.variable_scope('pyramid_pooling'): pyramid = pyramid_pooling_block(feature3, kernel_size=32, input_width=32, input_height=32, bin_sizes=[2, 4, 6, 8]) with tf.variable_scope('featurefuse'): feature_fuse_layer1 = conv_block(stage_1_3, conv_type='conv', filters=160, kernel_size=1, strides=1, training=self.keep_layer ) print('test', feature_fuse_layer1) feature_fuse_layer2 = upsample_layer(pyramid, [128, 128]) depthwise_filter = tf.compat.v1.get_variable('feature_fuse_layer2', [3, 3, 32 * 5, 1], initializer=tf.compat.v1. variance_scaling_initializer()) feature_fuse_layer2 = tf.compat.v1.nn.depthwise_conv2d(input= feature_fuse_layer2, filter=depthwise_filter, strides=[1, 1, 1, 1], padding='SAME') print('feature_deptiwise conv=', feature_fuse_layer2) feature_fuse_layer2 = tf.compat.v1.layers.batch_normalization( feature_fuse_layer2, scale=True, center=True, momentum=0.9, training=self.keep_layer) feature_fuse_layer2 = tf.compat.v1.nn.relu(feature_fuse_layer2) feature_fuse_layer2 = tf.compat.v1.layers.conv2d(inputs= feature_fuse_layer2, filters=1, kernel_size=1, strides=1, padding='same', kernel_initializer=tf.compat.v1. variance_scaling_initializer()) final_feature = feature_fuse_layer2 + feature_fuse_layer1 final_feature = tf.compat.v1.layers.batch_normalization( final_feature, scale=True, center=True, momentum=0.9, training=self.keep_layer) final_feature = tf.compat.v1.nn.relu(final_feature) with tf.variable_scope('classifier'): classifiter = conv_block(final_feature, conv_type='ds', filters =64, kernel_size=3, strides=1, training=self.keep_layer) print('=== network structure ===') with tf.variable_scope('detector'): self.cls = conv(classifiter, filters=self.class_count, kernel_size=1, strides=1, name='detector_conv1') self.cls = tf.compat.v1.nn.sigmoid(self.cls, name='heatmap') self.size = conv(classifiter, filters=2, kernel_size=1, strides =1, name='detector_conv2') self.size = tf.compat.v1.nn.relu(self.size, name='sizemap') print('heatmap sigmoid=', self.cls) self.output = self.cls print('=== network structure ===') self.heatmap_loss = focal_loss(self.output, self.Y) self.size_loss = reg_l1_loss(self.size, self.SIZE) self.cost = self.heatmap_loss + 0.1 * self.size_loss update_ops = tf.compat.v1.get_collection(tf.compat.v1.GraphKeys. UPDATE_OPS) with tf.compat.v1.control_dependencies(update_ops): self.optimizer = tf.compat.v1.train.AdamOptimizer(learning_rate =self.learning_rate_tensor).minimize(self.cost, name= 'AdamMinimize') print('==============Node Name List==============') print('learning rate tensor : ', self.learning_rate_tensor) print('Input Node Name : ', self.X) print('Output 4 Train Node Name : ', self.Y) print('Phase Node Name', self.keep_layer) print('Output Node Name (heatmap) : ', self.output) print('Output Node Name (sizemap) : ', self.size) print('Cost Function Node Name : ', self.cost) print('Run this operation for a train step :', self. optimizer.name) print('==============Node Name List==============') def predict(self, x_test, keep_prop=False): return self.sess.run([self.output, self.size], feed_dict={self.X: x_test, self.keep_layer: keep_prop}) <|reserved_special_token_0|> def train(self, x_data, y_data, y_size, keep_prop=True, learn_rate=0.003): return self.sess.run(self.optimizer, feed_dict={self.X: x_data, self.Y: y_data, self.SIZE: y_size, self.keep_layer: keep_prop, self.learning_rate_tensor: learn_rate}) <|reserved_special_token_1|> <|reserved_special_token_0|> class model_objectdetection_ppm_centernet_v1: def __init__(self, sess, class_count): self.sess = sess self.class_count = class_count self.up_sample_rate = 1 self.feature_channels = 32 with tf.variable_scope('CenterNet'): self._build_net() def _build_net(self): self.learning_rate_tensor = tf.compat.v1.placeholder(tf.float32, shape=[], name='learning_rate') print(self.learning_rate_tensor) self.X = tf.compat.v1.placeholder(tf.float32, [None, 512, 512, 3], name='X') print(self.X) self.keep_layer = tf.compat.v1.placeholder(tf.bool, name='phase') print(self.keep_layer) self.Y = tf.compat.v1.placeholder(tf.float32, [None, 128, 128, self .class_count], 'Y') self.SIZE = tf.compat.v1.placeholder(tf.float32, [None, 128, 128, 2 ], 'Y') print(self.Y) with tf.variable_scope('downsamples'): stage_1_1 = conv_block(self.X, conv_type='conv', filters=16, kernel_size=3, strides=2, training=self.keep_layer) stage_1_2 = conv_block(stage_1_1, conv_type='ds', filters=32, kernel_size=3, strides=2, training=self.keep_layer) stage_1_3 = conv_block(stage_1_2, conv_type='ds', filters=64, kernel_size=3, strides=2, training=self.keep_layer) with tf.variable_scope('feature_extraction'): feature1 = bottlenect_block_v1(inputs=stage_1_3, filters=64, kernel_size=3, upsample_rate=2, strides=2, repeat=2, training=self.keep_layer, name='residual1') feature2 = bottlenect_block_v1(inputs=feature1, filters=64, kernel_size=3, upsample_rate=2, strides=2, repeat=2, training=self.keep_layer, name='residual2') feature3 = bottlenect_block_v1(inputs=feature2, filters=32, kernel_size=3, upsample_rate=2, strides=1, repeat=2, training=self.keep_layer, name='residual3') with tf.variable_scope('pyramid_pooling'): pyramid = pyramid_pooling_block(feature3, kernel_size=32, input_width=32, input_height=32, bin_sizes=[2, 4, 6, 8]) with tf.variable_scope('featurefuse'): feature_fuse_layer1 = conv_block(stage_1_3, conv_type='conv', filters=160, kernel_size=1, strides=1, training=self.keep_layer ) print('test', feature_fuse_layer1) feature_fuse_layer2 = upsample_layer(pyramid, [128, 128]) depthwise_filter = tf.compat.v1.get_variable('feature_fuse_layer2', [3, 3, 32 * 5, 1], initializer=tf.compat.v1. variance_scaling_initializer()) feature_fuse_layer2 = tf.compat.v1.nn.depthwise_conv2d(input= feature_fuse_layer2, filter=depthwise_filter, strides=[1, 1, 1, 1], padding='SAME') print('feature_deptiwise conv=', feature_fuse_layer2) feature_fuse_layer2 = tf.compat.v1.layers.batch_normalization( feature_fuse_layer2, scale=True, center=True, momentum=0.9, training=self.keep_layer) feature_fuse_layer2 = tf.compat.v1.nn.relu(feature_fuse_layer2) feature_fuse_layer2 = tf.compat.v1.layers.conv2d(inputs= feature_fuse_layer2, filters=1, kernel_size=1, strides=1, padding='same', kernel_initializer=tf.compat.v1. variance_scaling_initializer()) final_feature = feature_fuse_layer2 + feature_fuse_layer1 final_feature = tf.compat.v1.layers.batch_normalization( final_feature, scale=True, center=True, momentum=0.9, training=self.keep_layer) final_feature = tf.compat.v1.nn.relu(final_feature) with tf.variable_scope('classifier'): classifiter = conv_block(final_feature, conv_type='ds', filters =64, kernel_size=3, strides=1, training=self.keep_layer) print('=== network structure ===') with tf.variable_scope('detector'): self.cls = conv(classifiter, filters=self.class_count, kernel_size=1, strides=1, name='detector_conv1') self.cls = tf.compat.v1.nn.sigmoid(self.cls, name='heatmap') self.size = conv(classifiter, filters=2, kernel_size=1, strides =1, name='detector_conv2') self.size = tf.compat.v1.nn.relu(self.size, name='sizemap') print('heatmap sigmoid=', self.cls) self.output = self.cls print('=== network structure ===') self.heatmap_loss = focal_loss(self.output, self.Y) self.size_loss = reg_l1_loss(self.size, self.SIZE) self.cost = self.heatmap_loss + 0.1 * self.size_loss update_ops = tf.compat.v1.get_collection(tf.compat.v1.GraphKeys. UPDATE_OPS) with tf.compat.v1.control_dependencies(update_ops): self.optimizer = tf.compat.v1.train.AdamOptimizer(learning_rate =self.learning_rate_tensor).minimize(self.cost, name= 'AdamMinimize') print('==============Node Name List==============') print('learning rate tensor : ', self.learning_rate_tensor) print('Input Node Name : ', self.X) print('Output 4 Train Node Name : ', self.Y) print('Phase Node Name', self.keep_layer) print('Output Node Name (heatmap) : ', self.output) print('Output Node Name (sizemap) : ', self.size) print('Cost Function Node Name : ', self.cost) print('Run this operation for a train step :', self. optimizer.name) print('==============Node Name List==============') def predict(self, x_test, keep_prop=False): return self.sess.run([self.output, self.size], feed_dict={self.X: x_test, self.keep_layer: keep_prop}) <|reserved_special_token_0|> def train(self, x_data, y_data, y_size, keep_prop=True, learn_rate=0.003): return self.sess.run(self.optimizer, feed_dict={self.X: x_data, self.Y: y_data, self.SIZE: y_size, self.keep_layer: keep_prop, self.learning_rate_tensor: learn_rate}) <|reserved_special_token_1|> <|reserved_special_token_0|> class model_objectdetection_ppm_centernet_v1: def __init__(self, sess, class_count): self.sess = sess self.class_count = class_count self.up_sample_rate = 1 self.feature_channels = 32 with tf.variable_scope('CenterNet'): self._build_net() def _build_net(self): self.learning_rate_tensor = tf.compat.v1.placeholder(tf.float32, shape=[], name='learning_rate') print(self.learning_rate_tensor) self.X = tf.compat.v1.placeholder(tf.float32, [None, 512, 512, 3], name='X') print(self.X) self.keep_layer = tf.compat.v1.placeholder(tf.bool, name='phase') print(self.keep_layer) self.Y = tf.compat.v1.placeholder(tf.float32, [None, 128, 128, self .class_count], 'Y') self.SIZE = tf.compat.v1.placeholder(tf.float32, [None, 128, 128, 2 ], 'Y') print(self.Y) with tf.variable_scope('downsamples'): stage_1_1 = conv_block(self.X, conv_type='conv', filters=16, kernel_size=3, strides=2, training=self.keep_layer) stage_1_2 = conv_block(stage_1_1, conv_type='ds', filters=32, kernel_size=3, strides=2, training=self.keep_layer) stage_1_3 = conv_block(stage_1_2, conv_type='ds', filters=64, kernel_size=3, strides=2, training=self.keep_layer) with tf.variable_scope('feature_extraction'): feature1 = bottlenect_block_v1(inputs=stage_1_3, filters=64, kernel_size=3, upsample_rate=2, strides=2, repeat=2, training=self.keep_layer, name='residual1') feature2 = bottlenect_block_v1(inputs=feature1, filters=64, kernel_size=3, upsample_rate=2, strides=2, repeat=2, training=self.keep_layer, name='residual2') feature3 = bottlenect_block_v1(inputs=feature2, filters=32, kernel_size=3, upsample_rate=2, strides=1, repeat=2, training=self.keep_layer, name='residual3') with tf.variable_scope('pyramid_pooling'): pyramid = pyramid_pooling_block(feature3, kernel_size=32, input_width=32, input_height=32, bin_sizes=[2, 4, 6, 8]) with tf.variable_scope('featurefuse'): feature_fuse_layer1 = conv_block(stage_1_3, conv_type='conv', filters=160, kernel_size=1, strides=1, training=self.keep_layer ) print('test', feature_fuse_layer1) feature_fuse_layer2 = upsample_layer(pyramid, [128, 128]) depthwise_filter = tf.compat.v1.get_variable('feature_fuse_layer2', [3, 3, 32 * 5, 1], initializer=tf.compat.v1. variance_scaling_initializer()) feature_fuse_layer2 = tf.compat.v1.nn.depthwise_conv2d(input= feature_fuse_layer2, filter=depthwise_filter, strides=[1, 1, 1, 1], padding='SAME') print('feature_deptiwise conv=', feature_fuse_layer2) feature_fuse_layer2 = tf.compat.v1.layers.batch_normalization( feature_fuse_layer2, scale=True, center=True, momentum=0.9, training=self.keep_layer) feature_fuse_layer2 = tf.compat.v1.nn.relu(feature_fuse_layer2) feature_fuse_layer2 = tf.compat.v1.layers.conv2d(inputs= feature_fuse_layer2, filters=1, kernel_size=1, strides=1, padding='same', kernel_initializer=tf.compat.v1. variance_scaling_initializer()) final_feature = feature_fuse_layer2 + feature_fuse_layer1 final_feature = tf.compat.v1.layers.batch_normalization( final_feature, scale=True, center=True, momentum=0.9, training=self.keep_layer) final_feature = tf.compat.v1.nn.relu(final_feature) with tf.variable_scope('classifier'): classifiter = conv_block(final_feature, conv_type='ds', filters =64, kernel_size=3, strides=1, training=self.keep_layer) print('=== network structure ===') with tf.variable_scope('detector'): self.cls = conv(classifiter, filters=self.class_count, kernel_size=1, strides=1, name='detector_conv1') self.cls = tf.compat.v1.nn.sigmoid(self.cls, name='heatmap') self.size = conv(classifiter, filters=2, kernel_size=1, strides =1, name='detector_conv2') self.size = tf.compat.v1.nn.relu(self.size, name='sizemap') print('heatmap sigmoid=', self.cls) self.output = self.cls print('=== network structure ===') self.heatmap_loss = focal_loss(self.output, self.Y) self.size_loss = reg_l1_loss(self.size, self.SIZE) self.cost = self.heatmap_loss + 0.1 * self.size_loss update_ops = tf.compat.v1.get_collection(tf.compat.v1.GraphKeys. UPDATE_OPS) with tf.compat.v1.control_dependencies(update_ops): self.optimizer = tf.compat.v1.train.AdamOptimizer(learning_rate =self.learning_rate_tensor).minimize(self.cost, name= 'AdamMinimize') print('==============Node Name List==============') print('learning rate tensor : ', self.learning_rate_tensor) print('Input Node Name : ', self.X) print('Output 4 Train Node Name : ', self.Y) print('Phase Node Name', self.keep_layer) print('Output Node Name (heatmap) : ', self.output) print('Output Node Name (sizemap) : ', self.size) print('Cost Function Node Name : ', self.cost) print('Run this operation for a train step :', self. optimizer.name) print('==============Node Name List==============') def predict(self, x_test, keep_prop=False): return self.sess.run([self.output, self.size], feed_dict={self.X: x_test, self.keep_layer: keep_prop}) def get_cost(self, x_test, y_test, y_size, keep_prop=False): return self.sess.run(self.cost, feed_dict={self.X: x_test, self.Y: y_test, self.SIZE: y_size, self.keep_layer: keep_prop}) def train(self, x_data, y_data, y_size, keep_prop=True, learn_rate=0.003): return self.sess.run(self.optimizer, feed_dict={self.X: x_data, self.Y: y_data, self.SIZE: y_size, self.keep_layer: keep_prop, self.learning_rate_tensor: learn_rate}) <|reserved_special_token_1|> import tensorflow as tf from util.helper import focal_loss from util.helper import conv_elu_bn from util.helper import deconv_elu_bn from util.helper import residual_block_elu from util.helper import conv_elu from util.helper import conv from util.helper import reg_l1_loss from util.helper import conv_bn from util.helper import deconv from util.helper import max_pool2d from util.helper import upsample_layer from util.helper import hourglass_module from util.helper import conv_block from util.helper import bottlenect_block_v1 from util.helper import pyramid_pooling_block class model_objectdetection_ppm_centernet_v1: def __init__(self, sess, class_count): self.sess = sess self.class_count = class_count self.up_sample_rate = 1 self.feature_channels = 32 with tf.variable_scope('CenterNet'): self._build_net() def _build_net(self): self.learning_rate_tensor = tf.compat.v1.placeholder(tf.float32, shape=[], name='learning_rate') print(self.learning_rate_tensor) self.X = tf.compat.v1.placeholder(tf.float32, [None, 512, 512, 3], name='X') print(self.X) self.keep_layer = tf.compat.v1.placeholder(tf.bool, name='phase') print(self.keep_layer) self.Y = tf.compat.v1.placeholder(tf.float32, [None, 128, 128, self .class_count], 'Y') self.SIZE = tf.compat.v1.placeholder(tf.float32, [None, 128, 128, 2 ], 'Y') print(self.Y) with tf.variable_scope('downsamples'): stage_1_1 = conv_block(self.X, conv_type='conv', filters=16, kernel_size=3, strides=2, training=self.keep_layer) stage_1_2 = conv_block(stage_1_1, conv_type='ds', filters=32, kernel_size=3, strides=2, training=self.keep_layer) stage_1_3 = conv_block(stage_1_2, conv_type='ds', filters=64, kernel_size=3, strides=2, training=self.keep_layer) with tf.variable_scope('feature_extraction'): feature1 = bottlenect_block_v1(inputs=stage_1_3, filters=64, kernel_size=3, upsample_rate=2, strides=2, repeat=2, training=self.keep_layer, name='residual1') feature2 = bottlenect_block_v1(inputs=feature1, filters=64, kernel_size=3, upsample_rate=2, strides=2, repeat=2, training=self.keep_layer, name='residual2') feature3 = bottlenect_block_v1(inputs=feature2, filters=32, kernel_size=3, upsample_rate=2, strides=1, repeat=2, training=self.keep_layer, name='residual3') with tf.variable_scope('pyramid_pooling'): pyramid = pyramid_pooling_block(feature3, kernel_size=32, input_width=32, input_height=32, bin_sizes=[2, 4, 6, 8]) with tf.variable_scope('featurefuse'): feature_fuse_layer1 = conv_block(stage_1_3, conv_type='conv', filters=160, kernel_size=1, strides=1, training=self.keep_layer ) print('test', feature_fuse_layer1) feature_fuse_layer2 = upsample_layer(pyramid, [128, 128]) depthwise_filter = tf.compat.v1.get_variable('feature_fuse_layer2', [3, 3, 32 * 5, 1], initializer=tf.compat.v1. variance_scaling_initializer()) feature_fuse_layer2 = tf.compat.v1.nn.depthwise_conv2d(input= feature_fuse_layer2, filter=depthwise_filter, strides=[1, 1, 1, 1], padding='SAME') print('feature_deptiwise conv=', feature_fuse_layer2) feature_fuse_layer2 = tf.compat.v1.layers.batch_normalization( feature_fuse_layer2, scale=True, center=True, momentum=0.9, training=self.keep_layer) feature_fuse_layer2 = tf.compat.v1.nn.relu(feature_fuse_layer2) feature_fuse_layer2 = tf.compat.v1.layers.conv2d(inputs= feature_fuse_layer2, filters=1, kernel_size=1, strides=1, padding='same', kernel_initializer=tf.compat.v1. variance_scaling_initializer()) final_feature = feature_fuse_layer2 + feature_fuse_layer1 final_feature = tf.compat.v1.layers.batch_normalization( final_feature, scale=True, center=True, momentum=0.9, training=self.keep_layer) final_feature = tf.compat.v1.nn.relu(final_feature) with tf.variable_scope('classifier'): classifiter = conv_block(final_feature, conv_type='ds', filters =64, kernel_size=3, strides=1, training=self.keep_layer) print('=== network structure ===') with tf.variable_scope('detector'): self.cls = conv(classifiter, filters=self.class_count, kernel_size=1, strides=1, name='detector_conv1') self.cls = tf.compat.v1.nn.sigmoid(self.cls, name='heatmap') self.size = conv(classifiter, filters=2, kernel_size=1, strides =1, name='detector_conv2') self.size = tf.compat.v1.nn.relu(self.size, name='sizemap') print('heatmap sigmoid=', self.cls) self.output = self.cls print('=== network structure ===') self.heatmap_loss = focal_loss(self.output, self.Y) self.size_loss = reg_l1_loss(self.size, self.SIZE) self.cost = self.heatmap_loss + 0.1 * self.size_loss update_ops = tf.compat.v1.get_collection(tf.compat.v1.GraphKeys. UPDATE_OPS) with tf.compat.v1.control_dependencies(update_ops): self.optimizer = tf.compat.v1.train.AdamOptimizer(learning_rate =self.learning_rate_tensor).minimize(self.cost, name= 'AdamMinimize') print('==============Node Name List==============') print('learning rate tensor : ', self.learning_rate_tensor) print('Input Node Name : ', self.X) print('Output 4 Train Node Name : ', self.Y) print('Phase Node Name', self.keep_layer) print('Output Node Name (heatmap) : ', self.output) print('Output Node Name (sizemap) : ', self.size) print('Cost Function Node Name : ', self.cost) print('Run this operation for a train step :', self. optimizer.name) print('==============Node Name List==============') def predict(self, x_test, keep_prop=False): return self.sess.run([self.output, self.size], feed_dict={self.X: x_test, self.keep_layer: keep_prop}) def get_cost(self, x_test, y_test, y_size, keep_prop=False): return self.sess.run(self.cost, feed_dict={self.X: x_test, self.Y: y_test, self.SIZE: y_size, self.keep_layer: keep_prop}) def train(self, x_data, y_data, y_size, keep_prop=True, learn_rate=0.003): return self.sess.run(self.optimizer, feed_dict={self.X: x_data, self.Y: y_data, self.SIZE: y_size, self.keep_layer: keep_prop, self.learning_rate_tensor: learn_rate}) <|reserved_special_token_1|> import tensorflow as tf from util.helper import focal_loss from util.helper import conv_elu_bn from util.helper import deconv_elu_bn from util.helper import residual_block_elu from util.helper import conv_elu from util.helper import conv from util.helper import reg_l1_loss from util.helper import conv_bn from util.helper import deconv from util.helper import max_pool2d from util.helper import upsample_layer from util.helper import hourglass_module from util.helper import conv_block from util.helper import bottlenect_block_v1 from util.helper import pyramid_pooling_block # 0 cat , 1 dog, class model_objectdetection_ppm_centernet_v1: def __init__(self, sess, class_count): self.sess = sess self.class_count = class_count self.up_sample_rate = 1 self.feature_channels = 32 #self.hourglass_channel = 32 with tf.variable_scope('CenterNet'): self._build_net() def _build_net(self): self.learning_rate_tensor = tf.compat.v1.placeholder(tf.float32, shape=[], name='learning_rate') print(self.learning_rate_tensor) self.X = tf.compat.v1.placeholder(tf.float32, [None, 512, 512, 3], name='X') print(self.X) self.keep_layer = tf.compat.v1.placeholder(tf.bool, name='phase') print(self.keep_layer) self.Y = tf.compat.v1.placeholder(tf.float32, [None, 128, 128, self.class_count], 'Y') self.SIZE = tf.compat.v1.placeholder(tf.float32, [None, 128, 128, 2], 'Y') print(self.Y) ## Batch , Height , Width, Class #X_input = tf.compat.v1.reshape(self.X, [-1, 512, 512, 3]) #Y_input = tf.compat.v1.reshape(self.Y, [-1, 128, 128, self.class_count]) # 512 512 -> 256x 256 with tf.variable_scope('downsamples'): stage_1_1 = conv_block(self.X, conv_type='conv', filters=16, kernel_size=3, strides=2, training=self.keep_layer) stage_1_2 = conv_block(stage_1_1, conv_type='ds', filters=32, kernel_size=3, strides=2, training=self.keep_layer) stage_1_3 = conv_block(stage_1_2, conv_type='ds', filters=64, kernel_size=3, strides=2, training=self.keep_layer) with tf.variable_scope('feature_extraction'): feature1 = bottlenect_block_v1(inputs=stage_1_3, filters=64, kernel_size=3, upsample_rate=2, strides=2, repeat=2, training=self.keep_layer, name='residual1') feature2 = bottlenect_block_v1(inputs=feature1, filters=64, kernel_size=3, upsample_rate=2, strides=2, repeat=2, training=self.keep_layer, name='residual2') feature3 = bottlenect_block_v1(inputs=feature2, filters=32, kernel_size=3, upsample_rate=2, strides=1, repeat=2, training=self.keep_layer, name='residual3') with tf.variable_scope('pyramid_pooling'): pyramid = pyramid_pooling_block(feature3, kernel_size=32, input_width=32, input_height=32, bin_sizes=[2, 4, 6, 8]) with tf.variable_scope('featurefuse'): feature_fuse_layer1 = conv_block(stage_1_3, conv_type='conv', filters=160, kernel_size=1, strides=1, training=self.keep_layer) print('test',feature_fuse_layer1) feature_fuse_layer2 = upsample_layer(pyramid, [128, 128]) depthwise_filter = tf.compat.v1.get_variable('feature_fuse_layer2', [3, 3, 32 * 5, 1], initializer=tf.compat.v1.variance_scaling_initializer()) feature_fuse_layer2 = tf.compat.v1.nn.depthwise_conv2d(input=feature_fuse_layer2, filter=depthwise_filter, strides=[1, 1, 1, 1], padding='SAME') print('feature_deptiwise conv=', feature_fuse_layer2) feature_fuse_layer2 = tf.compat.v1.layers.batch_normalization(feature_fuse_layer2, scale=True, center=True, momentum=0.9, training=self.keep_layer) feature_fuse_layer2 = tf.compat.v1.nn.relu(feature_fuse_layer2) feature_fuse_layer2 = tf.compat.v1.layers.conv2d(inputs=feature_fuse_layer2, filters=1, kernel_size=1, strides=1, padding='same', kernel_initializer=tf.compat.v1.variance_scaling_initializer()) final_feature = feature_fuse_layer2 + feature_fuse_layer1 final_feature = tf.compat.v1.layers.batch_normalization(final_feature, scale=True, center=True, momentum=0.9, training=self.keep_layer) final_feature = tf.compat.v1.nn.relu(final_feature) with tf.variable_scope('classifier'): classifiter = conv_block(final_feature, conv_type='ds', filters=64, kernel_size=3, strides=1, training=self.keep_layer) #classifiter = conv_block(classifiter, conv_type='ds', filters=64, kernel_size=3, strides=1, training=self.keep_layer) print("=== network structure ===") with tf.variable_scope("detector"): #self.cls = conv_elu_bn(feature_fuse_layer2, filters=self.feature_channels, training=self.keep_layer, kernel_size=3, strides=1, name='detector_convelu1') self.cls = conv(classifiter, filters=self.class_count, kernel_size=1, strides=1, name='detector_conv1') self.cls = tf.compat.v1.nn.sigmoid(self.cls, name="heatmap") #self.size = conv_elu_bn(feature_fuse_layer2, filters=self.feature_channels, training=self.keep_layer, kernel_size=3, strides=1, name='detector_convelu2') self.size = conv(classifiter, filters=2, kernel_size=1, strides=1, name='detector_conv2') self.size = tf.compat.v1.nn.relu(self.size, name='sizemap') print("heatmap sigmoid=", self.cls) self.output = self.cls; print("=== network structure ===") self.heatmap_loss = focal_loss(self.output, self.Y) self.size_loss = reg_l1_loss(self.size, self.SIZE) self.cost = self.heatmap_loss + 0.1 * self.size_loss # define cost/loss & optimizer update_ops = tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.UPDATE_OPS) with tf.compat.v1.control_dependencies(update_ops): self.optimizer = tf.compat.v1.train.AdamOptimizer(learning_rate=self.learning_rate_tensor).minimize(self.cost, name='AdamMinimize') print("==============Node Name List==============") print("learning rate tensor : ", self.learning_rate_tensor) print("Input Node Name : ", self.X) print("Output 4 Train Node Name : ", self.Y) print("Phase Node Name", self.keep_layer) print("Output Node Name (heatmap) : ", self.output) print("Output Node Name (sizemap) : ", self.size) print("Cost Function Node Name : ", self.cost) print("Run this operation for a train step :", self.optimizer.name) print("==============Node Name List==============") def predict(self, x_test, keep_prop=False): return self.sess.run([self.output, self.size], feed_dict={self.X: x_test, self.keep_layer: keep_prop}) def get_cost(self, x_test, y_test, y_size, keep_prop=False): # print(self.sess.run(self.output, feed_dict={self.X: x_test, self.keep_layer: keep_prop})) return self.sess.run(self.cost, feed_dict={self.X: x_test, self.Y: y_test, self.SIZE:y_size, self.keep_layer: keep_prop}) def train(self, x_data, y_data, y_size, keep_prop=True, learn_rate=0.003): return self.sess.run(self.optimizer, feed_dict={self.X: x_data, self.Y: y_data, self.SIZE:y_size, self.keep_layer: keep_prop, self.learning_rate_tensor: learn_rate})
flexible
{ "blob_id": "e24a62f2a3ff0122922f472a7b37f1773dfe9c11", "index": 7605, "step-1": "<mask token>\n\n\nclass model_objectdetection_ppm_centernet_v1:\n <mask token>\n\n def _build_net(self):\n self.learning_rate_tensor = tf.compat.v1.placeholder(tf.float32,\n shape=[], name='learning_rate')\n print(self.learning_rate_tensor)\n self.X = tf.compat.v1.placeholder(tf.float32, [None, 512, 512, 3],\n name='X')\n print(self.X)\n self.keep_layer = tf.compat.v1.placeholder(tf.bool, name='phase')\n print(self.keep_layer)\n self.Y = tf.compat.v1.placeholder(tf.float32, [None, 128, 128, self\n .class_count], 'Y')\n self.SIZE = tf.compat.v1.placeholder(tf.float32, [None, 128, 128, 2\n ], 'Y')\n print(self.Y)\n with tf.variable_scope('downsamples'):\n stage_1_1 = conv_block(self.X, conv_type='conv', filters=16,\n kernel_size=3, strides=2, training=self.keep_layer)\n stage_1_2 = conv_block(stage_1_1, conv_type='ds', filters=32,\n kernel_size=3, strides=2, training=self.keep_layer)\n stage_1_3 = conv_block(stage_1_2, conv_type='ds', filters=64,\n kernel_size=3, strides=2, training=self.keep_layer)\n with tf.variable_scope('feature_extraction'):\n feature1 = bottlenect_block_v1(inputs=stage_1_3, filters=64,\n kernel_size=3, upsample_rate=2, strides=2, repeat=2,\n training=self.keep_layer, name='residual1')\n feature2 = bottlenect_block_v1(inputs=feature1, filters=64,\n kernel_size=3, upsample_rate=2, strides=2, repeat=2,\n training=self.keep_layer, name='residual2')\n feature3 = bottlenect_block_v1(inputs=feature2, filters=32,\n kernel_size=3, upsample_rate=2, strides=1, repeat=2,\n training=self.keep_layer, name='residual3')\n with tf.variable_scope('pyramid_pooling'):\n pyramid = pyramid_pooling_block(feature3, kernel_size=32,\n input_width=32, input_height=32, bin_sizes=[2, 4, 6, 8])\n with tf.variable_scope('featurefuse'):\n feature_fuse_layer1 = conv_block(stage_1_3, conv_type='conv',\n filters=160, kernel_size=1, strides=1, training=self.keep_layer\n )\n print('test', feature_fuse_layer1)\n feature_fuse_layer2 = upsample_layer(pyramid, [128, 128])\n depthwise_filter = tf.compat.v1.get_variable('feature_fuse_layer2',\n [3, 3, 32 * 5, 1], initializer=tf.compat.v1.\n variance_scaling_initializer())\n feature_fuse_layer2 = tf.compat.v1.nn.depthwise_conv2d(input=\n feature_fuse_layer2, filter=depthwise_filter, strides=[1, 1,\n 1, 1], padding='SAME')\n print('feature_deptiwise conv=', feature_fuse_layer2)\n feature_fuse_layer2 = tf.compat.v1.layers.batch_normalization(\n feature_fuse_layer2, scale=True, center=True, momentum=0.9,\n training=self.keep_layer)\n feature_fuse_layer2 = tf.compat.v1.nn.relu(feature_fuse_layer2)\n feature_fuse_layer2 = tf.compat.v1.layers.conv2d(inputs=\n feature_fuse_layer2, filters=1, kernel_size=1, strides=1,\n padding='same', kernel_initializer=tf.compat.v1.\n variance_scaling_initializer())\n final_feature = feature_fuse_layer2 + feature_fuse_layer1\n final_feature = tf.compat.v1.layers.batch_normalization(\n final_feature, scale=True, center=True, momentum=0.9,\n training=self.keep_layer)\n final_feature = tf.compat.v1.nn.relu(final_feature)\n with tf.variable_scope('classifier'):\n classifiter = conv_block(final_feature, conv_type='ds', filters\n =64, kernel_size=3, strides=1, training=self.keep_layer)\n print('=== network structure ===')\n with tf.variable_scope('detector'):\n self.cls = conv(classifiter, filters=self.class_count,\n kernel_size=1, strides=1, name='detector_conv1')\n self.cls = tf.compat.v1.nn.sigmoid(self.cls, name='heatmap')\n self.size = conv(classifiter, filters=2, kernel_size=1, strides\n =1, name='detector_conv2')\n self.size = tf.compat.v1.nn.relu(self.size, name='sizemap')\n print('heatmap sigmoid=', self.cls)\n self.output = self.cls\n print('=== network structure ===')\n self.heatmap_loss = focal_loss(self.output, self.Y)\n self.size_loss = reg_l1_loss(self.size, self.SIZE)\n self.cost = self.heatmap_loss + 0.1 * self.size_loss\n update_ops = tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.\n UPDATE_OPS)\n with tf.compat.v1.control_dependencies(update_ops):\n self.optimizer = tf.compat.v1.train.AdamOptimizer(learning_rate\n =self.learning_rate_tensor).minimize(self.cost, name=\n 'AdamMinimize')\n print('==============Node Name List==============')\n print('learning rate tensor : ', self.learning_rate_tensor)\n print('Input Node Name : ', self.X)\n print('Output 4 Train Node Name : ', self.Y)\n print('Phase Node Name', self.keep_layer)\n print('Output Node Name (heatmap) : ', self.output)\n print('Output Node Name (sizemap) : ', self.size)\n print('Cost Function Node Name : ', self.cost)\n print('Run this operation for a train step :', self.\n optimizer.name)\n print('==============Node Name List==============')\n\n def predict(self, x_test, keep_prop=False):\n return self.sess.run([self.output, self.size], feed_dict={self.X:\n x_test, self.keep_layer: keep_prop})\n <mask token>\n\n def train(self, x_data, y_data, y_size, keep_prop=True, learn_rate=0.003):\n return self.sess.run(self.optimizer, feed_dict={self.X: x_data,\n self.Y: y_data, self.SIZE: y_size, self.keep_layer: keep_prop,\n self.learning_rate_tensor: learn_rate})\n", "step-2": "<mask token>\n\n\nclass model_objectdetection_ppm_centernet_v1:\n\n def __init__(self, sess, class_count):\n self.sess = sess\n self.class_count = class_count\n self.up_sample_rate = 1\n self.feature_channels = 32\n with tf.variable_scope('CenterNet'):\n self._build_net()\n\n def _build_net(self):\n self.learning_rate_tensor = tf.compat.v1.placeholder(tf.float32,\n shape=[], name='learning_rate')\n print(self.learning_rate_tensor)\n self.X = tf.compat.v1.placeholder(tf.float32, [None, 512, 512, 3],\n name='X')\n print(self.X)\n self.keep_layer = tf.compat.v1.placeholder(tf.bool, name='phase')\n print(self.keep_layer)\n self.Y = tf.compat.v1.placeholder(tf.float32, [None, 128, 128, self\n .class_count], 'Y')\n self.SIZE = tf.compat.v1.placeholder(tf.float32, [None, 128, 128, 2\n ], 'Y')\n print(self.Y)\n with tf.variable_scope('downsamples'):\n stage_1_1 = conv_block(self.X, conv_type='conv', filters=16,\n kernel_size=3, strides=2, training=self.keep_layer)\n stage_1_2 = conv_block(stage_1_1, conv_type='ds', filters=32,\n kernel_size=3, strides=2, training=self.keep_layer)\n stage_1_3 = conv_block(stage_1_2, conv_type='ds', filters=64,\n kernel_size=3, strides=2, training=self.keep_layer)\n with tf.variable_scope('feature_extraction'):\n feature1 = bottlenect_block_v1(inputs=stage_1_3, filters=64,\n kernel_size=3, upsample_rate=2, strides=2, repeat=2,\n training=self.keep_layer, name='residual1')\n feature2 = bottlenect_block_v1(inputs=feature1, filters=64,\n kernel_size=3, upsample_rate=2, strides=2, repeat=2,\n training=self.keep_layer, name='residual2')\n feature3 = bottlenect_block_v1(inputs=feature2, filters=32,\n kernel_size=3, upsample_rate=2, strides=1, repeat=2,\n training=self.keep_layer, name='residual3')\n with tf.variable_scope('pyramid_pooling'):\n pyramid = pyramid_pooling_block(feature3, kernel_size=32,\n input_width=32, input_height=32, bin_sizes=[2, 4, 6, 8])\n with tf.variable_scope('featurefuse'):\n feature_fuse_layer1 = conv_block(stage_1_3, conv_type='conv',\n filters=160, kernel_size=1, strides=1, training=self.keep_layer\n )\n print('test', feature_fuse_layer1)\n feature_fuse_layer2 = upsample_layer(pyramid, [128, 128])\n depthwise_filter = tf.compat.v1.get_variable('feature_fuse_layer2',\n [3, 3, 32 * 5, 1], initializer=tf.compat.v1.\n variance_scaling_initializer())\n feature_fuse_layer2 = tf.compat.v1.nn.depthwise_conv2d(input=\n feature_fuse_layer2, filter=depthwise_filter, strides=[1, 1,\n 1, 1], padding='SAME')\n print('feature_deptiwise conv=', feature_fuse_layer2)\n feature_fuse_layer2 = tf.compat.v1.layers.batch_normalization(\n feature_fuse_layer2, scale=True, center=True, momentum=0.9,\n training=self.keep_layer)\n feature_fuse_layer2 = tf.compat.v1.nn.relu(feature_fuse_layer2)\n feature_fuse_layer2 = tf.compat.v1.layers.conv2d(inputs=\n feature_fuse_layer2, filters=1, kernel_size=1, strides=1,\n padding='same', kernel_initializer=tf.compat.v1.\n variance_scaling_initializer())\n final_feature = feature_fuse_layer2 + feature_fuse_layer1\n final_feature = tf.compat.v1.layers.batch_normalization(\n final_feature, scale=True, center=True, momentum=0.9,\n training=self.keep_layer)\n final_feature = tf.compat.v1.nn.relu(final_feature)\n with tf.variable_scope('classifier'):\n classifiter = conv_block(final_feature, conv_type='ds', filters\n =64, kernel_size=3, strides=1, training=self.keep_layer)\n print('=== network structure ===')\n with tf.variable_scope('detector'):\n self.cls = conv(classifiter, filters=self.class_count,\n kernel_size=1, strides=1, name='detector_conv1')\n self.cls = tf.compat.v1.nn.sigmoid(self.cls, name='heatmap')\n self.size = conv(classifiter, filters=2, kernel_size=1, strides\n =1, name='detector_conv2')\n self.size = tf.compat.v1.nn.relu(self.size, name='sizemap')\n print('heatmap sigmoid=', self.cls)\n self.output = self.cls\n print('=== network structure ===')\n self.heatmap_loss = focal_loss(self.output, self.Y)\n self.size_loss = reg_l1_loss(self.size, self.SIZE)\n self.cost = self.heatmap_loss + 0.1 * self.size_loss\n update_ops = tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.\n UPDATE_OPS)\n with tf.compat.v1.control_dependencies(update_ops):\n self.optimizer = tf.compat.v1.train.AdamOptimizer(learning_rate\n =self.learning_rate_tensor).minimize(self.cost, name=\n 'AdamMinimize')\n print('==============Node Name List==============')\n print('learning rate tensor : ', self.learning_rate_tensor)\n print('Input Node Name : ', self.X)\n print('Output 4 Train Node Name : ', self.Y)\n print('Phase Node Name', self.keep_layer)\n print('Output Node Name (heatmap) : ', self.output)\n print('Output Node Name (sizemap) : ', self.size)\n print('Cost Function Node Name : ', self.cost)\n print('Run this operation for a train step :', self.\n optimizer.name)\n print('==============Node Name List==============')\n\n def predict(self, x_test, keep_prop=False):\n return self.sess.run([self.output, self.size], feed_dict={self.X:\n x_test, self.keep_layer: keep_prop})\n <mask token>\n\n def train(self, x_data, y_data, y_size, keep_prop=True, learn_rate=0.003):\n return self.sess.run(self.optimizer, feed_dict={self.X: x_data,\n self.Y: y_data, self.SIZE: y_size, self.keep_layer: keep_prop,\n self.learning_rate_tensor: learn_rate})\n", "step-3": "<mask token>\n\n\nclass model_objectdetection_ppm_centernet_v1:\n\n def __init__(self, sess, class_count):\n self.sess = sess\n self.class_count = class_count\n self.up_sample_rate = 1\n self.feature_channels = 32\n with tf.variable_scope('CenterNet'):\n self._build_net()\n\n def _build_net(self):\n self.learning_rate_tensor = tf.compat.v1.placeholder(tf.float32,\n shape=[], name='learning_rate')\n print(self.learning_rate_tensor)\n self.X = tf.compat.v1.placeholder(tf.float32, [None, 512, 512, 3],\n name='X')\n print(self.X)\n self.keep_layer = tf.compat.v1.placeholder(tf.bool, name='phase')\n print(self.keep_layer)\n self.Y = tf.compat.v1.placeholder(tf.float32, [None, 128, 128, self\n .class_count], 'Y')\n self.SIZE = tf.compat.v1.placeholder(tf.float32, [None, 128, 128, 2\n ], 'Y')\n print(self.Y)\n with tf.variable_scope('downsamples'):\n stage_1_1 = conv_block(self.X, conv_type='conv', filters=16,\n kernel_size=3, strides=2, training=self.keep_layer)\n stage_1_2 = conv_block(stage_1_1, conv_type='ds', filters=32,\n kernel_size=3, strides=2, training=self.keep_layer)\n stage_1_3 = conv_block(stage_1_2, conv_type='ds', filters=64,\n kernel_size=3, strides=2, training=self.keep_layer)\n with tf.variable_scope('feature_extraction'):\n feature1 = bottlenect_block_v1(inputs=stage_1_3, filters=64,\n kernel_size=3, upsample_rate=2, strides=2, repeat=2,\n training=self.keep_layer, name='residual1')\n feature2 = bottlenect_block_v1(inputs=feature1, filters=64,\n kernel_size=3, upsample_rate=2, strides=2, repeat=2,\n training=self.keep_layer, name='residual2')\n feature3 = bottlenect_block_v1(inputs=feature2, filters=32,\n kernel_size=3, upsample_rate=2, strides=1, repeat=2,\n training=self.keep_layer, name='residual3')\n with tf.variable_scope('pyramid_pooling'):\n pyramid = pyramid_pooling_block(feature3, kernel_size=32,\n input_width=32, input_height=32, bin_sizes=[2, 4, 6, 8])\n with tf.variable_scope('featurefuse'):\n feature_fuse_layer1 = conv_block(stage_1_3, conv_type='conv',\n filters=160, kernel_size=1, strides=1, training=self.keep_layer\n )\n print('test', feature_fuse_layer1)\n feature_fuse_layer2 = upsample_layer(pyramid, [128, 128])\n depthwise_filter = tf.compat.v1.get_variable('feature_fuse_layer2',\n [3, 3, 32 * 5, 1], initializer=tf.compat.v1.\n variance_scaling_initializer())\n feature_fuse_layer2 = tf.compat.v1.nn.depthwise_conv2d(input=\n feature_fuse_layer2, filter=depthwise_filter, strides=[1, 1,\n 1, 1], padding='SAME')\n print('feature_deptiwise conv=', feature_fuse_layer2)\n feature_fuse_layer2 = tf.compat.v1.layers.batch_normalization(\n feature_fuse_layer2, scale=True, center=True, momentum=0.9,\n training=self.keep_layer)\n feature_fuse_layer2 = tf.compat.v1.nn.relu(feature_fuse_layer2)\n feature_fuse_layer2 = tf.compat.v1.layers.conv2d(inputs=\n feature_fuse_layer2, filters=1, kernel_size=1, strides=1,\n padding='same', kernel_initializer=tf.compat.v1.\n variance_scaling_initializer())\n final_feature = feature_fuse_layer2 + feature_fuse_layer1\n final_feature = tf.compat.v1.layers.batch_normalization(\n final_feature, scale=True, center=True, momentum=0.9,\n training=self.keep_layer)\n final_feature = tf.compat.v1.nn.relu(final_feature)\n with tf.variable_scope('classifier'):\n classifiter = conv_block(final_feature, conv_type='ds', filters\n =64, kernel_size=3, strides=1, training=self.keep_layer)\n print('=== network structure ===')\n with tf.variable_scope('detector'):\n self.cls = conv(classifiter, filters=self.class_count,\n kernel_size=1, strides=1, name='detector_conv1')\n self.cls = tf.compat.v1.nn.sigmoid(self.cls, name='heatmap')\n self.size = conv(classifiter, filters=2, kernel_size=1, strides\n =1, name='detector_conv2')\n self.size = tf.compat.v1.nn.relu(self.size, name='sizemap')\n print('heatmap sigmoid=', self.cls)\n self.output = self.cls\n print('=== network structure ===')\n self.heatmap_loss = focal_loss(self.output, self.Y)\n self.size_loss = reg_l1_loss(self.size, self.SIZE)\n self.cost = self.heatmap_loss + 0.1 * self.size_loss\n update_ops = tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.\n UPDATE_OPS)\n with tf.compat.v1.control_dependencies(update_ops):\n self.optimizer = tf.compat.v1.train.AdamOptimizer(learning_rate\n =self.learning_rate_tensor).minimize(self.cost, name=\n 'AdamMinimize')\n print('==============Node Name List==============')\n print('learning rate tensor : ', self.learning_rate_tensor)\n print('Input Node Name : ', self.X)\n print('Output 4 Train Node Name : ', self.Y)\n print('Phase Node Name', self.keep_layer)\n print('Output Node Name (heatmap) : ', self.output)\n print('Output Node Name (sizemap) : ', self.size)\n print('Cost Function Node Name : ', self.cost)\n print('Run this operation for a train step :', self.\n optimizer.name)\n print('==============Node Name List==============')\n\n def predict(self, x_test, keep_prop=False):\n return self.sess.run([self.output, self.size], feed_dict={self.X:\n x_test, self.keep_layer: keep_prop})\n\n def get_cost(self, x_test, y_test, y_size, keep_prop=False):\n return self.sess.run(self.cost, feed_dict={self.X: x_test, self.Y:\n y_test, self.SIZE: y_size, self.keep_layer: keep_prop})\n\n def train(self, x_data, y_data, y_size, keep_prop=True, learn_rate=0.003):\n return self.sess.run(self.optimizer, feed_dict={self.X: x_data,\n self.Y: y_data, self.SIZE: y_size, self.keep_layer: keep_prop,\n self.learning_rate_tensor: learn_rate})\n", "step-4": "import tensorflow as tf\nfrom util.helper import focal_loss\nfrom util.helper import conv_elu_bn\nfrom util.helper import deconv_elu_bn\nfrom util.helper import residual_block_elu\nfrom util.helper import conv_elu\nfrom util.helper import conv\nfrom util.helper import reg_l1_loss\nfrom util.helper import conv_bn\nfrom util.helper import deconv\nfrom util.helper import max_pool2d\nfrom util.helper import upsample_layer\nfrom util.helper import hourglass_module\nfrom util.helper import conv_block\nfrom util.helper import bottlenect_block_v1\nfrom util.helper import pyramid_pooling_block\n\n\nclass model_objectdetection_ppm_centernet_v1:\n\n def __init__(self, sess, class_count):\n self.sess = sess\n self.class_count = class_count\n self.up_sample_rate = 1\n self.feature_channels = 32\n with tf.variable_scope('CenterNet'):\n self._build_net()\n\n def _build_net(self):\n self.learning_rate_tensor = tf.compat.v1.placeholder(tf.float32,\n shape=[], name='learning_rate')\n print(self.learning_rate_tensor)\n self.X = tf.compat.v1.placeholder(tf.float32, [None, 512, 512, 3],\n name='X')\n print(self.X)\n self.keep_layer = tf.compat.v1.placeholder(tf.bool, name='phase')\n print(self.keep_layer)\n self.Y = tf.compat.v1.placeholder(tf.float32, [None, 128, 128, self\n .class_count], 'Y')\n self.SIZE = tf.compat.v1.placeholder(tf.float32, [None, 128, 128, 2\n ], 'Y')\n print(self.Y)\n with tf.variable_scope('downsamples'):\n stage_1_1 = conv_block(self.X, conv_type='conv', filters=16,\n kernel_size=3, strides=2, training=self.keep_layer)\n stage_1_2 = conv_block(stage_1_1, conv_type='ds', filters=32,\n kernel_size=3, strides=2, training=self.keep_layer)\n stage_1_3 = conv_block(stage_1_2, conv_type='ds', filters=64,\n kernel_size=3, strides=2, training=self.keep_layer)\n with tf.variable_scope('feature_extraction'):\n feature1 = bottlenect_block_v1(inputs=stage_1_3, filters=64,\n kernel_size=3, upsample_rate=2, strides=2, repeat=2,\n training=self.keep_layer, name='residual1')\n feature2 = bottlenect_block_v1(inputs=feature1, filters=64,\n kernel_size=3, upsample_rate=2, strides=2, repeat=2,\n training=self.keep_layer, name='residual2')\n feature3 = bottlenect_block_v1(inputs=feature2, filters=32,\n kernel_size=3, upsample_rate=2, strides=1, repeat=2,\n training=self.keep_layer, name='residual3')\n with tf.variable_scope('pyramid_pooling'):\n pyramid = pyramid_pooling_block(feature3, kernel_size=32,\n input_width=32, input_height=32, bin_sizes=[2, 4, 6, 8])\n with tf.variable_scope('featurefuse'):\n feature_fuse_layer1 = conv_block(stage_1_3, conv_type='conv',\n filters=160, kernel_size=1, strides=1, training=self.keep_layer\n )\n print('test', feature_fuse_layer1)\n feature_fuse_layer2 = upsample_layer(pyramid, [128, 128])\n depthwise_filter = tf.compat.v1.get_variable('feature_fuse_layer2',\n [3, 3, 32 * 5, 1], initializer=tf.compat.v1.\n variance_scaling_initializer())\n feature_fuse_layer2 = tf.compat.v1.nn.depthwise_conv2d(input=\n feature_fuse_layer2, filter=depthwise_filter, strides=[1, 1,\n 1, 1], padding='SAME')\n print('feature_deptiwise conv=', feature_fuse_layer2)\n feature_fuse_layer2 = tf.compat.v1.layers.batch_normalization(\n feature_fuse_layer2, scale=True, center=True, momentum=0.9,\n training=self.keep_layer)\n feature_fuse_layer2 = tf.compat.v1.nn.relu(feature_fuse_layer2)\n feature_fuse_layer2 = tf.compat.v1.layers.conv2d(inputs=\n feature_fuse_layer2, filters=1, kernel_size=1, strides=1,\n padding='same', kernel_initializer=tf.compat.v1.\n variance_scaling_initializer())\n final_feature = feature_fuse_layer2 + feature_fuse_layer1\n final_feature = tf.compat.v1.layers.batch_normalization(\n final_feature, scale=True, center=True, momentum=0.9,\n training=self.keep_layer)\n final_feature = tf.compat.v1.nn.relu(final_feature)\n with tf.variable_scope('classifier'):\n classifiter = conv_block(final_feature, conv_type='ds', filters\n =64, kernel_size=3, strides=1, training=self.keep_layer)\n print('=== network structure ===')\n with tf.variable_scope('detector'):\n self.cls = conv(classifiter, filters=self.class_count,\n kernel_size=1, strides=1, name='detector_conv1')\n self.cls = tf.compat.v1.nn.sigmoid(self.cls, name='heatmap')\n self.size = conv(classifiter, filters=2, kernel_size=1, strides\n =1, name='detector_conv2')\n self.size = tf.compat.v1.nn.relu(self.size, name='sizemap')\n print('heatmap sigmoid=', self.cls)\n self.output = self.cls\n print('=== network structure ===')\n self.heatmap_loss = focal_loss(self.output, self.Y)\n self.size_loss = reg_l1_loss(self.size, self.SIZE)\n self.cost = self.heatmap_loss + 0.1 * self.size_loss\n update_ops = tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.\n UPDATE_OPS)\n with tf.compat.v1.control_dependencies(update_ops):\n self.optimizer = tf.compat.v1.train.AdamOptimizer(learning_rate\n =self.learning_rate_tensor).minimize(self.cost, name=\n 'AdamMinimize')\n print('==============Node Name List==============')\n print('learning rate tensor : ', self.learning_rate_tensor)\n print('Input Node Name : ', self.X)\n print('Output 4 Train Node Name : ', self.Y)\n print('Phase Node Name', self.keep_layer)\n print('Output Node Name (heatmap) : ', self.output)\n print('Output Node Name (sizemap) : ', self.size)\n print('Cost Function Node Name : ', self.cost)\n print('Run this operation for a train step :', self.\n optimizer.name)\n print('==============Node Name List==============')\n\n def predict(self, x_test, keep_prop=False):\n return self.sess.run([self.output, self.size], feed_dict={self.X:\n x_test, self.keep_layer: keep_prop})\n\n def get_cost(self, x_test, y_test, y_size, keep_prop=False):\n return self.sess.run(self.cost, feed_dict={self.X: x_test, self.Y:\n y_test, self.SIZE: y_size, self.keep_layer: keep_prop})\n\n def train(self, x_data, y_data, y_size, keep_prop=True, learn_rate=0.003):\n return self.sess.run(self.optimizer, feed_dict={self.X: x_data,\n self.Y: y_data, self.SIZE: y_size, self.keep_layer: keep_prop,\n self.learning_rate_tensor: learn_rate})\n", "step-5": "import tensorflow as tf\n\nfrom util.helper import focal_loss\nfrom util.helper import conv_elu_bn\nfrom util.helper import deconv_elu_bn\nfrom util.helper import residual_block_elu\nfrom util.helper import conv_elu\nfrom util.helper import conv\nfrom util.helper import reg_l1_loss\nfrom util.helper import conv_bn\nfrom util.helper import deconv\nfrom util.helper import max_pool2d\nfrom util.helper import upsample_layer\nfrom util.helper import hourglass_module\n\n\nfrom util.helper import conv_block\nfrom util.helper import bottlenect_block_v1\nfrom util.helper import pyramid_pooling_block\n\n# 0 cat , 1 dog,\n\nclass model_objectdetection_ppm_centernet_v1:\n\n def __init__(self, sess, class_count):\n self.sess = sess\n self.class_count = class_count\n self.up_sample_rate = 1\n self.feature_channels = 32\n #self.hourglass_channel = 32\n\n with tf.variable_scope('CenterNet'):\n self._build_net()\n\n def _build_net(self):\n self.learning_rate_tensor = tf.compat.v1.placeholder(tf.float32, shape=[], name='learning_rate')\n print(self.learning_rate_tensor)\n\n self.X = tf.compat.v1.placeholder(tf.float32, [None, 512, 512, 3], name='X')\n print(self.X)\n\n self.keep_layer = tf.compat.v1.placeholder(tf.bool, name='phase')\n print(self.keep_layer)\n\n self.Y = tf.compat.v1.placeholder(tf.float32, [None, 128, 128, self.class_count], 'Y')\n self.SIZE = tf.compat.v1.placeholder(tf.float32, [None, 128, 128, 2], 'Y')\n print(self.Y)\n\n ## Batch , Height , Width, Class\n #X_input = tf.compat.v1.reshape(self.X, [-1, 512, 512, 3])\n #Y_input = tf.compat.v1.reshape(self.Y, [-1, 128, 128, self.class_count])\n\n\n # 512 512 -> 256x 256\n with tf.variable_scope('downsamples'):\n stage_1_1 = conv_block(self.X, conv_type='conv', filters=16, kernel_size=3, strides=2, training=self.keep_layer)\n stage_1_2 = conv_block(stage_1_1, conv_type='ds', filters=32, kernel_size=3, strides=2, training=self.keep_layer)\n stage_1_3 = conv_block(stage_1_2, conv_type='ds', filters=64, kernel_size=3, strides=2, training=self.keep_layer)\n\n\n\n with tf.variable_scope('feature_extraction'):\n feature1 = bottlenect_block_v1(inputs=stage_1_3, filters=64, kernel_size=3, upsample_rate=2, strides=2, repeat=2, training=self.keep_layer, name='residual1')\n feature2 = bottlenect_block_v1(inputs=feature1, filters=64, kernel_size=3, upsample_rate=2, strides=2, repeat=2, training=self.keep_layer, name='residual2')\n feature3 = bottlenect_block_v1(inputs=feature2, filters=32, kernel_size=3, upsample_rate=2, strides=1, repeat=2, training=self.keep_layer, name='residual3')\n\n\n with tf.variable_scope('pyramid_pooling'):\n pyramid = pyramid_pooling_block(feature3, kernel_size=32, input_width=32, input_height=32, bin_sizes=[2, 4, 6, 8])\n\n\n with tf.variable_scope('featurefuse'):\n feature_fuse_layer1 = conv_block(stage_1_3, conv_type='conv', filters=160, kernel_size=1, strides=1, training=self.keep_layer)\n print('test',feature_fuse_layer1)\n\n feature_fuse_layer2 = upsample_layer(pyramid, [128, 128])\n depthwise_filter = tf.compat.v1.get_variable('feature_fuse_layer2', [3, 3, 32 * 5, 1], initializer=tf.compat.v1.variance_scaling_initializer())\n feature_fuse_layer2 = tf.compat.v1.nn.depthwise_conv2d(input=feature_fuse_layer2, filter=depthwise_filter, strides=[1, 1, 1, 1], padding='SAME')\n print('feature_deptiwise conv=', feature_fuse_layer2)\n feature_fuse_layer2 = tf.compat.v1.layers.batch_normalization(feature_fuse_layer2, scale=True, center=True, momentum=0.9, training=self.keep_layer)\n feature_fuse_layer2 = tf.compat.v1.nn.relu(feature_fuse_layer2)\n feature_fuse_layer2 = tf.compat.v1.layers.conv2d(inputs=feature_fuse_layer2, filters=1, kernel_size=1, strides=1, padding='same', kernel_initializer=tf.compat.v1.variance_scaling_initializer())\n\n final_feature = feature_fuse_layer2 + feature_fuse_layer1\n final_feature = tf.compat.v1.layers.batch_normalization(final_feature, scale=True, center=True, momentum=0.9, training=self.keep_layer)\n final_feature = tf.compat.v1.nn.relu(final_feature)\n\n\n with tf.variable_scope('classifier'):\n classifiter = conv_block(final_feature, conv_type='ds', filters=64, kernel_size=3, strides=1, training=self.keep_layer)\n #classifiter = conv_block(classifiter, conv_type='ds', filters=64, kernel_size=3, strides=1, training=self.keep_layer)\n\n\n print(\"=== network structure ===\")\n\n with tf.variable_scope(\"detector\"):\n #self.cls = conv_elu_bn(feature_fuse_layer2, filters=self.feature_channels, training=self.keep_layer, kernel_size=3, strides=1, name='detector_convelu1')\n self.cls = conv(classifiter, filters=self.class_count, kernel_size=1, strides=1, name='detector_conv1')\n self.cls = tf.compat.v1.nn.sigmoid(self.cls, name=\"heatmap\")\n\n #self.size = conv_elu_bn(feature_fuse_layer2, filters=self.feature_channels, training=self.keep_layer, kernel_size=3, strides=1, name='detector_convelu2')\n self.size = conv(classifiter, filters=2, kernel_size=1, strides=1, name='detector_conv2')\n self.size = tf.compat.v1.nn.relu(self.size, name='sizemap')\n\n\n print(\"heatmap sigmoid=\", self.cls)\n\n self.output = self.cls;\n print(\"=== network structure ===\")\n\n\n self.heatmap_loss = focal_loss(self.output, self.Y)\n self.size_loss = reg_l1_loss(self.size, self.SIZE)\n self.cost = self.heatmap_loss + 0.1 * self.size_loss\n # define cost/loss & optimizer\n update_ops = tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.UPDATE_OPS)\n with tf.compat.v1.control_dependencies(update_ops):\n self.optimizer = tf.compat.v1.train.AdamOptimizer(learning_rate=self.learning_rate_tensor).minimize(self.cost, name='AdamMinimize')\n\n print(\"==============Node Name List==============\")\n print(\"learning rate tensor : \", self.learning_rate_tensor)\n print(\"Input Node Name : \", self.X)\n print(\"Output 4 Train Node Name : \", self.Y)\n print(\"Phase Node Name\", self.keep_layer)\n print(\"Output Node Name (heatmap) : \", self.output)\n print(\"Output Node Name (sizemap) : \", self.size)\n print(\"Cost Function Node Name : \", self.cost)\n print(\"Run this operation for a train step :\", self.optimizer.name)\n print(\"==============Node Name List==============\")\n\n def predict(self, x_test, keep_prop=False):\n return self.sess.run([self.output, self.size], feed_dict={self.X: x_test, self.keep_layer: keep_prop})\n\n def get_cost(self, x_test, y_test, y_size, keep_prop=False):\n # print(self.sess.run(self.output, feed_dict={self.X: x_test, self.keep_layer: keep_prop}))\n return self.sess.run(self.cost, feed_dict={self.X: x_test, self.Y: y_test, self.SIZE:y_size, self.keep_layer: keep_prop})\n\n def train(self, x_data, y_data, y_size, keep_prop=True, learn_rate=0.003):\n return self.sess.run(self.optimizer, feed_dict={self.X: x_data, self.Y: y_data, self.SIZE:y_size, self.keep_layer: keep_prop, self.learning_rate_tensor: learn_rate})", "step-ids": [ 4, 5, 6, 7, 8 ] }
[ 4, 5, 6, 7, 8 ]
<|reserved_special_token_0|> def decrypt_string(password_string, need_return=False): if not is_number(VERSION): raise ValueError('Invalid argument: --Version') ver = float(VERSION) Cipher = ARC4.new(getCipherKey()) try: if ver < 5.1: de_password = Cipher.decrypt(b64decode(password_string)).decode() else: data = b64decode(password_string) ciphertext, checksum = data[:-SHA256.digest_size], data[-SHA256 .digest_size:] plaintext = Cipher.decrypt(ciphertext) if SHA256.new(plaintext).digest() != checksum: raise ValueError('Cannot decrypt string. The key is wrong!') de_password = plaintext.decode('ascii') if need_return: return de_password else: print('%-20s : %s' % ('Version', VERSION)) print('%-20s : %s' % ('Password', password_string)) print('%-20s : %s' % ('Decrypted Password', de_password)) except Exception as e: print('Password is invalid') def decrypt_file(filepath: str=''): if not os.path.isfile(filepath): print(f'{filepath:=^100}\nError: No file') return file = os.path.basename(os.path.realpath(filepath)) if file.endswith('.xsh') or file.endswith('.xfp'): cfg = configparser.ConfigParser() try: cfg.read(filepath) except UnicodeDecodeError: cfg.read(filepath, encoding='utf-16') try: if file.endswith('.xsh'): host = cfg['CONNECTION']['Host'] port = cfg['CONNECTION']['Port'] username = cfg['CONNECTION:AUTHENTICATION']['UserName'] password = cfg['CONNECTION:AUTHENTICATION']['Password'] version = cfg['SessionInfo']['Version'] de_password = decrypt_string(password, True) else: host = cfg['Connection']['Host'] port = cfg['Connection']['Port'] username = cfg['Connection']['UserName'] password = cfg['Connection']['Password'] version = cfg['SessionInfo']['Version'] de_password = decrypt_string(password, True) print(f'{filepath:=^100}') print('%-20s : %s' % ('Host', host)) print('%-20s : %s' % ('Port', port)) print('%-20s : %s' % ('Version', version)) print('%-20s : %s' % ('UserName', username)) print('%-20s : %s' % ('Password', de_password)) print('%-20s : %s' % ('Encrypted Password', password)) except Exception as e: print(f'{filepath:=^100}\nError:{e}') def decrypt_dir(): for root, dirs, files in os.walk(KEY): for f in files: decrypt_file(os.path.join(root, f)) def setDefaultSessionDirByVer(): if not is_number(VERSION): return ver = float(VERSION) dir = 'Xshell' if IS_XSH else 'Xftp' global KEY if ver < 6: KEY = os.path.join(os.environ['USERPROFILE'], 'Documents\\NetSarang\\%s\\Sessions' % dir) elif ver == 6: KEY = os.path.join(os.environ['USERPROFILE'], 'Documents\\NetSarang Computer\\6\\%s\\Sessions' % dir) def is_number(s): try: float(s) return True except ValueError: pass try: unicodedata.numeric(s) return True except (TypeError, ValueError): pass return False <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def getCipherKey(): if not is_number(VERSION): raise ValueError('Invalid argument: --Version') ver = float(VERSION) if 0 < ver and ver < 5.1: if IS_XSH: return MD5.new(b'!X@s#h$e%l^l&').digest() else: return MD5.new(b'!X@s#c$e%l^l&').digest() elif 5.1 <= ver and ver <= 5.2: return SHA256.new(SID.encode()).digest() elif 5.2 < ver: if MASTER_PWD == None: return SHA256.new((USERNAME + SID).encode()).digest() else: return SHA256.new(MASTER_PWD.encode()).digest() else: raise ValueError('Invalid argument: --Version') def encrypt_string(password_string, need_return=False): if not is_number(VERSION): raise ValueError('Invalid argument: --Version') ver = float(VERSION) Cipher = ARC4.new(getCipherKey()) if ver < 5.1: en_password = b64encode(Cipher.encrypt(password_string.encode()) ).decode() else: checksum = SHA256.new(password_string.encode()).digest() ciphertext = Cipher.encrypt(password_string.encode()) en_password = b64encode(ciphertext + checksum).decode() if need_return: return en_password else: print('%-20s : %s' % ('Version', VERSION)) print('%-20s : %s' % ('Password', password_string)) print('%-20s : %s' % ('Encrypted Password', en_password)) def decrypt_string(password_string, need_return=False): if not is_number(VERSION): raise ValueError('Invalid argument: --Version') ver = float(VERSION) Cipher = ARC4.new(getCipherKey()) try: if ver < 5.1: de_password = Cipher.decrypt(b64decode(password_string)).decode() else: data = b64decode(password_string) ciphertext, checksum = data[:-SHA256.digest_size], data[-SHA256 .digest_size:] plaintext = Cipher.decrypt(ciphertext) if SHA256.new(plaintext).digest() != checksum: raise ValueError('Cannot decrypt string. The key is wrong!') de_password = plaintext.decode('ascii') if need_return: return de_password else: print('%-20s : %s' % ('Version', VERSION)) print('%-20s : %s' % ('Password', password_string)) print('%-20s : %s' % ('Decrypted Password', de_password)) except Exception as e: print('Password is invalid') def decrypt_file(filepath: str=''): if not os.path.isfile(filepath): print(f'{filepath:=^100}\nError: No file') return file = os.path.basename(os.path.realpath(filepath)) if file.endswith('.xsh') or file.endswith('.xfp'): cfg = configparser.ConfigParser() try: cfg.read(filepath) except UnicodeDecodeError: cfg.read(filepath, encoding='utf-16') try: if file.endswith('.xsh'): host = cfg['CONNECTION']['Host'] port = cfg['CONNECTION']['Port'] username = cfg['CONNECTION:AUTHENTICATION']['UserName'] password = cfg['CONNECTION:AUTHENTICATION']['Password'] version = cfg['SessionInfo']['Version'] de_password = decrypt_string(password, True) else: host = cfg['Connection']['Host'] port = cfg['Connection']['Port'] username = cfg['Connection']['UserName'] password = cfg['Connection']['Password'] version = cfg['SessionInfo']['Version'] de_password = decrypt_string(password, True) print(f'{filepath:=^100}') print('%-20s : %s' % ('Host', host)) print('%-20s : %s' % ('Port', port)) print('%-20s : %s' % ('Version', version)) print('%-20s : %s' % ('UserName', username)) print('%-20s : %s' % ('Password', de_password)) print('%-20s : %s' % ('Encrypted Password', password)) except Exception as e: print(f'{filepath:=^100}\nError:{e}') def decrypt_dir(): for root, dirs, files in os.walk(KEY): for f in files: decrypt_file(os.path.join(root, f)) def setDefaultSessionDirByVer(): if not is_number(VERSION): return ver = float(VERSION) dir = 'Xshell' if IS_XSH else 'Xftp' global KEY if ver < 6: KEY = os.path.join(os.environ['USERPROFILE'], 'Documents\\NetSarang\\%s\\Sessions' % dir) elif ver == 6: KEY = os.path.join(os.environ['USERPROFILE'], 'Documents\\NetSarang Computer\\6\\%s\\Sessions' % dir) def is_number(s): try: float(s) return True except ValueError: pass try: unicodedata.numeric(s) return True except (TypeError, ValueError): pass return False if __name__ == '__main__': parser = argparse.ArgumentParser(description='xsh, xfp password decrypt') group = parser.add_mutually_exclusive_group(required=False) group.add_argument('-e', '--encrypt', default=False, help= '<-e | -d> encrypt password, default -d', action='store_true') group.add_argument('-d', '--decrypt', default=True, help= '<-e | -d> decrypt encrypted password, default -d', action='store_true' ) parser.add_argument('-f', '--ftp', default=False, help= 'xftp or xshell. Ignore if it is xshell', action='store_true') parser.add_argument('-u', '--username', default='', type=str, help= 'user `whoami /user` in command. Ignore if it is local. Used by version >= 5.1' ) parser.add_argument('-m', '--master_pwd', default='', type=str, help= "user's master password. Used by version >= 6") parser.add_argument('-s', '--sid', default='', type=str, help= 'SID `whoami /user` in command. Ignore if it is local. Used by version >= 5.1' ) parser.add_argument('-v', '--version', default='', type=str, help= 'xsh or xfp version. If not specified, 5.2 will be used.') parser.add_argument('-k', '--key', default='', nargs='?', help= 'the path of sessions directory or file of xsh or xfp, or password or other key' ) args = parser.parse_args() if args.encrypt: IS_DECRYPT = False if args.sid: SID = args.sid if args.username: USERNAME = args.username if args.master_pwd: MASTER_PWD = args.master_pwd if args.ftp: IS_XSH = False if is_number(args.version): VERSION = args.version if args.key: KEY = args.key if not args.key and (is_number(args.version) or args.ftp): setDefaultSessionDirByVer() if IS_DECRYPT: if os.path.isdir(KEY): decrypt_dir() elif os.path.isfile(KEY): decrypt_file(KEY) else: decrypt_string(KEY) else: encrypt_string(KEY) <|reserved_special_token_1|> <|reserved_special_token_0|> USERNAME = GetUserName() MASTER_PWD = None SID = ConvertSidToStringSid(LookupAccountName(GetComputerName(), GetUserName())[0]) IS_XSH = True VERSION = '5.2' KEY = os.path.join(os.environ['USERPROFILE'], 'Documents\\NetSarang\\Xshell\\Sessions') IS_DECRYPT = True def getCipherKey(): if not is_number(VERSION): raise ValueError('Invalid argument: --Version') ver = float(VERSION) if 0 < ver and ver < 5.1: if IS_XSH: return MD5.new(b'!X@s#h$e%l^l&').digest() else: return MD5.new(b'!X@s#c$e%l^l&').digest() elif 5.1 <= ver and ver <= 5.2: return SHA256.new(SID.encode()).digest() elif 5.2 < ver: if MASTER_PWD == None: return SHA256.new((USERNAME + SID).encode()).digest() else: return SHA256.new(MASTER_PWD.encode()).digest() else: raise ValueError('Invalid argument: --Version') def encrypt_string(password_string, need_return=False): if not is_number(VERSION): raise ValueError('Invalid argument: --Version') ver = float(VERSION) Cipher = ARC4.new(getCipherKey()) if ver < 5.1: en_password = b64encode(Cipher.encrypt(password_string.encode()) ).decode() else: checksum = SHA256.new(password_string.encode()).digest() ciphertext = Cipher.encrypt(password_string.encode()) en_password = b64encode(ciphertext + checksum).decode() if need_return: return en_password else: print('%-20s : %s' % ('Version', VERSION)) print('%-20s : %s' % ('Password', password_string)) print('%-20s : %s' % ('Encrypted Password', en_password)) def decrypt_string(password_string, need_return=False): if not is_number(VERSION): raise ValueError('Invalid argument: --Version') ver = float(VERSION) Cipher = ARC4.new(getCipherKey()) try: if ver < 5.1: de_password = Cipher.decrypt(b64decode(password_string)).decode() else: data = b64decode(password_string) ciphertext, checksum = data[:-SHA256.digest_size], data[-SHA256 .digest_size:] plaintext = Cipher.decrypt(ciphertext) if SHA256.new(plaintext).digest() != checksum: raise ValueError('Cannot decrypt string. The key is wrong!') de_password = plaintext.decode('ascii') if need_return: return de_password else: print('%-20s : %s' % ('Version', VERSION)) print('%-20s : %s' % ('Password', password_string)) print('%-20s : %s' % ('Decrypted Password', de_password)) except Exception as e: print('Password is invalid') def decrypt_file(filepath: str=''): if not os.path.isfile(filepath): print(f'{filepath:=^100}\nError: No file') return file = os.path.basename(os.path.realpath(filepath)) if file.endswith('.xsh') or file.endswith('.xfp'): cfg = configparser.ConfigParser() try: cfg.read(filepath) except UnicodeDecodeError: cfg.read(filepath, encoding='utf-16') try: if file.endswith('.xsh'): host = cfg['CONNECTION']['Host'] port = cfg['CONNECTION']['Port'] username = cfg['CONNECTION:AUTHENTICATION']['UserName'] password = cfg['CONNECTION:AUTHENTICATION']['Password'] version = cfg['SessionInfo']['Version'] de_password = decrypt_string(password, True) else: host = cfg['Connection']['Host'] port = cfg['Connection']['Port'] username = cfg['Connection']['UserName'] password = cfg['Connection']['Password'] version = cfg['SessionInfo']['Version'] de_password = decrypt_string(password, True) print(f'{filepath:=^100}') print('%-20s : %s' % ('Host', host)) print('%-20s : %s' % ('Port', port)) print('%-20s : %s' % ('Version', version)) print('%-20s : %s' % ('UserName', username)) print('%-20s : %s' % ('Password', de_password)) print('%-20s : %s' % ('Encrypted Password', password)) except Exception as e: print(f'{filepath:=^100}\nError:{e}') def decrypt_dir(): for root, dirs, files in os.walk(KEY): for f in files: decrypt_file(os.path.join(root, f)) def setDefaultSessionDirByVer(): if not is_number(VERSION): return ver = float(VERSION) dir = 'Xshell' if IS_XSH else 'Xftp' global KEY if ver < 6: KEY = os.path.join(os.environ['USERPROFILE'], 'Documents\\NetSarang\\%s\\Sessions' % dir) elif ver == 6: KEY = os.path.join(os.environ['USERPROFILE'], 'Documents\\NetSarang Computer\\6\\%s\\Sessions' % dir) def is_number(s): try: float(s) return True except ValueError: pass try: unicodedata.numeric(s) return True except (TypeError, ValueError): pass return False if __name__ == '__main__': parser = argparse.ArgumentParser(description='xsh, xfp password decrypt') group = parser.add_mutually_exclusive_group(required=False) group.add_argument('-e', '--encrypt', default=False, help= '<-e | -d> encrypt password, default -d', action='store_true') group.add_argument('-d', '--decrypt', default=True, help= '<-e | -d> decrypt encrypted password, default -d', action='store_true' ) parser.add_argument('-f', '--ftp', default=False, help= 'xftp or xshell. Ignore if it is xshell', action='store_true') parser.add_argument('-u', '--username', default='', type=str, help= 'user `whoami /user` in command. Ignore if it is local. Used by version >= 5.1' ) parser.add_argument('-m', '--master_pwd', default='', type=str, help= "user's master password. Used by version >= 6") parser.add_argument('-s', '--sid', default='', type=str, help= 'SID `whoami /user` in command. Ignore if it is local. Used by version >= 5.1' ) parser.add_argument('-v', '--version', default='', type=str, help= 'xsh or xfp version. If not specified, 5.2 will be used.') parser.add_argument('-k', '--key', default='', nargs='?', help= 'the path of sessions directory or file of xsh or xfp, or password or other key' ) args = parser.parse_args() if args.encrypt: IS_DECRYPT = False if args.sid: SID = args.sid if args.username: USERNAME = args.username if args.master_pwd: MASTER_PWD = args.master_pwd if args.ftp: IS_XSH = False if is_number(args.version): VERSION = args.version if args.key: KEY = args.key if not args.key and (is_number(args.version) or args.ftp): setDefaultSessionDirByVer() if IS_DECRYPT: if os.path.isdir(KEY): decrypt_dir() elif os.path.isfile(KEY): decrypt_file(KEY) else: decrypt_string(KEY) else: encrypt_string(KEY) <|reserved_special_token_1|> import os import argparse import configparser import unicodedata from win32api import GetComputerName, GetUserName from win32security import LookupAccountName, ConvertSidToStringSid from base64 import b64encode, b64decode from Cryptodome.Hash import MD5, SHA256 from Cryptodome.Cipher import ARC4 USERNAME = GetUserName() MASTER_PWD = None SID = ConvertSidToStringSid(LookupAccountName(GetComputerName(), GetUserName())[0]) IS_XSH = True VERSION = '5.2' KEY = os.path.join(os.environ['USERPROFILE'], 'Documents\\NetSarang\\Xshell\\Sessions') IS_DECRYPT = True def getCipherKey(): if not is_number(VERSION): raise ValueError('Invalid argument: --Version') ver = float(VERSION) if 0 < ver and ver < 5.1: if IS_XSH: return MD5.new(b'!X@s#h$e%l^l&').digest() else: return MD5.new(b'!X@s#c$e%l^l&').digest() elif 5.1 <= ver and ver <= 5.2: return SHA256.new(SID.encode()).digest() elif 5.2 < ver: if MASTER_PWD == None: return SHA256.new((USERNAME + SID).encode()).digest() else: return SHA256.new(MASTER_PWD.encode()).digest() else: raise ValueError('Invalid argument: --Version') def encrypt_string(password_string, need_return=False): if not is_number(VERSION): raise ValueError('Invalid argument: --Version') ver = float(VERSION) Cipher = ARC4.new(getCipherKey()) if ver < 5.1: en_password = b64encode(Cipher.encrypt(password_string.encode()) ).decode() else: checksum = SHA256.new(password_string.encode()).digest() ciphertext = Cipher.encrypt(password_string.encode()) en_password = b64encode(ciphertext + checksum).decode() if need_return: return en_password else: print('%-20s : %s' % ('Version', VERSION)) print('%-20s : %s' % ('Password', password_string)) print('%-20s : %s' % ('Encrypted Password', en_password)) def decrypt_string(password_string, need_return=False): if not is_number(VERSION): raise ValueError('Invalid argument: --Version') ver = float(VERSION) Cipher = ARC4.new(getCipherKey()) try: if ver < 5.1: de_password = Cipher.decrypt(b64decode(password_string)).decode() else: data = b64decode(password_string) ciphertext, checksum = data[:-SHA256.digest_size], data[-SHA256 .digest_size:] plaintext = Cipher.decrypt(ciphertext) if SHA256.new(plaintext).digest() != checksum: raise ValueError('Cannot decrypt string. The key is wrong!') de_password = plaintext.decode('ascii') if need_return: return de_password else: print('%-20s : %s' % ('Version', VERSION)) print('%-20s : %s' % ('Password', password_string)) print('%-20s : %s' % ('Decrypted Password', de_password)) except Exception as e: print('Password is invalid') def decrypt_file(filepath: str=''): if not os.path.isfile(filepath): print(f'{filepath:=^100}\nError: No file') return file = os.path.basename(os.path.realpath(filepath)) if file.endswith('.xsh') or file.endswith('.xfp'): cfg = configparser.ConfigParser() try: cfg.read(filepath) except UnicodeDecodeError: cfg.read(filepath, encoding='utf-16') try: if file.endswith('.xsh'): host = cfg['CONNECTION']['Host'] port = cfg['CONNECTION']['Port'] username = cfg['CONNECTION:AUTHENTICATION']['UserName'] password = cfg['CONNECTION:AUTHENTICATION']['Password'] version = cfg['SessionInfo']['Version'] de_password = decrypt_string(password, True) else: host = cfg['Connection']['Host'] port = cfg['Connection']['Port'] username = cfg['Connection']['UserName'] password = cfg['Connection']['Password'] version = cfg['SessionInfo']['Version'] de_password = decrypt_string(password, True) print(f'{filepath:=^100}') print('%-20s : %s' % ('Host', host)) print('%-20s : %s' % ('Port', port)) print('%-20s : %s' % ('Version', version)) print('%-20s : %s' % ('UserName', username)) print('%-20s : %s' % ('Password', de_password)) print('%-20s : %s' % ('Encrypted Password', password)) except Exception as e: print(f'{filepath:=^100}\nError:{e}') def decrypt_dir(): for root, dirs, files in os.walk(KEY): for f in files: decrypt_file(os.path.join(root, f)) def setDefaultSessionDirByVer(): if not is_number(VERSION): return ver = float(VERSION) dir = 'Xshell' if IS_XSH else 'Xftp' global KEY if ver < 6: KEY = os.path.join(os.environ['USERPROFILE'], 'Documents\\NetSarang\\%s\\Sessions' % dir) elif ver == 6: KEY = os.path.join(os.environ['USERPROFILE'], 'Documents\\NetSarang Computer\\6\\%s\\Sessions' % dir) def is_number(s): try: float(s) return True except ValueError: pass try: unicodedata.numeric(s) return True except (TypeError, ValueError): pass return False if __name__ == '__main__': parser = argparse.ArgumentParser(description='xsh, xfp password decrypt') group = parser.add_mutually_exclusive_group(required=False) group.add_argument('-e', '--encrypt', default=False, help= '<-e | -d> encrypt password, default -d', action='store_true') group.add_argument('-d', '--decrypt', default=True, help= '<-e | -d> decrypt encrypted password, default -d', action='store_true' ) parser.add_argument('-f', '--ftp', default=False, help= 'xftp or xshell. Ignore if it is xshell', action='store_true') parser.add_argument('-u', '--username', default='', type=str, help= 'user `whoami /user` in command. Ignore if it is local. Used by version >= 5.1' ) parser.add_argument('-m', '--master_pwd', default='', type=str, help= "user's master password. Used by version >= 6") parser.add_argument('-s', '--sid', default='', type=str, help= 'SID `whoami /user` in command. Ignore if it is local. Used by version >= 5.1' ) parser.add_argument('-v', '--version', default='', type=str, help= 'xsh or xfp version. If not specified, 5.2 will be used.') parser.add_argument('-k', '--key', default='', nargs='?', help= 'the path of sessions directory or file of xsh or xfp, or password or other key' ) args = parser.parse_args() if args.encrypt: IS_DECRYPT = False if args.sid: SID = args.sid if args.username: USERNAME = args.username if args.master_pwd: MASTER_PWD = args.master_pwd if args.ftp: IS_XSH = False if is_number(args.version): VERSION = args.version if args.key: KEY = args.key if not args.key and (is_number(args.version) or args.ftp): setDefaultSessionDirByVer() if IS_DECRYPT: if os.path.isdir(KEY): decrypt_dir() elif os.path.isfile(KEY): decrypt_file(KEY) else: decrypt_string(KEY) else: encrypt_string(KEY) <|reserved_special_token_1|> # -*- coding: utf-8 -*- # python >= 3.7 # supported xmanager version <5.1, 5.1, 5.2, 6 import os import argparse import configparser import unicodedata from win32api import GetComputerName, GetUserName from win32security import LookupAccountName, ConvertSidToStringSid from base64 import b64encode, b64decode from Cryptodome.Hash import MD5, SHA256 from Cryptodome.Cipher import ARC4 USERNAME = GetUserName() MASTER_PWD = None SID = ConvertSidToStringSid(LookupAccountName(GetComputerName(), GetUserName())[0]) IS_XSH = True VERSION = '5.2' KEY = os.path.join(os.environ["USERPROFILE"], r"Documents\NetSarang\Xshell\Sessions") IS_DECRYPT = True def getCipherKey(): if not is_number(VERSION): raise ValueError('Invalid argument: --Version') ver = float(VERSION) if 0 < ver and ver < 5.1: if IS_XSH: return MD5.new(b'!X@s#h$e%l^l&').digest() else: return MD5.new(b'!X@s#c$e%l^l&').digest() elif 5.1 <= ver and ver <= 5.2: return SHA256.new(SID.encode()).digest() elif 5.2 < ver: if MASTER_PWD == None: return SHA256.new((USERNAME + SID).encode()).digest() else: return SHA256.new(MASTER_PWD.encode()).digest() else: raise ValueError('Invalid argument: --Version') def encrypt_string(password_string, need_return=False): if not is_number(VERSION): raise ValueError('Invalid argument: --Version') ver = float(VERSION) Cipher = ARC4.new(getCipherKey()) if ver < 5.1: en_password = b64encode(Cipher.encrypt(password_string.encode())).decode() else: checksum = SHA256.new(password_string.encode()).digest() ciphertext = Cipher.encrypt(password_string.encode()) en_password = b64encode(ciphertext + checksum).decode() if need_return: return en_password else: print('%-20s : %s' % ('Version', VERSION)) print('%-20s : %s' % ('Password', password_string)) print('%-20s : %s' % ('Encrypted Password', en_password)) def decrypt_string(password_string, need_return=False): if not is_number(VERSION): raise ValueError('Invalid argument: --Version') ver = float(VERSION) Cipher = ARC4.new(getCipherKey()) try: if ver < 5.1: de_password = Cipher.decrypt(b64decode(password_string)).decode() else: data = b64decode(password_string) ciphertext, checksum = data[:-SHA256.digest_size], data[-SHA256.digest_size:] plaintext = Cipher.decrypt(ciphertext) if SHA256.new(plaintext).digest() != checksum: raise ValueError('Cannot decrypt string. The key is wrong!') de_password = plaintext.decode('ascii') if need_return: return de_password else: print('%-20s : %s' % ('Version', VERSION)) print('%-20s : %s' % ('Password', password_string)) print('%-20s : %s' % ('Decrypted Password', de_password)) except Exception as e: print("Password is invalid") def decrypt_file(filepath: str = ''): if not os.path.isfile(filepath): print(f"{filepath:=^100}\nError: No file") return file = os.path.basename(os.path.realpath(filepath)) if file.endswith(".xsh") or file.endswith(".xfp"): cfg = configparser.ConfigParser() try: cfg.read(filepath) except UnicodeDecodeError: cfg.read(filepath, encoding="utf-16") try: if file.endswith(".xsh"): host = cfg["CONNECTION"]["Host"] port = cfg["CONNECTION"]["Port"] username = cfg["CONNECTION:AUTHENTICATION"]["UserName"] password = cfg["CONNECTION:AUTHENTICATION"]["Password"] version = cfg["SessionInfo"]["Version"] de_password = decrypt_string(password, True) else: host = cfg["Connection"]["Host"] port = cfg["Connection"]["Port"] username = cfg["Connection"]["UserName"] password = cfg["Connection"]["Password"] version = cfg["SessionInfo"]["Version"] de_password = decrypt_string(password, True) print(f"{filepath:=^100}") print('%-20s : %s' % ('Host', host)) print('%-20s : %s' % ('Port', port)) print('%-20s : %s' % ('Version', version)) print('%-20s : %s' % ('UserName', username)) print('%-20s : %s' % ('Password', de_password)) print('%-20s : %s' % ('Encrypted Password', password)) except Exception as e: print(f"{filepath:=^100}\nError:{e}") def decrypt_dir(): for root, dirs, files in os.walk(KEY): for f in files: decrypt_file(os.path.join(root, f)) def setDefaultSessionDirByVer(): if not is_number(VERSION): return ver = float(VERSION) dir = 'Xshell' if IS_XSH else 'Xftp'; global KEY if ver < 6: KEY = os.path.join(os.environ["USERPROFILE"], r"Documents\NetSarang\%s\Sessions" % dir) elif ver == 6: KEY = os.path.join(os.environ["USERPROFILE"], r"Documents\NetSarang Computer\6\%s\Sessions" % dir) def is_number(s): try: float(s) return True except ValueError: pass try: unicodedata.numeric(s) return True except (TypeError, ValueError): pass return False if __name__ == '__main__': parser = argparse.ArgumentParser(description="xsh, xfp password decrypt") group = parser.add_mutually_exclusive_group(required=False) group.add_argument("-e", "--encrypt", default=False, help="<-e | -d> encrypt password, default -d", action="store_true") group.add_argument("-d", "--decrypt", default=True, help="<-e | -d> decrypt encrypted password, default -d", action="store_true") parser.add_argument("-f", "--ftp", default=False, help="xftp or xshell. Ignore if it is xshell", action="store_true") parser.add_argument("-u", "--username", default="", type=str, help="user `whoami /user` in command. Ignore if it is local. Used by version >= 5.1") parser.add_argument("-m", "--master_pwd", default="", type=str, help="user\'s master password. Used by version >= 6") parser.add_argument("-s", "--sid", default="", type=str, help="SID `whoami /user` in command. Ignore if it is local. Used by version >= 5.1") parser.add_argument("-v", "--version", default="", type=str, help="xsh or xfp version. If not specified, 5.2 will be used.") parser.add_argument("-k", "--key", default="", nargs='?', help="the path of sessions directory or file of xsh or xfp, or password or other key") args = parser.parse_args() #print(args) if args.encrypt: IS_DECRYPT = False if args.sid: SID = args.sid if args.username: USERNAME = args.username if args.master_pwd: MASTER_PWD = args.master_pwd if args.ftp: IS_XSH = False if is_number(args.version): VERSION = args.version if args.key: KEY = args.key if not args.key and (is_number(args.version) or args.ftp): setDefaultSessionDirByVer() if IS_DECRYPT: if os.path.isdir(KEY): decrypt_dir() elif os.path.isfile(KEY): decrypt_file(KEY) else: decrypt_string(KEY) else: encrypt_string(KEY)
flexible
{ "blob_id": "5f2427c077d460d109f5a3e94b93f72c090f036d", "index": 7181, "step-1": "<mask token>\n\n\ndef decrypt_string(password_string, need_return=False):\n if not is_number(VERSION):\n raise ValueError('Invalid argument: --Version')\n ver = float(VERSION)\n Cipher = ARC4.new(getCipherKey())\n try:\n if ver < 5.1:\n de_password = Cipher.decrypt(b64decode(password_string)).decode()\n else:\n data = b64decode(password_string)\n ciphertext, checksum = data[:-SHA256.digest_size], data[-SHA256\n .digest_size:]\n plaintext = Cipher.decrypt(ciphertext)\n if SHA256.new(plaintext).digest() != checksum:\n raise ValueError('Cannot decrypt string. The key is wrong!')\n de_password = plaintext.decode('ascii')\n if need_return:\n return de_password\n else:\n print('%-20s : %s' % ('Version', VERSION))\n print('%-20s : %s' % ('Password', password_string))\n print('%-20s : %s' % ('Decrypted Password', de_password))\n except Exception as e:\n print('Password is invalid')\n\n\ndef decrypt_file(filepath: str=''):\n if not os.path.isfile(filepath):\n print(f'{filepath:=^100}\\nError: No file')\n return\n file = os.path.basename(os.path.realpath(filepath))\n if file.endswith('.xsh') or file.endswith('.xfp'):\n cfg = configparser.ConfigParser()\n try:\n cfg.read(filepath)\n except UnicodeDecodeError:\n cfg.read(filepath, encoding='utf-16')\n try:\n if file.endswith('.xsh'):\n host = cfg['CONNECTION']['Host']\n port = cfg['CONNECTION']['Port']\n username = cfg['CONNECTION:AUTHENTICATION']['UserName']\n password = cfg['CONNECTION:AUTHENTICATION']['Password']\n version = cfg['SessionInfo']['Version']\n de_password = decrypt_string(password, True)\n else:\n host = cfg['Connection']['Host']\n port = cfg['Connection']['Port']\n username = cfg['Connection']['UserName']\n password = cfg['Connection']['Password']\n version = cfg['SessionInfo']['Version']\n de_password = decrypt_string(password, True)\n print(f'{filepath:=^100}')\n print('%-20s : %s' % ('Host', host))\n print('%-20s : %s' % ('Port', port))\n print('%-20s : %s' % ('Version', version))\n print('%-20s : %s' % ('UserName', username))\n print('%-20s : %s' % ('Password', de_password))\n print('%-20s : %s' % ('Encrypted Password', password))\n except Exception as e:\n print(f'{filepath:=^100}\\nError:{e}')\n\n\ndef decrypt_dir():\n for root, dirs, files in os.walk(KEY):\n for f in files:\n decrypt_file(os.path.join(root, f))\n\n\ndef setDefaultSessionDirByVer():\n if not is_number(VERSION):\n return\n ver = float(VERSION)\n dir = 'Xshell' if IS_XSH else 'Xftp'\n global KEY\n if ver < 6:\n KEY = os.path.join(os.environ['USERPROFILE'], \n 'Documents\\\\NetSarang\\\\%s\\\\Sessions' % dir)\n elif ver == 6:\n KEY = os.path.join(os.environ['USERPROFILE'], \n 'Documents\\\\NetSarang Computer\\\\6\\\\%s\\\\Sessions' % dir)\n\n\ndef is_number(s):\n try:\n float(s)\n return True\n except ValueError:\n pass\n try:\n unicodedata.numeric(s)\n return True\n except (TypeError, ValueError):\n pass\n return False\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef getCipherKey():\n if not is_number(VERSION):\n raise ValueError('Invalid argument: --Version')\n ver = float(VERSION)\n if 0 < ver and ver < 5.1:\n if IS_XSH:\n return MD5.new(b'!X@s#h$e%l^l&').digest()\n else:\n return MD5.new(b'!X@s#c$e%l^l&').digest()\n elif 5.1 <= ver and ver <= 5.2:\n return SHA256.new(SID.encode()).digest()\n elif 5.2 < ver:\n if MASTER_PWD == None:\n return SHA256.new((USERNAME + SID).encode()).digest()\n else:\n return SHA256.new(MASTER_PWD.encode()).digest()\n else:\n raise ValueError('Invalid argument: --Version')\n\n\ndef encrypt_string(password_string, need_return=False):\n if not is_number(VERSION):\n raise ValueError('Invalid argument: --Version')\n ver = float(VERSION)\n Cipher = ARC4.new(getCipherKey())\n if ver < 5.1:\n en_password = b64encode(Cipher.encrypt(password_string.encode())\n ).decode()\n else:\n checksum = SHA256.new(password_string.encode()).digest()\n ciphertext = Cipher.encrypt(password_string.encode())\n en_password = b64encode(ciphertext + checksum).decode()\n if need_return:\n return en_password\n else:\n print('%-20s : %s' % ('Version', VERSION))\n print('%-20s : %s' % ('Password', password_string))\n print('%-20s : %s' % ('Encrypted Password', en_password))\n\n\ndef decrypt_string(password_string, need_return=False):\n if not is_number(VERSION):\n raise ValueError('Invalid argument: --Version')\n ver = float(VERSION)\n Cipher = ARC4.new(getCipherKey())\n try:\n if ver < 5.1:\n de_password = Cipher.decrypt(b64decode(password_string)).decode()\n else:\n data = b64decode(password_string)\n ciphertext, checksum = data[:-SHA256.digest_size], data[-SHA256\n .digest_size:]\n plaintext = Cipher.decrypt(ciphertext)\n if SHA256.new(plaintext).digest() != checksum:\n raise ValueError('Cannot decrypt string. The key is wrong!')\n de_password = plaintext.decode('ascii')\n if need_return:\n return de_password\n else:\n print('%-20s : %s' % ('Version', VERSION))\n print('%-20s : %s' % ('Password', password_string))\n print('%-20s : %s' % ('Decrypted Password', de_password))\n except Exception as e:\n print('Password is invalid')\n\n\ndef decrypt_file(filepath: str=''):\n if not os.path.isfile(filepath):\n print(f'{filepath:=^100}\\nError: No file')\n return\n file = os.path.basename(os.path.realpath(filepath))\n if file.endswith('.xsh') or file.endswith('.xfp'):\n cfg = configparser.ConfigParser()\n try:\n cfg.read(filepath)\n except UnicodeDecodeError:\n cfg.read(filepath, encoding='utf-16')\n try:\n if file.endswith('.xsh'):\n host = cfg['CONNECTION']['Host']\n port = cfg['CONNECTION']['Port']\n username = cfg['CONNECTION:AUTHENTICATION']['UserName']\n password = cfg['CONNECTION:AUTHENTICATION']['Password']\n version = cfg['SessionInfo']['Version']\n de_password = decrypt_string(password, True)\n else:\n host = cfg['Connection']['Host']\n port = cfg['Connection']['Port']\n username = cfg['Connection']['UserName']\n password = cfg['Connection']['Password']\n version = cfg['SessionInfo']['Version']\n de_password = decrypt_string(password, True)\n print(f'{filepath:=^100}')\n print('%-20s : %s' % ('Host', host))\n print('%-20s : %s' % ('Port', port))\n print('%-20s : %s' % ('Version', version))\n print('%-20s : %s' % ('UserName', username))\n print('%-20s : %s' % ('Password', de_password))\n print('%-20s : %s' % ('Encrypted Password', password))\n except Exception as e:\n print(f'{filepath:=^100}\\nError:{e}')\n\n\ndef decrypt_dir():\n for root, dirs, files in os.walk(KEY):\n for f in files:\n decrypt_file(os.path.join(root, f))\n\n\ndef setDefaultSessionDirByVer():\n if not is_number(VERSION):\n return\n ver = float(VERSION)\n dir = 'Xshell' if IS_XSH else 'Xftp'\n global KEY\n if ver < 6:\n KEY = os.path.join(os.environ['USERPROFILE'], \n 'Documents\\\\NetSarang\\\\%s\\\\Sessions' % dir)\n elif ver == 6:\n KEY = os.path.join(os.environ['USERPROFILE'], \n 'Documents\\\\NetSarang Computer\\\\6\\\\%s\\\\Sessions' % dir)\n\n\ndef is_number(s):\n try:\n float(s)\n return True\n except ValueError:\n pass\n try:\n unicodedata.numeric(s)\n return True\n except (TypeError, ValueError):\n pass\n return False\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='xsh, xfp password decrypt')\n group = parser.add_mutually_exclusive_group(required=False)\n group.add_argument('-e', '--encrypt', default=False, help=\n '<-e | -d> encrypt password, default -d', action='store_true')\n group.add_argument('-d', '--decrypt', default=True, help=\n '<-e | -d> decrypt encrypted password, default -d', action='store_true'\n )\n parser.add_argument('-f', '--ftp', default=False, help=\n 'xftp or xshell. Ignore if it is xshell', action='store_true')\n parser.add_argument('-u', '--username', default='', type=str, help=\n 'user `whoami /user` in command. Ignore if it is local. Used by version >= 5.1'\n )\n parser.add_argument('-m', '--master_pwd', default='', type=str, help=\n \"user's master password. Used by version >= 6\")\n parser.add_argument('-s', '--sid', default='', type=str, help=\n 'SID `whoami /user` in command. Ignore if it is local. Used by version >= 5.1'\n )\n parser.add_argument('-v', '--version', default='', type=str, help=\n 'xsh or xfp version. If not specified, 5.2 will be used.')\n parser.add_argument('-k', '--key', default='', nargs='?', help=\n 'the path of sessions directory or file of xsh or xfp, or password or other key'\n )\n args = parser.parse_args()\n if args.encrypt:\n IS_DECRYPT = False\n if args.sid:\n SID = args.sid\n if args.username:\n USERNAME = args.username\n if args.master_pwd:\n MASTER_PWD = args.master_pwd\n if args.ftp:\n IS_XSH = False\n if is_number(args.version):\n VERSION = args.version\n if args.key:\n KEY = args.key\n if not args.key and (is_number(args.version) or args.ftp):\n setDefaultSessionDirByVer()\n if IS_DECRYPT:\n if os.path.isdir(KEY):\n decrypt_dir()\n elif os.path.isfile(KEY):\n decrypt_file(KEY)\n else:\n decrypt_string(KEY)\n else:\n encrypt_string(KEY)\n", "step-3": "<mask token>\nUSERNAME = GetUserName()\nMASTER_PWD = None\nSID = ConvertSidToStringSid(LookupAccountName(GetComputerName(),\n GetUserName())[0])\nIS_XSH = True\nVERSION = '5.2'\nKEY = os.path.join(os.environ['USERPROFILE'],\n 'Documents\\\\NetSarang\\\\Xshell\\\\Sessions')\nIS_DECRYPT = True\n\n\ndef getCipherKey():\n if not is_number(VERSION):\n raise ValueError('Invalid argument: --Version')\n ver = float(VERSION)\n if 0 < ver and ver < 5.1:\n if IS_XSH:\n return MD5.new(b'!X@s#h$e%l^l&').digest()\n else:\n return MD5.new(b'!X@s#c$e%l^l&').digest()\n elif 5.1 <= ver and ver <= 5.2:\n return SHA256.new(SID.encode()).digest()\n elif 5.2 < ver:\n if MASTER_PWD == None:\n return SHA256.new((USERNAME + SID).encode()).digest()\n else:\n return SHA256.new(MASTER_PWD.encode()).digest()\n else:\n raise ValueError('Invalid argument: --Version')\n\n\ndef encrypt_string(password_string, need_return=False):\n if not is_number(VERSION):\n raise ValueError('Invalid argument: --Version')\n ver = float(VERSION)\n Cipher = ARC4.new(getCipherKey())\n if ver < 5.1:\n en_password = b64encode(Cipher.encrypt(password_string.encode())\n ).decode()\n else:\n checksum = SHA256.new(password_string.encode()).digest()\n ciphertext = Cipher.encrypt(password_string.encode())\n en_password = b64encode(ciphertext + checksum).decode()\n if need_return:\n return en_password\n else:\n print('%-20s : %s' % ('Version', VERSION))\n print('%-20s : %s' % ('Password', password_string))\n print('%-20s : %s' % ('Encrypted Password', en_password))\n\n\ndef decrypt_string(password_string, need_return=False):\n if not is_number(VERSION):\n raise ValueError('Invalid argument: --Version')\n ver = float(VERSION)\n Cipher = ARC4.new(getCipherKey())\n try:\n if ver < 5.1:\n de_password = Cipher.decrypt(b64decode(password_string)).decode()\n else:\n data = b64decode(password_string)\n ciphertext, checksum = data[:-SHA256.digest_size], data[-SHA256\n .digest_size:]\n plaintext = Cipher.decrypt(ciphertext)\n if SHA256.new(plaintext).digest() != checksum:\n raise ValueError('Cannot decrypt string. The key is wrong!')\n de_password = plaintext.decode('ascii')\n if need_return:\n return de_password\n else:\n print('%-20s : %s' % ('Version', VERSION))\n print('%-20s : %s' % ('Password', password_string))\n print('%-20s : %s' % ('Decrypted Password', de_password))\n except Exception as e:\n print('Password is invalid')\n\n\ndef decrypt_file(filepath: str=''):\n if not os.path.isfile(filepath):\n print(f'{filepath:=^100}\\nError: No file')\n return\n file = os.path.basename(os.path.realpath(filepath))\n if file.endswith('.xsh') or file.endswith('.xfp'):\n cfg = configparser.ConfigParser()\n try:\n cfg.read(filepath)\n except UnicodeDecodeError:\n cfg.read(filepath, encoding='utf-16')\n try:\n if file.endswith('.xsh'):\n host = cfg['CONNECTION']['Host']\n port = cfg['CONNECTION']['Port']\n username = cfg['CONNECTION:AUTHENTICATION']['UserName']\n password = cfg['CONNECTION:AUTHENTICATION']['Password']\n version = cfg['SessionInfo']['Version']\n de_password = decrypt_string(password, True)\n else:\n host = cfg['Connection']['Host']\n port = cfg['Connection']['Port']\n username = cfg['Connection']['UserName']\n password = cfg['Connection']['Password']\n version = cfg['SessionInfo']['Version']\n de_password = decrypt_string(password, True)\n print(f'{filepath:=^100}')\n print('%-20s : %s' % ('Host', host))\n print('%-20s : %s' % ('Port', port))\n print('%-20s : %s' % ('Version', version))\n print('%-20s : %s' % ('UserName', username))\n print('%-20s : %s' % ('Password', de_password))\n print('%-20s : %s' % ('Encrypted Password', password))\n except Exception as e:\n print(f'{filepath:=^100}\\nError:{e}')\n\n\ndef decrypt_dir():\n for root, dirs, files in os.walk(KEY):\n for f in files:\n decrypt_file(os.path.join(root, f))\n\n\ndef setDefaultSessionDirByVer():\n if not is_number(VERSION):\n return\n ver = float(VERSION)\n dir = 'Xshell' if IS_XSH else 'Xftp'\n global KEY\n if ver < 6:\n KEY = os.path.join(os.environ['USERPROFILE'], \n 'Documents\\\\NetSarang\\\\%s\\\\Sessions' % dir)\n elif ver == 6:\n KEY = os.path.join(os.environ['USERPROFILE'], \n 'Documents\\\\NetSarang Computer\\\\6\\\\%s\\\\Sessions' % dir)\n\n\ndef is_number(s):\n try:\n float(s)\n return True\n except ValueError:\n pass\n try:\n unicodedata.numeric(s)\n return True\n except (TypeError, ValueError):\n pass\n return False\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='xsh, xfp password decrypt')\n group = parser.add_mutually_exclusive_group(required=False)\n group.add_argument('-e', '--encrypt', default=False, help=\n '<-e | -d> encrypt password, default -d', action='store_true')\n group.add_argument('-d', '--decrypt', default=True, help=\n '<-e | -d> decrypt encrypted password, default -d', action='store_true'\n )\n parser.add_argument('-f', '--ftp', default=False, help=\n 'xftp or xshell. Ignore if it is xshell', action='store_true')\n parser.add_argument('-u', '--username', default='', type=str, help=\n 'user `whoami /user` in command. Ignore if it is local. Used by version >= 5.1'\n )\n parser.add_argument('-m', '--master_pwd', default='', type=str, help=\n \"user's master password. Used by version >= 6\")\n parser.add_argument('-s', '--sid', default='', type=str, help=\n 'SID `whoami /user` in command. Ignore if it is local. Used by version >= 5.1'\n )\n parser.add_argument('-v', '--version', default='', type=str, help=\n 'xsh or xfp version. If not specified, 5.2 will be used.')\n parser.add_argument('-k', '--key', default='', nargs='?', help=\n 'the path of sessions directory or file of xsh or xfp, or password or other key'\n )\n args = parser.parse_args()\n if args.encrypt:\n IS_DECRYPT = False\n if args.sid:\n SID = args.sid\n if args.username:\n USERNAME = args.username\n if args.master_pwd:\n MASTER_PWD = args.master_pwd\n if args.ftp:\n IS_XSH = False\n if is_number(args.version):\n VERSION = args.version\n if args.key:\n KEY = args.key\n if not args.key and (is_number(args.version) or args.ftp):\n setDefaultSessionDirByVer()\n if IS_DECRYPT:\n if os.path.isdir(KEY):\n decrypt_dir()\n elif os.path.isfile(KEY):\n decrypt_file(KEY)\n else:\n decrypt_string(KEY)\n else:\n encrypt_string(KEY)\n", "step-4": "import os\nimport argparse\nimport configparser\nimport unicodedata\nfrom win32api import GetComputerName, GetUserName\nfrom win32security import LookupAccountName, ConvertSidToStringSid\nfrom base64 import b64encode, b64decode\nfrom Cryptodome.Hash import MD5, SHA256\nfrom Cryptodome.Cipher import ARC4\nUSERNAME = GetUserName()\nMASTER_PWD = None\nSID = ConvertSidToStringSid(LookupAccountName(GetComputerName(),\n GetUserName())[0])\nIS_XSH = True\nVERSION = '5.2'\nKEY = os.path.join(os.environ['USERPROFILE'],\n 'Documents\\\\NetSarang\\\\Xshell\\\\Sessions')\nIS_DECRYPT = True\n\n\ndef getCipherKey():\n if not is_number(VERSION):\n raise ValueError('Invalid argument: --Version')\n ver = float(VERSION)\n if 0 < ver and ver < 5.1:\n if IS_XSH:\n return MD5.new(b'!X@s#h$e%l^l&').digest()\n else:\n return MD5.new(b'!X@s#c$e%l^l&').digest()\n elif 5.1 <= ver and ver <= 5.2:\n return SHA256.new(SID.encode()).digest()\n elif 5.2 < ver:\n if MASTER_PWD == None:\n return SHA256.new((USERNAME + SID).encode()).digest()\n else:\n return SHA256.new(MASTER_PWD.encode()).digest()\n else:\n raise ValueError('Invalid argument: --Version')\n\n\ndef encrypt_string(password_string, need_return=False):\n if not is_number(VERSION):\n raise ValueError('Invalid argument: --Version')\n ver = float(VERSION)\n Cipher = ARC4.new(getCipherKey())\n if ver < 5.1:\n en_password = b64encode(Cipher.encrypt(password_string.encode())\n ).decode()\n else:\n checksum = SHA256.new(password_string.encode()).digest()\n ciphertext = Cipher.encrypt(password_string.encode())\n en_password = b64encode(ciphertext + checksum).decode()\n if need_return:\n return en_password\n else:\n print('%-20s : %s' % ('Version', VERSION))\n print('%-20s : %s' % ('Password', password_string))\n print('%-20s : %s' % ('Encrypted Password', en_password))\n\n\ndef decrypt_string(password_string, need_return=False):\n if not is_number(VERSION):\n raise ValueError('Invalid argument: --Version')\n ver = float(VERSION)\n Cipher = ARC4.new(getCipherKey())\n try:\n if ver < 5.1:\n de_password = Cipher.decrypt(b64decode(password_string)).decode()\n else:\n data = b64decode(password_string)\n ciphertext, checksum = data[:-SHA256.digest_size], data[-SHA256\n .digest_size:]\n plaintext = Cipher.decrypt(ciphertext)\n if SHA256.new(plaintext).digest() != checksum:\n raise ValueError('Cannot decrypt string. The key is wrong!')\n de_password = plaintext.decode('ascii')\n if need_return:\n return de_password\n else:\n print('%-20s : %s' % ('Version', VERSION))\n print('%-20s : %s' % ('Password', password_string))\n print('%-20s : %s' % ('Decrypted Password', de_password))\n except Exception as e:\n print('Password is invalid')\n\n\ndef decrypt_file(filepath: str=''):\n if not os.path.isfile(filepath):\n print(f'{filepath:=^100}\\nError: No file')\n return\n file = os.path.basename(os.path.realpath(filepath))\n if file.endswith('.xsh') or file.endswith('.xfp'):\n cfg = configparser.ConfigParser()\n try:\n cfg.read(filepath)\n except UnicodeDecodeError:\n cfg.read(filepath, encoding='utf-16')\n try:\n if file.endswith('.xsh'):\n host = cfg['CONNECTION']['Host']\n port = cfg['CONNECTION']['Port']\n username = cfg['CONNECTION:AUTHENTICATION']['UserName']\n password = cfg['CONNECTION:AUTHENTICATION']['Password']\n version = cfg['SessionInfo']['Version']\n de_password = decrypt_string(password, True)\n else:\n host = cfg['Connection']['Host']\n port = cfg['Connection']['Port']\n username = cfg['Connection']['UserName']\n password = cfg['Connection']['Password']\n version = cfg['SessionInfo']['Version']\n de_password = decrypt_string(password, True)\n print(f'{filepath:=^100}')\n print('%-20s : %s' % ('Host', host))\n print('%-20s : %s' % ('Port', port))\n print('%-20s : %s' % ('Version', version))\n print('%-20s : %s' % ('UserName', username))\n print('%-20s : %s' % ('Password', de_password))\n print('%-20s : %s' % ('Encrypted Password', password))\n except Exception as e:\n print(f'{filepath:=^100}\\nError:{e}')\n\n\ndef decrypt_dir():\n for root, dirs, files in os.walk(KEY):\n for f in files:\n decrypt_file(os.path.join(root, f))\n\n\ndef setDefaultSessionDirByVer():\n if not is_number(VERSION):\n return\n ver = float(VERSION)\n dir = 'Xshell' if IS_XSH else 'Xftp'\n global KEY\n if ver < 6:\n KEY = os.path.join(os.environ['USERPROFILE'], \n 'Documents\\\\NetSarang\\\\%s\\\\Sessions' % dir)\n elif ver == 6:\n KEY = os.path.join(os.environ['USERPROFILE'], \n 'Documents\\\\NetSarang Computer\\\\6\\\\%s\\\\Sessions' % dir)\n\n\ndef is_number(s):\n try:\n float(s)\n return True\n except ValueError:\n pass\n try:\n unicodedata.numeric(s)\n return True\n except (TypeError, ValueError):\n pass\n return False\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='xsh, xfp password decrypt')\n group = parser.add_mutually_exclusive_group(required=False)\n group.add_argument('-e', '--encrypt', default=False, help=\n '<-e | -d> encrypt password, default -d', action='store_true')\n group.add_argument('-d', '--decrypt', default=True, help=\n '<-e | -d> decrypt encrypted password, default -d', action='store_true'\n )\n parser.add_argument('-f', '--ftp', default=False, help=\n 'xftp or xshell. Ignore if it is xshell', action='store_true')\n parser.add_argument('-u', '--username', default='', type=str, help=\n 'user `whoami /user` in command. Ignore if it is local. Used by version >= 5.1'\n )\n parser.add_argument('-m', '--master_pwd', default='', type=str, help=\n \"user's master password. Used by version >= 6\")\n parser.add_argument('-s', '--sid', default='', type=str, help=\n 'SID `whoami /user` in command. Ignore if it is local. Used by version >= 5.1'\n )\n parser.add_argument('-v', '--version', default='', type=str, help=\n 'xsh or xfp version. If not specified, 5.2 will be used.')\n parser.add_argument('-k', '--key', default='', nargs='?', help=\n 'the path of sessions directory or file of xsh or xfp, or password or other key'\n )\n args = parser.parse_args()\n if args.encrypt:\n IS_DECRYPT = False\n if args.sid:\n SID = args.sid\n if args.username:\n USERNAME = args.username\n if args.master_pwd:\n MASTER_PWD = args.master_pwd\n if args.ftp:\n IS_XSH = False\n if is_number(args.version):\n VERSION = args.version\n if args.key:\n KEY = args.key\n if not args.key and (is_number(args.version) or args.ftp):\n setDefaultSessionDirByVer()\n if IS_DECRYPT:\n if os.path.isdir(KEY):\n decrypt_dir()\n elif os.path.isfile(KEY):\n decrypt_file(KEY)\n else:\n decrypt_string(KEY)\n else:\n encrypt_string(KEY)\n", "step-5": "# -*- coding: utf-8 -*-\n# python >= 3.7\n# supported xmanager version <5.1, 5.1, 5.2, 6\n\nimport os\nimport argparse\nimport configparser\nimport unicodedata\n\nfrom win32api import GetComputerName, GetUserName\nfrom win32security import LookupAccountName, ConvertSidToStringSid\nfrom base64 import b64encode, b64decode\nfrom Cryptodome.Hash import MD5, SHA256\nfrom Cryptodome.Cipher import ARC4\n\nUSERNAME = GetUserName()\nMASTER_PWD = None\nSID = ConvertSidToStringSid(LookupAccountName(GetComputerName(), GetUserName())[0])\nIS_XSH = True\nVERSION = '5.2'\nKEY = os.path.join(os.environ[\"USERPROFILE\"], r\"Documents\\NetSarang\\Xshell\\Sessions\")\nIS_DECRYPT = True\n\ndef getCipherKey():\n if not is_number(VERSION):\n raise ValueError('Invalid argument: --Version')\n\n ver = float(VERSION)\n if 0 < ver and ver < 5.1:\n if IS_XSH:\n return MD5.new(b'!X@s#h$e%l^l&').digest()\n else:\n return MD5.new(b'!X@s#c$e%l^l&').digest()\n elif 5.1 <= ver and ver <= 5.2:\n return SHA256.new(SID.encode()).digest()\n elif 5.2 < ver:\n if MASTER_PWD == None:\n return SHA256.new((USERNAME + SID).encode()).digest()\n else:\n return SHA256.new(MASTER_PWD.encode()).digest()\n else:\n raise ValueError('Invalid argument: --Version')\n\ndef encrypt_string(password_string, need_return=False):\n if not is_number(VERSION):\n raise ValueError('Invalid argument: --Version')\n\n ver = float(VERSION)\n\n Cipher = ARC4.new(getCipherKey())\n if ver < 5.1:\n en_password = b64encode(Cipher.encrypt(password_string.encode())).decode()\n else:\n checksum = SHA256.new(password_string.encode()).digest()\n ciphertext = Cipher.encrypt(password_string.encode())\n en_password = b64encode(ciphertext + checksum).decode()\n if need_return:\n return en_password\n else:\n print('%-20s : %s' % ('Version', VERSION))\n print('%-20s : %s' % ('Password', password_string))\n print('%-20s : %s' % ('Encrypted Password', en_password))\n\ndef decrypt_string(password_string, need_return=False):\n if not is_number(VERSION):\n raise ValueError('Invalid argument: --Version')\n\n ver = float(VERSION)\n\n Cipher = ARC4.new(getCipherKey())\n\n try:\n if ver < 5.1:\n de_password = Cipher.decrypt(b64decode(password_string)).decode()\n else:\n data = b64decode(password_string)\n ciphertext, checksum = data[:-SHA256.digest_size], data[-SHA256.digest_size:]\n plaintext = Cipher.decrypt(ciphertext)\n if SHA256.new(plaintext).digest() != checksum:\n raise ValueError('Cannot decrypt string. The key is wrong!')\n de_password = plaintext.decode('ascii')\n if need_return:\n return de_password\n else:\n print('%-20s : %s' % ('Version', VERSION))\n print('%-20s : %s' % ('Password', password_string))\n print('%-20s : %s' % ('Decrypted Password', de_password))\n\n except Exception as e:\n print(\"Password is invalid\")\n\ndef decrypt_file(filepath: str = ''):\n if not os.path.isfile(filepath):\n print(f\"{filepath:=^100}\\nError: No file\")\n return\n\n file = os.path.basename(os.path.realpath(filepath))\n\n if file.endswith(\".xsh\") or file.endswith(\".xfp\"):\n cfg = configparser.ConfigParser()\n try:\n cfg.read(filepath)\n except UnicodeDecodeError:\n cfg.read(filepath, encoding=\"utf-16\")\n\n try:\n if file.endswith(\".xsh\"):\n host = cfg[\"CONNECTION\"][\"Host\"]\n port = cfg[\"CONNECTION\"][\"Port\"]\n username = cfg[\"CONNECTION:AUTHENTICATION\"][\"UserName\"]\n password = cfg[\"CONNECTION:AUTHENTICATION\"][\"Password\"]\n version = cfg[\"SessionInfo\"][\"Version\"]\n\n de_password = decrypt_string(password, True)\n else:\n host = cfg[\"Connection\"][\"Host\"]\n port = cfg[\"Connection\"][\"Port\"]\n username = cfg[\"Connection\"][\"UserName\"]\n password = cfg[\"Connection\"][\"Password\"]\n version = cfg[\"SessionInfo\"][\"Version\"]\n\n de_password = decrypt_string(password, True)\n\n print(f\"{filepath:=^100}\")\n print('%-20s : %s' % ('Host', host))\n print('%-20s : %s' % ('Port', port))\n print('%-20s : %s' % ('Version', version))\n print('%-20s : %s' % ('UserName', username))\n print('%-20s : %s' % ('Password', de_password))\n print('%-20s : %s' % ('Encrypted Password', password))\n except Exception as e:\n print(f\"{filepath:=^100}\\nError:{e}\")\n\ndef decrypt_dir():\n for root, dirs, files in os.walk(KEY):\n for f in files:\n decrypt_file(os.path.join(root, f))\n\ndef setDefaultSessionDirByVer():\n if not is_number(VERSION):\n return\n ver = float(VERSION)\n dir = 'Xshell' if IS_XSH else 'Xftp';\n global KEY\n if ver < 6:\n KEY = os.path.join(os.environ[\"USERPROFILE\"], r\"Documents\\NetSarang\\%s\\Sessions\" % dir)\n elif ver == 6:\n KEY = os.path.join(os.environ[\"USERPROFILE\"], r\"Documents\\NetSarang Computer\\6\\%s\\Sessions\" % dir)\n\ndef is_number(s):\n try:\n float(s)\n return True\n except ValueError:\n pass\n\n try:\n unicodedata.numeric(s)\n return True\n except (TypeError, ValueError):\n pass\n\n return False\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description=\"xsh, xfp password decrypt\")\n group = parser.add_mutually_exclusive_group(required=False)\n group.add_argument(\"-e\", \"--encrypt\", default=False,\n help=\"<-e | -d> encrypt password, default -d\", action=\"store_true\")\n group.add_argument(\"-d\", \"--decrypt\", default=True,\n help=\"<-e | -d> decrypt encrypted password, default -d\", action=\"store_true\")\n parser.add_argument(\"-f\", \"--ftp\", default=False,\n help=\"xftp or xshell. Ignore if it is xshell\", action=\"store_true\")\n parser.add_argument(\"-u\", \"--username\", default=\"\", type=str,\n help=\"user `whoami /user` in command. Ignore if it is local. Used by version >= 5.1\")\n parser.add_argument(\"-m\", \"--master_pwd\", default=\"\", type=str,\n help=\"user\\'s master password. Used by version >= 6\")\n parser.add_argument(\"-s\", \"--sid\", default=\"\", type=str,\n help=\"SID `whoami /user` in command. Ignore if it is local. Used by version >= 5.1\")\n parser.add_argument(\"-v\", \"--version\", default=\"\", type=str,\n help=\"xsh or xfp version. If not specified, 5.2 will be used.\")\n parser.add_argument(\"-k\", \"--key\", default=\"\", nargs='?',\n help=\"the path of sessions directory or file of xsh or xfp, or password or other key\")\n\n args = parser.parse_args()\n\n #print(args)\n\n if args.encrypt:\n IS_DECRYPT = False\n if args.sid:\n SID = args.sid\n if args.username:\n USERNAME = args.username\n if args.master_pwd:\n MASTER_PWD = args.master_pwd\n if args.ftp:\n IS_XSH = False\n if is_number(args.version):\n VERSION = args.version\n if args.key:\n KEY = args.key\n\n if not args.key and (is_number(args.version) or args.ftp):\n setDefaultSessionDirByVer()\n\n if IS_DECRYPT:\n if os.path.isdir(KEY):\n decrypt_dir()\n elif os.path.isfile(KEY):\n decrypt_file(KEY)\n else:\n decrypt_string(KEY)\n else:\n encrypt_string(KEY)", "step-ids": [ 5, 8, 9, 10, 11 ] }
[ 5, 8, 9, 10, 11 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> random.shuffle(lista) print('A ordem de apresentacao será') print(lista) <|reserved_special_token_1|> <|reserved_special_token_0|> a1 = input('Primeiro aluno: ') a2 = input('Primeiro segundo: ') a3 = input('Primeiro terceiro: ') a4 = input('Primeiro quarto: ') lista = [a1, a2, a3, a4] random.shuffle(lista) print('A ordem de apresentacao será') print(lista) <|reserved_special_token_1|> import random a1 = input('Primeiro aluno: ') a2 = input('Primeiro segundo: ') a3 = input('Primeiro terceiro: ') a4 = input('Primeiro quarto: ') lista = [a1, a2, a3, a4] random.shuffle(lista) print('A ordem de apresentacao será') print(lista) <|reserved_special_token_1|> #embaralhar sorteio import random a1 = input('Primeiro aluno: ') a2 = input('Primeiro segundo: ') a3 = input('Primeiro terceiro: ') a4 = input('Primeiro quarto: ') lista = [a1, a2, a3, a4] random.shuffle(lista) print('A ordem de apresentacao será') print(lista)
flexible
{ "blob_id": "9a0e24fbe9f51dc914d891e90196c2ff4e65f04a", "index": 9652, "step-1": "<mask token>\n", "step-2": "<mask token>\nrandom.shuffle(lista)\nprint('A ordem de apresentacao será')\nprint(lista)\n", "step-3": "<mask token>\na1 = input('Primeiro aluno: ')\na2 = input('Primeiro segundo: ')\na3 = input('Primeiro terceiro: ')\na4 = input('Primeiro quarto: ')\nlista = [a1, a2, a3, a4]\nrandom.shuffle(lista)\nprint('A ordem de apresentacao será')\nprint(lista)\n", "step-4": "import random\na1 = input('Primeiro aluno: ')\na2 = input('Primeiro segundo: ')\na3 = input('Primeiro terceiro: ')\na4 = input('Primeiro quarto: ')\nlista = [a1, a2, a3, a4]\nrandom.shuffle(lista)\nprint('A ordem de apresentacao será')\nprint(lista)\n", "step-5": "#embaralhar sorteio\nimport random\na1 = input('Primeiro aluno: ')\na2 = input('Primeiro segundo: ')\na3 = input('Primeiro terceiro: ')\na4 = input('Primeiro quarto: ')\nlista = [a1, a2, a3, a4]\nrandom.shuffle(lista)\nprint('A ordem de apresentacao será')\nprint(lista)", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> def plot_img(): """ plot ground truth (left) and reconstruction (right) showing b/w image data of mnist """ plt.subplot(121) plt.imshow(data.data.numpy()[0,].squeeze()) plt.subplot(122) plt.imshow(dec_mean.view(-1, 28, 28).data.numpy()[0,].squeeze()) plt.show() plt.pause(1e-06) plt.gcf().clear() sample = model.sample_z(data) plt.imshow(sample) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def plot_img(): """ plot ground truth (left) and reconstruction (right) showing b/w image data of mnist """ plt.subplot(121) plt.imshow(data.data.numpy()[0,].squeeze()) plt.subplot(122) plt.imshow(dec_mean.view(-1, 28, 28).data.numpy()[0,].squeeze()) plt.show() plt.pause(1e-06) plt.gcf().clear() sample = model.sample_z(data) plt.imshow(sample) def plot_kde(): """ plot the kernel density estimation for 2d distributions """ f, (ax1, ax2) = plt.subplots(1, 2, sharey=True, sharex=True) sns.kdeplot(data.data.numpy()[:, 0], data.data.numpy()[:, 1], color='r', shade=True, ax=ax1) sns.kdeplot(dec_mean.data.numpy()[:, 0], dec_mean.data.numpy()[:, 1], color='b', shade=True, ax=ax2) plt.show() plt.pause(1e-06) plt.gcf().clear() <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def plot_img(): """ plot ground truth (left) and reconstruction (right) showing b/w image data of mnist """ plt.subplot(121) plt.imshow(data.data.numpy()[0,].squeeze()) plt.subplot(122) plt.imshow(dec_mean.view(-1, 28, 28).data.numpy()[0,].squeeze()) plt.show() plt.pause(1e-06) plt.gcf().clear() sample = model.sample_z(data) plt.imshow(sample) def plot_kde(): """ plot the kernel density estimation for 2d distributions """ f, (ax1, ax2) = plt.subplots(1, 2, sharey=True, sharex=True) sns.kdeplot(data.data.numpy()[:, 0], data.data.numpy()[:, 1], color='r', shade=True, ax=ax1) sns.kdeplot(dec_mean.data.numpy()[:, 0], dec_mean.data.numpy()[:, 1], color='b', shade=True, ax=ax2) plt.show() plt.pause(1e-06) plt.gcf().clear() def plot_ts(data, enc_mean, dec_mean): """ plot time series with uncertainty """ batch_size = data.size()[0] D = 2 N = int(data.size()[1] / D) f, (ax1, ax2, ax3) = plt.subplots(1, 3, sharey=False, sharex=True) plt.axes(ax1) ax1.set_ylim(-0.1, 0.1) sns.tsplot(data.view(batch_size, N, -1).data.numpy()) plt.axes(ax2) ax2.set_ylim(-0.1, 0.1) sns.tsplot(dec_mean.view(batch_size, N, -1).data.numpy()) plt.axes(ax3) sample_Sigma = bivech2(enc_mean.view(batch_size, N, -1)) sample_vechSigma = bvech(sample_Sigma).data.numpy() sns.tsplot(sample_vechSigma) <|reserved_special_token_1|> <|reserved_special_token_0|> import matplotlib.pyplot as plt import seaborn as sns from util.matutil import * from util.batchutil import * def plot_img(): """ plot ground truth (left) and reconstruction (right) showing b/w image data of mnist """ plt.subplot(121) plt.imshow(data.data.numpy()[0,].squeeze()) plt.subplot(122) plt.imshow(dec_mean.view(-1, 28, 28).data.numpy()[0,].squeeze()) plt.show() plt.pause(1e-06) plt.gcf().clear() sample = model.sample_z(data) plt.imshow(sample) def plot_kde(): """ plot the kernel density estimation for 2d distributions """ f, (ax1, ax2) = plt.subplots(1, 2, sharey=True, sharex=True) sns.kdeplot(data.data.numpy()[:, 0], data.data.numpy()[:, 1], color='r', shade=True, ax=ax1) sns.kdeplot(dec_mean.data.numpy()[:, 0], dec_mean.data.numpy()[:, 1], color='b', shade=True, ax=ax2) plt.show() plt.pause(1e-06) plt.gcf().clear() def plot_ts(data, enc_mean, dec_mean): """ plot time series with uncertainty """ batch_size = data.size()[0] D = 2 N = int(data.size()[1] / D) f, (ax1, ax2, ax3) = plt.subplots(1, 3, sharey=False, sharex=True) plt.axes(ax1) ax1.set_ylim(-0.1, 0.1) sns.tsplot(data.view(batch_size, N, -1).data.numpy()) plt.axes(ax2) ax2.set_ylim(-0.1, 0.1) sns.tsplot(dec_mean.view(batch_size, N, -1).data.numpy()) plt.axes(ax3) sample_Sigma = bivech2(enc_mean.view(batch_size, N, -1)) sample_vechSigma = bvech(sample_Sigma).data.numpy() sns.tsplot(sample_vechSigma) <|reserved_special_token_1|> """ Plot funcs Jan, 2018 Rose Yu @Caltech """ import matplotlib.pyplot as plt import seaborn as sns from util.matutil import * from util.batchutil import * def plot_img(): """ plot ground truth (left) and reconstruction (right) showing b/w image data of mnist """ plt.subplot(121) plt.imshow(data.data.numpy()[0,].squeeze()) plt.subplot(122) plt.imshow(dec_mean.view(-1,28,28).data.numpy()[0,].squeeze()) plt.show() plt.pause(1e-6) plt.gcf().clear() sample = model.sample_z(data) plt.imshow(sample) def plot_kde(): """ plot the kernel density estimation for 2d distributions """ f, (ax1, ax2) = plt.subplots(1, 2, sharey=True, sharex=True) sns.kdeplot(data.data.numpy()[:,0], data.data.numpy()[:,1], color="r", shade=True, ax=ax1) sns.kdeplot(dec_mean.data.numpy()[:,0], dec_mean.data.numpy()[:,1], color="b", shade=True, ax=ax2) plt.show() plt.pause(1e-6) plt.gcf().clear() def plot_ts(data, enc_mean, dec_mean): """ plot time series with uncertainty """ # enc_mean, enc_cov = enc # dec_mean, dec_cov = dec batch_size = data.size()[0] D = 2 N = int(data.size()[1]/D) f, (ax1, ax2, ax3) = plt.subplots(1, 3, sharey=False, sharex=True) # plot data plt.axes(ax1) ax1.set_ylim(-0.1,0.1) sns.tsplot(data.view(batch_size,N,-1).data.numpy()) # plot reconstruction plt.axes(ax2) ax2.set_ylim(-0.1,0.1) sns.tsplot(dec_mean.view(batch_size,N,-1).data.numpy()) plt.axes(ax3) sample_Sigma = bivech2(enc_mean.view(batch_size,N,-1)) sample_vechSigma = bvech(sample_Sigma).data.numpy() sns.tsplot(sample_vechSigma) # plot latent variables # sample_Sigma = ivech2x(enc_cov.data.numpy()) # sample_vechSigma = vechx(sample_Sigma.reshape((-1,N,N))) # sns.tsplot(sample_vechSigma)
flexible
{ "blob_id": "cca9d91fe20e58f233ccfc4100edb748356ed234", "index": 6311, "step-1": "<mask token>\n\n\ndef plot_img():\n \"\"\" \n plot ground truth (left) and reconstruction (right)\n showing b/w image data of mnist\n \"\"\"\n plt.subplot(121)\n plt.imshow(data.data.numpy()[0,].squeeze())\n plt.subplot(122)\n plt.imshow(dec_mean.view(-1, 28, 28).data.numpy()[0,].squeeze())\n plt.show()\n plt.pause(1e-06)\n plt.gcf().clear()\n sample = model.sample_z(data)\n plt.imshow(sample)\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef plot_img():\n \"\"\" \n plot ground truth (left) and reconstruction (right)\n showing b/w image data of mnist\n \"\"\"\n plt.subplot(121)\n plt.imshow(data.data.numpy()[0,].squeeze())\n plt.subplot(122)\n plt.imshow(dec_mean.view(-1, 28, 28).data.numpy()[0,].squeeze())\n plt.show()\n plt.pause(1e-06)\n plt.gcf().clear()\n sample = model.sample_z(data)\n plt.imshow(sample)\n\n\ndef plot_kde():\n \"\"\"\n plot the kernel density estimation for 2d distributions\n \"\"\"\n f, (ax1, ax2) = plt.subplots(1, 2, sharey=True, sharex=True)\n sns.kdeplot(data.data.numpy()[:, 0], data.data.numpy()[:, 1], color='r',\n shade=True, ax=ax1)\n sns.kdeplot(dec_mean.data.numpy()[:, 0], dec_mean.data.numpy()[:, 1],\n color='b', shade=True, ax=ax2)\n plt.show()\n plt.pause(1e-06)\n plt.gcf().clear()\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef plot_img():\n \"\"\" \n plot ground truth (left) and reconstruction (right)\n showing b/w image data of mnist\n \"\"\"\n plt.subplot(121)\n plt.imshow(data.data.numpy()[0,].squeeze())\n plt.subplot(122)\n plt.imshow(dec_mean.view(-1, 28, 28).data.numpy()[0,].squeeze())\n plt.show()\n plt.pause(1e-06)\n plt.gcf().clear()\n sample = model.sample_z(data)\n plt.imshow(sample)\n\n\ndef plot_kde():\n \"\"\"\n plot the kernel density estimation for 2d distributions\n \"\"\"\n f, (ax1, ax2) = plt.subplots(1, 2, sharey=True, sharex=True)\n sns.kdeplot(data.data.numpy()[:, 0], data.data.numpy()[:, 1], color='r',\n shade=True, ax=ax1)\n sns.kdeplot(dec_mean.data.numpy()[:, 0], dec_mean.data.numpy()[:, 1],\n color='b', shade=True, ax=ax2)\n plt.show()\n plt.pause(1e-06)\n plt.gcf().clear()\n\n\ndef plot_ts(data, enc_mean, dec_mean):\n \"\"\"\n plot time series with uncertainty\n \"\"\"\n batch_size = data.size()[0]\n D = 2\n N = int(data.size()[1] / D)\n f, (ax1, ax2, ax3) = plt.subplots(1, 3, sharey=False, sharex=True)\n plt.axes(ax1)\n ax1.set_ylim(-0.1, 0.1)\n sns.tsplot(data.view(batch_size, N, -1).data.numpy())\n plt.axes(ax2)\n ax2.set_ylim(-0.1, 0.1)\n sns.tsplot(dec_mean.view(batch_size, N, -1).data.numpy())\n plt.axes(ax3)\n sample_Sigma = bivech2(enc_mean.view(batch_size, N, -1))\n sample_vechSigma = bvech(sample_Sigma).data.numpy()\n sns.tsplot(sample_vechSigma)\n", "step-4": "<mask token>\nimport matplotlib.pyplot as plt\nimport seaborn as sns\nfrom util.matutil import *\nfrom util.batchutil import *\n\n\ndef plot_img():\n \"\"\" \n plot ground truth (left) and reconstruction (right)\n showing b/w image data of mnist\n \"\"\"\n plt.subplot(121)\n plt.imshow(data.data.numpy()[0,].squeeze())\n plt.subplot(122)\n plt.imshow(dec_mean.view(-1, 28, 28).data.numpy()[0,].squeeze())\n plt.show()\n plt.pause(1e-06)\n plt.gcf().clear()\n sample = model.sample_z(data)\n plt.imshow(sample)\n\n\ndef plot_kde():\n \"\"\"\n plot the kernel density estimation for 2d distributions\n \"\"\"\n f, (ax1, ax2) = plt.subplots(1, 2, sharey=True, sharex=True)\n sns.kdeplot(data.data.numpy()[:, 0], data.data.numpy()[:, 1], color='r',\n shade=True, ax=ax1)\n sns.kdeplot(dec_mean.data.numpy()[:, 0], dec_mean.data.numpy()[:, 1],\n color='b', shade=True, ax=ax2)\n plt.show()\n plt.pause(1e-06)\n plt.gcf().clear()\n\n\ndef plot_ts(data, enc_mean, dec_mean):\n \"\"\"\n plot time series with uncertainty\n \"\"\"\n batch_size = data.size()[0]\n D = 2\n N = int(data.size()[1] / D)\n f, (ax1, ax2, ax3) = plt.subplots(1, 3, sharey=False, sharex=True)\n plt.axes(ax1)\n ax1.set_ylim(-0.1, 0.1)\n sns.tsplot(data.view(batch_size, N, -1).data.numpy())\n plt.axes(ax2)\n ax2.set_ylim(-0.1, 0.1)\n sns.tsplot(dec_mean.view(batch_size, N, -1).data.numpy())\n plt.axes(ax3)\n sample_Sigma = bivech2(enc_mean.view(batch_size, N, -1))\n sample_vechSigma = bvech(sample_Sigma).data.numpy()\n sns.tsplot(sample_vechSigma)\n", "step-5": "\"\"\"\nPlot funcs \nJan, 2018 Rose Yu @Caltech \n\"\"\"\nimport matplotlib.pyplot as plt \nimport seaborn as sns\nfrom util.matutil import *\nfrom util.batchutil import *\n\ndef plot_img():\n \"\"\" \n plot ground truth (left) and reconstruction (right)\n showing b/w image data of mnist\n \"\"\"\n plt.subplot(121)\n plt.imshow(data.data.numpy()[0,].squeeze())\n plt.subplot(122)\n plt.imshow(dec_mean.view(-1,28,28).data.numpy()[0,].squeeze())\n\n plt.show()\n plt.pause(1e-6)\n plt.gcf().clear()\n sample = model.sample_z(data) \n plt.imshow(sample)\n\ndef plot_kde():\n \"\"\"\n plot the kernel density estimation for 2d distributions\n \"\"\"\n f, (ax1, ax2) = plt.subplots(1, 2, sharey=True, sharex=True)\n sns.kdeplot(data.data.numpy()[:,0], data.data.numpy()[:,1], color=\"r\", shade=True, ax=ax1)\n sns.kdeplot(dec_mean.data.numpy()[:,0], dec_mean.data.numpy()[:,1], color=\"b\", shade=True, ax=ax2)\n plt.show()\n plt.pause(1e-6)\n plt.gcf().clear()\n\ndef plot_ts(data, enc_mean, dec_mean):\n \"\"\"\n plot time series with uncertainty\n \"\"\"\n # enc_mean, enc_cov = enc\n # dec_mean, dec_cov = dec\n\n batch_size = data.size()[0]\n D = 2\n N = int(data.size()[1]/D)\n\n f, (ax1, ax2, ax3) = plt.subplots(1, 3, sharey=False, sharex=True)\n # plot data\n plt.axes(ax1)\n ax1.set_ylim(-0.1,0.1)\n\n sns.tsplot(data.view(batch_size,N,-1).data.numpy())\n\n # plot reconstruction\n plt.axes(ax2)\n ax2.set_ylim(-0.1,0.1)\n sns.tsplot(dec_mean.view(batch_size,N,-1).data.numpy())\n\n plt.axes(ax3)\n sample_Sigma = bivech2(enc_mean.view(batch_size,N,-1))\n sample_vechSigma = bvech(sample_Sigma).data.numpy()\n \n sns.tsplot(sample_vechSigma)\n\n # plot latent variables\n # sample_Sigma = ivech2x(enc_cov.data.numpy())\n # sample_vechSigma = vechx(sample_Sigma.reshape((-1,N,N)))\n # sns.tsplot(sample_vechSigma)\n \n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> ax.set_xticks(x + width / 2) ax.set_xticklabels(x_axis) plt.legend((p_rf[0], p_anns[0], p_adaboost[0]), ('RF', 'ANNs', 'AdaBoost'), loc='best', fontsize=20) plt.xticks(fontsize=18) plt.yticks(fontsize=18) plt.ylim(150, 200) plt.xlabel('训练集大小(%)', fontsize=20) plt.ylabel('MAE(s)', fontsize=20) plt.show() <|reserved_special_token_1|> <|reserved_special_token_0|> plt.rcParams['savefig.dpi'] = 300 plt.rcParams['figure.dpi'] = 300 plt.rcParams['font.sans-serif'] = ['SimHei'] plt.rcParams['axes.unicode_minus'] = False x_axis = [20, 40, 60, 80, 100] rf = [184, 174, 166, 159, 157.5] anns = [186, 179, 170, 164, 161] adaboost = [187.5, 176, 172, 163, 162] x = np.arange(len(x_axis)) width = 0.2 fig, ax = plt.subplots() p_rf = ax.bar(x - width, rf, width, alpha=0.9) p_anns = ax.bar(x, anns, width, alpha=0.9, color='red') p_adaboost = ax.bar(x + width, adaboost, width, alpha=0.9, color='green') ax.set_xticks(x + width / 2) ax.set_xticklabels(x_axis) plt.legend((p_rf[0], p_anns[0], p_adaboost[0]), ('RF', 'ANNs', 'AdaBoost'), loc='best', fontsize=20) plt.xticks(fontsize=18) plt.yticks(fontsize=18) plt.ylim(150, 200) plt.xlabel('训练集大小(%)', fontsize=20) plt.ylabel('MAE(s)', fontsize=20) plt.show() <|reserved_special_token_1|> import matplotlib.pyplot as plt import numpy as np plt.rcParams['savefig.dpi'] = 300 plt.rcParams['figure.dpi'] = 300 plt.rcParams['font.sans-serif'] = ['SimHei'] plt.rcParams['axes.unicode_minus'] = False x_axis = [20, 40, 60, 80, 100] rf = [184, 174, 166, 159, 157.5] anns = [186, 179, 170, 164, 161] adaboost = [187.5, 176, 172, 163, 162] x = np.arange(len(x_axis)) width = 0.2 fig, ax = plt.subplots() p_rf = ax.bar(x - width, rf, width, alpha=0.9) p_anns = ax.bar(x, anns, width, alpha=0.9, color='red') p_adaboost = ax.bar(x + width, adaboost, width, alpha=0.9, color='green') ax.set_xticks(x + width / 2) ax.set_xticklabels(x_axis) plt.legend((p_rf[0], p_anns[0], p_adaboost[0]), ('RF', 'ANNs', 'AdaBoost'), loc='best', fontsize=20) plt.xticks(fontsize=18) plt.yticks(fontsize=18) plt.ylim(150, 200) plt.xlabel('训练集大小(%)', fontsize=20) plt.ylabel('MAE(s)', fontsize=20) plt.show() <|reserved_special_token_1|> import matplotlib.pyplot as plt import numpy as np plt.rcParams['savefig.dpi'] = 300 #图片像素 plt.rcParams['figure.dpi'] = 300 #分辨率 plt.rcParams['font.sans-serif']=['SimHei'] plt.rcParams['axes.unicode_minus'] = False x_axis = [20,40,60,80,100] rf = [184,174,166,159,157.5] anns = [186,179,170,164,161] adaboost = [187.5,176,172,163,162] x = np.arange(len(x_axis)) #首先用第一个的长度作为横坐标 width = 0.2 #设置柱与柱之间的宽度 fig,ax = plt.subplots() p_rf = ax.bar(x-width,rf,width,alpha = 0.9,) p_anns = ax.bar(x,anns,width,alpha = 0.9,color= 'red') p_adaboost = ax.bar(x+width,adaboost,width,alpha = 0.9,color= 'green') ax.set_xticks(x +width/2)#将坐标设置在指定位置 ax.set_xticklabels(x_axis)#将横坐标替换成 plt.legend((p_rf[0],p_anns[0],p_adaboost[0]),('RF','ANNs','AdaBoost'),loc='best',fontsize=20) plt.xticks(fontsize=18) plt.yticks(fontsize=18) plt.ylim(150,200) # 指定Y轴的高度 plt.xlabel('训练集大小(%)',fontsize=20) plt.ylabel('MAE(s)',fontsize=20) plt.show() #plt.savefig('MAE.png', dpi=3600)
flexible
{ "blob_id": "13342922022f0a0e8928c81c1c4716125af0b2c4", "index": 418, "step-1": "<mask token>\n", "step-2": "<mask token>\nax.set_xticks(x + width / 2)\nax.set_xticklabels(x_axis)\nplt.legend((p_rf[0], p_anns[0], p_adaboost[0]), ('RF', 'ANNs', 'AdaBoost'),\n loc='best', fontsize=20)\nplt.xticks(fontsize=18)\nplt.yticks(fontsize=18)\nplt.ylim(150, 200)\nplt.xlabel('训练集大小(%)', fontsize=20)\nplt.ylabel('MAE(s)', fontsize=20)\nplt.show()\n", "step-3": "<mask token>\nplt.rcParams['savefig.dpi'] = 300\nplt.rcParams['figure.dpi'] = 300\nplt.rcParams['font.sans-serif'] = ['SimHei']\nplt.rcParams['axes.unicode_minus'] = False\nx_axis = [20, 40, 60, 80, 100]\nrf = [184, 174, 166, 159, 157.5]\nanns = [186, 179, 170, 164, 161]\nadaboost = [187.5, 176, 172, 163, 162]\nx = np.arange(len(x_axis))\nwidth = 0.2\nfig, ax = plt.subplots()\np_rf = ax.bar(x - width, rf, width, alpha=0.9)\np_anns = ax.bar(x, anns, width, alpha=0.9, color='red')\np_adaboost = ax.bar(x + width, adaboost, width, alpha=0.9, color='green')\nax.set_xticks(x + width / 2)\nax.set_xticklabels(x_axis)\nplt.legend((p_rf[0], p_anns[0], p_adaboost[0]), ('RF', 'ANNs', 'AdaBoost'),\n loc='best', fontsize=20)\nplt.xticks(fontsize=18)\nplt.yticks(fontsize=18)\nplt.ylim(150, 200)\nplt.xlabel('训练集大小(%)', fontsize=20)\nplt.ylabel('MAE(s)', fontsize=20)\nplt.show()\n", "step-4": "import matplotlib.pyplot as plt\nimport numpy as np\nplt.rcParams['savefig.dpi'] = 300\nplt.rcParams['figure.dpi'] = 300\nplt.rcParams['font.sans-serif'] = ['SimHei']\nplt.rcParams['axes.unicode_minus'] = False\nx_axis = [20, 40, 60, 80, 100]\nrf = [184, 174, 166, 159, 157.5]\nanns = [186, 179, 170, 164, 161]\nadaboost = [187.5, 176, 172, 163, 162]\nx = np.arange(len(x_axis))\nwidth = 0.2\nfig, ax = plt.subplots()\np_rf = ax.bar(x - width, rf, width, alpha=0.9)\np_anns = ax.bar(x, anns, width, alpha=0.9, color='red')\np_adaboost = ax.bar(x + width, adaboost, width, alpha=0.9, color='green')\nax.set_xticks(x + width / 2)\nax.set_xticklabels(x_axis)\nplt.legend((p_rf[0], p_anns[0], p_adaboost[0]), ('RF', 'ANNs', 'AdaBoost'),\n loc='best', fontsize=20)\nplt.xticks(fontsize=18)\nplt.yticks(fontsize=18)\nplt.ylim(150, 200)\nplt.xlabel('训练集大小(%)', fontsize=20)\nplt.ylabel('MAE(s)', fontsize=20)\nplt.show()\n", "step-5": "import matplotlib.pyplot as plt\nimport numpy as np\nplt.rcParams['savefig.dpi'] = 300 #图片像素\nplt.rcParams['figure.dpi'] = 300 #分辨率\nplt.rcParams['font.sans-serif']=['SimHei']\nplt.rcParams['axes.unicode_minus'] = False\nx_axis = [20,40,60,80,100]\n\nrf = [184,174,166,159,157.5]\nanns = [186,179,170,164,161]\nadaboost = [187.5,176,172,163,162]\n\n\nx = np.arange(len(x_axis)) #首先用第一个的长度作为横坐标\nwidth = 0.2 #设置柱与柱之间的宽度\nfig,ax = plt.subplots()\np_rf = ax.bar(x-width,rf,width,alpha = 0.9,)\np_anns = ax.bar(x,anns,width,alpha = 0.9,color= 'red')\np_adaboost = ax.bar(x+width,adaboost,width,alpha = 0.9,color= 'green')\nax.set_xticks(x +width/2)#将坐标设置在指定位置\nax.set_xticklabels(x_axis)#将横坐标替换成\nplt.legend((p_rf[0],p_anns[0],p_adaboost[0]),('RF','ANNs','AdaBoost'),loc='best',fontsize=20)\nplt.xticks(fontsize=18)\nplt.yticks(fontsize=18)\nplt.ylim(150,200) # 指定Y轴的高度\nplt.xlabel('训练集大小(%)',fontsize=20)\nplt.ylabel('MAE(s)',fontsize=20)\nplt.show()\n#plt.savefig('MAE.png', dpi=3600)", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
import datetime,os def GetDatetimeFromMyFormat(l): # l = "2018-5-17 19:18:45" l_words = l.split() l_days = l_words[0].split('-') l_times = l_words[1].split(':') out = datetime.datetime(int(l_days[0]),int(l_days[1]),int(l_days[2]),int(l_times[0]),int(l_times[1]),int(l_times[2])) return out
normal
{ "blob_id": "6767302869d73d041e2d7061722e05484d19f3e0", "index": 4752, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef GetDatetimeFromMyFormat(l):\n l_words = l.split()\n l_days = l_words[0].split('-')\n l_times = l_words[1].split(':')\n out = datetime.datetime(int(l_days[0]), int(l_days[1]), int(l_days[2]),\n int(l_times[0]), int(l_times[1]), int(l_times[2]))\n return out\n", "step-3": "import datetime, os\n\n\ndef GetDatetimeFromMyFormat(l):\n l_words = l.split()\n l_days = l_words[0].split('-')\n l_times = l_words[1].split(':')\n out = datetime.datetime(int(l_days[0]), int(l_days[1]), int(l_days[2]),\n int(l_times[0]), int(l_times[1]), int(l_times[2]))\n return out\n", "step-4": "import datetime,os\n\ndef GetDatetimeFromMyFormat(l):\n # l = \"2018-5-17 19:18:45\"\n l_words = l.split()\n l_days = l_words[0].split('-')\n l_times = l_words[1].split(':')\n out = datetime.datetime(int(l_days[0]),int(l_days[1]),int(l_days[2]),int(l_times[0]),int(l_times[1]),int(l_times[2]))\n return out\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> class SelectSubsDialog(QDialog): <|reserved_special_token_0|> def enable_save_buttons(self): self.confirm_button.setEnabled(True) self.save_as_set_button.setEnabled(True) def get_substituents(self): self.substituents = list(dict.fromkeys([item.text() for item in self.select_subs_table.selectedItems()])) <|reserved_special_token_0|> <|reserved_special_token_0|> class SelectSubsForNewSetDialog(SelectSubsDialog): def __init__(self): super().__init__(r_group='New Set') self.confirm_button.setVisible(False) class SelectSubsEditSetDialog(SelectSubsDialog): def __init__(self, set_name): super().__init__(r_group=None) self.set_name = set_name self.setWindowTitle(f'Select Groups for {self.set_name}') self.save_as_set_button.setVisible(False) <|reserved_special_token_1|> <|reserved_special_token_0|> class SelectSubsDialog(QDialog): <|reserved_special_token_0|> def enable_save_buttons(self): self.confirm_button.setEnabled(True) self.save_as_set_button.setEnabled(True) def get_substituents(self): self.substituents = list(dict.fromkeys([item.text() for item in self.select_subs_table.selectedItems()])) def save_substituents(self): self.get_substituents() self.close() <|reserved_special_token_0|> class SelectSubsForNewSetDialog(SelectSubsDialog): def __init__(self): super().__init__(r_group='New Set') self.confirm_button.setVisible(False) class SelectSubsEditSetDialog(SelectSubsDialog): def __init__(self, set_name): super().__init__(r_group=None) self.set_name = set_name self.setWindowTitle(f'Select Groups for {self.set_name}') self.save_as_set_button.setVisible(False) <|reserved_special_token_1|> <|reserved_special_token_0|> class SelectSubsDialog(QDialog): def __init__(self, r_group): super().__init__() self.r_group = r_group self.substituents = None self.new_set_saved = False self.setWindowTitle(f'Select Substituents for {self.r_group}') self.instructions_label = QLabel( 'Click row heading to select functional group set. Ctrl + click or Shift + click to select multiple items. Double click functional group name to view SMILES.' ) self.select_subs_table = SelectSubsTable() self.confirm_button = QPushButton('Confirm Selection') self.confirm_button.setEnabled(False) self.save_as_set_button = QPushButton('Save Selection as Set') self.save_as_set_button.setEnabled(False) self.cancel_button = QPushButton('Cancel') self.select_subs_button_layout = QHBoxLayout() self.select_subs_button_layout.addWidget(self.confirm_button) self.select_subs_button_layout.addWidget(self.save_as_set_button) self.select_subs_button_layout.addWidget(self.cancel_button) self.select_subs_layout = QVBoxLayout() self.select_subs_layout.addWidget(self.instructions_label) self.select_subs_layout.addWidget(self.select_subs_table) self.select_subs_layout.addLayout(self.select_subs_button_layout) self.setLayout(self.select_subs_layout) self.select_subs_table.itemSelectionChanged.connect(self. enable_save_buttons) self.confirm_button.clicked.connect(self.save_substituents) self.save_as_set_button.clicked.connect(self.save_selection) self.cancel_button.clicked.connect(self.close) def enable_save_buttons(self): self.confirm_button.setEnabled(True) self.save_as_set_button.setEnabled(True) def get_substituents(self): self.substituents = list(dict.fromkeys([item.text() for item in self.select_subs_table.selectedItems()])) def save_substituents(self): self.get_substituents() self.close() <|reserved_special_token_0|> class SelectSubsForNewSetDialog(SelectSubsDialog): def __init__(self): super().__init__(r_group='New Set') self.confirm_button.setVisible(False) class SelectSubsEditSetDialog(SelectSubsDialog): def __init__(self, set_name): super().__init__(r_group=None) self.set_name = set_name self.setWindowTitle(f'Select Groups for {self.set_name}') self.save_as_set_button.setVisible(False) <|reserved_special_token_1|> from PyQt5.QtWidgets import * from select_substituents_table import * from save_selection_dialog import * class SelectSubsDialog(QDialog): def __init__(self, r_group): super().__init__() self.r_group = r_group self.substituents = None self.new_set_saved = False self.setWindowTitle(f'Select Substituents for {self.r_group}') self.instructions_label = QLabel( 'Click row heading to select functional group set. Ctrl + click or Shift + click to select multiple items. Double click functional group name to view SMILES.' ) self.select_subs_table = SelectSubsTable() self.confirm_button = QPushButton('Confirm Selection') self.confirm_button.setEnabled(False) self.save_as_set_button = QPushButton('Save Selection as Set') self.save_as_set_button.setEnabled(False) self.cancel_button = QPushButton('Cancel') self.select_subs_button_layout = QHBoxLayout() self.select_subs_button_layout.addWidget(self.confirm_button) self.select_subs_button_layout.addWidget(self.save_as_set_button) self.select_subs_button_layout.addWidget(self.cancel_button) self.select_subs_layout = QVBoxLayout() self.select_subs_layout.addWidget(self.instructions_label) self.select_subs_layout.addWidget(self.select_subs_table) self.select_subs_layout.addLayout(self.select_subs_button_layout) self.setLayout(self.select_subs_layout) self.select_subs_table.itemSelectionChanged.connect(self. enable_save_buttons) self.confirm_button.clicked.connect(self.save_substituents) self.save_as_set_button.clicked.connect(self.save_selection) self.cancel_button.clicked.connect(self.close) def enable_save_buttons(self): self.confirm_button.setEnabled(True) self.save_as_set_button.setEnabled(True) def get_substituents(self): self.substituents = list(dict.fromkeys([item.text() for item in self.select_subs_table.selectedItems()])) def save_substituents(self): self.get_substituents() self.close() def save_selection(self): self.get_substituents() save_selection_dialog = SaveSelectionDialog(self.substituents) save_selection_dialog.exec_() if save_selection_dialog.new_set_saved: self.new_set_saved = True self.close() class SelectSubsForNewSetDialog(SelectSubsDialog): def __init__(self): super().__init__(r_group='New Set') self.confirm_button.setVisible(False) class SelectSubsEditSetDialog(SelectSubsDialog): def __init__(self, set_name): super().__init__(r_group=None) self.set_name = set_name self.setWindowTitle(f'Select Groups for {self.set_name}') self.save_as_set_button.setVisible(False) <|reserved_special_token_1|> from PyQt5.QtWidgets import * from select_substituents_table import * from save_selection_dialog import * class SelectSubsDialog(QDialog): def __init__(self, r_group): super().__init__() self.r_group = r_group self.substituents = None self.new_set_saved = False self.setWindowTitle(f"Select Substituents for {self.r_group}") self.instructions_label = QLabel("Click row heading to select functional group set. Ctrl + click or Shift + click to select multiple items. Double click functional group name to view SMILES.") self.select_subs_table = SelectSubsTable() self.confirm_button = QPushButton("Confirm Selection") self.confirm_button.setEnabled(False) self.save_as_set_button = QPushButton("Save Selection as Set") self.save_as_set_button.setEnabled(False) self.cancel_button = QPushButton("Cancel") self.select_subs_button_layout = QHBoxLayout() self.select_subs_button_layout.addWidget(self.confirm_button) self.select_subs_button_layout.addWidget(self.save_as_set_button) self.select_subs_button_layout.addWidget(self.cancel_button) self.select_subs_layout = QVBoxLayout() self.select_subs_layout.addWidget(self.instructions_label) self.select_subs_layout.addWidget(self.select_subs_table) self.select_subs_layout.addLayout(self.select_subs_button_layout) self.setLayout(self.select_subs_layout) self.select_subs_table.itemSelectionChanged.connect(self.enable_save_buttons) self.confirm_button.clicked.connect(self.save_substituents) self.save_as_set_button.clicked.connect(self.save_selection) self.cancel_button.clicked.connect(self.close) def enable_save_buttons(self): self.confirm_button.setEnabled(True) self.save_as_set_button.setEnabled(True) def get_substituents(self): self.substituents = list(dict.fromkeys([item.text() for item in self.select_subs_table.selectedItems()])) def save_substituents(self): self.get_substituents() self.close() def save_selection(self): self.get_substituents() save_selection_dialog = SaveSelectionDialog(self.substituents) save_selection_dialog.exec_() if save_selection_dialog.new_set_saved: self.new_set_saved = True self.close() class SelectSubsForNewSetDialog(SelectSubsDialog): def __init__(self): super().__init__(r_group = "New Set") self.confirm_button.setVisible(False) class SelectSubsEditSetDialog(SelectSubsDialog): def __init__(self, set_name): super().__init__(r_group = None) self.set_name = set_name self.setWindowTitle(f"Select Groups for {self.set_name}") self.save_as_set_button.setVisible(False)
flexible
{ "blob_id": "849db3a92e0544661dd465b3e7f6949f8de5633b", "index": 5099, "step-1": "<mask token>\n\n\nclass SelectSubsDialog(QDialog):\n <mask token>\n\n def enable_save_buttons(self):\n self.confirm_button.setEnabled(True)\n self.save_as_set_button.setEnabled(True)\n\n def get_substituents(self):\n self.substituents = list(dict.fromkeys([item.text() for item in\n self.select_subs_table.selectedItems()]))\n <mask token>\n <mask token>\n\n\nclass SelectSubsForNewSetDialog(SelectSubsDialog):\n\n def __init__(self):\n super().__init__(r_group='New Set')\n self.confirm_button.setVisible(False)\n\n\nclass SelectSubsEditSetDialog(SelectSubsDialog):\n\n def __init__(self, set_name):\n super().__init__(r_group=None)\n self.set_name = set_name\n self.setWindowTitle(f'Select Groups for {self.set_name}')\n self.save_as_set_button.setVisible(False)\n", "step-2": "<mask token>\n\n\nclass SelectSubsDialog(QDialog):\n <mask token>\n\n def enable_save_buttons(self):\n self.confirm_button.setEnabled(True)\n self.save_as_set_button.setEnabled(True)\n\n def get_substituents(self):\n self.substituents = list(dict.fromkeys([item.text() for item in\n self.select_subs_table.selectedItems()]))\n\n def save_substituents(self):\n self.get_substituents()\n self.close()\n <mask token>\n\n\nclass SelectSubsForNewSetDialog(SelectSubsDialog):\n\n def __init__(self):\n super().__init__(r_group='New Set')\n self.confirm_button.setVisible(False)\n\n\nclass SelectSubsEditSetDialog(SelectSubsDialog):\n\n def __init__(self, set_name):\n super().__init__(r_group=None)\n self.set_name = set_name\n self.setWindowTitle(f'Select Groups for {self.set_name}')\n self.save_as_set_button.setVisible(False)\n", "step-3": "<mask token>\n\n\nclass SelectSubsDialog(QDialog):\n\n def __init__(self, r_group):\n super().__init__()\n self.r_group = r_group\n self.substituents = None\n self.new_set_saved = False\n self.setWindowTitle(f'Select Substituents for {self.r_group}')\n self.instructions_label = QLabel(\n 'Click row heading to select functional group set. Ctrl + click or Shift + click to select multiple items. Double click functional group name to view SMILES.'\n )\n self.select_subs_table = SelectSubsTable()\n self.confirm_button = QPushButton('Confirm Selection')\n self.confirm_button.setEnabled(False)\n self.save_as_set_button = QPushButton('Save Selection as Set')\n self.save_as_set_button.setEnabled(False)\n self.cancel_button = QPushButton('Cancel')\n self.select_subs_button_layout = QHBoxLayout()\n self.select_subs_button_layout.addWidget(self.confirm_button)\n self.select_subs_button_layout.addWidget(self.save_as_set_button)\n self.select_subs_button_layout.addWidget(self.cancel_button)\n self.select_subs_layout = QVBoxLayout()\n self.select_subs_layout.addWidget(self.instructions_label)\n self.select_subs_layout.addWidget(self.select_subs_table)\n self.select_subs_layout.addLayout(self.select_subs_button_layout)\n self.setLayout(self.select_subs_layout)\n self.select_subs_table.itemSelectionChanged.connect(self.\n enable_save_buttons)\n self.confirm_button.clicked.connect(self.save_substituents)\n self.save_as_set_button.clicked.connect(self.save_selection)\n self.cancel_button.clicked.connect(self.close)\n\n def enable_save_buttons(self):\n self.confirm_button.setEnabled(True)\n self.save_as_set_button.setEnabled(True)\n\n def get_substituents(self):\n self.substituents = list(dict.fromkeys([item.text() for item in\n self.select_subs_table.selectedItems()]))\n\n def save_substituents(self):\n self.get_substituents()\n self.close()\n <mask token>\n\n\nclass SelectSubsForNewSetDialog(SelectSubsDialog):\n\n def __init__(self):\n super().__init__(r_group='New Set')\n self.confirm_button.setVisible(False)\n\n\nclass SelectSubsEditSetDialog(SelectSubsDialog):\n\n def __init__(self, set_name):\n super().__init__(r_group=None)\n self.set_name = set_name\n self.setWindowTitle(f'Select Groups for {self.set_name}')\n self.save_as_set_button.setVisible(False)\n", "step-4": "from PyQt5.QtWidgets import *\nfrom select_substituents_table import *\nfrom save_selection_dialog import *\n\n\nclass SelectSubsDialog(QDialog):\n\n def __init__(self, r_group):\n super().__init__()\n self.r_group = r_group\n self.substituents = None\n self.new_set_saved = False\n self.setWindowTitle(f'Select Substituents for {self.r_group}')\n self.instructions_label = QLabel(\n 'Click row heading to select functional group set. Ctrl + click or Shift + click to select multiple items. Double click functional group name to view SMILES.'\n )\n self.select_subs_table = SelectSubsTable()\n self.confirm_button = QPushButton('Confirm Selection')\n self.confirm_button.setEnabled(False)\n self.save_as_set_button = QPushButton('Save Selection as Set')\n self.save_as_set_button.setEnabled(False)\n self.cancel_button = QPushButton('Cancel')\n self.select_subs_button_layout = QHBoxLayout()\n self.select_subs_button_layout.addWidget(self.confirm_button)\n self.select_subs_button_layout.addWidget(self.save_as_set_button)\n self.select_subs_button_layout.addWidget(self.cancel_button)\n self.select_subs_layout = QVBoxLayout()\n self.select_subs_layout.addWidget(self.instructions_label)\n self.select_subs_layout.addWidget(self.select_subs_table)\n self.select_subs_layout.addLayout(self.select_subs_button_layout)\n self.setLayout(self.select_subs_layout)\n self.select_subs_table.itemSelectionChanged.connect(self.\n enable_save_buttons)\n self.confirm_button.clicked.connect(self.save_substituents)\n self.save_as_set_button.clicked.connect(self.save_selection)\n self.cancel_button.clicked.connect(self.close)\n\n def enable_save_buttons(self):\n self.confirm_button.setEnabled(True)\n self.save_as_set_button.setEnabled(True)\n\n def get_substituents(self):\n self.substituents = list(dict.fromkeys([item.text() for item in\n self.select_subs_table.selectedItems()]))\n\n def save_substituents(self):\n self.get_substituents()\n self.close()\n\n def save_selection(self):\n self.get_substituents()\n save_selection_dialog = SaveSelectionDialog(self.substituents)\n save_selection_dialog.exec_()\n if save_selection_dialog.new_set_saved:\n self.new_set_saved = True\n self.close()\n\n\nclass SelectSubsForNewSetDialog(SelectSubsDialog):\n\n def __init__(self):\n super().__init__(r_group='New Set')\n self.confirm_button.setVisible(False)\n\n\nclass SelectSubsEditSetDialog(SelectSubsDialog):\n\n def __init__(self, set_name):\n super().__init__(r_group=None)\n self.set_name = set_name\n self.setWindowTitle(f'Select Groups for {self.set_name}')\n self.save_as_set_button.setVisible(False)\n", "step-5": "from PyQt5.QtWidgets import *\n\nfrom select_substituents_table import *\nfrom save_selection_dialog import *\n\nclass SelectSubsDialog(QDialog):\n\n def __init__(self, r_group):\n super().__init__()\n self.r_group = r_group\n self.substituents = None\n self.new_set_saved = False\n\n self.setWindowTitle(f\"Select Substituents for {self.r_group}\")\n\n self.instructions_label = QLabel(\"Click row heading to select functional group set. Ctrl + click or Shift + click to select multiple items. Double click functional group name to view SMILES.\")\n\n self.select_subs_table = SelectSubsTable()\n\n self.confirm_button = QPushButton(\"Confirm Selection\")\n self.confirm_button.setEnabled(False)\n self.save_as_set_button = QPushButton(\"Save Selection as Set\")\n self.save_as_set_button.setEnabled(False)\n self.cancel_button = QPushButton(\"Cancel\")\n\n self.select_subs_button_layout = QHBoxLayout()\n self.select_subs_button_layout.addWidget(self.confirm_button)\n self.select_subs_button_layout.addWidget(self.save_as_set_button)\n self.select_subs_button_layout.addWidget(self.cancel_button)\n\n self.select_subs_layout = QVBoxLayout()\n self.select_subs_layout.addWidget(self.instructions_label)\n self.select_subs_layout.addWidget(self.select_subs_table)\n self.select_subs_layout.addLayout(self.select_subs_button_layout)\n self.setLayout(self.select_subs_layout)\n\n self.select_subs_table.itemSelectionChanged.connect(self.enable_save_buttons)\n self.confirm_button.clicked.connect(self.save_substituents)\n self.save_as_set_button.clicked.connect(self.save_selection)\n self.cancel_button.clicked.connect(self.close)\n\n def enable_save_buttons(self):\n self.confirm_button.setEnabled(True)\n self.save_as_set_button.setEnabled(True)\n\n def get_substituents(self):\n self.substituents = list(dict.fromkeys([item.text() for item in self.select_subs_table.selectedItems()]))\n \n def save_substituents(self):\n self.get_substituents()\n self.close()\n\n def save_selection(self):\n self.get_substituents()\n save_selection_dialog = SaveSelectionDialog(self.substituents)\n save_selection_dialog.exec_()\n if save_selection_dialog.new_set_saved:\n self.new_set_saved = True\n self.close()\n\nclass SelectSubsForNewSetDialog(SelectSubsDialog):\n\n def __init__(self):\n super().__init__(r_group = \"New Set\")\n\n self.confirm_button.setVisible(False)\n\nclass SelectSubsEditSetDialog(SelectSubsDialog):\n\n def __init__(self, set_name):\n super().__init__(r_group = None)\n self.set_name = set_name\n \n self.setWindowTitle(f\"Select Groups for {self.set_name}\")\n\n self.save_as_set_button.setVisible(False)\n\n\n\n\n\n\n\n\n\n", "step-ids": [ 7, 8, 9, 11, 12 ] }
[ 7, 8, 9, 11, 12 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> if len(dr_items): with open('calls.csv', 'w', encoding='UTF8') as csv_file: csv_writer = csv.writer(csv_file) csv_header = ['dr_sid', 'date_start', 'number_src', 'number_dst', 'direction', 'duration', 'price'] csv_writer.writerow(csv_header) dr_sid = dr_items[0]['dr_sid'] csv_row = [dr_items[0]['dr_sid'], dr_items[0]['date_start'], dr_items[0]['number_src'], dr_items[0]['number_dst'], dr_items[ 0]['direction'], dr_items[0]['duration'], dr_items[0]['price']] csv_writer.writerow(csv_row) print(f"{i}. {dr_items[0]['dr_sid']}") while True: params = {'limit': '100', 'order': 'date_stop asc', 'after': dr_sid } r = requests.get(url, headers=headers, params=params) if len(r.json()['items']): dr_items = r.json()['items'] for item in dr_items: i += 1 dr_sid = dr_items[len(r.json()['items']) - 1]['dr_sid'] csv_row = [item['dr_sid'], item['date_start'], item[ 'number_src'], item['number_dst'], item['direction' ], item['duration'], item['price']] csv_writer.writerow(csv_row) print(f"{i}. {item['dr_sid']}") else: print('No more new calls') break else: print(f'No calls since {date}') <|reserved_special_token_1|> <|reserved_special_token_0|> headers = {'Authorization': f'Bearer {configuration.CARRIERX_API_TOKEN}'} url = f'{configuration.BASE_CARRIERX_API_URL}/core/v2/calls/call_drs' date = configuration.DATE i = 1 params = {'limit': '1', 'order': 'date_stop asc', 'filter': f'date_stop ge {date}'} r = requests.get(url, headers=headers, params=params) dr_items = r.json()['items'] if len(dr_items): with open('calls.csv', 'w', encoding='UTF8') as csv_file: csv_writer = csv.writer(csv_file) csv_header = ['dr_sid', 'date_start', 'number_src', 'number_dst', 'direction', 'duration', 'price'] csv_writer.writerow(csv_header) dr_sid = dr_items[0]['dr_sid'] csv_row = [dr_items[0]['dr_sid'], dr_items[0]['date_start'], dr_items[0]['number_src'], dr_items[0]['number_dst'], dr_items[ 0]['direction'], dr_items[0]['duration'], dr_items[0]['price']] csv_writer.writerow(csv_row) print(f"{i}. {dr_items[0]['dr_sid']}") while True: params = {'limit': '100', 'order': 'date_stop asc', 'after': dr_sid } r = requests.get(url, headers=headers, params=params) if len(r.json()['items']): dr_items = r.json()['items'] for item in dr_items: i += 1 dr_sid = dr_items[len(r.json()['items']) - 1]['dr_sid'] csv_row = [item['dr_sid'], item['date_start'], item[ 'number_src'], item['number_dst'], item['direction' ], item['duration'], item['price']] csv_writer.writerow(csv_row) print(f"{i}. {item['dr_sid']}") else: print('No more new calls') break else: print(f'No calls since {date}') <|reserved_special_token_1|> import requests, csv, configuration headers = {'Authorization': f'Bearer {configuration.CARRIERX_API_TOKEN}'} url = f'{configuration.BASE_CARRIERX_API_URL}/core/v2/calls/call_drs' date = configuration.DATE i = 1 params = {'limit': '1', 'order': 'date_stop asc', 'filter': f'date_stop ge {date}'} r = requests.get(url, headers=headers, params=params) dr_items = r.json()['items'] if len(dr_items): with open('calls.csv', 'w', encoding='UTF8') as csv_file: csv_writer = csv.writer(csv_file) csv_header = ['dr_sid', 'date_start', 'number_src', 'number_dst', 'direction', 'duration', 'price'] csv_writer.writerow(csv_header) dr_sid = dr_items[0]['dr_sid'] csv_row = [dr_items[0]['dr_sid'], dr_items[0]['date_start'], dr_items[0]['number_src'], dr_items[0]['number_dst'], dr_items[ 0]['direction'], dr_items[0]['duration'], dr_items[0]['price']] csv_writer.writerow(csv_row) print(f"{i}. {dr_items[0]['dr_sid']}") while True: params = {'limit': '100', 'order': 'date_stop asc', 'after': dr_sid } r = requests.get(url, headers=headers, params=params) if len(r.json()['items']): dr_items = r.json()['items'] for item in dr_items: i += 1 dr_sid = dr_items[len(r.json()['items']) - 1]['dr_sid'] csv_row = [item['dr_sid'], item['date_start'], item[ 'number_src'], item['number_dst'], item['direction' ], item['duration'], item['price']] csv_writer.writerow(csv_row) print(f"{i}. {item['dr_sid']}") else: print('No more new calls') break else: print(f'No calls since {date}')
flexible
{ "blob_id": "8262d8b5bbb156eccae021c1c9333d3cd1a6260f", "index": 9030, "step-1": "<mask token>\n", "step-2": "<mask token>\nif len(dr_items):\n with open('calls.csv', 'w', encoding='UTF8') as csv_file:\n csv_writer = csv.writer(csv_file)\n csv_header = ['dr_sid', 'date_start', 'number_src', 'number_dst',\n 'direction', 'duration', 'price']\n csv_writer.writerow(csv_header)\n dr_sid = dr_items[0]['dr_sid']\n csv_row = [dr_items[0]['dr_sid'], dr_items[0]['date_start'],\n dr_items[0]['number_src'], dr_items[0]['number_dst'], dr_items[\n 0]['direction'], dr_items[0]['duration'], dr_items[0]['price']]\n csv_writer.writerow(csv_row)\n print(f\"{i}. {dr_items[0]['dr_sid']}\")\n while True:\n params = {'limit': '100', 'order': 'date_stop asc', 'after': dr_sid\n }\n r = requests.get(url, headers=headers, params=params)\n if len(r.json()['items']):\n dr_items = r.json()['items']\n for item in dr_items:\n i += 1\n dr_sid = dr_items[len(r.json()['items']) - 1]['dr_sid']\n csv_row = [item['dr_sid'], item['date_start'], item[\n 'number_src'], item['number_dst'], item['direction'\n ], item['duration'], item['price']]\n csv_writer.writerow(csv_row)\n print(f\"{i}. {item['dr_sid']}\")\n else:\n print('No more new calls')\n break\nelse:\n print(f'No calls since {date}')\n", "step-3": "<mask token>\nheaders = {'Authorization': f'Bearer {configuration.CARRIERX_API_TOKEN}'}\nurl = f'{configuration.BASE_CARRIERX_API_URL}/core/v2/calls/call_drs'\ndate = configuration.DATE\ni = 1\nparams = {'limit': '1', 'order': 'date_stop asc', 'filter':\n f'date_stop ge {date}'}\nr = requests.get(url, headers=headers, params=params)\ndr_items = r.json()['items']\nif len(dr_items):\n with open('calls.csv', 'w', encoding='UTF8') as csv_file:\n csv_writer = csv.writer(csv_file)\n csv_header = ['dr_sid', 'date_start', 'number_src', 'number_dst',\n 'direction', 'duration', 'price']\n csv_writer.writerow(csv_header)\n dr_sid = dr_items[0]['dr_sid']\n csv_row = [dr_items[0]['dr_sid'], dr_items[0]['date_start'],\n dr_items[0]['number_src'], dr_items[0]['number_dst'], dr_items[\n 0]['direction'], dr_items[0]['duration'], dr_items[0]['price']]\n csv_writer.writerow(csv_row)\n print(f\"{i}. {dr_items[0]['dr_sid']}\")\n while True:\n params = {'limit': '100', 'order': 'date_stop asc', 'after': dr_sid\n }\n r = requests.get(url, headers=headers, params=params)\n if len(r.json()['items']):\n dr_items = r.json()['items']\n for item in dr_items:\n i += 1\n dr_sid = dr_items[len(r.json()['items']) - 1]['dr_sid']\n csv_row = [item['dr_sid'], item['date_start'], item[\n 'number_src'], item['number_dst'], item['direction'\n ], item['duration'], item['price']]\n csv_writer.writerow(csv_row)\n print(f\"{i}. {item['dr_sid']}\")\n else:\n print('No more new calls')\n break\nelse:\n print(f'No calls since {date}')\n", "step-4": "import requests, csv, configuration\nheaders = {'Authorization': f'Bearer {configuration.CARRIERX_API_TOKEN}'}\nurl = f'{configuration.BASE_CARRIERX_API_URL}/core/v2/calls/call_drs'\ndate = configuration.DATE\ni = 1\nparams = {'limit': '1', 'order': 'date_stop asc', 'filter':\n f'date_stop ge {date}'}\nr = requests.get(url, headers=headers, params=params)\ndr_items = r.json()['items']\nif len(dr_items):\n with open('calls.csv', 'w', encoding='UTF8') as csv_file:\n csv_writer = csv.writer(csv_file)\n csv_header = ['dr_sid', 'date_start', 'number_src', 'number_dst',\n 'direction', 'duration', 'price']\n csv_writer.writerow(csv_header)\n dr_sid = dr_items[0]['dr_sid']\n csv_row = [dr_items[0]['dr_sid'], dr_items[0]['date_start'],\n dr_items[0]['number_src'], dr_items[0]['number_dst'], dr_items[\n 0]['direction'], dr_items[0]['duration'], dr_items[0]['price']]\n csv_writer.writerow(csv_row)\n print(f\"{i}. {dr_items[0]['dr_sid']}\")\n while True:\n params = {'limit': '100', 'order': 'date_stop asc', 'after': dr_sid\n }\n r = requests.get(url, headers=headers, params=params)\n if len(r.json()['items']):\n dr_items = r.json()['items']\n for item in dr_items:\n i += 1\n dr_sid = dr_items[len(r.json()['items']) - 1]['dr_sid']\n csv_row = [item['dr_sid'], item['date_start'], item[\n 'number_src'], item['number_dst'], item['direction'\n ], item['duration'], item['price']]\n csv_writer.writerow(csv_row)\n print(f\"{i}. {item['dr_sid']}\")\n else:\n print('No more new calls')\n break\nelse:\n print(f'No calls since {date}')\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
from django.db import models # Create your models here. class Remedio(models.Model): nome = models.CharField(max_length=100, unique=True, help_text='Nome') valor = models.FloatField(null=False, help_text='Valor') detalhe = models.CharField(max_length=500, null=True) foto = models.ImageField(upload_to='media') def __str__(self): return self.nome
normal
{ "blob_id": "07cce6802ab3259dbc78ab86a8dd6d6a4a617c7e", "index": 5242, "step-1": "<mask token>\n\n\nclass Remedio(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n", "step-2": "<mask token>\n\n\nclass Remedio(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.nome\n", "step-3": "<mask token>\n\n\nclass Remedio(models.Model):\n nome = models.CharField(max_length=100, unique=True, help_text='Nome')\n valor = models.FloatField(null=False, help_text='Valor')\n detalhe = models.CharField(max_length=500, null=True)\n foto = models.ImageField(upload_to='media')\n\n def __str__(self):\n return self.nome\n", "step-4": "from django.db import models\n\n\nclass Remedio(models.Model):\n nome = models.CharField(max_length=100, unique=True, help_text='Nome')\n valor = models.FloatField(null=False, help_text='Valor')\n detalhe = models.CharField(max_length=500, null=True)\n foto = models.ImageField(upload_to='media')\n\n def __str__(self):\n return self.nome\n", "step-5": "from django.db import models\n\n# Create your models here.\nclass Remedio(models.Model):\n nome = models.CharField(max_length=100, unique=True, help_text='Nome')\n valor = models.FloatField(null=False, help_text='Valor')\n detalhe = models.CharField(max_length=500, null=True)\n foto = models.ImageField(upload_to='media')\n\n def __str__(self):\n return self.nome", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
<|reserved_special_token_0|> class PillListView(ListView): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> class PillDetailView(DetailView): model = Pills template_name = 'pills/pill_detail.html' <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class PillListView(ListView): model = Pills template_name = 'pills/pill_list.html' form_class = CommentForm def get_context_data(self, **kwargs): context = super(PillListView, self).get_context_data(**kwargs) return context def get_queryset(self, *args, **kwargs): qs = Pills.objects.prefetch_related('category_body', 'category_gender', 'like_user_set').all() print(self.request.GET) query = self.request.GET.get('q', None) if query is not None: qs = qs.filter(Q(name__icontains=query) | Q( category_body__name__icontains=query)) return qs <|reserved_special_token_0|> class PillDetailView(DetailView): model = Pills template_name = 'pills/pill_detail.html' <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class PillListView(ListView): model = Pills template_name = 'pills/pill_list.html' form_class = CommentForm def get_context_data(self, **kwargs): context = super(PillListView, self).get_context_data(**kwargs) return context def get_queryset(self, *args, **kwargs): qs = Pills.objects.prefetch_related('category_body', 'category_gender', 'like_user_set').all() print(self.request.GET) query = self.request.GET.get('q', None) if query is not None: qs = qs.filter(Q(name__icontains=query) | Q( category_body__name__icontains=query)) return qs @login_required def comment_new(request): pk = request.POST.get('pk') pill = get_object_or_404(Pills, pk=pk) form = CommentForm if request.method == 'POST': form = CommentForm(request.POST) if form.is_valid(): comment = form.save(commit=False) comment.author = request.user comment.pills = pill comment.save() return render(request, 'pills/comment_new_ajax.html', { 'comment': comment, 'form': form}) return redirect('pills:pill_list') @login_required def comment_delete(request, pill_pk, pk): comment = get_object_or_404(Comment, pk=pk) if request.method == 'POST' and request.user == comment.author: comment.delete() messages.success(request, '삭제했습니다.') return redirect('pills:pill_list') messages.warning('권한이 없습니다.') return redirect('pills:pill_list') class PillDetailView(DetailView): model = Pills template_name = 'pills/pill_detail.html' <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class PillListView(ListView): model = Pills template_name = 'pills/pill_list.html' form_class = CommentForm def get_context_data(self, **kwargs): context = super(PillListView, self).get_context_data(**kwargs) return context def get_queryset(self, *args, **kwargs): qs = Pills.objects.prefetch_related('category_body', 'category_gender', 'like_user_set').all() print(self.request.GET) query = self.request.GET.get('q', None) if query is not None: qs = qs.filter(Q(name__icontains=query) | Q( category_body__name__icontains=query)) return qs @login_required def comment_new(request): pk = request.POST.get('pk') pill = get_object_or_404(Pills, pk=pk) form = CommentForm if request.method == 'POST': form = CommentForm(request.POST) if form.is_valid(): comment = form.save(commit=False) comment.author = request.user comment.pills = pill comment.save() return render(request, 'pills/comment_new_ajax.html', { 'comment': comment, 'form': form}) return redirect('pills:pill_list') @login_required def comment_delete(request, pill_pk, pk): comment = get_object_or_404(Comment, pk=pk) if request.method == 'POST' and request.user == comment.author: comment.delete() messages.success(request, '삭제했습니다.') return redirect('pills:pill_list') messages.warning('권한이 없습니다.') return redirect('pills:pill_list') class PillDetailView(DetailView): model = Pills template_name = 'pills/pill_detail.html' @login_required @require_POST def pill_like(request): pk = request.POST.get('pk', None) pill = get_object_or_404(Pills, pk=pk) pill_like, pill_like_created = pill.like_set.get_or_create(user=request .user) if not pill_like_created: pill_like.delete() message = '좋아요 취소' else: message = '좋아요' context = {'like_count': pill.like_count, 'message': message, 'username': request.user.username} return HttpResponse(json.dumps(context)) <|reserved_special_token_1|> from django.db.models import Q from django.contrib import messages from django.views.generic import ListView, DetailView from django.shortcuts import get_object_or_404, redirect, render from django.contrib.auth.decorators import login_required from django.http import HttpResponse from django.views.decorators.http import require_POST from .models import Pills, Like, Comment from .forms import CommentForm import json class PillListView(ListView): model = Pills template_name = "pills/pill_list.html" form_class = CommentForm def get_context_data(self, **kwargs): context = super(PillListView, self).get_context_data(**kwargs) return context def get_queryset(self, *args, **kwargs): qs = Pills.objects.prefetch_related('category_body','category_gender','like_user_set').all() print(self.request.GET) query = self.request.GET.get("q", None) if query is not None: qs = qs.filter( Q(name__icontains=query) | Q(category_body__name__icontains=query) ) return qs # def PillCategory_SearchList(request): # qs = Pills.objects.prefetch_related('category_body').all() # query = self.request.GET.get("q", None) # if query is not None: # qs = qs.filter( # Q(name__icontains=query) # ) # return qs # context = { # 'qs' : qs, # } # return render(request, "categorysearch.html", context) @login_required def comment_new(request): pk = request.POST.get('pk') pill = get_object_or_404(Pills, pk=pk) form = CommentForm if request.method == 'POST': form = CommentForm(request.POST) if form.is_valid(): comment = form.save(commit=False) comment.author = request.user comment.pills = pill comment.save() return render(request, 'pills/comment_new_ajax.html', {'comment':comment, 'form':form,}) return redirect("pills:pill_list") @login_required def comment_delete(request, pill_pk, pk): comment = get_object_or_404(Comment, pk=pk) if request.method == 'POST' and request.user == comment.author: comment.delete() messages.success(request, '삭제했습니다.') return redirect('pills:pill_list') messages.warning('권한이 없습니다.') return redirect('pills:pill_list') class PillDetailView(DetailView): model = Pills template_name = 'pills/pill_detail.html' # context_object_name = 'pills' @login_required @require_POST # POST method만 받음 def pill_like(request): pk = request.POST.get('pk', None) pill = get_object_or_404(Pills, pk=pk) pill_like, pill_like_created = pill.like_set.get_or_create(user=request.user) if not pill_like_created: pill_like.delete() message = "좋아요 취소" else: message = "좋아요" context = { 'like_count': pill.like_count, 'message': message, 'username': request.user.username } return HttpResponse(json.dumps(context))
flexible
{ "blob_id": "3c193decc4a1f284de953003fbba434d6e798b24", "index": 2827, "step-1": "<mask token>\n\n\nclass PillListView(ListView):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n\n\nclass PillDetailView(DetailView):\n model = Pills\n template_name = 'pills/pill_detail.html'\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass PillListView(ListView):\n model = Pills\n template_name = 'pills/pill_list.html'\n form_class = CommentForm\n\n def get_context_data(self, **kwargs):\n context = super(PillListView, self).get_context_data(**kwargs)\n return context\n\n def get_queryset(self, *args, **kwargs):\n qs = Pills.objects.prefetch_related('category_body',\n 'category_gender', 'like_user_set').all()\n print(self.request.GET)\n query = self.request.GET.get('q', None)\n if query is not None:\n qs = qs.filter(Q(name__icontains=query) | Q(\n category_body__name__icontains=query))\n return qs\n\n\n<mask token>\n\n\nclass PillDetailView(DetailView):\n model = Pills\n template_name = 'pills/pill_detail.html'\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass PillListView(ListView):\n model = Pills\n template_name = 'pills/pill_list.html'\n form_class = CommentForm\n\n def get_context_data(self, **kwargs):\n context = super(PillListView, self).get_context_data(**kwargs)\n return context\n\n def get_queryset(self, *args, **kwargs):\n qs = Pills.objects.prefetch_related('category_body',\n 'category_gender', 'like_user_set').all()\n print(self.request.GET)\n query = self.request.GET.get('q', None)\n if query is not None:\n qs = qs.filter(Q(name__icontains=query) | Q(\n category_body__name__icontains=query))\n return qs\n\n\n@login_required\ndef comment_new(request):\n pk = request.POST.get('pk')\n pill = get_object_or_404(Pills, pk=pk)\n form = CommentForm\n if request.method == 'POST':\n form = CommentForm(request.POST)\n if form.is_valid():\n comment = form.save(commit=False)\n comment.author = request.user\n comment.pills = pill\n comment.save()\n return render(request, 'pills/comment_new_ajax.html', {\n 'comment': comment, 'form': form})\n return redirect('pills:pill_list')\n\n\n@login_required\ndef comment_delete(request, pill_pk, pk):\n comment = get_object_or_404(Comment, pk=pk)\n if request.method == 'POST' and request.user == comment.author:\n comment.delete()\n messages.success(request, '삭제했습니다.')\n return redirect('pills:pill_list')\n messages.warning('권한이 없습니다.')\n return redirect('pills:pill_list')\n\n\nclass PillDetailView(DetailView):\n model = Pills\n template_name = 'pills/pill_detail.html'\n\n\n<mask token>\n", "step-4": "<mask token>\n\n\nclass PillListView(ListView):\n model = Pills\n template_name = 'pills/pill_list.html'\n form_class = CommentForm\n\n def get_context_data(self, **kwargs):\n context = super(PillListView, self).get_context_data(**kwargs)\n return context\n\n def get_queryset(self, *args, **kwargs):\n qs = Pills.objects.prefetch_related('category_body',\n 'category_gender', 'like_user_set').all()\n print(self.request.GET)\n query = self.request.GET.get('q', None)\n if query is not None:\n qs = qs.filter(Q(name__icontains=query) | Q(\n category_body__name__icontains=query))\n return qs\n\n\n@login_required\ndef comment_new(request):\n pk = request.POST.get('pk')\n pill = get_object_or_404(Pills, pk=pk)\n form = CommentForm\n if request.method == 'POST':\n form = CommentForm(request.POST)\n if form.is_valid():\n comment = form.save(commit=False)\n comment.author = request.user\n comment.pills = pill\n comment.save()\n return render(request, 'pills/comment_new_ajax.html', {\n 'comment': comment, 'form': form})\n return redirect('pills:pill_list')\n\n\n@login_required\ndef comment_delete(request, pill_pk, pk):\n comment = get_object_or_404(Comment, pk=pk)\n if request.method == 'POST' and request.user == comment.author:\n comment.delete()\n messages.success(request, '삭제했습니다.')\n return redirect('pills:pill_list')\n messages.warning('권한이 없습니다.')\n return redirect('pills:pill_list')\n\n\nclass PillDetailView(DetailView):\n model = Pills\n template_name = 'pills/pill_detail.html'\n\n\n@login_required\n@require_POST\ndef pill_like(request):\n pk = request.POST.get('pk', None)\n pill = get_object_or_404(Pills, pk=pk)\n pill_like, pill_like_created = pill.like_set.get_or_create(user=request\n .user)\n if not pill_like_created:\n pill_like.delete()\n message = '좋아요 취소'\n else:\n message = '좋아요'\n context = {'like_count': pill.like_count, 'message': message,\n 'username': request.user.username}\n return HttpResponse(json.dumps(context))\n", "step-5": "from django.db.models import Q\nfrom django.contrib import messages\n\nfrom django.views.generic import ListView, DetailView\nfrom django.shortcuts import get_object_or_404, redirect, render\n\nfrom django.contrib.auth.decorators import login_required\nfrom django.http import HttpResponse\n\nfrom django.views.decorators.http import require_POST\n\nfrom .models import Pills, Like, Comment\nfrom .forms import CommentForm\nimport json\n\n\nclass PillListView(ListView):\n\tmodel = Pills\n\ttemplate_name = \"pills/pill_list.html\"\n\tform_class = CommentForm\n\n\tdef get_context_data(self, **kwargs):\n\t\tcontext = super(PillListView, self).get_context_data(**kwargs)\n\t\treturn context\n\n\tdef get_queryset(self, *args, **kwargs):\n\t\tqs = Pills.objects.prefetch_related('category_body','category_gender','like_user_set').all()\n\t\tprint(self.request.GET)\n\t\tquery = self.request.GET.get(\"q\", None)\n\t\tif query is not None:\n\t\t\tqs = qs.filter(\n\t\t\t\t\tQ(name__icontains=query) | Q(category_body__name__icontains=query)\n\t\t\t\t)\n\t\treturn qs\n\n\n# def PillCategory_SearchList(request):\n# \tqs = Pills.objects.prefetch_related('category_body').all()\n# \tquery = self.request.GET.get(\"q\", None)\n# \tif query is not None:\n# \t\tqs = qs.filter(\n# \t\t\t\tQ(name__icontains=query)\n# \t\t\t\t)\n# \t\treturn qs\n\n# \tcontext = {\n# \t\t\t\t'qs' : qs,\n\n# \t}\n\n# \treturn render(request, \"categorysearch.html\", context)\n\n\n\n\n\n@login_required\ndef comment_new(request):\n\tpk = request.POST.get('pk')\n\tpill = get_object_or_404(Pills, pk=pk)\n\tform = CommentForm\n\tif request.method == 'POST':\n\t\tform = CommentForm(request.POST)\n\t\tif form.is_valid():\n\t\t\tcomment = form.save(commit=False)\n\t\t\tcomment.author = request.user\n\t\t\tcomment.pills = pill\n\t\t\tcomment.save()\n\t\t\treturn render(request, 'pills/comment_new_ajax.html', {'comment':comment, 'form':form,})\n\treturn redirect(\"pills:pill_list\")\n\n\n@login_required\ndef comment_delete(request, pill_pk, pk):\n\tcomment = get_object_or_404(Comment, pk=pk)\n\tif request.method == 'POST' and request.user == comment.author:\n\t\tcomment.delete()\n\t\tmessages.success(request, '삭제했습니다.')\n\t\treturn redirect('pills:pill_list')\n\n\tmessages.warning('권한이 없습니다.')\n\treturn redirect('pills:pill_list')\n\n\n\nclass PillDetailView(DetailView):\n\tmodel = Pills\n\ttemplate_name = 'pills/pill_detail.html'\n\t# context_object_name = 'pills'\n\n\n@login_required\n@require_POST\t# POST method만 받음\ndef pill_like(request):\n\tpk = request.POST.get('pk', None)\n\tpill = get_object_or_404(Pills, pk=pk)\n\n\tpill_like, pill_like_created = pill.like_set.get_or_create(user=request.user)\n\n\tif not pill_like_created:\n\t\tpill_like.delete()\n\t\tmessage = \"좋아요 취소\"\n\telse:\n\t\tmessage = \"좋아요\"\n\n\tcontext = {\n\t\t\t\t'like_count': pill.like_count,\n\t\t\t\t'message': message,\n\t\t\t\t'username': request.user.username\n\t}\n\n\treturn HttpResponse(json.dumps(context))\n\n\n\n\n\n\n", "step-ids": [ 3, 6, 8, 9, 11 ] }
[ 3, 6, 8, 9, 11 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> @decorator def coroutine(f, *a, **kw): """This decorator starts the coroutine for us.""" i = f(*a, **kw) i.next() return i <|reserved_special_token_1|> <|reserved_special_token_0|> from decorator import decorator @decorator def coroutine(f, *a, **kw): """This decorator starts the coroutine for us.""" i = f(*a, **kw) i.next() return i <|reserved_special_token_1|> """Coroutine utilities.""" from decorator import decorator @decorator def coroutine(f, *a, **kw): """This decorator starts the coroutine for us.""" i = f(*a, **kw) i.next() return i
flexible
{ "blob_id": "6bde0ce30f33b155cc4c9ce9aa2ea6a6c5a1231d", "index": 5472, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\n@decorator\ndef coroutine(f, *a, **kw):\n \"\"\"This decorator starts the coroutine for us.\"\"\"\n i = f(*a, **kw)\n i.next()\n return i\n", "step-3": "<mask token>\nfrom decorator import decorator\n\n\n@decorator\ndef coroutine(f, *a, **kw):\n \"\"\"This decorator starts the coroutine for us.\"\"\"\n i = f(*a, **kw)\n i.next()\n return i\n", "step-4": "\"\"\"Coroutine utilities.\"\"\"\n\nfrom decorator import decorator\n\n@decorator\ndef coroutine(f, *a, **kw):\n \"\"\"This decorator starts the coroutine for us.\"\"\"\n i = f(*a, **kw)\n i.next()\n return i\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
from django.contrib import admin from lesson.models import ProgrammingEnvironment, Language, Lesson, LessonHint # list_display - Show these fields for each model on the Admin site # search_fields - Allow searching in these fields # Register models for the Admin site class ProgrammingEnvironmentAdmin(admin.ModelAdmin): """ Model for the Admin page """ list_display = ('environment_name', 'description') filter_horizontal = () list_filter = () fieldsets = () class LanguageAdmin(admin.ModelAdmin): """ Model for the Admin page """ list_display = ('language_name', 'description', 'environment') filter_horizontal = () list_filter = () fieldsets = () class LessonAdmin(admin.ModelAdmin): """ Model for the Admin page """ list_display = ('lesson_number', 'lesson_title', 'language', 'lesson_description') filter_horizontal = () list_filter = () fieldsets = () class LessonHintAdmin(admin.ModelAdmin): """ Model for the Admin page """ list_display = ('hint_title', 'lesson', 'hint_description') filter_horizontal = () list_filter = () fieldsets = () admin.site.register(ProgrammingEnvironment, ProgrammingEnvironmentAdmin) admin.site.register(Language, LanguageAdmin) admin.site.register(Lesson, LessonAdmin) admin.site.register(LessonHint, LessonHintAdmin)
normal
{ "blob_id": "2500c3562819e4e85ce3cbc30e0ddf1b8437e0a2", "index": 6448, "step-1": "<mask token>\n\n\nclass LanguageAdmin(admin.ModelAdmin):\n <mask token>\n list_display = 'language_name', 'description', 'environment'\n filter_horizontal = ()\n list_filter = ()\n fieldsets = ()\n\n\nclass LessonAdmin(admin.ModelAdmin):\n \"\"\" Model for the Admin page \"\"\"\n list_display = ('lesson_number', 'lesson_title', 'language',\n 'lesson_description')\n filter_horizontal = ()\n list_filter = ()\n fieldsets = ()\n\n\nclass LessonHintAdmin(admin.ModelAdmin):\n \"\"\" Model for the Admin page \"\"\"\n list_display = 'hint_title', 'lesson', 'hint_description'\n filter_horizontal = ()\n list_filter = ()\n fieldsets = ()\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass LanguageAdmin(admin.ModelAdmin):\n \"\"\" Model for the Admin page \"\"\"\n list_display = 'language_name', 'description', 'environment'\n filter_horizontal = ()\n list_filter = ()\n fieldsets = ()\n\n\nclass LessonAdmin(admin.ModelAdmin):\n \"\"\" Model for the Admin page \"\"\"\n list_display = ('lesson_number', 'lesson_title', 'language',\n 'lesson_description')\n filter_horizontal = ()\n list_filter = ()\n fieldsets = ()\n\n\nclass LessonHintAdmin(admin.ModelAdmin):\n \"\"\" Model for the Admin page \"\"\"\n list_display = 'hint_title', 'lesson', 'hint_description'\n filter_horizontal = ()\n list_filter = ()\n fieldsets = ()\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass ProgrammingEnvironmentAdmin(admin.ModelAdmin):\n \"\"\" Model for the Admin page \"\"\"\n list_display = 'environment_name', 'description'\n filter_horizontal = ()\n list_filter = ()\n fieldsets = ()\n\n\nclass LanguageAdmin(admin.ModelAdmin):\n \"\"\" Model for the Admin page \"\"\"\n list_display = 'language_name', 'description', 'environment'\n filter_horizontal = ()\n list_filter = ()\n fieldsets = ()\n\n\nclass LessonAdmin(admin.ModelAdmin):\n \"\"\" Model for the Admin page \"\"\"\n list_display = ('lesson_number', 'lesson_title', 'language',\n 'lesson_description')\n filter_horizontal = ()\n list_filter = ()\n fieldsets = ()\n\n\nclass LessonHintAdmin(admin.ModelAdmin):\n \"\"\" Model for the Admin page \"\"\"\n list_display = 'hint_title', 'lesson', 'hint_description'\n filter_horizontal = ()\n list_filter = ()\n fieldsets = ()\n\n\n<mask token>\n", "step-4": "from django.contrib import admin\nfrom lesson.models import ProgrammingEnvironment, Language, Lesson, LessonHint\n\n\nclass ProgrammingEnvironmentAdmin(admin.ModelAdmin):\n \"\"\" Model for the Admin page \"\"\"\n list_display = 'environment_name', 'description'\n filter_horizontal = ()\n list_filter = ()\n fieldsets = ()\n\n\nclass LanguageAdmin(admin.ModelAdmin):\n \"\"\" Model for the Admin page \"\"\"\n list_display = 'language_name', 'description', 'environment'\n filter_horizontal = ()\n list_filter = ()\n fieldsets = ()\n\n\nclass LessonAdmin(admin.ModelAdmin):\n \"\"\" Model for the Admin page \"\"\"\n list_display = ('lesson_number', 'lesson_title', 'language',\n 'lesson_description')\n filter_horizontal = ()\n list_filter = ()\n fieldsets = ()\n\n\nclass LessonHintAdmin(admin.ModelAdmin):\n \"\"\" Model for the Admin page \"\"\"\n list_display = 'hint_title', 'lesson', 'hint_description'\n filter_horizontal = ()\n list_filter = ()\n fieldsets = ()\n\n\nadmin.site.register(ProgrammingEnvironment, ProgrammingEnvironmentAdmin)\nadmin.site.register(Language, LanguageAdmin)\nadmin.site.register(Lesson, LessonAdmin)\nadmin.site.register(LessonHint, LessonHintAdmin)\n", "step-5": "from django.contrib import admin\nfrom lesson.models import ProgrammingEnvironment, Language, Lesson, LessonHint\n\n# list_display - Show these fields for each model on the Admin site\n# search_fields - Allow searching in these fields\n\n# Register models for the Admin site\nclass ProgrammingEnvironmentAdmin(admin.ModelAdmin):\n \"\"\" Model for the Admin page \"\"\"\n list_display = ('environment_name', 'description')\n\n filter_horizontal = ()\n list_filter = ()\n fieldsets = ()\n\nclass LanguageAdmin(admin.ModelAdmin):\n \"\"\" Model for the Admin page \"\"\"\n list_display = ('language_name', 'description', 'environment')\n\n filter_horizontal = ()\n list_filter = ()\n fieldsets = ()\n\nclass LessonAdmin(admin.ModelAdmin):\n \"\"\" Model for the Admin page \"\"\"\n list_display = ('lesson_number', 'lesson_title', 'language', 'lesson_description')\n\n filter_horizontal = ()\n list_filter = ()\n fieldsets = ()\n\nclass LessonHintAdmin(admin.ModelAdmin):\n \"\"\" Model for the Admin page \"\"\"\n list_display = ('hint_title', 'lesson', 'hint_description')\n\n filter_horizontal = ()\n list_filter = ()\n fieldsets = ()\n\nadmin.site.register(ProgrammingEnvironment, ProgrammingEnvironmentAdmin)\nadmin.site.register(Language, LanguageAdmin)\nadmin.site.register(Lesson, LessonAdmin)\nadmin.site.register(LessonHint, LessonHintAdmin)", "step-ids": [ 8, 9, 12, 14, 15 ] }
[ 8, 9, 12, 14, 15 ]
from .base import paw_test class warning_test(paw_test): def test_warning_badchars(self): self.paw.cset_lookup(self.badchar) self.assertEqual(1, self.paw.wcount)
normal
{ "blob_id": "b4c6075aabe833f6fe23471f608d928edd25ef63", "index": 372, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass warning_test(paw_test):\n <mask token>\n", "step-3": "<mask token>\n\n\nclass warning_test(paw_test):\n\n def test_warning_badchars(self):\n self.paw.cset_lookup(self.badchar)\n self.assertEqual(1, self.paw.wcount)\n", "step-4": "from .base import paw_test\n\n\nclass warning_test(paw_test):\n\n def test_warning_badchars(self):\n self.paw.cset_lookup(self.badchar)\n self.assertEqual(1, self.paw.wcount)\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> class Jav_ink: def __init__(self): self.parser_name = 'jav_ink' self.domain = 'https://www.jav.ink' self.album_flag = {} <|reserved_special_token_0|> def album2photos(self, album_url, album_html): photos = [] album_id = album_html.xpath('//article/div/@id') if not album_id: return {'error': {'url': album_url, 'info': 'not supported'}} album_id = album_id[0] if album_id in self.album_flag: return self.album_flag[album_id] = 1 album_name = album_html.xpath( '//*[contains(@class, "article-title")]/text()') photos_html = album_html.xpath('//*[@class="gallery-item"]') for photo_html in photos_html: photo_url = photo_html.xpath('.//a/@href')[0] photo_name = photo_url[photo_url.rfind('/') + 1:] photos.append({'photo_url': photo_url, 'photo_name': photo_name}) if len(album_name) == 0: album_name = album_url.split('/')[-2] else: album_name = album_name[0] album = {'parser_name': self.parser_name, 'album_name': album_name, 'photos': photos} return album <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Jav_ink: def __init__(self): self.parser_name = 'jav_ink' self.domain = 'https://www.jav.ink' self.album_flag = {} <|reserved_special_token_0|> def album2photos(self, album_url, album_html): photos = [] album_id = album_html.xpath('//article/div/@id') if not album_id: return {'error': {'url': album_url, 'info': 'not supported'}} album_id = album_id[0] if album_id in self.album_flag: return self.album_flag[album_id] = 1 album_name = album_html.xpath( '//*[contains(@class, "article-title")]/text()') photos_html = album_html.xpath('//*[@class="gallery-item"]') for photo_html in photos_html: photo_url = photo_html.xpath('.//a/@href')[0] photo_name = photo_url[photo_url.rfind('/') + 1:] photos.append({'photo_url': photo_url, 'photo_name': photo_name}) if len(album_name) == 0: album_name = album_url.split('/')[-2] else: album_name = album_name[0] album = {'parser_name': self.parser_name, 'album_name': album_name, 'photos': photos} return album def url2albums(self, url): albums_url = [] if '/category/' in url or '/?s=' in url: albums_url.extend(self.category2albums(url)) else: albums_url.append(url) albums = [] urls = [{'url': url} for url in albums_url] threads = MultiRequest(urls=urls, name=url).run() for thread in threads: try: album = self.album2photos(thread.url, thread.response) if album is not None: albums.append(album) except SystemExit: sys.exit() except: albums.append({'error': {'url': thread.url, 'info': 'parse error'}}) del thread return albums <|reserved_special_token_1|> <|reserved_special_token_0|> class Jav_ink: def __init__(self): self.parser_name = 'jav_ink' self.domain = 'https://www.jav.ink' self.album_flag = {} @staticmethod def category2albums(category_url): category_url = category_url[:category_url.find('/page/')] category_html = request(category_url) albums = category_html.xpath( '//*[@id="infinite-articles"]/li[contains(@class, "post")]/a/@href' ) pages = category_html.xpath('//*[@class="pages"]/text()') if pages: pages = pages[0] pages = pages[pages.find('of') + 3:] urls = [] for page in range(1, int(pages) + 1): urls.append('%s/page/%d/' % (category_url, page)) urls = [{'url': url} for url in urls] threads = MultiRequest(urls=urls, progress=False).run() for thread in threads: albums.extend(thread.response.xpath( '//*[@id="infinite-articles"] /li[contains(@class, "post")]/a/@href' )) del thread return albums def album2photos(self, album_url, album_html): photos = [] album_id = album_html.xpath('//article/div/@id') if not album_id: return {'error': {'url': album_url, 'info': 'not supported'}} album_id = album_id[0] if album_id in self.album_flag: return self.album_flag[album_id] = 1 album_name = album_html.xpath( '//*[contains(@class, "article-title")]/text()') photos_html = album_html.xpath('//*[@class="gallery-item"]') for photo_html in photos_html: photo_url = photo_html.xpath('.//a/@href')[0] photo_name = photo_url[photo_url.rfind('/') + 1:] photos.append({'photo_url': photo_url, 'photo_name': photo_name}) if len(album_name) == 0: album_name = album_url.split('/')[-2] else: album_name = album_name[0] album = {'parser_name': self.parser_name, 'album_name': album_name, 'photos': photos} return album def url2albums(self, url): albums_url = [] if '/category/' in url or '/?s=' in url: albums_url.extend(self.category2albums(url)) else: albums_url.append(url) albums = [] urls = [{'url': url} for url in albums_url] threads = MultiRequest(urls=urls, name=url).run() for thread in threads: try: album = self.album2photos(thread.url, thread.response) if album is not None: albums.append(album) except SystemExit: sys.exit() except: albums.append({'error': {'url': thread.url, 'info': 'parse error'}}) del thread return albums <|reserved_special_token_1|> import sys from photo_dl.request import request from photo_dl.request import MultiRequest class Jav_ink: def __init__(self): self.parser_name = 'jav_ink' self.domain = 'https://www.jav.ink' self.album_flag = {} @staticmethod def category2albums(category_url): category_url = category_url[:category_url.find('/page/')] category_html = request(category_url) albums = category_html.xpath( '//*[@id="infinite-articles"]/li[contains(@class, "post")]/a/@href' ) pages = category_html.xpath('//*[@class="pages"]/text()') if pages: pages = pages[0] pages = pages[pages.find('of') + 3:] urls = [] for page in range(1, int(pages) + 1): urls.append('%s/page/%d/' % (category_url, page)) urls = [{'url': url} for url in urls] threads = MultiRequest(urls=urls, progress=False).run() for thread in threads: albums.extend(thread.response.xpath( '//*[@id="infinite-articles"] /li[contains(@class, "post")]/a/@href' )) del thread return albums def album2photos(self, album_url, album_html): photos = [] album_id = album_html.xpath('//article/div/@id') if not album_id: return {'error': {'url': album_url, 'info': 'not supported'}} album_id = album_id[0] if album_id in self.album_flag: return self.album_flag[album_id] = 1 album_name = album_html.xpath( '//*[contains(@class, "article-title")]/text()') photos_html = album_html.xpath('//*[@class="gallery-item"]') for photo_html in photos_html: photo_url = photo_html.xpath('.//a/@href')[0] photo_name = photo_url[photo_url.rfind('/') + 1:] photos.append({'photo_url': photo_url, 'photo_name': photo_name}) if len(album_name) == 0: album_name = album_url.split('/')[-2] else: album_name = album_name[0] album = {'parser_name': self.parser_name, 'album_name': album_name, 'photos': photos} return album def url2albums(self, url): albums_url = [] if '/category/' in url or '/?s=' in url: albums_url.extend(self.category2albums(url)) else: albums_url.append(url) albums = [] urls = [{'url': url} for url in albums_url] threads = MultiRequest(urls=urls, name=url).run() for thread in threads: try: album = self.album2photos(thread.url, thread.response) if album is not None: albums.append(album) except SystemExit: sys.exit() except: albums.append({'error': {'url': thread.url, 'info': 'parse error'}}) del thread return albums <|reserved_special_token_1|> import sys from photo_dl.request import request from photo_dl.request import MultiRequest class Jav_ink: def __init__(self): self.parser_name = 'jav_ink' self.domain = 'https://www.jav.ink' self.album_flag = {} @staticmethod def category2albums(category_url): category_url = category_url[:category_url.find('/page/')] category_html = request(category_url) albums = category_html.xpath('//*[@id="infinite-articles"]/li[contains(@class, "post")]/a/@href') pages = category_html.xpath('//*[@class="pages"]/text()') if pages: pages = pages[0] pages = pages[pages.find('of') + 3:] urls = [] for page in range(1, int(pages) + 1): urls.append('%s/page/%d/' % (category_url, page)) urls = [{'url': url} for url in urls] threads = MultiRequest(urls=urls, progress=False).run() for thread in threads: albums.extend(thread.response.xpath('//*[@id="infinite-articles"]\ /li[contains(@class, "post")]/a/@href')) del thread return albums def album2photos(self, album_url, album_html): photos = [] album_id = album_html.xpath('//article/div/@id') if not album_id: return {'error': {'url': album_url, 'info': 'not supported'}} album_id = album_id[0] if album_id in self.album_flag: return self.album_flag[album_id] = 1 album_name = album_html.xpath('//*[contains(@class, "article-title")]/text()') photos_html = album_html.xpath('//*[@class="gallery-item"]') for photo_html in photos_html: photo_url = photo_html.xpath('.//a/@href')[0] photo_name = photo_url[photo_url.rfind('/') + 1:] photos.append({'photo_url': photo_url, 'photo_name': photo_name}) if len(album_name) == 0: album_name = album_url.split('/')[-2] else: album_name = album_name[0] album = {'parser_name': self.parser_name, 'album_name': album_name, 'photos': photos} return album def url2albums(self, url): albums_url = [] if '/category/' in url or '/?s=' in url: albums_url.extend(self.category2albums(url)) else: albums_url.append(url) albums = [] urls = [{'url': url} for url in albums_url] threads = MultiRequest(urls=urls, name=url).run() for thread in threads: try: album = self.album2photos(thread.url, thread.response) if album is not None: albums.append(album) except SystemExit: sys.exit() except: albums.append({'error': {'url': thread.url, 'info': 'parse error'}}) del thread return albums
flexible
{ "blob_id": "9fff345dedcfc7051a258bc471acf07aece95bcf", "index": 9319, "step-1": "<mask token>\n\n\nclass Jav_ink:\n\n def __init__(self):\n self.parser_name = 'jav_ink'\n self.domain = 'https://www.jav.ink'\n self.album_flag = {}\n <mask token>\n\n def album2photos(self, album_url, album_html):\n photos = []\n album_id = album_html.xpath('//article/div/@id')\n if not album_id:\n return {'error': {'url': album_url, 'info': 'not supported'}}\n album_id = album_id[0]\n if album_id in self.album_flag:\n return\n self.album_flag[album_id] = 1\n album_name = album_html.xpath(\n '//*[contains(@class, \"article-title\")]/text()')\n photos_html = album_html.xpath('//*[@class=\"gallery-item\"]')\n for photo_html in photos_html:\n photo_url = photo_html.xpath('.//a/@href')[0]\n photo_name = photo_url[photo_url.rfind('/') + 1:]\n photos.append({'photo_url': photo_url, 'photo_name': photo_name})\n if len(album_name) == 0:\n album_name = album_url.split('/')[-2]\n else:\n album_name = album_name[0]\n album = {'parser_name': self.parser_name, 'album_name': album_name,\n 'photos': photos}\n return album\n <mask token>\n", "step-2": "<mask token>\n\n\nclass Jav_ink:\n\n def __init__(self):\n self.parser_name = 'jav_ink'\n self.domain = 'https://www.jav.ink'\n self.album_flag = {}\n <mask token>\n\n def album2photos(self, album_url, album_html):\n photos = []\n album_id = album_html.xpath('//article/div/@id')\n if not album_id:\n return {'error': {'url': album_url, 'info': 'not supported'}}\n album_id = album_id[0]\n if album_id in self.album_flag:\n return\n self.album_flag[album_id] = 1\n album_name = album_html.xpath(\n '//*[contains(@class, \"article-title\")]/text()')\n photos_html = album_html.xpath('//*[@class=\"gallery-item\"]')\n for photo_html in photos_html:\n photo_url = photo_html.xpath('.//a/@href')[0]\n photo_name = photo_url[photo_url.rfind('/') + 1:]\n photos.append({'photo_url': photo_url, 'photo_name': photo_name})\n if len(album_name) == 0:\n album_name = album_url.split('/')[-2]\n else:\n album_name = album_name[0]\n album = {'parser_name': self.parser_name, 'album_name': album_name,\n 'photos': photos}\n return album\n\n def url2albums(self, url):\n albums_url = []\n if '/category/' in url or '/?s=' in url:\n albums_url.extend(self.category2albums(url))\n else:\n albums_url.append(url)\n albums = []\n urls = [{'url': url} for url in albums_url]\n threads = MultiRequest(urls=urls, name=url).run()\n for thread in threads:\n try:\n album = self.album2photos(thread.url, thread.response)\n if album is not None:\n albums.append(album)\n except SystemExit:\n sys.exit()\n except:\n albums.append({'error': {'url': thread.url, 'info':\n 'parse error'}})\n del thread\n return albums\n", "step-3": "<mask token>\n\n\nclass Jav_ink:\n\n def __init__(self):\n self.parser_name = 'jav_ink'\n self.domain = 'https://www.jav.ink'\n self.album_flag = {}\n\n @staticmethod\n def category2albums(category_url):\n category_url = category_url[:category_url.find('/page/')]\n category_html = request(category_url)\n albums = category_html.xpath(\n '//*[@id=\"infinite-articles\"]/li[contains(@class, \"post\")]/a/@href'\n )\n pages = category_html.xpath('//*[@class=\"pages\"]/text()')\n if pages:\n pages = pages[0]\n pages = pages[pages.find('of') + 3:]\n urls = []\n for page in range(1, int(pages) + 1):\n urls.append('%s/page/%d/' % (category_url, page))\n urls = [{'url': url} for url in urls]\n threads = MultiRequest(urls=urls, progress=False).run()\n for thread in threads:\n albums.extend(thread.response.xpath(\n '//*[@id=\"infinite-articles\"] /li[contains(@class, \"post\")]/a/@href'\n ))\n del thread\n return albums\n\n def album2photos(self, album_url, album_html):\n photos = []\n album_id = album_html.xpath('//article/div/@id')\n if not album_id:\n return {'error': {'url': album_url, 'info': 'not supported'}}\n album_id = album_id[0]\n if album_id in self.album_flag:\n return\n self.album_flag[album_id] = 1\n album_name = album_html.xpath(\n '//*[contains(@class, \"article-title\")]/text()')\n photos_html = album_html.xpath('//*[@class=\"gallery-item\"]')\n for photo_html in photos_html:\n photo_url = photo_html.xpath('.//a/@href')[0]\n photo_name = photo_url[photo_url.rfind('/') + 1:]\n photos.append({'photo_url': photo_url, 'photo_name': photo_name})\n if len(album_name) == 0:\n album_name = album_url.split('/')[-2]\n else:\n album_name = album_name[0]\n album = {'parser_name': self.parser_name, 'album_name': album_name,\n 'photos': photos}\n return album\n\n def url2albums(self, url):\n albums_url = []\n if '/category/' in url or '/?s=' in url:\n albums_url.extend(self.category2albums(url))\n else:\n albums_url.append(url)\n albums = []\n urls = [{'url': url} for url in albums_url]\n threads = MultiRequest(urls=urls, name=url).run()\n for thread in threads:\n try:\n album = self.album2photos(thread.url, thread.response)\n if album is not None:\n albums.append(album)\n except SystemExit:\n sys.exit()\n except:\n albums.append({'error': {'url': thread.url, 'info':\n 'parse error'}})\n del thread\n return albums\n", "step-4": "import sys\nfrom photo_dl.request import request\nfrom photo_dl.request import MultiRequest\n\n\nclass Jav_ink:\n\n def __init__(self):\n self.parser_name = 'jav_ink'\n self.domain = 'https://www.jav.ink'\n self.album_flag = {}\n\n @staticmethod\n def category2albums(category_url):\n category_url = category_url[:category_url.find('/page/')]\n category_html = request(category_url)\n albums = category_html.xpath(\n '//*[@id=\"infinite-articles\"]/li[contains(@class, \"post\")]/a/@href'\n )\n pages = category_html.xpath('//*[@class=\"pages\"]/text()')\n if pages:\n pages = pages[0]\n pages = pages[pages.find('of') + 3:]\n urls = []\n for page in range(1, int(pages) + 1):\n urls.append('%s/page/%d/' % (category_url, page))\n urls = [{'url': url} for url in urls]\n threads = MultiRequest(urls=urls, progress=False).run()\n for thread in threads:\n albums.extend(thread.response.xpath(\n '//*[@id=\"infinite-articles\"] /li[contains(@class, \"post\")]/a/@href'\n ))\n del thread\n return albums\n\n def album2photos(self, album_url, album_html):\n photos = []\n album_id = album_html.xpath('//article/div/@id')\n if not album_id:\n return {'error': {'url': album_url, 'info': 'not supported'}}\n album_id = album_id[0]\n if album_id in self.album_flag:\n return\n self.album_flag[album_id] = 1\n album_name = album_html.xpath(\n '//*[contains(@class, \"article-title\")]/text()')\n photos_html = album_html.xpath('//*[@class=\"gallery-item\"]')\n for photo_html in photos_html:\n photo_url = photo_html.xpath('.//a/@href')[0]\n photo_name = photo_url[photo_url.rfind('/') + 1:]\n photos.append({'photo_url': photo_url, 'photo_name': photo_name})\n if len(album_name) == 0:\n album_name = album_url.split('/')[-2]\n else:\n album_name = album_name[0]\n album = {'parser_name': self.parser_name, 'album_name': album_name,\n 'photos': photos}\n return album\n\n def url2albums(self, url):\n albums_url = []\n if '/category/' in url or '/?s=' in url:\n albums_url.extend(self.category2albums(url))\n else:\n albums_url.append(url)\n albums = []\n urls = [{'url': url} for url in albums_url]\n threads = MultiRequest(urls=urls, name=url).run()\n for thread in threads:\n try:\n album = self.album2photos(thread.url, thread.response)\n if album is not None:\n albums.append(album)\n except SystemExit:\n sys.exit()\n except:\n albums.append({'error': {'url': thread.url, 'info':\n 'parse error'}})\n del thread\n return albums\n", "step-5": "import sys\nfrom photo_dl.request import request\nfrom photo_dl.request import MultiRequest\n\n\nclass Jav_ink:\n def __init__(self):\n self.parser_name = 'jav_ink'\n self.domain = 'https://www.jav.ink'\n self.album_flag = {}\n\n @staticmethod\n def category2albums(category_url):\n category_url = category_url[:category_url.find('/page/')]\n category_html = request(category_url)\n albums = category_html.xpath('//*[@id=\"infinite-articles\"]/li[contains(@class, \"post\")]/a/@href')\n pages = category_html.xpath('//*[@class=\"pages\"]/text()')\n if pages:\n pages = pages[0]\n pages = pages[pages.find('of') + 3:]\n urls = []\n for page in range(1, int(pages) + 1):\n urls.append('%s/page/%d/' % (category_url, page))\n urls = [{'url': url} for url in urls]\n threads = MultiRequest(urls=urls, progress=False).run()\n for thread in threads:\n albums.extend(thread.response.xpath('//*[@id=\"infinite-articles\"]\\\n /li[contains(@class, \"post\")]/a/@href'))\n del thread\n return albums\n\n def album2photos(self, album_url, album_html):\n photos = []\n album_id = album_html.xpath('//article/div/@id')\n if not album_id:\n return {'error': {'url': album_url, 'info': 'not supported'}}\n album_id = album_id[0]\n if album_id in self.album_flag:\n return\n self.album_flag[album_id] = 1\n\n album_name = album_html.xpath('//*[contains(@class, \"article-title\")]/text()')\n photos_html = album_html.xpath('//*[@class=\"gallery-item\"]')\n for photo_html in photos_html:\n photo_url = photo_html.xpath('.//a/@href')[0]\n photo_name = photo_url[photo_url.rfind('/') + 1:]\n photos.append({'photo_url': photo_url, 'photo_name': photo_name})\n if len(album_name) == 0:\n album_name = album_url.split('/')[-2]\n else:\n album_name = album_name[0]\n album = {'parser_name': self.parser_name, 'album_name': album_name, 'photos': photos}\n return album\n\n def url2albums(self, url):\n albums_url = []\n if '/category/' in url or '/?s=' in url:\n albums_url.extend(self.category2albums(url))\n else:\n albums_url.append(url)\n\n albums = []\n urls = [{'url': url} for url in albums_url]\n threads = MultiRequest(urls=urls, name=url).run()\n for thread in threads:\n try:\n album = self.album2photos(thread.url, thread.response)\n if album is not None:\n albums.append(album)\n except SystemExit:\n sys.exit()\n except:\n albums.append({'error': {'url': thread.url, 'info': 'parse error'}})\n del thread\n return albums\n", "step-ids": [ 3, 4, 5, 6, 7 ] }
[ 3, 4, 5, 6, 7 ]
# -*- coding: utf-8 -*- # @Author: Lich_Amnesia # @Email: alwaysxiaop@gmail.com # @Date: 2016-11-17 11:00:33 # @Last Modified time: 2016-11-17 11:00:34 # @FileName: 346.py class MovingAverage(object): def __init__(self, size): """ Initialize your data structure here. :type size: int """ self.size = size self.q = collections.deque() self.sum_ = 0 def next(self, val): """ :type val: int :rtype: float """ if len(self.q) == self.size: a = self.q.popleft() self.sum_ -= a self.q.append(val) self.sum_ += val return float(self.sum_) / len(self.q) # Your MovingAverage object will be instantiated and called as such: # obj = MovingAverage(size) # param_1 = obj.next(val)
normal
{ "blob_id": "9e37b728d8045726aef7625fccc14111ecb0e1c8", "index": 5578, "step-1": "<mask token>\n", "step-2": "class MovingAverage(object):\n <mask token>\n <mask token>\n", "step-3": "class MovingAverage(object):\n\n def __init__(self, size):\n \"\"\"\n Initialize your data structure here.\n :type size: int\n \"\"\"\n self.size = size\n self.q = collections.deque()\n self.sum_ = 0\n <mask token>\n", "step-4": "class MovingAverage(object):\n\n def __init__(self, size):\n \"\"\"\n Initialize your data structure here.\n :type size: int\n \"\"\"\n self.size = size\n self.q = collections.deque()\n self.sum_ = 0\n\n def next(self, val):\n \"\"\"\n :type val: int\n :rtype: float\n \"\"\"\n if len(self.q) == self.size:\n a = self.q.popleft()\n self.sum_ -= a\n self.q.append(val)\n self.sum_ += val\n return float(self.sum_) / len(self.q)\n", "step-5": "# -*- coding: utf-8 -*-\n# @Author: Lich_Amnesia\n# @Email: alwaysxiaop@gmail.com\n# @Date: 2016-11-17 11:00:33\n# @Last Modified time: 2016-11-17 11:00:34\n# @FileName: 346.py\n\n\nclass MovingAverage(object):\n\n def __init__(self, size):\n \"\"\"\n Initialize your data structure here.\n :type size: int\n \"\"\"\n self.size = size\n self.q = collections.deque()\n self.sum_ = 0\n\n def next(self, val):\n \"\"\"\n :type val: int\n :rtype: float\n \"\"\"\n if len(self.q) == self.size:\n a = self.q.popleft()\n self.sum_ -= a\n self.q.append(val)\n self.sum_ += val\n return float(self.sum_) / len(self.q)\n\n# Your MovingAverage object will be instantiated and called as such:\n# obj = MovingAverage(size)\n# param_1 = obj.next(val)", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
# Find sum/count of Prime digits in a number
normal
{ "blob_id": "75217256d88c32ed1c502bc104c30092bf74382d", "index": 9791, "step-1": "# Find sum/count of Prime digits in a number", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 1 ] }
[ 1 ]
from keras.models import * from keras.layers import * from keras.optimizers import * from keras.callbacks import ModelCheckpoint, LearningRateScheduler from keras import backend as keras unet_feature_n = 512 unet_feature_nstep_size = 1e-4 unet_input_image_size = 128 def unet(pretrained_weights=None, input_size=(unet_input_image_size, unet_input_image_size, 1)): inputs = Input(input_size) conv1 = Conv2D(unet_feature_n // 16, 3, activation='relu', padding='same', kernel_initializer='he_normal')(inputs) conv1 = Conv2D(unet_feature_n // 16, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv1) pool1 = MaxPooling2D(pool_size=(2, 2))(conv1) conv2 = Conv2D(unet_feature_n // 8, 3, activation='relu', padding='same', kernel_initializer='he_normal')(pool1) conv2 = Conv2D(unet_feature_n // 8, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv2) pool2 = MaxPooling2D(pool_size=(2, 2))(conv2) conv3 = Conv2D(unet_feature_n // 4, 3, activation='relu', padding='same', kernel_initializer='he_normal')(pool2) conv3 = Conv2D(unet_feature_n // 4, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv3) pool3 = MaxPooling2D(pool_size=(2, 2))(conv3) conv4 = Conv2D(unet_feature_n // 2, 3, activation='relu', padding='same', kernel_initializer='he_normal')(pool3) conv4 = Conv2D(unet_feature_n // 2, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv4) drop4 = Dropout(0.5)(conv4) pool4 = MaxPooling2D(pool_size=(2, 2))(drop4) conv5 = Conv2D(unet_feature_n, 3, activation='relu', padding='same', kernel_initializer='he_normal')(pool4) conv5 = Conv2D(unet_feature_n, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv5) drop5 = Dropout(0.5)(conv5) up6 = Conv2D(unet_feature_n // 2, 2, activation='relu', padding='same', kernel_initializer='he_normal')( UpSampling2D(size=(2, 2))(drop5)) merge6 = concatenate([drop4, up6], axis=3) conv6 = Conv2D(unet_feature_n // 2, 3, activation='relu', padding='same', kernel_initializer='he_normal')(merge6) conv6 = Conv2D(unet_feature_n // 2, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv6) up7 = Conv2D(unet_feature_n // 4, 2, activation='relu', padding='same', kernel_initializer='he_normal')( UpSampling2D(size=(2, 2))(conv6)) merge7 = concatenate([conv3, up7], axis=3) conv7 = Conv2D(unet_feature_n // 4, 3, activation='relu', padding='same', kernel_initializer='he_normal')(merge7) conv7 = Conv2D(unet_feature_n // 4, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv7) up8 = Conv2D(unet_feature_n // 8, 2, activation='relu', padding='same', kernel_initializer='he_normal')( UpSampling2D(size=(2, 2))(conv7)) merge8 = concatenate([conv2, up8], axis=3) conv8 = Conv2D(unet_feature_n // 8, 3, activation='relu', padding='same', kernel_initializer='he_normal')(merge8) conv8 = Conv2D(unet_feature_n // 8, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv8) up9 = Conv2D(unet_feature_n // 16, 2, activation='relu', padding='same', kernel_initializer='he_normal')( UpSampling2D(size=(2, 2))(conv8)) merge9 = concatenate([conv1, up9], axis=3) conv9 = Conv2D(unet_feature_n // 16, 3, activation='relu', padding='same', kernel_initializer='he_normal')(merge9) conv9 = Conv2D(unet_feature_n // 16, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv9) conv9 = Conv2D(2, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv9) conv10 = Conv2D(1, 1, activation='sigmoid')(conv9) model = Model(inputs=inputs, outputs=conv10) model.compile(optimizer=Adam(lr=unet_feature_nstep_size), loss='binary_crossentropy', metrics=['accuracy']) if (pretrained_weights): model.load_weights(pretrained_weights) return model def small_unet(pretrained_weights=False, patch_size=128): input_ = Input((patch_size, patch_size, 1)) skips = [] output = input_ for shape, filters in zip([5, 3, 3, 3, 3, 3, 3], [16, 32, 64, 64, 64, 64, 64]): skips.append(output) print(output.shape) output= Conv2D(filters, (shape, shape), strides=2, padding="same", activation="relu")(output) #output = BatchNormalization()(output) #if shape != 7: # output = BatchNormalization()(output) for shape, filters in zip([4, 4, 4, 4, 4, 4, 4, 4], [64, 64, 64, 64,32, 16, 2]): output = UpSampling2D()(output) skip_output = skips.pop() output = concatenate([output, skip_output], axis=3) if filters != 2: activation = "relu" else: activation = "softmax" output = Conv2D(filters if filters != 2 else 2, (shape, shape), activation=activation, padding="same")(output) if filters != 2: output = BatchNormalization(momentum=.9)(output) assert len(skips) == 0 m = Model([input_], [output]) if pretrained_weights: m.load_weights(pretrained_weights) m.compile(optimizer=Adam(), loss='binary_crossentropy', metrics=['accuracy']) return m
normal
{ "blob_id": "b8d45a0028cb4e393ddca9dd6d246289328d1791", "index": 4044, "step-1": "<mask token>\n\n\ndef small_unet(pretrained_weights=False, patch_size=128):\n input_ = Input((patch_size, patch_size, 1))\n skips = []\n output = input_\n for shape, filters in zip([5, 3, 3, 3, 3, 3, 3], [16, 32, 64, 64, 64, \n 64, 64]):\n skips.append(output)\n print(output.shape)\n output = Conv2D(filters, (shape, shape), strides=2, padding='same',\n activation='relu')(output)\n for shape, filters in zip([4, 4, 4, 4, 4, 4, 4, 4], [64, 64, 64, 64, 32,\n 16, 2]):\n output = UpSampling2D()(output)\n skip_output = skips.pop()\n output = concatenate([output, skip_output], axis=3)\n if filters != 2:\n activation = 'relu'\n else:\n activation = 'softmax'\n output = Conv2D(filters if filters != 2 else 2, (shape, shape),\n activation=activation, padding='same')(output)\n if filters != 2:\n output = BatchNormalization(momentum=0.9)(output)\n assert len(skips) == 0\n m = Model([input_], [output])\n if pretrained_weights:\n m.load_weights(pretrained_weights)\n m.compile(optimizer=Adam(), loss='binary_crossentropy', metrics=[\n 'accuracy'])\n return m\n", "step-2": "<mask token>\n\n\ndef unet(pretrained_weights=None, input_size=(unet_input_image_size,\n unet_input_image_size, 1)):\n inputs = Input(input_size)\n conv1 = Conv2D(unet_feature_n // 16, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(inputs)\n conv1 = Conv2D(unet_feature_n // 16, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(conv1)\n pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)\n conv2 = Conv2D(unet_feature_n // 8, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(pool1)\n conv2 = Conv2D(unet_feature_n // 8, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(conv2)\n pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)\n conv3 = Conv2D(unet_feature_n // 4, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(pool2)\n conv3 = Conv2D(unet_feature_n // 4, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(conv3)\n pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)\n conv4 = Conv2D(unet_feature_n // 2, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(pool3)\n conv4 = Conv2D(unet_feature_n // 2, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(conv4)\n drop4 = Dropout(0.5)(conv4)\n pool4 = MaxPooling2D(pool_size=(2, 2))(drop4)\n conv5 = Conv2D(unet_feature_n, 3, activation='relu', padding='same',\n kernel_initializer='he_normal')(pool4)\n conv5 = Conv2D(unet_feature_n, 3, activation='relu', padding='same',\n kernel_initializer='he_normal')(conv5)\n drop5 = Dropout(0.5)(conv5)\n up6 = Conv2D(unet_feature_n // 2, 2, activation='relu', padding='same',\n kernel_initializer='he_normal')(UpSampling2D(size=(2, 2))(drop5))\n merge6 = concatenate([drop4, up6], axis=3)\n conv6 = Conv2D(unet_feature_n // 2, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(merge6)\n conv6 = Conv2D(unet_feature_n // 2, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(conv6)\n up7 = Conv2D(unet_feature_n // 4, 2, activation='relu', padding='same',\n kernel_initializer='he_normal')(UpSampling2D(size=(2, 2))(conv6))\n merge7 = concatenate([conv3, up7], axis=3)\n conv7 = Conv2D(unet_feature_n // 4, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(merge7)\n conv7 = Conv2D(unet_feature_n // 4, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(conv7)\n up8 = Conv2D(unet_feature_n // 8, 2, activation='relu', padding='same',\n kernel_initializer='he_normal')(UpSampling2D(size=(2, 2))(conv7))\n merge8 = concatenate([conv2, up8], axis=3)\n conv8 = Conv2D(unet_feature_n // 8, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(merge8)\n conv8 = Conv2D(unet_feature_n // 8, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(conv8)\n up9 = Conv2D(unet_feature_n // 16, 2, activation='relu', padding='same',\n kernel_initializer='he_normal')(UpSampling2D(size=(2, 2))(conv8))\n merge9 = concatenate([conv1, up9], axis=3)\n conv9 = Conv2D(unet_feature_n // 16, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(merge9)\n conv9 = Conv2D(unet_feature_n // 16, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(conv9)\n conv9 = Conv2D(2, 3, activation='relu', padding='same',\n kernel_initializer='he_normal')(conv9)\n conv10 = Conv2D(1, 1, activation='sigmoid')(conv9)\n model = Model(inputs=inputs, outputs=conv10)\n model.compile(optimizer=Adam(lr=unet_feature_nstep_size), loss=\n 'binary_crossentropy', metrics=['accuracy'])\n if pretrained_weights:\n model.load_weights(pretrained_weights)\n return model\n\n\ndef small_unet(pretrained_weights=False, patch_size=128):\n input_ = Input((patch_size, patch_size, 1))\n skips = []\n output = input_\n for shape, filters in zip([5, 3, 3, 3, 3, 3, 3], [16, 32, 64, 64, 64, \n 64, 64]):\n skips.append(output)\n print(output.shape)\n output = Conv2D(filters, (shape, shape), strides=2, padding='same',\n activation='relu')(output)\n for shape, filters in zip([4, 4, 4, 4, 4, 4, 4, 4], [64, 64, 64, 64, 32,\n 16, 2]):\n output = UpSampling2D()(output)\n skip_output = skips.pop()\n output = concatenate([output, skip_output], axis=3)\n if filters != 2:\n activation = 'relu'\n else:\n activation = 'softmax'\n output = Conv2D(filters if filters != 2 else 2, (shape, shape),\n activation=activation, padding='same')(output)\n if filters != 2:\n output = BatchNormalization(momentum=0.9)(output)\n assert len(skips) == 0\n m = Model([input_], [output])\n if pretrained_weights:\n m.load_weights(pretrained_weights)\n m.compile(optimizer=Adam(), loss='binary_crossentropy', metrics=[\n 'accuracy'])\n return m\n", "step-3": "<mask token>\nunet_feature_n = 512\nunet_feature_nstep_size = 0.0001\nunet_input_image_size = 128\n\n\ndef unet(pretrained_weights=None, input_size=(unet_input_image_size,\n unet_input_image_size, 1)):\n inputs = Input(input_size)\n conv1 = Conv2D(unet_feature_n // 16, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(inputs)\n conv1 = Conv2D(unet_feature_n // 16, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(conv1)\n pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)\n conv2 = Conv2D(unet_feature_n // 8, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(pool1)\n conv2 = Conv2D(unet_feature_n // 8, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(conv2)\n pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)\n conv3 = Conv2D(unet_feature_n // 4, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(pool2)\n conv3 = Conv2D(unet_feature_n // 4, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(conv3)\n pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)\n conv4 = Conv2D(unet_feature_n // 2, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(pool3)\n conv4 = Conv2D(unet_feature_n // 2, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(conv4)\n drop4 = Dropout(0.5)(conv4)\n pool4 = MaxPooling2D(pool_size=(2, 2))(drop4)\n conv5 = Conv2D(unet_feature_n, 3, activation='relu', padding='same',\n kernel_initializer='he_normal')(pool4)\n conv5 = Conv2D(unet_feature_n, 3, activation='relu', padding='same',\n kernel_initializer='he_normal')(conv5)\n drop5 = Dropout(0.5)(conv5)\n up6 = Conv2D(unet_feature_n // 2, 2, activation='relu', padding='same',\n kernel_initializer='he_normal')(UpSampling2D(size=(2, 2))(drop5))\n merge6 = concatenate([drop4, up6], axis=3)\n conv6 = Conv2D(unet_feature_n // 2, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(merge6)\n conv6 = Conv2D(unet_feature_n // 2, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(conv6)\n up7 = Conv2D(unet_feature_n // 4, 2, activation='relu', padding='same',\n kernel_initializer='he_normal')(UpSampling2D(size=(2, 2))(conv6))\n merge7 = concatenate([conv3, up7], axis=3)\n conv7 = Conv2D(unet_feature_n // 4, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(merge7)\n conv7 = Conv2D(unet_feature_n // 4, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(conv7)\n up8 = Conv2D(unet_feature_n // 8, 2, activation='relu', padding='same',\n kernel_initializer='he_normal')(UpSampling2D(size=(2, 2))(conv7))\n merge8 = concatenate([conv2, up8], axis=3)\n conv8 = Conv2D(unet_feature_n // 8, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(merge8)\n conv8 = Conv2D(unet_feature_n // 8, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(conv8)\n up9 = Conv2D(unet_feature_n // 16, 2, activation='relu', padding='same',\n kernel_initializer='he_normal')(UpSampling2D(size=(2, 2))(conv8))\n merge9 = concatenate([conv1, up9], axis=3)\n conv9 = Conv2D(unet_feature_n // 16, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(merge9)\n conv9 = Conv2D(unet_feature_n // 16, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(conv9)\n conv9 = Conv2D(2, 3, activation='relu', padding='same',\n kernel_initializer='he_normal')(conv9)\n conv10 = Conv2D(1, 1, activation='sigmoid')(conv9)\n model = Model(inputs=inputs, outputs=conv10)\n model.compile(optimizer=Adam(lr=unet_feature_nstep_size), loss=\n 'binary_crossentropy', metrics=['accuracy'])\n if pretrained_weights:\n model.load_weights(pretrained_weights)\n return model\n\n\ndef small_unet(pretrained_weights=False, patch_size=128):\n input_ = Input((patch_size, patch_size, 1))\n skips = []\n output = input_\n for shape, filters in zip([5, 3, 3, 3, 3, 3, 3], [16, 32, 64, 64, 64, \n 64, 64]):\n skips.append(output)\n print(output.shape)\n output = Conv2D(filters, (shape, shape), strides=2, padding='same',\n activation='relu')(output)\n for shape, filters in zip([4, 4, 4, 4, 4, 4, 4, 4], [64, 64, 64, 64, 32,\n 16, 2]):\n output = UpSampling2D()(output)\n skip_output = skips.pop()\n output = concatenate([output, skip_output], axis=3)\n if filters != 2:\n activation = 'relu'\n else:\n activation = 'softmax'\n output = Conv2D(filters if filters != 2 else 2, (shape, shape),\n activation=activation, padding='same')(output)\n if filters != 2:\n output = BatchNormalization(momentum=0.9)(output)\n assert len(skips) == 0\n m = Model([input_], [output])\n if pretrained_weights:\n m.load_weights(pretrained_weights)\n m.compile(optimizer=Adam(), loss='binary_crossentropy', metrics=[\n 'accuracy'])\n return m\n", "step-4": "from keras.models import *\nfrom keras.layers import *\nfrom keras.optimizers import *\nfrom keras.callbacks import ModelCheckpoint, LearningRateScheduler\nfrom keras import backend as keras\nunet_feature_n = 512\nunet_feature_nstep_size = 0.0001\nunet_input_image_size = 128\n\n\ndef unet(pretrained_weights=None, input_size=(unet_input_image_size,\n unet_input_image_size, 1)):\n inputs = Input(input_size)\n conv1 = Conv2D(unet_feature_n // 16, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(inputs)\n conv1 = Conv2D(unet_feature_n // 16, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(conv1)\n pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)\n conv2 = Conv2D(unet_feature_n // 8, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(pool1)\n conv2 = Conv2D(unet_feature_n // 8, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(conv2)\n pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)\n conv3 = Conv2D(unet_feature_n // 4, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(pool2)\n conv3 = Conv2D(unet_feature_n // 4, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(conv3)\n pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)\n conv4 = Conv2D(unet_feature_n // 2, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(pool3)\n conv4 = Conv2D(unet_feature_n // 2, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(conv4)\n drop4 = Dropout(0.5)(conv4)\n pool4 = MaxPooling2D(pool_size=(2, 2))(drop4)\n conv5 = Conv2D(unet_feature_n, 3, activation='relu', padding='same',\n kernel_initializer='he_normal')(pool4)\n conv5 = Conv2D(unet_feature_n, 3, activation='relu', padding='same',\n kernel_initializer='he_normal')(conv5)\n drop5 = Dropout(0.5)(conv5)\n up6 = Conv2D(unet_feature_n // 2, 2, activation='relu', padding='same',\n kernel_initializer='he_normal')(UpSampling2D(size=(2, 2))(drop5))\n merge6 = concatenate([drop4, up6], axis=3)\n conv6 = Conv2D(unet_feature_n // 2, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(merge6)\n conv6 = Conv2D(unet_feature_n // 2, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(conv6)\n up7 = Conv2D(unet_feature_n // 4, 2, activation='relu', padding='same',\n kernel_initializer='he_normal')(UpSampling2D(size=(2, 2))(conv6))\n merge7 = concatenate([conv3, up7], axis=3)\n conv7 = Conv2D(unet_feature_n // 4, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(merge7)\n conv7 = Conv2D(unet_feature_n // 4, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(conv7)\n up8 = Conv2D(unet_feature_n // 8, 2, activation='relu', padding='same',\n kernel_initializer='he_normal')(UpSampling2D(size=(2, 2))(conv7))\n merge8 = concatenate([conv2, up8], axis=3)\n conv8 = Conv2D(unet_feature_n // 8, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(merge8)\n conv8 = Conv2D(unet_feature_n // 8, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(conv8)\n up9 = Conv2D(unet_feature_n // 16, 2, activation='relu', padding='same',\n kernel_initializer='he_normal')(UpSampling2D(size=(2, 2))(conv8))\n merge9 = concatenate([conv1, up9], axis=3)\n conv9 = Conv2D(unet_feature_n // 16, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(merge9)\n conv9 = Conv2D(unet_feature_n // 16, 3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')(conv9)\n conv9 = Conv2D(2, 3, activation='relu', padding='same',\n kernel_initializer='he_normal')(conv9)\n conv10 = Conv2D(1, 1, activation='sigmoid')(conv9)\n model = Model(inputs=inputs, outputs=conv10)\n model.compile(optimizer=Adam(lr=unet_feature_nstep_size), loss=\n 'binary_crossentropy', metrics=['accuracy'])\n if pretrained_weights:\n model.load_weights(pretrained_weights)\n return model\n\n\ndef small_unet(pretrained_weights=False, patch_size=128):\n input_ = Input((patch_size, patch_size, 1))\n skips = []\n output = input_\n for shape, filters in zip([5, 3, 3, 3, 3, 3, 3], [16, 32, 64, 64, 64, \n 64, 64]):\n skips.append(output)\n print(output.shape)\n output = Conv2D(filters, (shape, shape), strides=2, padding='same',\n activation='relu')(output)\n for shape, filters in zip([4, 4, 4, 4, 4, 4, 4, 4], [64, 64, 64, 64, 32,\n 16, 2]):\n output = UpSampling2D()(output)\n skip_output = skips.pop()\n output = concatenate([output, skip_output], axis=3)\n if filters != 2:\n activation = 'relu'\n else:\n activation = 'softmax'\n output = Conv2D(filters if filters != 2 else 2, (shape, shape),\n activation=activation, padding='same')(output)\n if filters != 2:\n output = BatchNormalization(momentum=0.9)(output)\n assert len(skips) == 0\n m = Model([input_], [output])\n if pretrained_weights:\n m.load_weights(pretrained_weights)\n m.compile(optimizer=Adam(), loss='binary_crossentropy', metrics=[\n 'accuracy'])\n return m\n", "step-5": "from keras.models import *\nfrom keras.layers import *\nfrom keras.optimizers import *\nfrom keras.callbacks import ModelCheckpoint, LearningRateScheduler\nfrom keras import backend as keras\n\nunet_feature_n = 512\nunet_feature_nstep_size = 1e-4\nunet_input_image_size = 128\n\ndef unet(pretrained_weights=None, input_size=(unet_input_image_size, unet_input_image_size, 1)):\n inputs = Input(input_size)\n conv1 = Conv2D(unet_feature_n // 16, 3, activation='relu', padding='same', kernel_initializer='he_normal')(inputs)\n conv1 = Conv2D(unet_feature_n // 16, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv1)\n pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)\n conv2 = Conv2D(unet_feature_n // 8, 3, activation='relu', padding='same', kernel_initializer='he_normal')(pool1)\n conv2 = Conv2D(unet_feature_n // 8, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv2)\n pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)\n conv3 = Conv2D(unet_feature_n // 4, 3, activation='relu', padding='same', kernel_initializer='he_normal')(pool2)\n conv3 = Conv2D(unet_feature_n // 4, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv3)\n pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)\n conv4 = Conv2D(unet_feature_n // 2, 3, activation='relu', padding='same', kernel_initializer='he_normal')(pool3)\n conv4 = Conv2D(unet_feature_n // 2, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv4)\n drop4 = Dropout(0.5)(conv4)\n pool4 = MaxPooling2D(pool_size=(2, 2))(drop4)\n\n conv5 = Conv2D(unet_feature_n, 3, activation='relu', padding='same', kernel_initializer='he_normal')(pool4)\n conv5 = Conv2D(unet_feature_n, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv5)\n drop5 = Dropout(0.5)(conv5)\n\n up6 = Conv2D(unet_feature_n // 2, 2, activation='relu', padding='same', kernel_initializer='he_normal')(\n UpSampling2D(size=(2, 2))(drop5))\n merge6 = concatenate([drop4, up6], axis=3)\n conv6 = Conv2D(unet_feature_n // 2, 3, activation='relu', padding='same', kernel_initializer='he_normal')(merge6)\n conv6 = Conv2D(unet_feature_n // 2, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv6)\n\n up7 = Conv2D(unet_feature_n // 4, 2, activation='relu', padding='same', kernel_initializer='he_normal')(\n UpSampling2D(size=(2, 2))(conv6))\n merge7 = concatenate([conv3, up7], axis=3)\n conv7 = Conv2D(unet_feature_n // 4, 3, activation='relu', padding='same', kernel_initializer='he_normal')(merge7)\n conv7 = Conv2D(unet_feature_n // 4, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv7)\n\n up8 = Conv2D(unet_feature_n // 8, 2, activation='relu', padding='same', kernel_initializer='he_normal')(\n UpSampling2D(size=(2, 2))(conv7))\n merge8 = concatenate([conv2, up8], axis=3)\n conv8 = Conv2D(unet_feature_n // 8, 3, activation='relu', padding='same', kernel_initializer='he_normal')(merge8)\n conv8 = Conv2D(unet_feature_n // 8, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv8)\n\n up9 = Conv2D(unet_feature_n // 16, 2, activation='relu', padding='same', kernel_initializer='he_normal')(\n UpSampling2D(size=(2, 2))(conv8))\n merge9 = concatenate([conv1, up9], axis=3)\n conv9 = Conv2D(unet_feature_n // 16, 3, activation='relu', padding='same', kernel_initializer='he_normal')(merge9)\n conv9 = Conv2D(unet_feature_n // 16, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv9)\n conv9 = Conv2D(2, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv9)\n conv10 = Conv2D(1, 1, activation='sigmoid')(conv9)\n\n model = Model(inputs=inputs, outputs=conv10)\n\n model.compile(optimizer=Adam(lr=unet_feature_nstep_size), loss='binary_crossentropy', metrics=['accuracy'])\n\n if (pretrained_weights):\n model.load_weights(pretrained_weights)\n\n return model\n\n\ndef small_unet(pretrained_weights=False, patch_size=128):\n input_ = Input((patch_size, patch_size, 1))\n skips = []\n output = input_\n for shape, filters in zip([5, 3, 3, 3, 3, 3, 3], [16, 32, 64, 64, 64, 64, 64]):\n skips.append(output)\n print(output.shape)\n output= Conv2D(filters, (shape, shape), strides=2, padding=\"same\", activation=\"relu\")(output)\n #output = BatchNormalization()(output)\n #if shape != 7:\n # output = BatchNormalization()(output)\n for shape, filters in zip([4, 4, 4, 4, 4, 4, 4, 4], [64, 64, 64, 64,32, 16, 2]):\n output = UpSampling2D()(output)\n\n skip_output = skips.pop()\n output = concatenate([output, skip_output], axis=3)\n\n if filters != 2:\n activation = \"relu\"\n else:\n activation = \"softmax\"\n output = Conv2D(filters if filters != 2 else 2, (shape, shape), activation=activation, padding=\"same\")(output)\n \n if filters != 2:\n output = BatchNormalization(momentum=.9)(output)\n assert len(skips) == 0\n m = Model([input_], [output])\n\n if pretrained_weights:\n m.load_weights(pretrained_weights)\n\n m.compile(optimizer=Adam(), loss='binary_crossentropy', metrics=['accuracy'])\n return m", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> for row in range(7): for col in range(5): if col == 0 or row % 3 == 0: print('*', end=' ') else: print(' ', end=' ') print() <|reserved_special_token_1|> for row in range(7): for col in range(5): if (col == 0) or (row % 3 == 0): print("*", end=" ") else: print(" ", end=" ") print()
flexible
{ "blob_id": "634c826d30b22c6061531c514914e9ca62b21605", "index": 7158, "step-1": "<mask token>\n", "step-2": "for row in range(7):\n for col in range(5):\n if col == 0 or row % 3 == 0:\n print('*', end=' ')\n else:\n print(' ', end=' ')\n print()\n", "step-3": "for row in range(7):\r\n for col in range(5):\r\n if (col == 0) or (row % 3 == 0):\r\n print(\"*\", end=\" \")\r\n else:\r\n print(\" \", end=\" \")\r\n print()\r\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
<|reserved_special_token_0|> def book_add(request): if request.user.is_authenticated: context = {} if request.method == 'GET': form = BookCreateModelForm() context['form'] = form return render(request, 'addbook.html', context) elif request.method == 'POST': context = {} form = BookCreateModelForm(request.POST) if form.is_valid(): form.save() return redirect('index') else: return render(request, 'addbook.html', context) else: return redirect('singn') def get_books(request): if request.user.is_authenticated: form = SearchForm() context = {} books = Books.objects.all() context['books'] = books context['form'] = form if request.method == 'POST': form = SearchForm(request.POST) if form.is_valid(): book_name = form.cleaned_data['book_name'] books = Books.objects.filter(book_name__contains=book_name) context['books'] = books return render(request, 'book_list.html', context) else: context['form'] = form return render(request, 'book_list.html', context) return render(request, 'book_list.html', context) else: return redirect('singn') <|reserved_special_token_0|> def remove_book(request, id): if request.user.is_authenticated: book = Books.objects.get(id=id) book.delete() return redirect('books') else: return redirect('singn') <|reserved_special_token_0|> def create_account(request): form = RegistrationForm() context = {'form': form} if request.method == 'POST': form = RegistrationForm(request.POST) if form.is_valid(): form.save() print('account created') return redirect('singn') else: context['form'] = form return render(request, 'createaccount.html', context) return render(request, 'createaccount.html', context) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def book_add(request): if request.user.is_authenticated: context = {} if request.method == 'GET': form = BookCreateModelForm() context['form'] = form return render(request, 'addbook.html', context) elif request.method == 'POST': context = {} form = BookCreateModelForm(request.POST) if form.is_valid(): form.save() return redirect('index') else: return render(request, 'addbook.html', context) else: return redirect('singn') def get_books(request): if request.user.is_authenticated: form = SearchForm() context = {} books = Books.objects.all() context['books'] = books context['form'] = form if request.method == 'POST': form = SearchForm(request.POST) if form.is_valid(): book_name = form.cleaned_data['book_name'] books = Books.objects.filter(book_name__contains=book_name) context['books'] = books return render(request, 'book_list.html', context) else: context['form'] = form return render(request, 'book_list.html', context) return render(request, 'book_list.html', context) else: return redirect('singn') def book_details(request, id): if request.user.is_authenticated: book = Books.objects.get(id=id) context = {} context['book'] = book return render(request, 'book_details.html', context) else: return redirect('singn') def remove_book(request, id): if request.user.is_authenticated: book = Books.objects.get(id=id) book.delete() return redirect('books') else: return redirect('singn') <|reserved_special_token_0|> def create_account(request): form = RegistrationForm() context = {'form': form} if request.method == 'POST': form = RegistrationForm(request.POST) if form.is_valid(): form.save() print('account created') return redirect('singn') else: context['form'] = form return render(request, 'createaccount.html', context) return render(request, 'createaccount.html', context) <|reserved_special_token_0|> def signout(request): if request.user.is_authenticated: logout(request) return redirect('singn') else: return redirect('singn') <|reserved_special_token_1|> <|reserved_special_token_0|> def book_add(request): if request.user.is_authenticated: context = {} if request.method == 'GET': form = BookCreateModelForm() context['form'] = form return render(request, 'addbook.html', context) elif request.method == 'POST': context = {} form = BookCreateModelForm(request.POST) if form.is_valid(): form.save() return redirect('index') else: return render(request, 'addbook.html', context) else: return redirect('singn') def get_books(request): if request.user.is_authenticated: form = SearchForm() context = {} books = Books.objects.all() context['books'] = books context['form'] = form if request.method == 'POST': form = SearchForm(request.POST) if form.is_valid(): book_name = form.cleaned_data['book_name'] books = Books.objects.filter(book_name__contains=book_name) context['books'] = books return render(request, 'book_list.html', context) else: context['form'] = form return render(request, 'book_list.html', context) return render(request, 'book_list.html', context) else: return redirect('singn') def book_details(request, id): if request.user.is_authenticated: book = Books.objects.get(id=id) context = {} context['book'] = book return render(request, 'book_details.html', context) else: return redirect('singn') def remove_book(request, id): if request.user.is_authenticated: book = Books.objects.get(id=id) book.delete() return redirect('books') else: return redirect('singn') def update_book(request, id): if request.user.is_authenticated: book = Books.objects.get(id=id) form = BookCreateModelForm(instance=book) context = {} context['form'] = form if request.method == 'POST': book = Books.objects.get(id=id) form = BookCreateModelForm(instance=book, data=request.POST) if form.is_valid(): form.save() return redirect('books') else: form = BookCreateModelForm(request.POST) context['form'] = form print(form) return render(request, 'edit.html', context) return render(request, 'edit.html', context) else: return redirect('singn') def create_account(request): form = RegistrationForm() context = {'form': form} if request.method == 'POST': form = RegistrationForm(request.POST) if form.is_valid(): form.save() print('account created') return redirect('singn') else: context['form'] = form return render(request, 'createaccount.html', context) return render(request, 'createaccount.html', context) <|reserved_special_token_0|> def signout(request): if request.user.is_authenticated: logout(request) return redirect('singn') else: return redirect('singn') <|reserved_special_token_1|> <|reserved_special_token_0|> def book_add(request): if request.user.is_authenticated: context = {} if request.method == 'GET': form = BookCreateModelForm() context['form'] = form return render(request, 'addbook.html', context) elif request.method == 'POST': context = {} form = BookCreateModelForm(request.POST) if form.is_valid(): form.save() return redirect('index') else: return render(request, 'addbook.html', context) else: return redirect('singn') def get_books(request): if request.user.is_authenticated: form = SearchForm() context = {} books = Books.objects.all() context['books'] = books context['form'] = form if request.method == 'POST': form = SearchForm(request.POST) if form.is_valid(): book_name = form.cleaned_data['book_name'] books = Books.objects.filter(book_name__contains=book_name) context['books'] = books return render(request, 'book_list.html', context) else: context['form'] = form return render(request, 'book_list.html', context) return render(request, 'book_list.html', context) else: return redirect('singn') def book_details(request, id): if request.user.is_authenticated: book = Books.objects.get(id=id) context = {} context['book'] = book return render(request, 'book_details.html', context) else: return redirect('singn') def remove_book(request, id): if request.user.is_authenticated: book = Books.objects.get(id=id) book.delete() return redirect('books') else: return redirect('singn') def update_book(request, id): if request.user.is_authenticated: book = Books.objects.get(id=id) form = BookCreateModelForm(instance=book) context = {} context['form'] = form if request.method == 'POST': book = Books.objects.get(id=id) form = BookCreateModelForm(instance=book, data=request.POST) if form.is_valid(): form.save() return redirect('books') else: form = BookCreateModelForm(request.POST) context['form'] = form print(form) return render(request, 'edit.html', context) return render(request, 'edit.html', context) else: return redirect('singn') def create_account(request): form = RegistrationForm() context = {'form': form} if request.method == 'POST': form = RegistrationForm(request.POST) if form.is_valid(): form.save() print('account created') return redirect('singn') else: context['form'] = form return render(request, 'createaccount.html', context) return render(request, 'createaccount.html', context) def singn_in(request): form = SignInForm() context = {'form': form} if request.method == 'POST': form = SignInForm(request.POST) if form.is_valid(): username = form.cleaned_data['username'] password = form.cleaned_data['password'] user = authenticate(request, username=username, password=password) if user: login(request, user) return redirect('index') else: context['form'] = form return render(request, 'signin.html', context) return render(request, 'signin.html', context) def signout(request): if request.user.is_authenticated: logout(request) return redirect('singn') else: return redirect('singn') <|reserved_special_token_1|> from django.shortcuts import render from django.shortcuts import redirect # Create your views here. from .forms import AddBookForm ,UpdateBookForm,BookCreateModelForm,SearchForm,RegistrationForm,SignInForm from book.models import Books from django.contrib.auth import authenticate,login,logout def book_add(request): if request.user.is_authenticated: context = {} if request.method == "GET": form = BookCreateModelForm() context["form"] = form return render(request, "addbook.html", context) elif request.method == "POST": context = {} form = BookCreateModelForm(request.POST) if form.is_valid(): form.save() # context["form"] = form # book_name = form.cleaned_data["book_name"] # author= form.cleaned_data["author"] # category=form.cleaned_data["category"] # prices=form.cleaned_data["price"] # copies=form.cleaned_data["number_copies"] # print(book_name,author,category,prices,copies) # book=Books(book_name=book_name,author=author,category=category,price=prices,copies=copies) # book.save() return redirect("index") else: return render(request, "addbook.html",context) else: return redirect('singn') def get_books(request): if request.user.is_authenticated: form=SearchForm() context = {} books=Books.objects.all() context["books"]=books context['form']=form if request.method=="POST": form=SearchForm(request.POST) if form.is_valid(): book_name=form.cleaned_data["book_name"] books=Books.objects.filter(book_name__contains=book_name) context['books']=books return render(request,"book_list.html",context) else: context['form']=form return render(request, "book_list.html", context) return render(request, "book_list.html", context) else: return redirect('singn') def book_details(request,id): if request.user.is_authenticated: book=Books.objects.get(id=id) context = {} context["book"]=book return render(request,"book_details.html",context) else: return redirect('singn') def remove_book(request,id): if request.user.is_authenticated: book=Books.objects.get(id=id) book.delete() return redirect("books") else: return redirect('singn') def update_book(request,id): if request.user.is_authenticated: book = Books.objects.get(id=id) form=BookCreateModelForm(instance=book) # form=BookCreateModelForm(initial={ # "book_name":book.book_name, # "author":book.author, # "category":book.category, # "price":book.price, # "number_copies":book.copies}) context = {} context['form']=form if request.method=="POST": book = Books.objects.get(id=id) form=BookCreateModelForm(instance=book,data=request.POST) if form.is_valid(): form.save() # form=BookCreateModelForm(request.POST) # # if form.is_valid(): # book.book_name=form.cleaned_data["book_name"] # book.author=form.cleaned_data["author"] # book.category=form.cleaned_data["category"] # book.price=form.cleaned_data["price"] # book.copies=form.cleaned_data["number_copies"] # book.save() return redirect("books") else: form=BookCreateModelForm(request.POST) context["form"]=form print(form) return render(request, "edit.html", context) return render(request,"edit.html",context) else: return redirect('singn') def create_account(request): form=RegistrationForm() context={'form':form} if request.method=="POST": form=RegistrationForm(request.POST) if form.is_valid(): form.save() print("account created") return redirect("singn") else: context["form"]=form return render(request, "createaccount.html", context) return render(request,"createaccount.html",context) def singn_in(request): form=SignInForm() context={'form':form} if request.method=="POST": form=SignInForm(request.POST) if form.is_valid(): username=form.cleaned_data["username"] password=form.cleaned_data["password"] user=authenticate(request,username=username,password=password) if user: login(request,user) return redirect("index") else: context['form']=form return render(request, "signin.html", context) return render(request,"signin.html",context) def signout(request): if request.user.is_authenticated: logout(request) return redirect("singn") else: return redirect('singn')
flexible
{ "blob_id": "aba2a0a262c14f286c278f21ba42871410c174f0", "index": 953, "step-1": "<mask token>\n\n\ndef book_add(request):\n if request.user.is_authenticated:\n context = {}\n if request.method == 'GET':\n form = BookCreateModelForm()\n context['form'] = form\n return render(request, 'addbook.html', context)\n elif request.method == 'POST':\n context = {}\n form = BookCreateModelForm(request.POST)\n if form.is_valid():\n form.save()\n return redirect('index')\n else:\n return render(request, 'addbook.html', context)\n else:\n return redirect('singn')\n\n\ndef get_books(request):\n if request.user.is_authenticated:\n form = SearchForm()\n context = {}\n books = Books.objects.all()\n context['books'] = books\n context['form'] = form\n if request.method == 'POST':\n form = SearchForm(request.POST)\n if form.is_valid():\n book_name = form.cleaned_data['book_name']\n books = Books.objects.filter(book_name__contains=book_name)\n context['books'] = books\n return render(request, 'book_list.html', context)\n else:\n context['form'] = form\n return render(request, 'book_list.html', context)\n return render(request, 'book_list.html', context)\n else:\n return redirect('singn')\n\n\n<mask token>\n\n\ndef remove_book(request, id):\n if request.user.is_authenticated:\n book = Books.objects.get(id=id)\n book.delete()\n return redirect('books')\n else:\n return redirect('singn')\n\n\n<mask token>\n\n\ndef create_account(request):\n form = RegistrationForm()\n context = {'form': form}\n if request.method == 'POST':\n form = RegistrationForm(request.POST)\n if form.is_valid():\n form.save()\n print('account created')\n return redirect('singn')\n else:\n context['form'] = form\n return render(request, 'createaccount.html', context)\n return render(request, 'createaccount.html', context)\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef book_add(request):\n if request.user.is_authenticated:\n context = {}\n if request.method == 'GET':\n form = BookCreateModelForm()\n context['form'] = form\n return render(request, 'addbook.html', context)\n elif request.method == 'POST':\n context = {}\n form = BookCreateModelForm(request.POST)\n if form.is_valid():\n form.save()\n return redirect('index')\n else:\n return render(request, 'addbook.html', context)\n else:\n return redirect('singn')\n\n\ndef get_books(request):\n if request.user.is_authenticated:\n form = SearchForm()\n context = {}\n books = Books.objects.all()\n context['books'] = books\n context['form'] = form\n if request.method == 'POST':\n form = SearchForm(request.POST)\n if form.is_valid():\n book_name = form.cleaned_data['book_name']\n books = Books.objects.filter(book_name__contains=book_name)\n context['books'] = books\n return render(request, 'book_list.html', context)\n else:\n context['form'] = form\n return render(request, 'book_list.html', context)\n return render(request, 'book_list.html', context)\n else:\n return redirect('singn')\n\n\ndef book_details(request, id):\n if request.user.is_authenticated:\n book = Books.objects.get(id=id)\n context = {}\n context['book'] = book\n return render(request, 'book_details.html', context)\n else:\n return redirect('singn')\n\n\ndef remove_book(request, id):\n if request.user.is_authenticated:\n book = Books.objects.get(id=id)\n book.delete()\n return redirect('books')\n else:\n return redirect('singn')\n\n\n<mask token>\n\n\ndef create_account(request):\n form = RegistrationForm()\n context = {'form': form}\n if request.method == 'POST':\n form = RegistrationForm(request.POST)\n if form.is_valid():\n form.save()\n print('account created')\n return redirect('singn')\n else:\n context['form'] = form\n return render(request, 'createaccount.html', context)\n return render(request, 'createaccount.html', context)\n\n\n<mask token>\n\n\ndef signout(request):\n if request.user.is_authenticated:\n logout(request)\n return redirect('singn')\n else:\n return redirect('singn')\n", "step-3": "<mask token>\n\n\ndef book_add(request):\n if request.user.is_authenticated:\n context = {}\n if request.method == 'GET':\n form = BookCreateModelForm()\n context['form'] = form\n return render(request, 'addbook.html', context)\n elif request.method == 'POST':\n context = {}\n form = BookCreateModelForm(request.POST)\n if form.is_valid():\n form.save()\n return redirect('index')\n else:\n return render(request, 'addbook.html', context)\n else:\n return redirect('singn')\n\n\ndef get_books(request):\n if request.user.is_authenticated:\n form = SearchForm()\n context = {}\n books = Books.objects.all()\n context['books'] = books\n context['form'] = form\n if request.method == 'POST':\n form = SearchForm(request.POST)\n if form.is_valid():\n book_name = form.cleaned_data['book_name']\n books = Books.objects.filter(book_name__contains=book_name)\n context['books'] = books\n return render(request, 'book_list.html', context)\n else:\n context['form'] = form\n return render(request, 'book_list.html', context)\n return render(request, 'book_list.html', context)\n else:\n return redirect('singn')\n\n\ndef book_details(request, id):\n if request.user.is_authenticated:\n book = Books.objects.get(id=id)\n context = {}\n context['book'] = book\n return render(request, 'book_details.html', context)\n else:\n return redirect('singn')\n\n\ndef remove_book(request, id):\n if request.user.is_authenticated:\n book = Books.objects.get(id=id)\n book.delete()\n return redirect('books')\n else:\n return redirect('singn')\n\n\ndef update_book(request, id):\n if request.user.is_authenticated:\n book = Books.objects.get(id=id)\n form = BookCreateModelForm(instance=book)\n context = {}\n context['form'] = form\n if request.method == 'POST':\n book = Books.objects.get(id=id)\n form = BookCreateModelForm(instance=book, data=request.POST)\n if form.is_valid():\n form.save()\n return redirect('books')\n else:\n form = BookCreateModelForm(request.POST)\n context['form'] = form\n print(form)\n return render(request, 'edit.html', context)\n return render(request, 'edit.html', context)\n else:\n return redirect('singn')\n\n\ndef create_account(request):\n form = RegistrationForm()\n context = {'form': form}\n if request.method == 'POST':\n form = RegistrationForm(request.POST)\n if form.is_valid():\n form.save()\n print('account created')\n return redirect('singn')\n else:\n context['form'] = form\n return render(request, 'createaccount.html', context)\n return render(request, 'createaccount.html', context)\n\n\n<mask token>\n\n\ndef signout(request):\n if request.user.is_authenticated:\n logout(request)\n return redirect('singn')\n else:\n return redirect('singn')\n", "step-4": "<mask token>\n\n\ndef book_add(request):\n if request.user.is_authenticated:\n context = {}\n if request.method == 'GET':\n form = BookCreateModelForm()\n context['form'] = form\n return render(request, 'addbook.html', context)\n elif request.method == 'POST':\n context = {}\n form = BookCreateModelForm(request.POST)\n if form.is_valid():\n form.save()\n return redirect('index')\n else:\n return render(request, 'addbook.html', context)\n else:\n return redirect('singn')\n\n\ndef get_books(request):\n if request.user.is_authenticated:\n form = SearchForm()\n context = {}\n books = Books.objects.all()\n context['books'] = books\n context['form'] = form\n if request.method == 'POST':\n form = SearchForm(request.POST)\n if form.is_valid():\n book_name = form.cleaned_data['book_name']\n books = Books.objects.filter(book_name__contains=book_name)\n context['books'] = books\n return render(request, 'book_list.html', context)\n else:\n context['form'] = form\n return render(request, 'book_list.html', context)\n return render(request, 'book_list.html', context)\n else:\n return redirect('singn')\n\n\ndef book_details(request, id):\n if request.user.is_authenticated:\n book = Books.objects.get(id=id)\n context = {}\n context['book'] = book\n return render(request, 'book_details.html', context)\n else:\n return redirect('singn')\n\n\ndef remove_book(request, id):\n if request.user.is_authenticated:\n book = Books.objects.get(id=id)\n book.delete()\n return redirect('books')\n else:\n return redirect('singn')\n\n\ndef update_book(request, id):\n if request.user.is_authenticated:\n book = Books.objects.get(id=id)\n form = BookCreateModelForm(instance=book)\n context = {}\n context['form'] = form\n if request.method == 'POST':\n book = Books.objects.get(id=id)\n form = BookCreateModelForm(instance=book, data=request.POST)\n if form.is_valid():\n form.save()\n return redirect('books')\n else:\n form = BookCreateModelForm(request.POST)\n context['form'] = form\n print(form)\n return render(request, 'edit.html', context)\n return render(request, 'edit.html', context)\n else:\n return redirect('singn')\n\n\ndef create_account(request):\n form = RegistrationForm()\n context = {'form': form}\n if request.method == 'POST':\n form = RegistrationForm(request.POST)\n if form.is_valid():\n form.save()\n print('account created')\n return redirect('singn')\n else:\n context['form'] = form\n return render(request, 'createaccount.html', context)\n return render(request, 'createaccount.html', context)\n\n\ndef singn_in(request):\n form = SignInForm()\n context = {'form': form}\n if request.method == 'POST':\n form = SignInForm(request.POST)\n if form.is_valid():\n username = form.cleaned_data['username']\n password = form.cleaned_data['password']\n user = authenticate(request, username=username, password=password)\n if user:\n login(request, user)\n return redirect('index')\n else:\n context['form'] = form\n return render(request, 'signin.html', context)\n return render(request, 'signin.html', context)\n\n\ndef signout(request):\n if request.user.is_authenticated:\n logout(request)\n return redirect('singn')\n else:\n return redirect('singn')\n", "step-5": "from django.shortcuts import render\nfrom django.shortcuts import redirect\n\n\n\n# Create your views here.\nfrom .forms import AddBookForm ,UpdateBookForm,BookCreateModelForm,SearchForm,RegistrationForm,SignInForm\nfrom book.models import Books\nfrom django.contrib.auth import authenticate,login,logout\n\n\n\ndef book_add(request):\n if request.user.is_authenticated:\n context = {}\n if request.method == \"GET\":\n form = BookCreateModelForm()\n context[\"form\"] = form\n return render(request, \"addbook.html\", context)\n elif request.method == \"POST\":\n context = {}\n form = BookCreateModelForm(request.POST)\n \n if form.is_valid():\n form.save()\n # context[\"form\"] = form\n # book_name = form.cleaned_data[\"book_name\"]\n # author= form.cleaned_data[\"author\"]\n # category=form.cleaned_data[\"category\"]\n # prices=form.cleaned_data[\"price\"]\n # copies=form.cleaned_data[\"number_copies\"]\n # print(book_name,author,category,prices,copies)\n # book=Books(book_name=book_name,author=author,category=category,price=prices,copies=copies)\n # book.save()\n return redirect(\"index\")\n else:\n return render(request, \"addbook.html\",context)\n else:\n return redirect('singn')\n\ndef get_books(request):\n if request.user.is_authenticated:\n form=SearchForm()\n context = {}\n books=Books.objects.all()\n context[\"books\"]=books\n context['form']=form\n if request.method==\"POST\":\n form=SearchForm(request.POST)\n if form.is_valid():\n book_name=form.cleaned_data[\"book_name\"]\n books=Books.objects.filter(book_name__contains=book_name)\n context['books']=books\n return render(request,\"book_list.html\",context)\n else:\n context['form']=form\n return render(request, \"book_list.html\", context)\n return render(request, \"book_list.html\", context)\n else:\n return redirect('singn')\n\ndef book_details(request,id):\n if request.user.is_authenticated:\n book=Books.objects.get(id=id)\n context = {}\n context[\"book\"]=book\n return render(request,\"book_details.html\",context)\n else:\n return redirect('singn')\n\n\ndef remove_book(request,id):\n if request.user.is_authenticated:\n book=Books.objects.get(id=id)\n book.delete()\n return redirect(\"books\")\n else:\n return redirect('singn')\ndef update_book(request,id):\n if request.user.is_authenticated:\n book = Books.objects.get(id=id)\n form=BookCreateModelForm(instance=book)\n\n # form=BookCreateModelForm(initial={\n # \"book_name\":book.book_name,\n # \"author\":book.author,\n # \"category\":book.category,\n # \"price\":book.price,\n # \"number_copies\":book.copies})\n context = {}\n context['form']=form\n if request.method==\"POST\":\n book = Books.objects.get(id=id)\n form=BookCreateModelForm(instance=book,data=request.POST)\n if form.is_valid():\n form.save()\n # form=BookCreateModelForm(request.POST)\n #\n # if form.is_valid():\n # book.book_name=form.cleaned_data[\"book_name\"]\n # book.author=form.cleaned_data[\"author\"]\n # book.category=form.cleaned_data[\"category\"]\n # book.price=form.cleaned_data[\"price\"]\n # book.copies=form.cleaned_data[\"number_copies\"]\n # book.save()\n return redirect(\"books\")\n else:\n form=BookCreateModelForm(request.POST)\n context[\"form\"]=form\n print(form)\n return render(request, \"edit.html\", context)\n return render(request,\"edit.html\",context)\n else:\n return redirect('singn')\n\n\ndef create_account(request):\n form=RegistrationForm()\n context={'form':form}\n if request.method==\"POST\":\n form=RegistrationForm(request.POST)\n if form.is_valid():\n form.save()\n print(\"account created\")\n return redirect(\"singn\")\n else:\n context[\"form\"]=form\n return render(request, \"createaccount.html\", context)\n\n return render(request,\"createaccount.html\",context)\n\n\ndef singn_in(request):\n form=SignInForm()\n context={'form':form}\n if request.method==\"POST\":\n form=SignInForm(request.POST)\n if form.is_valid():\n username=form.cleaned_data[\"username\"]\n password=form.cleaned_data[\"password\"]\n user=authenticate(request,username=username,password=password)\n if user:\n login(request,user)\n return redirect(\"index\")\n else:\n context['form']=form\n return render(request, \"signin.html\", context)\n\n\n \n return render(request,\"signin.html\",context)\n\n\ndef signout(request):\n if request.user.is_authenticated:\n logout(request)\n return redirect(\"singn\")\n else:\n return redirect('singn')\n\n\n\n\n\n\n\n\n\n\n", "step-ids": [ 4, 6, 7, 8, 10 ] }
[ 4, 6, 7, 8, 10 ]
from itertools import takewhile import numpy as np from .rrt import TreeNode from .trajectory.linear import get_default_limits, solve_linear from .trajectory.retime import spline_duration from .utils import argmin, negate, circular_difference, UNBOUNDED_LIMITS, get_distance, get_delta ASYMETRIC = True def asymmetric_extend(q1, q2, extend_fn, backward=False): if backward and ASYMETRIC: return reversed(list(extend_fn(q2, q1))) # Forward model return extend_fn(q1, q2) def extend_towards(tree, target, distance_fn, extend_fn, collision_fn, swap=False, tree_frequency=1, **kwargs): assert tree_frequency >= 1 last = argmin(lambda n: distance_fn(n.config, target), tree) extend = list(asymmetric_extend(last.config, target, extend_fn, backward=swap)) safe = list(takewhile(negate(collision_fn), extend)) for i, q in enumerate(safe): if (i % tree_frequency == 0) or (i == len(safe) - 1): last = TreeNode(q, parent=last) tree.append(last) success = len(extend) == len(safe) return last, success ################################################## def calculate_radius(d=2): # TODO: unify with get_threshold_fn # Sampling-based Algorithms for Optimal Motion Planning # http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.419.5503&rep=rep1&type=pdf # https://en.wikipedia.org/wiki/Volume_of_an_n-ball interval = (1 - 0) vol_free = interval ** d radius = 1./2 vol_ball = np.pi * (radius ** d) gamma = 2 * ((1 + 1. / d) * (vol_free / vol_ball)) ** (1. / d) # threshold = gamma * (math.log(n) / n) ** (1. / d) return gamma def default_weights(conf, weights=None, scale=1.): if weights is not None: return weights d = len(conf) weights = scale*np.ones(d) return weights def get_embed_fn(weights): weights = np.array(weights) return lambda q: weights * q def get_distance_fn(weights, p_norm=2): embed_fn = get_embed_fn(weights) return lambda q1, q2: np.linalg.norm(embed_fn(q2) - embed_fn(q1), ord=p_norm) def distance_fn_from_extend_fn(extend_fn): # TODO: can compute cost between waypoints from extend_fn def distance_fn(q1, q2): path = list(extend_fn(q1, q2)) # TODO: cache return len(path) # TODO: subtract endpoints? return distance_fn ################################################## def get_difference_fn(circular={}): def fn(q2, q1): return tuple(circular_difference(v2, v1, interval=circular.get(i, UNBOUNDED_LIMITS)) for i, (v2, v1) in enumerate(zip(q2, q1))) return fn def get_cost_fn(distance_fn=get_distance, constant=0., coefficient=1.): def fn(q1, q2): return constant + coefficient*distance_fn(q1, q2) return fn def get_duration_fn(difference_fn=get_delta, t_constant=0., t_min=0., **kwargs): v_max, a_max = get_default_limits(d=None, **kwargs) def fn(q1, q2): # TODO: be careful that not colinear with other waypoints difference = difference_fn(q1, q2) t_transit = 0. if not np.allclose(np.zeros(len(difference)), difference, atol=1e-6, rtol=0): t_transit = solve_linear(difference, v_max, a_max, only_duration=True) assert t_transit is not None #curve = solve_linear(difference, v_max, a_max) #t_transit = spline_duration(curve) t = t_constant + t_transit return max(t_min, t) # TODO: clip function return fn
normal
{ "blob_id": "84febcc599aa97858ded3b6f803b6b76960878d4", "index": 7188, "step-1": "<mask token>\n\n\ndef asymmetric_extend(q1, q2, extend_fn, backward=False):\n if backward and ASYMETRIC:\n return reversed(list(extend_fn(q2, q1)))\n return extend_fn(q1, q2)\n\n\n<mask token>\n\n\ndef calculate_radius(d=2):\n interval = 1 - 0\n vol_free = interval ** d\n radius = 1.0 / 2\n vol_ball = np.pi * radius ** d\n gamma = 2 * ((1 + 1.0 / d) * (vol_free / vol_ball)) ** (1.0 / d)\n return gamma\n\n\ndef default_weights(conf, weights=None, scale=1.0):\n if weights is not None:\n return weights\n d = len(conf)\n weights = scale * np.ones(d)\n return weights\n\n\n<mask token>\n\n\ndef get_distance_fn(weights, p_norm=2):\n embed_fn = get_embed_fn(weights)\n return lambda q1, q2: np.linalg.norm(embed_fn(q2) - embed_fn(q1), ord=\n p_norm)\n\n\ndef distance_fn_from_extend_fn(extend_fn):\n\n def distance_fn(q1, q2):\n path = list(extend_fn(q1, q2))\n return len(path)\n return distance_fn\n\n\n<mask token>\n\n\ndef get_cost_fn(distance_fn=get_distance, constant=0.0, coefficient=1.0):\n\n def fn(q1, q2):\n return constant + coefficient * distance_fn(q1, q2)\n return fn\n\n\ndef get_duration_fn(difference_fn=get_delta, t_constant=0.0, t_min=0.0, **\n kwargs):\n v_max, a_max = get_default_limits(d=None, **kwargs)\n\n def fn(q1, q2):\n difference = difference_fn(q1, q2)\n t_transit = 0.0\n if not np.allclose(np.zeros(len(difference)), difference, atol=\n 1e-06, rtol=0):\n t_transit = solve_linear(difference, v_max, a_max,\n only_duration=True)\n assert t_transit is not None\n t = t_constant + t_transit\n return max(t_min, t)\n return fn\n", "step-2": "<mask token>\n\n\ndef asymmetric_extend(q1, q2, extend_fn, backward=False):\n if backward and ASYMETRIC:\n return reversed(list(extend_fn(q2, q1)))\n return extend_fn(q1, q2)\n\n\n<mask token>\n\n\ndef calculate_radius(d=2):\n interval = 1 - 0\n vol_free = interval ** d\n radius = 1.0 / 2\n vol_ball = np.pi * radius ** d\n gamma = 2 * ((1 + 1.0 / d) * (vol_free / vol_ball)) ** (1.0 / d)\n return gamma\n\n\ndef default_weights(conf, weights=None, scale=1.0):\n if weights is not None:\n return weights\n d = len(conf)\n weights = scale * np.ones(d)\n return weights\n\n\n<mask token>\n\n\ndef get_distance_fn(weights, p_norm=2):\n embed_fn = get_embed_fn(weights)\n return lambda q1, q2: np.linalg.norm(embed_fn(q2) - embed_fn(q1), ord=\n p_norm)\n\n\ndef distance_fn_from_extend_fn(extend_fn):\n\n def distance_fn(q1, q2):\n path = list(extend_fn(q1, q2))\n return len(path)\n return distance_fn\n\n\ndef get_difference_fn(circular={}):\n\n def fn(q2, q1):\n return tuple(circular_difference(v2, v1, interval=circular.get(i,\n UNBOUNDED_LIMITS)) for i, (v2, v1) in enumerate(zip(q2, q1)))\n return fn\n\n\ndef get_cost_fn(distance_fn=get_distance, constant=0.0, coefficient=1.0):\n\n def fn(q1, q2):\n return constant + coefficient * distance_fn(q1, q2)\n return fn\n\n\ndef get_duration_fn(difference_fn=get_delta, t_constant=0.0, t_min=0.0, **\n kwargs):\n v_max, a_max = get_default_limits(d=None, **kwargs)\n\n def fn(q1, q2):\n difference = difference_fn(q1, q2)\n t_transit = 0.0\n if not np.allclose(np.zeros(len(difference)), difference, atol=\n 1e-06, rtol=0):\n t_transit = solve_linear(difference, v_max, a_max,\n only_duration=True)\n assert t_transit is not None\n t = t_constant + t_transit\n return max(t_min, t)\n return fn\n", "step-3": "<mask token>\n\n\ndef asymmetric_extend(q1, q2, extend_fn, backward=False):\n if backward and ASYMETRIC:\n return reversed(list(extend_fn(q2, q1)))\n return extend_fn(q1, q2)\n\n\ndef extend_towards(tree, target, distance_fn, extend_fn, collision_fn, swap\n =False, tree_frequency=1, **kwargs):\n assert tree_frequency >= 1\n last = argmin(lambda n: distance_fn(n.config, target), tree)\n extend = list(asymmetric_extend(last.config, target, extend_fn,\n backward=swap))\n safe = list(takewhile(negate(collision_fn), extend))\n for i, q in enumerate(safe):\n if i % tree_frequency == 0 or i == len(safe) - 1:\n last = TreeNode(q, parent=last)\n tree.append(last)\n success = len(extend) == len(safe)\n return last, success\n\n\ndef calculate_radius(d=2):\n interval = 1 - 0\n vol_free = interval ** d\n radius = 1.0 / 2\n vol_ball = np.pi * radius ** d\n gamma = 2 * ((1 + 1.0 / d) * (vol_free / vol_ball)) ** (1.0 / d)\n return gamma\n\n\ndef default_weights(conf, weights=None, scale=1.0):\n if weights is not None:\n return weights\n d = len(conf)\n weights = scale * np.ones(d)\n return weights\n\n\ndef get_embed_fn(weights):\n weights = np.array(weights)\n return lambda q: weights * q\n\n\ndef get_distance_fn(weights, p_norm=2):\n embed_fn = get_embed_fn(weights)\n return lambda q1, q2: np.linalg.norm(embed_fn(q2) - embed_fn(q1), ord=\n p_norm)\n\n\ndef distance_fn_from_extend_fn(extend_fn):\n\n def distance_fn(q1, q2):\n path = list(extend_fn(q1, q2))\n return len(path)\n return distance_fn\n\n\ndef get_difference_fn(circular={}):\n\n def fn(q2, q1):\n return tuple(circular_difference(v2, v1, interval=circular.get(i,\n UNBOUNDED_LIMITS)) for i, (v2, v1) in enumerate(zip(q2, q1)))\n return fn\n\n\ndef get_cost_fn(distance_fn=get_distance, constant=0.0, coefficient=1.0):\n\n def fn(q1, q2):\n return constant + coefficient * distance_fn(q1, q2)\n return fn\n\n\ndef get_duration_fn(difference_fn=get_delta, t_constant=0.0, t_min=0.0, **\n kwargs):\n v_max, a_max = get_default_limits(d=None, **kwargs)\n\n def fn(q1, q2):\n difference = difference_fn(q1, q2)\n t_transit = 0.0\n if not np.allclose(np.zeros(len(difference)), difference, atol=\n 1e-06, rtol=0):\n t_transit = solve_linear(difference, v_max, a_max,\n only_duration=True)\n assert t_transit is not None\n t = t_constant + t_transit\n return max(t_min, t)\n return fn\n", "step-4": "from itertools import takewhile\nimport numpy as np\nfrom .rrt import TreeNode\nfrom .trajectory.linear import get_default_limits, solve_linear\nfrom .trajectory.retime import spline_duration\nfrom .utils import argmin, negate, circular_difference, UNBOUNDED_LIMITS, get_distance, get_delta\nASYMETRIC = True\n\n\ndef asymmetric_extend(q1, q2, extend_fn, backward=False):\n if backward and ASYMETRIC:\n return reversed(list(extend_fn(q2, q1)))\n return extend_fn(q1, q2)\n\n\ndef extend_towards(tree, target, distance_fn, extend_fn, collision_fn, swap\n =False, tree_frequency=1, **kwargs):\n assert tree_frequency >= 1\n last = argmin(lambda n: distance_fn(n.config, target), tree)\n extend = list(asymmetric_extend(last.config, target, extend_fn,\n backward=swap))\n safe = list(takewhile(negate(collision_fn), extend))\n for i, q in enumerate(safe):\n if i % tree_frequency == 0 or i == len(safe) - 1:\n last = TreeNode(q, parent=last)\n tree.append(last)\n success = len(extend) == len(safe)\n return last, success\n\n\ndef calculate_radius(d=2):\n interval = 1 - 0\n vol_free = interval ** d\n radius = 1.0 / 2\n vol_ball = np.pi * radius ** d\n gamma = 2 * ((1 + 1.0 / d) * (vol_free / vol_ball)) ** (1.0 / d)\n return gamma\n\n\ndef default_weights(conf, weights=None, scale=1.0):\n if weights is not None:\n return weights\n d = len(conf)\n weights = scale * np.ones(d)\n return weights\n\n\ndef get_embed_fn(weights):\n weights = np.array(weights)\n return lambda q: weights * q\n\n\ndef get_distance_fn(weights, p_norm=2):\n embed_fn = get_embed_fn(weights)\n return lambda q1, q2: np.linalg.norm(embed_fn(q2) - embed_fn(q1), ord=\n p_norm)\n\n\ndef distance_fn_from_extend_fn(extend_fn):\n\n def distance_fn(q1, q2):\n path = list(extend_fn(q1, q2))\n return len(path)\n return distance_fn\n\n\ndef get_difference_fn(circular={}):\n\n def fn(q2, q1):\n return tuple(circular_difference(v2, v1, interval=circular.get(i,\n UNBOUNDED_LIMITS)) for i, (v2, v1) in enumerate(zip(q2, q1)))\n return fn\n\n\ndef get_cost_fn(distance_fn=get_distance, constant=0.0, coefficient=1.0):\n\n def fn(q1, q2):\n return constant + coefficient * distance_fn(q1, q2)\n return fn\n\n\ndef get_duration_fn(difference_fn=get_delta, t_constant=0.0, t_min=0.0, **\n kwargs):\n v_max, a_max = get_default_limits(d=None, **kwargs)\n\n def fn(q1, q2):\n difference = difference_fn(q1, q2)\n t_transit = 0.0\n if not np.allclose(np.zeros(len(difference)), difference, atol=\n 1e-06, rtol=0):\n t_transit = solve_linear(difference, v_max, a_max,\n only_duration=True)\n assert t_transit is not None\n t = t_constant + t_transit\n return max(t_min, t)\n return fn\n", "step-5": "from itertools import takewhile\n\nimport numpy as np\n\nfrom .rrt import TreeNode\nfrom .trajectory.linear import get_default_limits, solve_linear\nfrom .trajectory.retime import spline_duration\nfrom .utils import argmin, negate, circular_difference, UNBOUNDED_LIMITS, get_distance, get_delta\n\nASYMETRIC = True\n\n\ndef asymmetric_extend(q1, q2, extend_fn, backward=False):\n if backward and ASYMETRIC:\n return reversed(list(extend_fn(q2, q1))) # Forward model\n return extend_fn(q1, q2)\n\n\ndef extend_towards(tree, target, distance_fn, extend_fn, collision_fn, swap=False, tree_frequency=1, **kwargs):\n assert tree_frequency >= 1\n last = argmin(lambda n: distance_fn(n.config, target), tree)\n extend = list(asymmetric_extend(last.config, target, extend_fn, backward=swap))\n safe = list(takewhile(negate(collision_fn), extend))\n for i, q in enumerate(safe):\n if (i % tree_frequency == 0) or (i == len(safe) - 1):\n last = TreeNode(q, parent=last)\n tree.append(last)\n success = len(extend) == len(safe)\n return last, success\n\n##################################################\n\ndef calculate_radius(d=2):\n # TODO: unify with get_threshold_fn\n # Sampling-based Algorithms for Optimal Motion Planning\n # http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.419.5503&rep=rep1&type=pdf\n # https://en.wikipedia.org/wiki/Volume_of_an_n-ball\n interval = (1 - 0)\n vol_free = interval ** d\n radius = 1./2\n vol_ball = np.pi * (radius ** d)\n gamma = 2 * ((1 + 1. / d) * (vol_free / vol_ball)) ** (1. / d)\n # threshold = gamma * (math.log(n) / n) ** (1. / d)\n return gamma\n\n\ndef default_weights(conf, weights=None, scale=1.):\n if weights is not None:\n return weights\n d = len(conf)\n weights = scale*np.ones(d)\n return weights\n\n\ndef get_embed_fn(weights):\n weights = np.array(weights)\n return lambda q: weights * q\n\n\ndef get_distance_fn(weights, p_norm=2):\n embed_fn = get_embed_fn(weights)\n return lambda q1, q2: np.linalg.norm(embed_fn(q2) - embed_fn(q1), ord=p_norm)\n\n\ndef distance_fn_from_extend_fn(extend_fn):\n # TODO: can compute cost between waypoints from extend_fn\n def distance_fn(q1, q2):\n path = list(extend_fn(q1, q2)) # TODO: cache\n return len(path) # TODO: subtract endpoints?\n return distance_fn\n\n##################################################\n\ndef get_difference_fn(circular={}):\n def fn(q2, q1):\n return tuple(circular_difference(v2, v1, interval=circular.get(i, UNBOUNDED_LIMITS))\n for i, (v2, v1) in enumerate(zip(q2, q1)))\n return fn\n\n\ndef get_cost_fn(distance_fn=get_distance, constant=0., coefficient=1.):\n def fn(q1, q2):\n return constant + coefficient*distance_fn(q1, q2)\n return fn\n\n\ndef get_duration_fn(difference_fn=get_delta, t_constant=0., t_min=0., **kwargs):\n v_max, a_max = get_default_limits(d=None, **kwargs)\n def fn(q1, q2):\n # TODO: be careful that not colinear with other waypoints\n difference = difference_fn(q1, q2)\n t_transit = 0.\n if not np.allclose(np.zeros(len(difference)), difference, atol=1e-6, rtol=0):\n t_transit = solve_linear(difference, v_max, a_max, only_duration=True)\n assert t_transit is not None\n #curve = solve_linear(difference, v_max, a_max)\n #t_transit = spline_duration(curve)\n t = t_constant + t_transit\n return max(t_min, t) # TODO: clip function\n return fn", "step-ids": [ 7, 8, 10, 12, 13 ] }
[ 7, 8, 10, 12, 13 ]
from __future__ import absolute_import from talin.quotations import register_xpath_extensions def init(): register_xpath_extensions()
normal
{ "blob_id": "c218428908c28a8c65bd72e66dcddaf7db1909d7", "index": 4325, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef init():\n register_xpath_extensions()\n", "step-3": "from __future__ import absolute_import\nfrom talin.quotations import register_xpath_extensions\n\n\ndef init():\n register_xpath_extensions()\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> default_app_config = 'assistant.additionalpage.apps.AdditionalPageAppConfig' <|reserved_special_token_1|> default_app_config = "assistant.additionalpage.apps.AdditionalPageAppConfig"
flexible
{ "blob_id": "0e2c71ab4f194af3c2ee65c2cbd6f36921eb587e", "index": 2079, "step-1": "<mask token>\n", "step-2": "default_app_config = 'assistant.additionalpage.apps.AdditionalPageAppConfig'\n", "step-3": "default_app_config = \"assistant.additionalpage.apps.AdditionalPageAppConfig\"\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
import pyttsx engine = pyttsx.init() rate = engine.getProperty('rate') engine.setProperty('rate', rate-55) engine.say('Hello , whats your name ?'); engine.say('I am mr. robot. What news would you like to listen to today ?'); #engine.say('Sally sells seashells by the seashore.') #engine.say('Sally sells seashells by the seashore.') #voices = engine.getProperty('voices') #for voice in voices: # engine.setProperty('voice', voice.id) # engine.say('The quick brown fox jumped over the lazy dog.') #engine.say('The quick brown fox jumped over the lazy dog.') engine.runAndWait()
normal
{ "blob_id": "d638194a37dc503b7dfb5410abf264be67c3a4f0", "index": 4126, "step-1": "<mask token>\n", "step-2": "<mask token>\nengine.setProperty('rate', rate - 55)\nengine.say('Hello , whats your name ?')\nengine.say('I am mr. robot. What news would you like to listen to today ?')\nengine.runAndWait()\n", "step-3": "<mask token>\nengine = pyttsx.init()\nrate = engine.getProperty('rate')\nengine.setProperty('rate', rate - 55)\nengine.say('Hello , whats your name ?')\nengine.say('I am mr. robot. What news would you like to listen to today ?')\nengine.runAndWait()\n", "step-4": "import pyttsx\nengine = pyttsx.init()\nrate = engine.getProperty('rate')\nengine.setProperty('rate', rate - 55)\nengine.say('Hello , whats your name ?')\nengine.say('I am mr. robot. What news would you like to listen to today ?')\nengine.runAndWait()\n", "step-5": "import pyttsx\nengine = pyttsx.init()\nrate = engine.getProperty('rate')\nengine.setProperty('rate', rate-55)\nengine.say('Hello , whats your name ?');\nengine.say('I am mr. robot. What news would you like to listen to today ?');\n#engine.say('Sally sells seashells by the seashore.')\n#engine.say('Sally sells seashells by the seashore.')\n\n#voices = engine.getProperty('voices')\n#for voice in voices:\n# engine.setProperty('voice', voice.id)\n# engine.say('The quick brown fox jumped over the lazy dog.')\n\n#engine.say('The quick brown fox jumped over the lazy dog.')\nengine.runAndWait()\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Migration(migrations.Migration): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Migration(migrations.Migration): dependencies = [('rate', '0003_auto_20210421_1316')] operations = [migrations.AlterField(model_name='song', name= 'overall_rating', field=models.FloatField(default=0)), migrations. AlterField(model_name='song', name='rating_count', field=models. FloatField(default=0))] <|reserved_special_token_1|> from django.db import migrations, models class Migration(migrations.Migration): dependencies = [('rate', '0003_auto_20210421_1316')] operations = [migrations.AlterField(model_name='song', name= 'overall_rating', field=models.FloatField(default=0)), migrations. AlterField(model_name='song', name='rating_count', field=models. FloatField(default=0))] <|reserved_special_token_1|> # Generated by Django 3.2 on 2021-04-21 13:21 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('rate', '0003_auto_20210421_1316'), ] operations = [ migrations.AlterField( model_name='song', name='overall_rating', field=models.FloatField(default=0), ), migrations.AlterField( model_name='song', name='rating_count', field=models.FloatField(default=0), ), ]
flexible
{ "blob_id": "d46cda5354640e1c87432d39a2e949d6db034edc", "index": 6413, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('rate', '0003_auto_20210421_1316')]\n operations = [migrations.AlterField(model_name='song', name=\n 'overall_rating', field=models.FloatField(default=0)), migrations.\n AlterField(model_name='song', name='rating_count', field=models.\n FloatField(default=0))]\n", "step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('rate', '0003_auto_20210421_1316')]\n operations = [migrations.AlterField(model_name='song', name=\n 'overall_rating', field=models.FloatField(default=0)), migrations.\n AlterField(model_name='song', name='rating_count', field=models.\n FloatField(default=0))]\n", "step-5": "# Generated by Django 3.2 on 2021-04-21 13:21\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('rate', '0003_auto_20210421_1316'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='song',\n name='overall_rating',\n field=models.FloatField(default=0),\n ),\n migrations.AlterField(\n model_name='song',\n name='rating_count',\n field=models.FloatField(default=0),\n ),\n ]\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
# Generated by Django 3.1.1 on 2021-03-25 14:42 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="Experiment", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("name", models.CharField(max_length=255)), ("description", models.TextField()), ("is_test", models.BooleanField(default=False)), ("type", models.CharField(max_length=255)), ("created_at", models.DateTimeField(auto_now_add=True)), ("updated_at", models.DateTimeField(auto_now=True)), ("deleted_at", models.DateTimeField(null=True)), ], options={ "db_table": "experiment", }, ), migrations.CreateModel( name="ExperimentPlio", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("split_percentage", models.CharField(max_length=255)), ( "experiment", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to="experiments.experiment", ), ), ], options={ "db_table": "experiment_plio", }, ), ]
normal
{ "blob_id": "b308d81fb8eab9f52aa0ad4f88e25d6757ef703a", "index": 1761, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='Experiment', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('name', models.CharField(max_length=\n 255)), ('description', models.TextField()), ('is_test', models.\n BooleanField(default=False)), ('type', models.CharField(max_length=\n 255)), ('created_at', models.DateTimeField(auto_now_add=True)), (\n 'updated_at', models.DateTimeField(auto_now=True)), ('deleted_at',\n models.DateTimeField(null=True))], options={'db_table':\n 'experiment'}), migrations.CreateModel(name='ExperimentPlio',\n fields=[('id', models.AutoField(auto_created=True, primary_key=True,\n serialize=False, verbose_name='ID')), ('split_percentage', models.\n CharField(max_length=255)), ('experiment', models.ForeignKey(\n on_delete=django.db.models.deletion.CASCADE, to=\n 'experiments.experiment'))], options={'db_table': 'experiment_plio'})]\n", "step-4": "from django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='Experiment', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('name', models.CharField(max_length=\n 255)), ('description', models.TextField()), ('is_test', models.\n BooleanField(default=False)), ('type', models.CharField(max_length=\n 255)), ('created_at', models.DateTimeField(auto_now_add=True)), (\n 'updated_at', models.DateTimeField(auto_now=True)), ('deleted_at',\n models.DateTimeField(null=True))], options={'db_table':\n 'experiment'}), migrations.CreateModel(name='ExperimentPlio',\n fields=[('id', models.AutoField(auto_created=True, primary_key=True,\n serialize=False, verbose_name='ID')), ('split_percentage', models.\n CharField(max_length=255)), ('experiment', models.ForeignKey(\n on_delete=django.db.models.deletion.CASCADE, to=\n 'experiments.experiment'))], options={'db_table': 'experiment_plio'})]\n", "step-5": "# Generated by Django 3.1.1 on 2021-03-25 14:42\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = []\n\n operations = [\n migrations.CreateModel(\n name=\"Experiment\",\n fields=[\n (\n \"id\",\n models.AutoField(\n auto_created=True,\n primary_key=True,\n serialize=False,\n verbose_name=\"ID\",\n ),\n ),\n (\"name\", models.CharField(max_length=255)),\n (\"description\", models.TextField()),\n (\"is_test\", models.BooleanField(default=False)),\n (\"type\", models.CharField(max_length=255)),\n (\"created_at\", models.DateTimeField(auto_now_add=True)),\n (\"updated_at\", models.DateTimeField(auto_now=True)),\n (\"deleted_at\", models.DateTimeField(null=True)),\n ],\n options={\n \"db_table\": \"experiment\",\n },\n ),\n migrations.CreateModel(\n name=\"ExperimentPlio\",\n fields=[\n (\n \"id\",\n models.AutoField(\n auto_created=True,\n primary_key=True,\n serialize=False,\n verbose_name=\"ID\",\n ),\n ),\n (\"split_percentage\", models.CharField(max_length=255)),\n (\n \"experiment\",\n models.ForeignKey(\n on_delete=django.db.models.deletion.CASCADE,\n to=\"experiments.experiment\",\n ),\n ),\n ],\n options={\n \"db_table\": \"experiment_plio\",\n },\n ),\n ]\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
# Generated by Django 3.2.5 on 2021-08-05 23:59 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('lectures', '0003_auto_20210805_1954'), ] operations = [ migrations.RenameField( model_name='lecture', old_name='is_requird', new_name='is_required', ), ]
normal
{ "blob_id": "e5bf4518f3834c73c3743d4c711a8d1a4ce3b944", "index": 6788, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('lectures', '0003_auto_20210805_1954')]\n operations = [migrations.RenameField(model_name='lecture', old_name=\n 'is_requird', new_name='is_required')]\n", "step-4": "from django.db import migrations\n\n\nclass Migration(migrations.Migration):\n dependencies = [('lectures', '0003_auto_20210805_1954')]\n operations = [migrations.RenameField(model_name='lecture', old_name=\n 'is_requird', new_name='is_required')]\n", "step-5": "# Generated by Django 3.2.5 on 2021-08-05 23:59\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('lectures', '0003_auto_20210805_1954'),\n ]\n\n operations = [\n migrations.RenameField(\n model_name='lecture',\n old_name='is_requird',\n new_name='is_required',\n ),\n ]\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
#YET TO COMMENT. import numpy as np from functools import reduce class ProbabilityNetwork: def __init__(self,n,edges,probs): self.nodes=list(range(n)) self.edges=edges self.probs=probs def parents(self, node): return [a for a,b in edges if b==node] def ancestralOrder(self): order=[] while len(order)<len(self.nodes): for node in self.nodes: if node in order: continue if not any((edge[0] not in order) and (edge[1]==node) for edge in self.edges): order.append(node) return order def logicSampling(self, evidences, targetNode, niters=10000000): evidenceNodes=evidences.keys() ancestralOrder = self.ancestralOrder() hits=0 total=0 for it in range(niters): fail=False values=dict([ [i,None] for i in self.nodes]) #True: present. False: not present for node in ancestralOrder: pNode=self.probs(node, values) nodeValue=np.random.random()<pNode values[node]=nodeValue if node in evidences and evidences[node]!=values[node]: fail=True break if fail: continue #print(values) total+=1 if values[targetNode]: hits+=1 return hits/total def weightedLikelihood(self, evidences, targetNode, niters=10000000): evidenceNodes=evidences.keys() ancestralOrder = [node for node in self.ancestralOrder() if node not in evidenceNodes] cumsumHit=0 cumsumTotal=0 hits=0 for it in range(niters): values=dict([ [i,None] for i in ancestralOrder]) #True: present. False: not present for evNode in evidenceNodes: values[evNode]=evidences[evNode] for node in ancestralOrder: pNode=self.probs(node, values) nodeValue=np.random.random()<pNode values[node]=nodeValue currProb=reduce(lambda x,y:x*y, [self.probs(i,values) if values[i] else 1-self.probs(i,values) for i in evidenceNodes ]) if values[targetNode]: cumsumHit+=currProb cumsumTotal+=currProb return cumsumHit/cumsumTotal edges=[(0,1),(0,2),(1,3),(1,4),(2,4),(2,5)] def probs(node,evidences): if node==0: return 0.3 elif node==1: if evidences[0]: return 0.9 else: return 0.2 elif node==2: if evidences[0]: return 0.75 else: return 0.25 elif node==3: if evidences[1]: return 0.6 else: return 0.1 elif node==4: if evidences[1] and evidences[2]: return 0.8 elif evidences[1] and not evidences[2]: return 0.6 elif not evidences[1] and evidences[2]: return 0.5 else: return 0 elif node==5: if evidences[2]: return 0.4 else: return 0.1 pn=ProbabilityNetwork(6, edges, probs) evidences=dict([[3,True],[4,True],[5,False]]) print(pn.logicSampling(evidences, 0)) print(pn.weightedLikelihood(evidences,0))
normal
{ "blob_id": "24fa41f916b54345e4647354f972bd22e130decf", "index": 4016, "step-1": "<mask token>\n\n\nclass ProbabilityNetwork:\n\n def __init__(self, n, edges, probs):\n self.nodes = list(range(n))\n self.edges = edges\n self.probs = probs\n\n def parents(self, node):\n return [a for a, b in edges if b == node]\n\n def ancestralOrder(self):\n order = []\n while len(order) < len(self.nodes):\n for node in self.nodes:\n if node in order:\n continue\n if not any(edge[0] not in order and edge[1] == node for\n edge in self.edges):\n order.append(node)\n return order\n\n def logicSampling(self, evidences, targetNode, niters=10000000):\n evidenceNodes = evidences.keys()\n ancestralOrder = self.ancestralOrder()\n hits = 0\n total = 0\n for it in range(niters):\n fail = False\n values = dict([[i, None] for i in self.nodes])\n for node in ancestralOrder:\n pNode = self.probs(node, values)\n nodeValue = np.random.random() < pNode\n values[node] = nodeValue\n if node in evidences and evidences[node] != values[node]:\n fail = True\n break\n if fail:\n continue\n total += 1\n if values[targetNode]:\n hits += 1\n return hits / total\n\n def weightedLikelihood(self, evidences, targetNode, niters=10000000):\n evidenceNodes = evidences.keys()\n ancestralOrder = [node for node in self.ancestralOrder() if node not in\n evidenceNodes]\n cumsumHit = 0\n cumsumTotal = 0\n hits = 0\n for it in range(niters):\n values = dict([[i, None] for i in ancestralOrder])\n for evNode in evidenceNodes:\n values[evNode] = evidences[evNode]\n for node in ancestralOrder:\n pNode = self.probs(node, values)\n nodeValue = np.random.random() < pNode\n values[node] = nodeValue\n currProb = reduce(lambda x, y: x * y, [(self.probs(i, values) if\n values[i] else 1 - self.probs(i, values)) for i in\n evidenceNodes])\n if values[targetNode]:\n cumsumHit += currProb\n cumsumTotal += currProb\n return cumsumHit / cumsumTotal\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass ProbabilityNetwork:\n\n def __init__(self, n, edges, probs):\n self.nodes = list(range(n))\n self.edges = edges\n self.probs = probs\n\n def parents(self, node):\n return [a for a, b in edges if b == node]\n\n def ancestralOrder(self):\n order = []\n while len(order) < len(self.nodes):\n for node in self.nodes:\n if node in order:\n continue\n if not any(edge[0] not in order and edge[1] == node for\n edge in self.edges):\n order.append(node)\n return order\n\n def logicSampling(self, evidences, targetNode, niters=10000000):\n evidenceNodes = evidences.keys()\n ancestralOrder = self.ancestralOrder()\n hits = 0\n total = 0\n for it in range(niters):\n fail = False\n values = dict([[i, None] for i in self.nodes])\n for node in ancestralOrder:\n pNode = self.probs(node, values)\n nodeValue = np.random.random() < pNode\n values[node] = nodeValue\n if node in evidences and evidences[node] != values[node]:\n fail = True\n break\n if fail:\n continue\n total += 1\n if values[targetNode]:\n hits += 1\n return hits / total\n\n def weightedLikelihood(self, evidences, targetNode, niters=10000000):\n evidenceNodes = evidences.keys()\n ancestralOrder = [node for node in self.ancestralOrder() if node not in\n evidenceNodes]\n cumsumHit = 0\n cumsumTotal = 0\n hits = 0\n for it in range(niters):\n values = dict([[i, None] for i in ancestralOrder])\n for evNode in evidenceNodes:\n values[evNode] = evidences[evNode]\n for node in ancestralOrder:\n pNode = self.probs(node, values)\n nodeValue = np.random.random() < pNode\n values[node] = nodeValue\n currProb = reduce(lambda x, y: x * y, [(self.probs(i, values) if\n values[i] else 1 - self.probs(i, values)) for i in\n evidenceNodes])\n if values[targetNode]:\n cumsumHit += currProb\n cumsumTotal += currProb\n return cumsumHit / cumsumTotal\n\n\n<mask token>\n\n\ndef probs(node, evidences):\n if node == 0:\n return 0.3\n elif node == 1:\n if evidences[0]:\n return 0.9\n else:\n return 0.2\n elif node == 2:\n if evidences[0]:\n return 0.75\n else:\n return 0.25\n elif node == 3:\n if evidences[1]:\n return 0.6\n else:\n return 0.1\n elif node == 4:\n if evidences[1] and evidences[2]:\n return 0.8\n elif evidences[1] and not evidences[2]:\n return 0.6\n elif not evidences[1] and evidences[2]:\n return 0.5\n else:\n return 0\n elif node == 5:\n if evidences[2]:\n return 0.4\n else:\n return 0.1\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass ProbabilityNetwork:\n\n def __init__(self, n, edges, probs):\n self.nodes = list(range(n))\n self.edges = edges\n self.probs = probs\n\n def parents(self, node):\n return [a for a, b in edges if b == node]\n\n def ancestralOrder(self):\n order = []\n while len(order) < len(self.nodes):\n for node in self.nodes:\n if node in order:\n continue\n if not any(edge[0] not in order and edge[1] == node for\n edge in self.edges):\n order.append(node)\n return order\n\n def logicSampling(self, evidences, targetNode, niters=10000000):\n evidenceNodes = evidences.keys()\n ancestralOrder = self.ancestralOrder()\n hits = 0\n total = 0\n for it in range(niters):\n fail = False\n values = dict([[i, None] for i in self.nodes])\n for node in ancestralOrder:\n pNode = self.probs(node, values)\n nodeValue = np.random.random() < pNode\n values[node] = nodeValue\n if node in evidences and evidences[node] != values[node]:\n fail = True\n break\n if fail:\n continue\n total += 1\n if values[targetNode]:\n hits += 1\n return hits / total\n\n def weightedLikelihood(self, evidences, targetNode, niters=10000000):\n evidenceNodes = evidences.keys()\n ancestralOrder = [node for node in self.ancestralOrder() if node not in\n evidenceNodes]\n cumsumHit = 0\n cumsumTotal = 0\n hits = 0\n for it in range(niters):\n values = dict([[i, None] for i in ancestralOrder])\n for evNode in evidenceNodes:\n values[evNode] = evidences[evNode]\n for node in ancestralOrder:\n pNode = self.probs(node, values)\n nodeValue = np.random.random() < pNode\n values[node] = nodeValue\n currProb = reduce(lambda x, y: x * y, [(self.probs(i, values) if\n values[i] else 1 - self.probs(i, values)) for i in\n evidenceNodes])\n if values[targetNode]:\n cumsumHit += currProb\n cumsumTotal += currProb\n return cumsumHit / cumsumTotal\n\n\nedges = [(0, 1), (0, 2), (1, 3), (1, 4), (2, 4), (2, 5)]\n\n\ndef probs(node, evidences):\n if node == 0:\n return 0.3\n elif node == 1:\n if evidences[0]:\n return 0.9\n else:\n return 0.2\n elif node == 2:\n if evidences[0]:\n return 0.75\n else:\n return 0.25\n elif node == 3:\n if evidences[1]:\n return 0.6\n else:\n return 0.1\n elif node == 4:\n if evidences[1] and evidences[2]:\n return 0.8\n elif evidences[1] and not evidences[2]:\n return 0.6\n elif not evidences[1] and evidences[2]:\n return 0.5\n else:\n return 0\n elif node == 5:\n if evidences[2]:\n return 0.4\n else:\n return 0.1\n\n\npn = ProbabilityNetwork(6, edges, probs)\nevidences = dict([[3, True], [4, True], [5, False]])\nprint(pn.logicSampling(evidences, 0))\nprint(pn.weightedLikelihood(evidences, 0))\n", "step-4": "import numpy as np\nfrom functools import reduce\n\n\nclass ProbabilityNetwork:\n\n def __init__(self, n, edges, probs):\n self.nodes = list(range(n))\n self.edges = edges\n self.probs = probs\n\n def parents(self, node):\n return [a for a, b in edges if b == node]\n\n def ancestralOrder(self):\n order = []\n while len(order) < len(self.nodes):\n for node in self.nodes:\n if node in order:\n continue\n if not any(edge[0] not in order and edge[1] == node for\n edge in self.edges):\n order.append(node)\n return order\n\n def logicSampling(self, evidences, targetNode, niters=10000000):\n evidenceNodes = evidences.keys()\n ancestralOrder = self.ancestralOrder()\n hits = 0\n total = 0\n for it in range(niters):\n fail = False\n values = dict([[i, None] for i in self.nodes])\n for node in ancestralOrder:\n pNode = self.probs(node, values)\n nodeValue = np.random.random() < pNode\n values[node] = nodeValue\n if node in evidences and evidences[node] != values[node]:\n fail = True\n break\n if fail:\n continue\n total += 1\n if values[targetNode]:\n hits += 1\n return hits / total\n\n def weightedLikelihood(self, evidences, targetNode, niters=10000000):\n evidenceNodes = evidences.keys()\n ancestralOrder = [node for node in self.ancestralOrder() if node not in\n evidenceNodes]\n cumsumHit = 0\n cumsumTotal = 0\n hits = 0\n for it in range(niters):\n values = dict([[i, None] for i in ancestralOrder])\n for evNode in evidenceNodes:\n values[evNode] = evidences[evNode]\n for node in ancestralOrder:\n pNode = self.probs(node, values)\n nodeValue = np.random.random() < pNode\n values[node] = nodeValue\n currProb = reduce(lambda x, y: x * y, [(self.probs(i, values) if\n values[i] else 1 - self.probs(i, values)) for i in\n evidenceNodes])\n if values[targetNode]:\n cumsumHit += currProb\n cumsumTotal += currProb\n return cumsumHit / cumsumTotal\n\n\nedges = [(0, 1), (0, 2), (1, 3), (1, 4), (2, 4), (2, 5)]\n\n\ndef probs(node, evidences):\n if node == 0:\n return 0.3\n elif node == 1:\n if evidences[0]:\n return 0.9\n else:\n return 0.2\n elif node == 2:\n if evidences[0]:\n return 0.75\n else:\n return 0.25\n elif node == 3:\n if evidences[1]:\n return 0.6\n else:\n return 0.1\n elif node == 4:\n if evidences[1] and evidences[2]:\n return 0.8\n elif evidences[1] and not evidences[2]:\n return 0.6\n elif not evidences[1] and evidences[2]:\n return 0.5\n else:\n return 0\n elif node == 5:\n if evidences[2]:\n return 0.4\n else:\n return 0.1\n\n\npn = ProbabilityNetwork(6, edges, probs)\nevidences = dict([[3, True], [4, True], [5, False]])\nprint(pn.logicSampling(evidences, 0))\nprint(pn.weightedLikelihood(evidences, 0))\n", "step-5": "#YET TO COMMENT.\n\nimport numpy as np\nfrom functools import reduce\n\nclass ProbabilityNetwork:\n def __init__(self,n,edges,probs):\n self.nodes=list(range(n))\n self.edges=edges\n self.probs=probs\n\n def parents(self, node):\n return [a for a,b in edges if b==node]\n\n def ancestralOrder(self):\n order=[]\n while len(order)<len(self.nodes):\n for node in self.nodes:\n if node in order:\n continue\n if not any((edge[0] not in order) and (edge[1]==node) for edge in self.edges):\n order.append(node)\n return order\n\n def logicSampling(self, evidences, targetNode, niters=10000000):\n evidenceNodes=evidences.keys()\n ancestralOrder = self.ancestralOrder()\n hits=0\n total=0\n\n for it in range(niters):\n fail=False\n values=dict([ [i,None] for i in self.nodes]) #True: present. False: not present\n for node in ancestralOrder:\n pNode=self.probs(node, values)\n nodeValue=np.random.random()<pNode\n values[node]=nodeValue\n if node in evidences and evidences[node]!=values[node]:\n fail=True\n break\n\n if fail: continue\n\n #print(values)\n total+=1\n if values[targetNode]:\n hits+=1\n\n return hits/total\n\n def weightedLikelihood(self, evidences, targetNode, niters=10000000):\n evidenceNodes=evidences.keys()\n\n ancestralOrder = [node for node in self.ancestralOrder() if node not in evidenceNodes]\n cumsumHit=0\n cumsumTotal=0\n hits=0\n for it in range(niters):\n values=dict([ [i,None] for i in ancestralOrder]) #True: present. False: not present\n for evNode in evidenceNodes:\n values[evNode]=evidences[evNode]\n\n for node in ancestralOrder:\n pNode=self.probs(node, values)\n nodeValue=np.random.random()<pNode\n values[node]=nodeValue\n\n currProb=reduce(lambda x,y:x*y, [self.probs(i,values) if values[i] else 1-self.probs(i,values) for i in evidenceNodes ])\n if values[targetNode]:\n cumsumHit+=currProb\n\n cumsumTotal+=currProb\n\n return cumsumHit/cumsumTotal\n\n\n\nedges=[(0,1),(0,2),(1,3),(1,4),(2,4),(2,5)]\n\ndef probs(node,evidences):\n if node==0: return 0.3\n elif node==1:\n if evidences[0]: return 0.9\n else: return 0.2\n elif node==2:\n if evidences[0]: return 0.75\n else: return 0.25\n elif node==3:\n if evidences[1]: return 0.6\n else: return 0.1\n elif node==4:\n if evidences[1] and evidences[2]: return 0.8\n elif evidences[1] and not evidences[2]: return 0.6\n elif not evidences[1] and evidences[2]: return 0.5\n else: return 0\n elif node==5:\n if evidences[2]: return 0.4\n else: return 0.1\n\npn=ProbabilityNetwork(6, edges, probs)\n\nevidences=dict([[3,True],[4,True],[5,False]])\n\nprint(pn.logicSampling(evidences, 0))\nprint(pn.weightedLikelihood(evidences,0))\n\n\n\n", "step-ids": [ 6, 7, 9, 10, 11 ] }
[ 6, 7, 9, 10, 11 ]
from .hailjwt import JWTClient, get_domain, authenticated_users_only __all__ = ['JWTClient', 'get_domain', 'authenticated_users_only']
normal
{ "blob_id": "39fb8d9f93be1e6c1ed2a425d14061737d643ab6", "index": 9330, "step-1": "<mask token>\n", "step-2": "<mask token>\n__all__ = ['JWTClient', 'get_domain', 'authenticated_users_only']\n", "step-3": "from .hailjwt import JWTClient, get_domain, authenticated_users_only\n__all__ = ['JWTClient', 'get_domain', 'authenticated_users_only']\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
import numpy as np from django.contrib.auth import logout, login, authenticate from django.contrib.auth.decorators import login_required from django.core.mail import EmailMessage from django.shortcuts import render, redirect from django.template.loader import get_template from dashboard.notebook.creditcard import credit_model from dashboard.notebook.bank import bank_model from dashboard.notebook.mobile_data import mobile_model from dashboard.notebook.graphs import result from dashboard.notebook.mobile_analytics import mobile_result from dashboard.notebook.creditcard_analytics import creditcard_result from .forms import ContactForm, UserLoginForm # view for index page def index(request): return render(request, 'index.html') # view for about page def about(request): return render(request, 'about.html') ### contact view def contact(request): form_class = ContactForm # new logic! if request.method == 'POST': form = form_class(data=request.POST) if form.is_valid(): contact_name = request.POST.get('contact_name', '') contact_email = request.POST.get('contact_email', '') form_content = request.POST.get('content', '') form_content = request.POST.get('content', '') # Email the profile with the # contact information template = get_template('contact_template.txt') context = { 'contact_name': contact_name, 'contact_email': contact_email, 'form_content': form_content, } content = template.render(context) email = EmailMessage( "New contact form submission", content, "FDS" + '', ['b200jst@gmail.com'], headers={'Reply-To': contact_email} ) email.send() return redirect('/success') return render(request, 'contact.html', { 'form': form_class, }) # success page def success(request): return render(request, 'success.html') # login page def login_view(request): next = request.GET.get('next') form = UserLoginForm(request.POST or None) if form.is_valid(): username = form.cleaned_data.get('username') password = form.cleaned_data.get('password') user = authenticate(username=username, password=password) login(request, user) if next: return redirect(next) return redirect("/") return render(request, 'login.html',{"form":form}) # logout view @login_required(login_url='/login/') def logout_view(request): logout(request) return render(request, "index.html") # service view @login_required(login_url='/login/') def services(request): return render(request, 'services.html') # bank fraud page @login_required(login_url='/login/') def bank(request): return render(request, 'bank.html') # creditcard fraud page @login_required(login_url='/login/') def creditcard(request): return render(request, 'creditcard.html') # mobile transaction @login_required(login_url='/login/') def mobilefraud(request): return render(request, 'mobile.html') #banking services @login_required(login_url='/login/') def bankresult(request): # get the data and print prediction age = request.POST.get("age") job = request.POST.get("job") print(job) if (job == "Unemployed"): new_job = 1 elif (job == "Management"): new_job = 2 elif (job == "Services"): new_job = 3 elif (job == "Blue-Collar"): new_job = 4 elif (job == "Entrepreneur"): new_job = 5 elif (job == "Admin"): new_job = 6 elif (job == "Unknown"): new_job = 7 elif (job == "Self-employed"): new_job = 8 elif (job == "Student"): new_job = 9 elif (job == "House maid"): new_job = 10 elif (job == "Technician"): new_job = 11 elif (job == "Retired"): new_job = 12 print(new_job) marital = request.POST.get("marital") if (marital == "Single"): new_marital = 1 elif (marital == "Divorced"): new_marital = 2 elif (marital == "Married"): new_marital = 3 print(new_marital) education = request.POST.get("education") if (education == "Unknown"): new_education = 1 elif (education == "Primary"): new_education = 2 elif (education == "Secondary"): new_education = 3 elif (education == "Graduate"): new_education = 4 print(new_education) balance = request.POST.get("balance") housing = request.POST.get("housing") if (housing == "Yes"): new_housing = 1 elif (housing == "No"): new_housing = 2 print(new_housing) loan = request.POST.get("loan") if (loan == "Yes"): new_loan = 1 elif (loan == "No"): new_loan = 2 print(new_loan) duration = int(request.POST.get("duration")) campaign = int(request.POST.get('campaign')) pdays = int(request.POST.get('pdays')) previous = int(request.POST.get('previous')) poutcome = (request.POST.get("poutcome")) if (poutcome == "Unknown"): new_poutcome = 3 elif (poutcome == "Failure"): new_poutcome = 1 elif (poutcome == "Successs"): new_poutcome = 4 elif (poutcome == "Failure"): new_poutcome = 2 print(new_poutcome) bank_data = np.array([age,new_job,new_marital,new_education,balance,new_housing,new_loan,duration,campaign,pdays,previous,new_poutcome]) clf = bank_model() c = clf.predict([bank_data]) print(c) if c == [1]: # print("Not fraud") response = 'Not Fraud' else: # print("Fraud") response = 'Fraud' accuracy = 0.8962983425414365 return render(request, 'bank/result.html', {"result": response, 'accuracy':accuracy}) # analytics # def analysis(request): # return render(request, 'analysis.html', {'accuracy': accuracy}) # credit card services @login_required(login_url='/login/') def creditresult(request): if request.method == "POST": # get the data and print limit_balance = request.POST.get("limit_balance") sex = request.POST.get("sex") print(sex) if(sex=="Male"): new_sex = 1 else: new_sex = 2 print(new_sex) education = request.POST.get("education") if (education == "Primary"): new_education = 1 elif (education == "Secondary"): new_education = 2 elif (education == "Graduate"): new_education = 3 print(new_education) marriage = request.POST.get("marriage") if (marriage == "Single"): new_marriage = 1 elif (marriage == "Married"): new_marriage = 2 elif (education == "Divorced"): new_marriage = 3 print(new_marriage) age = request.POST.get("age") pay_1 = int(request.POST.get("pay_1")) pay_2 = int(request.POST.get("pay_2")) pay_3 = int(request.POST.get("pay_3")) pay_4 = int(request.POST.get("pay_4")) pay_5 = int(request.POST.get("pay_5")) pay_6 = int(request.POST.get("pay_6")) Bill_Amt_1 = int(request.POST.get("Bill_Amt_1")) Bill_Amt_2 = int(request.POST.get("Bill_Amt_2")) Bill_Amt_3 = int(request.POST.get("Bill_Amt_3")) Bill_Amt_4 = int(request.POST.get("Bill_Amt_4")) Bill_Amt_5 = int(request.POST.get("Bill_Amt_5")) Bill_Amt_6 = int(request.POST.get("Bill_Amt_6")) Pay_Amt_1 = int(request.POST.get("Pay_Amt_1")) Pay_Amt_2 = int(request.POST.get("Pay_Amt_2")) Pay_Amt_3 = int(request.POST.get("Pay_Amt_3")) Pay_Amt_4 = int(request.POST.get("Pay_Amt_4")) Pay_Amt_5 = int(request.POST.get("Pay_Amt_5")) Pay_Amt_6 = int(request.POST.get("Pay_Amt_6")) credit_data = np.array([limit_balance, new_sex, new_education, new_marriage, age, pay_1, pay_2, pay_3, pay_4, pay_5, pay_6, Bill_Amt_1, Bill_Amt_2, Bill_Amt_3, Bill_Amt_4, Bill_Amt_5, Bill_Amt_6, Pay_Amt_1, Pay_Amt_2, Pay_Amt_3, Pay_Amt_4, Pay_Amt_5, Pay_Amt_6]) print(credit_data) clf = credit_model() c = clf.predict([credit_data]) print(c) if c == [0]: response = 'Not a Fraud' else: response = 'fraud' # print(c) return render(request, 'creditcard/result.html', {"result": response}) else: return redirect('/creditcard',request) # mobile fraud services @login_required(login_url='/login/') def mobileresult(request): # get the data and print step = request.POST.get("step") type = request.POST.get("type") if (type == "Payment"): new_type = 1 elif (type == "Transfer"): new_type = 4 elif (type == "Cash-out"): new_type = 5 elif (type == "Debit"): new_type = 2 print(new_type) amount = request.POST.get("amount") nameOrig = request.POST.get("nameOrig") oldbalanceOrg = request.POST.get("oldbalanceOrg") newbalanceOrig = request.POST.get("newbalanceOrig") nameDest = request.POST.get("nameDest") oldbalanceDest = request.POST.get("oldbalanceDest") newbalanceDest = request.POST.get("newbalanceDest") # isFraud = int(request.POST.get("isFraud"))) isFlaggedFraud = 1 mobile_data = np.array([step, new_type, amount, nameOrig, oldbalanceOrg, newbalanceOrig, nameDest,oldbalanceDest, newbalanceDest, isFlaggedFraud]) # print(bank_data) clf = mobile_model() c = clf.predict([mobile_data]) print(c) if c == [0]: # print("Not fraud") response = 'Not Fraud' else: # print("Fraud") response = 'Fraud' return render(request, 'mobile/result.html', {"result": response}) # analytics page def analytics(request): return render(request, 'analytics.html', {'analytics':result, "mobile_analytics": mobile_result, "creditcard_analytics": creditcard_result})
normal
{ "blob_id": "26bb5dc2679a4375d0950667ed02369df10857a8", "index": 8410, "step-1": "<mask token>\n\n\ndef contact(request):\n form_class = ContactForm\n if request.method == 'POST':\n form = form_class(data=request.POST)\n if form.is_valid():\n contact_name = request.POST.get('contact_name', '')\n contact_email = request.POST.get('contact_email', '')\n form_content = request.POST.get('content', '')\n form_content = request.POST.get('content', '')\n template = get_template('contact_template.txt')\n context = {'contact_name': contact_name, 'contact_email':\n contact_email, 'form_content': form_content}\n content = template.render(context)\n email = EmailMessage('New contact form submission', content, \n 'FDS' + '', ['b200jst@gmail.com'], headers={'Reply-To':\n contact_email})\n email.send()\n return redirect('/success')\n return render(request, 'contact.html', {'form': form_class})\n\n\n<mask token>\n\n\n@login_required(login_url='/login/')\ndef services(request):\n return render(request, 'services.html')\n\n\n<mask token>\n\n\n@login_required(login_url='/login/')\ndef creditcard(request):\n return render(request, 'creditcard.html')\n\n\n@login_required(login_url='/login/')\ndef mobilefraud(request):\n return render(request, 'mobile.html')\n\n\n@login_required(login_url='/login/')\ndef bankresult(request):\n age = request.POST.get('age')\n job = request.POST.get('job')\n print(job)\n if job == 'Unemployed':\n new_job = 1\n elif job == 'Management':\n new_job = 2\n elif job == 'Services':\n new_job = 3\n elif job == 'Blue-Collar':\n new_job = 4\n elif job == 'Entrepreneur':\n new_job = 5\n elif job == 'Admin':\n new_job = 6\n elif job == 'Unknown':\n new_job = 7\n elif job == 'Self-employed':\n new_job = 8\n elif job == 'Student':\n new_job = 9\n elif job == 'House maid':\n new_job = 10\n elif job == 'Technician':\n new_job = 11\n elif job == 'Retired':\n new_job = 12\n print(new_job)\n marital = request.POST.get('marital')\n if marital == 'Single':\n new_marital = 1\n elif marital == 'Divorced':\n new_marital = 2\n elif marital == 'Married':\n new_marital = 3\n print(new_marital)\n education = request.POST.get('education')\n if education == 'Unknown':\n new_education = 1\n elif education == 'Primary':\n new_education = 2\n elif education == 'Secondary':\n new_education = 3\n elif education == 'Graduate':\n new_education = 4\n print(new_education)\n balance = request.POST.get('balance')\n housing = request.POST.get('housing')\n if housing == 'Yes':\n new_housing = 1\n elif housing == 'No':\n new_housing = 2\n print(new_housing)\n loan = request.POST.get('loan')\n if loan == 'Yes':\n new_loan = 1\n elif loan == 'No':\n new_loan = 2\n print(new_loan)\n duration = int(request.POST.get('duration'))\n campaign = int(request.POST.get('campaign'))\n pdays = int(request.POST.get('pdays'))\n previous = int(request.POST.get('previous'))\n poutcome = request.POST.get('poutcome')\n if poutcome == 'Unknown':\n new_poutcome = 3\n elif poutcome == 'Failure':\n new_poutcome = 1\n elif poutcome == 'Successs':\n new_poutcome = 4\n elif poutcome == 'Failure':\n new_poutcome = 2\n print(new_poutcome)\n bank_data = np.array([age, new_job, new_marital, new_education, balance,\n new_housing, new_loan, duration, campaign, pdays, previous,\n new_poutcome])\n clf = bank_model()\n c = clf.predict([bank_data])\n print(c)\n if c == [1]:\n response = 'Not Fraud'\n else:\n response = 'Fraud'\n accuracy = 0.8962983425414365\n return render(request, 'bank/result.html', {'result': response,\n 'accuracy': accuracy})\n\n\n<mask token>\n\n\n@login_required(login_url='/login/')\ndef mobileresult(request):\n step = request.POST.get('step')\n type = request.POST.get('type')\n if type == 'Payment':\n new_type = 1\n elif type == 'Transfer':\n new_type = 4\n elif type == 'Cash-out':\n new_type = 5\n elif type == 'Debit':\n new_type = 2\n print(new_type)\n amount = request.POST.get('amount')\n nameOrig = request.POST.get('nameOrig')\n oldbalanceOrg = request.POST.get('oldbalanceOrg')\n newbalanceOrig = request.POST.get('newbalanceOrig')\n nameDest = request.POST.get('nameDest')\n oldbalanceDest = request.POST.get('oldbalanceDest')\n newbalanceDest = request.POST.get('newbalanceDest')\n isFlaggedFraud = 1\n mobile_data = np.array([step, new_type, amount, nameOrig, oldbalanceOrg,\n newbalanceOrig, nameDest, oldbalanceDest, newbalanceDest,\n isFlaggedFraud])\n clf = mobile_model()\n c = clf.predict([mobile_data])\n print(c)\n if c == [0]:\n response = 'Not Fraud'\n else:\n response = 'Fraud'\n return render(request, 'mobile/result.html', {'result': response})\n\n\ndef analytics(request):\n return render(request, 'analytics.html', {'analytics': result,\n 'mobile_analytics': mobile_result, 'creditcard_analytics':\n creditcard_result})\n", "step-2": "<mask token>\n\n\ndef index(request):\n return render(request, 'index.html')\n\n\n<mask token>\n\n\ndef contact(request):\n form_class = ContactForm\n if request.method == 'POST':\n form = form_class(data=request.POST)\n if form.is_valid():\n contact_name = request.POST.get('contact_name', '')\n contact_email = request.POST.get('contact_email', '')\n form_content = request.POST.get('content', '')\n form_content = request.POST.get('content', '')\n template = get_template('contact_template.txt')\n context = {'contact_name': contact_name, 'contact_email':\n contact_email, 'form_content': form_content}\n content = template.render(context)\n email = EmailMessage('New contact form submission', content, \n 'FDS' + '', ['b200jst@gmail.com'], headers={'Reply-To':\n contact_email})\n email.send()\n return redirect('/success')\n return render(request, 'contact.html', {'form': form_class})\n\n\n<mask token>\n\n\n@login_required(login_url='/login/')\ndef services(request):\n return render(request, 'services.html')\n\n\n<mask token>\n\n\n@login_required(login_url='/login/')\ndef creditcard(request):\n return render(request, 'creditcard.html')\n\n\n@login_required(login_url='/login/')\ndef mobilefraud(request):\n return render(request, 'mobile.html')\n\n\n@login_required(login_url='/login/')\ndef bankresult(request):\n age = request.POST.get('age')\n job = request.POST.get('job')\n print(job)\n if job == 'Unemployed':\n new_job = 1\n elif job == 'Management':\n new_job = 2\n elif job == 'Services':\n new_job = 3\n elif job == 'Blue-Collar':\n new_job = 4\n elif job == 'Entrepreneur':\n new_job = 5\n elif job == 'Admin':\n new_job = 6\n elif job == 'Unknown':\n new_job = 7\n elif job == 'Self-employed':\n new_job = 8\n elif job == 'Student':\n new_job = 9\n elif job == 'House maid':\n new_job = 10\n elif job == 'Technician':\n new_job = 11\n elif job == 'Retired':\n new_job = 12\n print(new_job)\n marital = request.POST.get('marital')\n if marital == 'Single':\n new_marital = 1\n elif marital == 'Divorced':\n new_marital = 2\n elif marital == 'Married':\n new_marital = 3\n print(new_marital)\n education = request.POST.get('education')\n if education == 'Unknown':\n new_education = 1\n elif education == 'Primary':\n new_education = 2\n elif education == 'Secondary':\n new_education = 3\n elif education == 'Graduate':\n new_education = 4\n print(new_education)\n balance = request.POST.get('balance')\n housing = request.POST.get('housing')\n if housing == 'Yes':\n new_housing = 1\n elif housing == 'No':\n new_housing = 2\n print(new_housing)\n loan = request.POST.get('loan')\n if loan == 'Yes':\n new_loan = 1\n elif loan == 'No':\n new_loan = 2\n print(new_loan)\n duration = int(request.POST.get('duration'))\n campaign = int(request.POST.get('campaign'))\n pdays = int(request.POST.get('pdays'))\n previous = int(request.POST.get('previous'))\n poutcome = request.POST.get('poutcome')\n if poutcome == 'Unknown':\n new_poutcome = 3\n elif poutcome == 'Failure':\n new_poutcome = 1\n elif poutcome == 'Successs':\n new_poutcome = 4\n elif poutcome == 'Failure':\n new_poutcome = 2\n print(new_poutcome)\n bank_data = np.array([age, new_job, new_marital, new_education, balance,\n new_housing, new_loan, duration, campaign, pdays, previous,\n new_poutcome])\n clf = bank_model()\n c = clf.predict([bank_data])\n print(c)\n if c == [1]:\n response = 'Not Fraud'\n else:\n response = 'Fraud'\n accuracy = 0.8962983425414365\n return render(request, 'bank/result.html', {'result': response,\n 'accuracy': accuracy})\n\n\n@login_required(login_url='/login/')\ndef creditresult(request):\n if request.method == 'POST':\n limit_balance = request.POST.get('limit_balance')\n sex = request.POST.get('sex')\n print(sex)\n if sex == 'Male':\n new_sex = 1\n else:\n new_sex = 2\n print(new_sex)\n education = request.POST.get('education')\n if education == 'Primary':\n new_education = 1\n elif education == 'Secondary':\n new_education = 2\n elif education == 'Graduate':\n new_education = 3\n print(new_education)\n marriage = request.POST.get('marriage')\n if marriage == 'Single':\n new_marriage = 1\n elif marriage == 'Married':\n new_marriage = 2\n elif education == 'Divorced':\n new_marriage = 3\n print(new_marriage)\n age = request.POST.get('age')\n pay_1 = int(request.POST.get('pay_1'))\n pay_2 = int(request.POST.get('pay_2'))\n pay_3 = int(request.POST.get('pay_3'))\n pay_4 = int(request.POST.get('pay_4'))\n pay_5 = int(request.POST.get('pay_5'))\n pay_6 = int(request.POST.get('pay_6'))\n Bill_Amt_1 = int(request.POST.get('Bill_Amt_1'))\n Bill_Amt_2 = int(request.POST.get('Bill_Amt_2'))\n Bill_Amt_3 = int(request.POST.get('Bill_Amt_3'))\n Bill_Amt_4 = int(request.POST.get('Bill_Amt_4'))\n Bill_Amt_5 = int(request.POST.get('Bill_Amt_5'))\n Bill_Amt_6 = int(request.POST.get('Bill_Amt_6'))\n Pay_Amt_1 = int(request.POST.get('Pay_Amt_1'))\n Pay_Amt_2 = int(request.POST.get('Pay_Amt_2'))\n Pay_Amt_3 = int(request.POST.get('Pay_Amt_3'))\n Pay_Amt_4 = int(request.POST.get('Pay_Amt_4'))\n Pay_Amt_5 = int(request.POST.get('Pay_Amt_5'))\n Pay_Amt_6 = int(request.POST.get('Pay_Amt_6'))\n credit_data = np.array([limit_balance, new_sex, new_education,\n new_marriage, age, pay_1, pay_2, pay_3, pay_4, pay_5, pay_6,\n Bill_Amt_1, Bill_Amt_2, Bill_Amt_3, Bill_Amt_4, Bill_Amt_5,\n Bill_Amt_6, Pay_Amt_1, Pay_Amt_2, Pay_Amt_3, Pay_Amt_4,\n Pay_Amt_5, Pay_Amt_6])\n print(credit_data)\n clf = credit_model()\n c = clf.predict([credit_data])\n print(c)\n if c == [0]:\n response = 'Not a Fraud'\n else:\n response = 'fraud'\n return render(request, 'creditcard/result.html', {'result': response})\n else:\n return redirect('/creditcard', request)\n\n\n@login_required(login_url='/login/')\ndef mobileresult(request):\n step = request.POST.get('step')\n type = request.POST.get('type')\n if type == 'Payment':\n new_type = 1\n elif type == 'Transfer':\n new_type = 4\n elif type == 'Cash-out':\n new_type = 5\n elif type == 'Debit':\n new_type = 2\n print(new_type)\n amount = request.POST.get('amount')\n nameOrig = request.POST.get('nameOrig')\n oldbalanceOrg = request.POST.get('oldbalanceOrg')\n newbalanceOrig = request.POST.get('newbalanceOrig')\n nameDest = request.POST.get('nameDest')\n oldbalanceDest = request.POST.get('oldbalanceDest')\n newbalanceDest = request.POST.get('newbalanceDest')\n isFlaggedFraud = 1\n mobile_data = np.array([step, new_type, amount, nameOrig, oldbalanceOrg,\n newbalanceOrig, nameDest, oldbalanceDest, newbalanceDest,\n isFlaggedFraud])\n clf = mobile_model()\n c = clf.predict([mobile_data])\n print(c)\n if c == [0]:\n response = 'Not Fraud'\n else:\n response = 'Fraud'\n return render(request, 'mobile/result.html', {'result': response})\n\n\ndef analytics(request):\n return render(request, 'analytics.html', {'analytics': result,\n 'mobile_analytics': mobile_result, 'creditcard_analytics':\n creditcard_result})\n", "step-3": "<mask token>\n\n\ndef index(request):\n return render(request, 'index.html')\n\n\n<mask token>\n\n\ndef contact(request):\n form_class = ContactForm\n if request.method == 'POST':\n form = form_class(data=request.POST)\n if form.is_valid():\n contact_name = request.POST.get('contact_name', '')\n contact_email = request.POST.get('contact_email', '')\n form_content = request.POST.get('content', '')\n form_content = request.POST.get('content', '')\n template = get_template('contact_template.txt')\n context = {'contact_name': contact_name, 'contact_email':\n contact_email, 'form_content': form_content}\n content = template.render(context)\n email = EmailMessage('New contact form submission', content, \n 'FDS' + '', ['b200jst@gmail.com'], headers={'Reply-To':\n contact_email})\n email.send()\n return redirect('/success')\n return render(request, 'contact.html', {'form': form_class})\n\n\n<mask token>\n\n\ndef login_view(request):\n next = request.GET.get('next')\n form = UserLoginForm(request.POST or None)\n if form.is_valid():\n username = form.cleaned_data.get('username')\n password = form.cleaned_data.get('password')\n user = authenticate(username=username, password=password)\n login(request, user)\n if next:\n return redirect(next)\n return redirect('/')\n return render(request, 'login.html', {'form': form})\n\n\n<mask token>\n\n\n@login_required(login_url='/login/')\ndef services(request):\n return render(request, 'services.html')\n\n\n@login_required(login_url='/login/')\ndef bank(request):\n return render(request, 'bank.html')\n\n\n@login_required(login_url='/login/')\ndef creditcard(request):\n return render(request, 'creditcard.html')\n\n\n@login_required(login_url='/login/')\ndef mobilefraud(request):\n return render(request, 'mobile.html')\n\n\n@login_required(login_url='/login/')\ndef bankresult(request):\n age = request.POST.get('age')\n job = request.POST.get('job')\n print(job)\n if job == 'Unemployed':\n new_job = 1\n elif job == 'Management':\n new_job = 2\n elif job == 'Services':\n new_job = 3\n elif job == 'Blue-Collar':\n new_job = 4\n elif job == 'Entrepreneur':\n new_job = 5\n elif job == 'Admin':\n new_job = 6\n elif job == 'Unknown':\n new_job = 7\n elif job == 'Self-employed':\n new_job = 8\n elif job == 'Student':\n new_job = 9\n elif job == 'House maid':\n new_job = 10\n elif job == 'Technician':\n new_job = 11\n elif job == 'Retired':\n new_job = 12\n print(new_job)\n marital = request.POST.get('marital')\n if marital == 'Single':\n new_marital = 1\n elif marital == 'Divorced':\n new_marital = 2\n elif marital == 'Married':\n new_marital = 3\n print(new_marital)\n education = request.POST.get('education')\n if education == 'Unknown':\n new_education = 1\n elif education == 'Primary':\n new_education = 2\n elif education == 'Secondary':\n new_education = 3\n elif education == 'Graduate':\n new_education = 4\n print(new_education)\n balance = request.POST.get('balance')\n housing = request.POST.get('housing')\n if housing == 'Yes':\n new_housing = 1\n elif housing == 'No':\n new_housing = 2\n print(new_housing)\n loan = request.POST.get('loan')\n if loan == 'Yes':\n new_loan = 1\n elif loan == 'No':\n new_loan = 2\n print(new_loan)\n duration = int(request.POST.get('duration'))\n campaign = int(request.POST.get('campaign'))\n pdays = int(request.POST.get('pdays'))\n previous = int(request.POST.get('previous'))\n poutcome = request.POST.get('poutcome')\n if poutcome == 'Unknown':\n new_poutcome = 3\n elif poutcome == 'Failure':\n new_poutcome = 1\n elif poutcome == 'Successs':\n new_poutcome = 4\n elif poutcome == 'Failure':\n new_poutcome = 2\n print(new_poutcome)\n bank_data = np.array([age, new_job, new_marital, new_education, balance,\n new_housing, new_loan, duration, campaign, pdays, previous,\n new_poutcome])\n clf = bank_model()\n c = clf.predict([bank_data])\n print(c)\n if c == [1]:\n response = 'Not Fraud'\n else:\n response = 'Fraud'\n accuracy = 0.8962983425414365\n return render(request, 'bank/result.html', {'result': response,\n 'accuracy': accuracy})\n\n\n@login_required(login_url='/login/')\ndef creditresult(request):\n if request.method == 'POST':\n limit_balance = request.POST.get('limit_balance')\n sex = request.POST.get('sex')\n print(sex)\n if sex == 'Male':\n new_sex = 1\n else:\n new_sex = 2\n print(new_sex)\n education = request.POST.get('education')\n if education == 'Primary':\n new_education = 1\n elif education == 'Secondary':\n new_education = 2\n elif education == 'Graduate':\n new_education = 3\n print(new_education)\n marriage = request.POST.get('marriage')\n if marriage == 'Single':\n new_marriage = 1\n elif marriage == 'Married':\n new_marriage = 2\n elif education == 'Divorced':\n new_marriage = 3\n print(new_marriage)\n age = request.POST.get('age')\n pay_1 = int(request.POST.get('pay_1'))\n pay_2 = int(request.POST.get('pay_2'))\n pay_3 = int(request.POST.get('pay_3'))\n pay_4 = int(request.POST.get('pay_4'))\n pay_5 = int(request.POST.get('pay_5'))\n pay_6 = int(request.POST.get('pay_6'))\n Bill_Amt_1 = int(request.POST.get('Bill_Amt_1'))\n Bill_Amt_2 = int(request.POST.get('Bill_Amt_2'))\n Bill_Amt_3 = int(request.POST.get('Bill_Amt_3'))\n Bill_Amt_4 = int(request.POST.get('Bill_Amt_4'))\n Bill_Amt_5 = int(request.POST.get('Bill_Amt_5'))\n Bill_Amt_6 = int(request.POST.get('Bill_Amt_6'))\n Pay_Amt_1 = int(request.POST.get('Pay_Amt_1'))\n Pay_Amt_2 = int(request.POST.get('Pay_Amt_2'))\n Pay_Amt_3 = int(request.POST.get('Pay_Amt_3'))\n Pay_Amt_4 = int(request.POST.get('Pay_Amt_4'))\n Pay_Amt_5 = int(request.POST.get('Pay_Amt_5'))\n Pay_Amt_6 = int(request.POST.get('Pay_Amt_6'))\n credit_data = np.array([limit_balance, new_sex, new_education,\n new_marriage, age, pay_1, pay_2, pay_3, pay_4, pay_5, pay_6,\n Bill_Amt_1, Bill_Amt_2, Bill_Amt_3, Bill_Amt_4, Bill_Amt_5,\n Bill_Amt_6, Pay_Amt_1, Pay_Amt_2, Pay_Amt_3, Pay_Amt_4,\n Pay_Amt_5, Pay_Amt_6])\n print(credit_data)\n clf = credit_model()\n c = clf.predict([credit_data])\n print(c)\n if c == [0]:\n response = 'Not a Fraud'\n else:\n response = 'fraud'\n return render(request, 'creditcard/result.html', {'result': response})\n else:\n return redirect('/creditcard', request)\n\n\n@login_required(login_url='/login/')\ndef mobileresult(request):\n step = request.POST.get('step')\n type = request.POST.get('type')\n if type == 'Payment':\n new_type = 1\n elif type == 'Transfer':\n new_type = 4\n elif type == 'Cash-out':\n new_type = 5\n elif type == 'Debit':\n new_type = 2\n print(new_type)\n amount = request.POST.get('amount')\n nameOrig = request.POST.get('nameOrig')\n oldbalanceOrg = request.POST.get('oldbalanceOrg')\n newbalanceOrig = request.POST.get('newbalanceOrig')\n nameDest = request.POST.get('nameDest')\n oldbalanceDest = request.POST.get('oldbalanceDest')\n newbalanceDest = request.POST.get('newbalanceDest')\n isFlaggedFraud = 1\n mobile_data = np.array([step, new_type, amount, nameOrig, oldbalanceOrg,\n newbalanceOrig, nameDest, oldbalanceDest, newbalanceDest,\n isFlaggedFraud])\n clf = mobile_model()\n c = clf.predict([mobile_data])\n print(c)\n if c == [0]:\n response = 'Not Fraud'\n else:\n response = 'Fraud'\n return render(request, 'mobile/result.html', {'result': response})\n\n\ndef analytics(request):\n return render(request, 'analytics.html', {'analytics': result,\n 'mobile_analytics': mobile_result, 'creditcard_analytics':\n creditcard_result})\n", "step-4": "<mask token>\n\n\ndef index(request):\n return render(request, 'index.html')\n\n\ndef about(request):\n return render(request, 'about.html')\n\n\ndef contact(request):\n form_class = ContactForm\n if request.method == 'POST':\n form = form_class(data=request.POST)\n if form.is_valid():\n contact_name = request.POST.get('contact_name', '')\n contact_email = request.POST.get('contact_email', '')\n form_content = request.POST.get('content', '')\n form_content = request.POST.get('content', '')\n template = get_template('contact_template.txt')\n context = {'contact_name': contact_name, 'contact_email':\n contact_email, 'form_content': form_content}\n content = template.render(context)\n email = EmailMessage('New contact form submission', content, \n 'FDS' + '', ['b200jst@gmail.com'], headers={'Reply-To':\n contact_email})\n email.send()\n return redirect('/success')\n return render(request, 'contact.html', {'form': form_class})\n\n\ndef success(request):\n return render(request, 'success.html')\n\n\ndef login_view(request):\n next = request.GET.get('next')\n form = UserLoginForm(request.POST or None)\n if form.is_valid():\n username = form.cleaned_data.get('username')\n password = form.cleaned_data.get('password')\n user = authenticate(username=username, password=password)\n login(request, user)\n if next:\n return redirect(next)\n return redirect('/')\n return render(request, 'login.html', {'form': form})\n\n\n@login_required(login_url='/login/')\ndef logout_view(request):\n logout(request)\n return render(request, 'index.html')\n\n\n@login_required(login_url='/login/')\ndef services(request):\n return render(request, 'services.html')\n\n\n@login_required(login_url='/login/')\ndef bank(request):\n return render(request, 'bank.html')\n\n\n@login_required(login_url='/login/')\ndef creditcard(request):\n return render(request, 'creditcard.html')\n\n\n@login_required(login_url='/login/')\ndef mobilefraud(request):\n return render(request, 'mobile.html')\n\n\n@login_required(login_url='/login/')\ndef bankresult(request):\n age = request.POST.get('age')\n job = request.POST.get('job')\n print(job)\n if job == 'Unemployed':\n new_job = 1\n elif job == 'Management':\n new_job = 2\n elif job == 'Services':\n new_job = 3\n elif job == 'Blue-Collar':\n new_job = 4\n elif job == 'Entrepreneur':\n new_job = 5\n elif job == 'Admin':\n new_job = 6\n elif job == 'Unknown':\n new_job = 7\n elif job == 'Self-employed':\n new_job = 8\n elif job == 'Student':\n new_job = 9\n elif job == 'House maid':\n new_job = 10\n elif job == 'Technician':\n new_job = 11\n elif job == 'Retired':\n new_job = 12\n print(new_job)\n marital = request.POST.get('marital')\n if marital == 'Single':\n new_marital = 1\n elif marital == 'Divorced':\n new_marital = 2\n elif marital == 'Married':\n new_marital = 3\n print(new_marital)\n education = request.POST.get('education')\n if education == 'Unknown':\n new_education = 1\n elif education == 'Primary':\n new_education = 2\n elif education == 'Secondary':\n new_education = 3\n elif education == 'Graduate':\n new_education = 4\n print(new_education)\n balance = request.POST.get('balance')\n housing = request.POST.get('housing')\n if housing == 'Yes':\n new_housing = 1\n elif housing == 'No':\n new_housing = 2\n print(new_housing)\n loan = request.POST.get('loan')\n if loan == 'Yes':\n new_loan = 1\n elif loan == 'No':\n new_loan = 2\n print(new_loan)\n duration = int(request.POST.get('duration'))\n campaign = int(request.POST.get('campaign'))\n pdays = int(request.POST.get('pdays'))\n previous = int(request.POST.get('previous'))\n poutcome = request.POST.get('poutcome')\n if poutcome == 'Unknown':\n new_poutcome = 3\n elif poutcome == 'Failure':\n new_poutcome = 1\n elif poutcome == 'Successs':\n new_poutcome = 4\n elif poutcome == 'Failure':\n new_poutcome = 2\n print(new_poutcome)\n bank_data = np.array([age, new_job, new_marital, new_education, balance,\n new_housing, new_loan, duration, campaign, pdays, previous,\n new_poutcome])\n clf = bank_model()\n c = clf.predict([bank_data])\n print(c)\n if c == [1]:\n response = 'Not Fraud'\n else:\n response = 'Fraud'\n accuracy = 0.8962983425414365\n return render(request, 'bank/result.html', {'result': response,\n 'accuracy': accuracy})\n\n\n@login_required(login_url='/login/')\ndef creditresult(request):\n if request.method == 'POST':\n limit_balance = request.POST.get('limit_balance')\n sex = request.POST.get('sex')\n print(sex)\n if sex == 'Male':\n new_sex = 1\n else:\n new_sex = 2\n print(new_sex)\n education = request.POST.get('education')\n if education == 'Primary':\n new_education = 1\n elif education == 'Secondary':\n new_education = 2\n elif education == 'Graduate':\n new_education = 3\n print(new_education)\n marriage = request.POST.get('marriage')\n if marriage == 'Single':\n new_marriage = 1\n elif marriage == 'Married':\n new_marriage = 2\n elif education == 'Divorced':\n new_marriage = 3\n print(new_marriage)\n age = request.POST.get('age')\n pay_1 = int(request.POST.get('pay_1'))\n pay_2 = int(request.POST.get('pay_2'))\n pay_3 = int(request.POST.get('pay_3'))\n pay_4 = int(request.POST.get('pay_4'))\n pay_5 = int(request.POST.get('pay_5'))\n pay_6 = int(request.POST.get('pay_6'))\n Bill_Amt_1 = int(request.POST.get('Bill_Amt_1'))\n Bill_Amt_2 = int(request.POST.get('Bill_Amt_2'))\n Bill_Amt_3 = int(request.POST.get('Bill_Amt_3'))\n Bill_Amt_4 = int(request.POST.get('Bill_Amt_4'))\n Bill_Amt_5 = int(request.POST.get('Bill_Amt_5'))\n Bill_Amt_6 = int(request.POST.get('Bill_Amt_6'))\n Pay_Amt_1 = int(request.POST.get('Pay_Amt_1'))\n Pay_Amt_2 = int(request.POST.get('Pay_Amt_2'))\n Pay_Amt_3 = int(request.POST.get('Pay_Amt_3'))\n Pay_Amt_4 = int(request.POST.get('Pay_Amt_4'))\n Pay_Amt_5 = int(request.POST.get('Pay_Amt_5'))\n Pay_Amt_6 = int(request.POST.get('Pay_Amt_6'))\n credit_data = np.array([limit_balance, new_sex, new_education,\n new_marriage, age, pay_1, pay_2, pay_3, pay_4, pay_5, pay_6,\n Bill_Amt_1, Bill_Amt_2, Bill_Amt_3, Bill_Amt_4, Bill_Amt_5,\n Bill_Amt_6, Pay_Amt_1, Pay_Amt_2, Pay_Amt_3, Pay_Amt_4,\n Pay_Amt_5, Pay_Amt_6])\n print(credit_data)\n clf = credit_model()\n c = clf.predict([credit_data])\n print(c)\n if c == [0]:\n response = 'Not a Fraud'\n else:\n response = 'fraud'\n return render(request, 'creditcard/result.html', {'result': response})\n else:\n return redirect('/creditcard', request)\n\n\n@login_required(login_url='/login/')\ndef mobileresult(request):\n step = request.POST.get('step')\n type = request.POST.get('type')\n if type == 'Payment':\n new_type = 1\n elif type == 'Transfer':\n new_type = 4\n elif type == 'Cash-out':\n new_type = 5\n elif type == 'Debit':\n new_type = 2\n print(new_type)\n amount = request.POST.get('amount')\n nameOrig = request.POST.get('nameOrig')\n oldbalanceOrg = request.POST.get('oldbalanceOrg')\n newbalanceOrig = request.POST.get('newbalanceOrig')\n nameDest = request.POST.get('nameDest')\n oldbalanceDest = request.POST.get('oldbalanceDest')\n newbalanceDest = request.POST.get('newbalanceDest')\n isFlaggedFraud = 1\n mobile_data = np.array([step, new_type, amount, nameOrig, oldbalanceOrg,\n newbalanceOrig, nameDest, oldbalanceDest, newbalanceDest,\n isFlaggedFraud])\n clf = mobile_model()\n c = clf.predict([mobile_data])\n print(c)\n if c == [0]:\n response = 'Not Fraud'\n else:\n response = 'Fraud'\n return render(request, 'mobile/result.html', {'result': response})\n\n\ndef analytics(request):\n return render(request, 'analytics.html', {'analytics': result,\n 'mobile_analytics': mobile_result, 'creditcard_analytics':\n creditcard_result})\n", "step-5": "import numpy as np\nfrom django.contrib.auth import logout, login, authenticate\nfrom django.contrib.auth.decorators import login_required\nfrom django.core.mail import EmailMessage\nfrom django.shortcuts import render, redirect\nfrom django.template.loader import get_template\n\nfrom dashboard.notebook.creditcard import credit_model\nfrom dashboard.notebook.bank import bank_model\nfrom dashboard.notebook.mobile_data import mobile_model\n\nfrom dashboard.notebook.graphs import result\n\nfrom dashboard.notebook.mobile_analytics import mobile_result\n\nfrom dashboard.notebook.creditcard_analytics import creditcard_result\nfrom .forms import ContactForm, UserLoginForm\n\n\n# view for index page\ndef index(request):\n return render(request, 'index.html')\n# view for about page\ndef about(request):\n return render(request, 'about.html')\n\n### contact view\ndef contact(request):\n form_class = ContactForm\n\n # new logic!\n if request.method == 'POST':\n form = form_class(data=request.POST)\n\n if form.is_valid():\n contact_name = request.POST.get('contact_name', '')\n contact_email = request.POST.get('contact_email', '')\n form_content = request.POST.get('content', '')\n form_content = request.POST.get('content', '')\n\n # Email the profile with the\n # contact information\n template = get_template('contact_template.txt')\n context = {\n 'contact_name': contact_name,\n 'contact_email': contact_email,\n 'form_content': form_content,\n }\n\n content = template.render(context)\n email = EmailMessage(\n \"New contact form submission\",\n content,\n \"FDS\" + '',\n ['b200jst@gmail.com'],\n headers={'Reply-To': contact_email}\n )\n email.send()\n return redirect('/success')\n\n return render(request, 'contact.html', {\n 'form': form_class,\n })\n\n# success page\ndef success(request):\n return render(request, 'success.html')\n\n# login page\ndef login_view(request):\n next = request.GET.get('next')\n form = UserLoginForm(request.POST or None)\n if form.is_valid():\n username = form.cleaned_data.get('username')\n password = form.cleaned_data.get('password')\n user = authenticate(username=username, password=password)\n login(request, user)\n if next:\n return redirect(next)\n return redirect(\"/\")\n return render(request, 'login.html',{\"form\":form})\n\n# logout view\n@login_required(login_url='/login/')\ndef logout_view(request):\n logout(request)\n return render(request, \"index.html\")\n\n# service view\n@login_required(login_url='/login/')\ndef services(request):\n return render(request, 'services.html')\n\n# bank fraud page\n@login_required(login_url='/login/')\ndef bank(request):\n return render(request, 'bank.html')\n# creditcard fraud page\n@login_required(login_url='/login/')\ndef creditcard(request):\n return render(request, 'creditcard.html')\n# mobile transaction\n@login_required(login_url='/login/')\ndef mobilefraud(request):\n return render(request, 'mobile.html')\n\n#banking services\n@login_required(login_url='/login/')\ndef bankresult(request):\n # get the data and print prediction\n age = request.POST.get(\"age\")\n job = request.POST.get(\"job\")\n print(job)\n if (job == \"Unemployed\"):\n new_job = 1\n elif (job == \"Management\"):\n new_job = 2\n elif (job == \"Services\"):\n new_job = 3\n elif (job == \"Blue-Collar\"):\n new_job = 4\n elif (job == \"Entrepreneur\"):\n new_job = 5\n elif (job == \"Admin\"):\n new_job = 6\n elif (job == \"Unknown\"):\n new_job = 7\n elif (job == \"Self-employed\"):\n new_job = 8\n elif (job == \"Student\"):\n new_job = 9\n elif (job == \"House maid\"):\n new_job = 10\n elif (job == \"Technician\"):\n new_job = 11\n elif (job == \"Retired\"):\n new_job = 12\n print(new_job)\n marital = request.POST.get(\"marital\")\n if (marital == \"Single\"):\n new_marital = 1\n elif (marital == \"Divorced\"):\n new_marital = 2\n elif (marital == \"Married\"):\n new_marital = 3\n print(new_marital)\n education = request.POST.get(\"education\")\n if (education == \"Unknown\"):\n new_education = 1\n elif (education == \"Primary\"):\n new_education = 2\n elif (education == \"Secondary\"):\n new_education = 3\n elif (education == \"Graduate\"):\n new_education = 4\n print(new_education)\n balance = request.POST.get(\"balance\")\n housing = request.POST.get(\"housing\")\n if (housing == \"Yes\"):\n new_housing = 1\n elif (housing == \"No\"):\n new_housing = 2\n print(new_housing)\n loan = request.POST.get(\"loan\")\n if (loan == \"Yes\"):\n new_loan = 1\n elif (loan == \"No\"):\n new_loan = 2\n print(new_loan)\n duration = int(request.POST.get(\"duration\"))\n campaign = int(request.POST.get('campaign'))\n pdays = int(request.POST.get('pdays'))\n previous = int(request.POST.get('previous'))\n poutcome = (request.POST.get(\"poutcome\"))\n if (poutcome == \"Unknown\"):\n new_poutcome = 3\n elif (poutcome == \"Failure\"):\n new_poutcome = 1\n elif (poutcome == \"Successs\"):\n new_poutcome = 4\n elif (poutcome == \"Failure\"):\n new_poutcome = 2\n print(new_poutcome)\n bank_data = np.array([age,new_job,new_marital,new_education,balance,new_housing,new_loan,duration,campaign,pdays,previous,new_poutcome])\n clf = bank_model()\n c = clf.predict([bank_data])\n print(c)\n if c == [1]:\n # print(\"Not fraud\")\n response = 'Not Fraud'\n else:\n # print(\"Fraud\")\n response = 'Fraud'\n\n\n accuracy = 0.8962983425414365\n return render(request, 'bank/result.html', {\"result\": response, 'accuracy':accuracy})\n\n# analytics\n# def analysis(request):\n# return render(request, 'analysis.html', {'accuracy': accuracy})\n\n# credit card services\n@login_required(login_url='/login/')\ndef creditresult(request):\n if request.method == \"POST\":\n # get the data and print\n limit_balance = request.POST.get(\"limit_balance\")\n sex = request.POST.get(\"sex\")\n print(sex)\n if(sex==\"Male\"):\n new_sex = 1\n else:\n new_sex = 2\n print(new_sex)\n education = request.POST.get(\"education\")\n if (education == \"Primary\"):\n new_education = 1\n elif (education == \"Secondary\"):\n new_education = 2\n elif (education == \"Graduate\"):\n new_education = 3\n print(new_education)\n marriage = request.POST.get(\"marriage\")\n if (marriage == \"Single\"):\n new_marriage = 1\n elif (marriage == \"Married\"):\n new_marriage = 2\n elif (education == \"Divorced\"):\n new_marriage = 3\n print(new_marriage)\n age = request.POST.get(\"age\")\n pay_1 = int(request.POST.get(\"pay_1\"))\n pay_2 = int(request.POST.get(\"pay_2\"))\n pay_3 = int(request.POST.get(\"pay_3\"))\n pay_4 = int(request.POST.get(\"pay_4\"))\n pay_5 = int(request.POST.get(\"pay_5\"))\n pay_6 = int(request.POST.get(\"pay_6\"))\n Bill_Amt_1 = int(request.POST.get(\"Bill_Amt_1\"))\n Bill_Amt_2 = int(request.POST.get(\"Bill_Amt_2\"))\n Bill_Amt_3 = int(request.POST.get(\"Bill_Amt_3\"))\n Bill_Amt_4 = int(request.POST.get(\"Bill_Amt_4\"))\n Bill_Amt_5 = int(request.POST.get(\"Bill_Amt_5\"))\n Bill_Amt_6 = int(request.POST.get(\"Bill_Amt_6\"))\n Pay_Amt_1 = int(request.POST.get(\"Pay_Amt_1\"))\n Pay_Amt_2 = int(request.POST.get(\"Pay_Amt_2\"))\n Pay_Amt_3 = int(request.POST.get(\"Pay_Amt_3\"))\n Pay_Amt_4 = int(request.POST.get(\"Pay_Amt_4\"))\n Pay_Amt_5 = int(request.POST.get(\"Pay_Amt_5\"))\n Pay_Amt_6 = int(request.POST.get(\"Pay_Amt_6\"))\n credit_data = np.array([limit_balance, new_sex, new_education, new_marriage, age, pay_1, pay_2, pay_3, pay_4, pay_5, pay_6, Bill_Amt_1, Bill_Amt_2, Bill_Amt_3, Bill_Amt_4, Bill_Amt_5, Bill_Amt_6, Pay_Amt_1, Pay_Amt_2, Pay_Amt_3, Pay_Amt_4, Pay_Amt_5, Pay_Amt_6])\n print(credit_data)\n clf = credit_model()\n c = clf.predict([credit_data])\n print(c)\n if c == [0]:\n response = 'Not a Fraud'\n else:\n response = 'fraud'\n # print(c)\n return render(request, 'creditcard/result.html', {\"result\": response})\n else:\n return redirect('/creditcard',request)\n\n# mobile fraud services\n@login_required(login_url='/login/')\ndef mobileresult(request):\n # get the data and print\n step = request.POST.get(\"step\")\n type = request.POST.get(\"type\")\n if (type == \"Payment\"):\n new_type = 1\n elif (type == \"Transfer\"):\n new_type = 4\n elif (type == \"Cash-out\"):\n new_type = 5\n elif (type == \"Debit\"):\n new_type = 2\n print(new_type)\n amount = request.POST.get(\"amount\")\n nameOrig = request.POST.get(\"nameOrig\")\n oldbalanceOrg = request.POST.get(\"oldbalanceOrg\")\n newbalanceOrig = request.POST.get(\"newbalanceOrig\")\n nameDest = request.POST.get(\"nameDest\")\n oldbalanceDest = request.POST.get(\"oldbalanceDest\")\n newbalanceDest = request.POST.get(\"newbalanceDest\")\n # isFraud = int(request.POST.get(\"isFraud\")))\n isFlaggedFraud = 1\n mobile_data = np.array([step, new_type, amount, nameOrig, oldbalanceOrg, newbalanceOrig, nameDest,oldbalanceDest, newbalanceDest, isFlaggedFraud])\n # print(bank_data)\n clf = mobile_model()\n c = clf.predict([mobile_data])\n print(c)\n if c == [0]:\n # print(\"Not fraud\")\n response = 'Not Fraud'\n else:\n # print(\"Fraud\")\n response = 'Fraud'\n return render(request, 'mobile/result.html', {\"result\": response})\n\n# analytics page\ndef analytics(request):\n return render(request, 'analytics.html', {'analytics':result, \"mobile_analytics\": mobile_result, \"creditcard_analytics\": creditcard_result})", "step-ids": [ 7, 9, 11, 14, 16 ] }
[ 7, 9, 11, 14, 16 ]
from nintendo.nex import backend, authentication, friends, matchmaking, common from nintendo.account import AccountAPI from nintendo.games import MK8, Friends import struct import logging logging.basicConfig(level=logging.INFO) #Device id can be retrieved with a call to MCP_GetDeviceId on the Wii U #Serial number can be found on the back of the Wii U DEVICE_ID = 12345678 SERIAL_NUMBER = "..." SYSTEM_VERSION = 0x220 REGION = 4 #EUR COUNTRY = "NL" USERNAME = "..." #Nintendo network id PASSWORD = "..." #Nintendo network password #This function logs in on a game server def backend_login(title, use_auth_info, use_login_data, settings=None): api.set_title(title.TITLE_ID_EUR, title.LATEST_VERSION) nex_token = api.get_nex_token(title.GAME_SERVER_ID) auth_info = None login_data = None if use_auth_info: auth_info = authentication.AuthenticationInfo() auth_info.token = nex_token.token auth_info.server_version = title.SERVER_VERSION if use_login_data: login_data = authentication.NintendoLoginData() login_data.token = nex_token.token client = backend.BackEndClient(settings) clietn.configure(title.ACCESS_KEY, title.NEX_VERSION) client.connect(nex_token.host, nex_token.port) client.login( nex_token.username, nex_token.password, auth_info, login_data ) return client api = AccountAPI() api.set_device(DEVICE_ID, SERIAL_NUMBER, SYSTEM_VERSION, REGION, COUNTRY) api.login(USERNAME, PASSWORD) #Connect to both the Mario Kart 8 server and the Wii U friends server friends_backend = backend_login( Friends, False, True, "friends.cfg" ) game_backend = backend_login(MK8, True, False) pid = game_backend.get_pid() friends_client = friends.FriendsClient(friends_backend.secure_client) matchmaker = matchmaking.MatchmakeExtensionClient(game_backend.secure_client) #Create a matchmake session matchmake_session = matchmaking.MatchmakeSession() matchmake_session.player_min = 2 matchmake_session.player_max = 12 matchmake_session.participation_policy = 98 matchmake_session.game_mode = 3 matchmake_session.attribs[4] = 0x403 #DLCs enabled matchmake_session.matchmake_system = matchmaking.MatchmakeSystem.FRIENDS session_id = matchmaker.create_matchmake_session( matchmake_session, "", 1 ).gid #Tell friends we're playing MK8 and have created a room application_data = b"\0\0\x20\x03\0\0\0\0\0\0\0\0\x18" + struct.pack("<I", pid) + b"\0\0\0" presence = friends.NintendoPresenceV2() presence.flags = 0x1EE presence.is_online = True presence.game_key.title_id = MK8.TITLE_ID_EUR presence.game_key.title_version = MK8.LATEST_VERSION presence.message = "I'm a Python client" presence.unk2 = 2 presence.unk3 = 2 presence.game_server_id = MK8.GAME_SERVER_ID presence.unk4 = 3 presence.pid = pid presence.gathering_id = session_id presence.application_data = application_data friends_client.update_presence(presence) input("Press enter to disconnect and exit\n") #Tell friends we've gone offline presence = friends.NintendoPresenceV2() friends_client.update_presence(presence) #Disconnect from servers game_backend.close() friends_backend.close()
normal
{ "blob_id": "43315abf9e096cdca89ed7f4de976d2706ff9c20", "index": 9234, "step-1": "<mask token>\n\n\ndef backend_login(title, use_auth_info, use_login_data, settings=None):\n api.set_title(title.TITLE_ID_EUR, title.LATEST_VERSION)\n nex_token = api.get_nex_token(title.GAME_SERVER_ID)\n auth_info = None\n login_data = None\n if use_auth_info:\n auth_info = authentication.AuthenticationInfo()\n auth_info.token = nex_token.token\n auth_info.server_version = title.SERVER_VERSION\n if use_login_data:\n login_data = authentication.NintendoLoginData()\n login_data.token = nex_token.token\n client = backend.BackEndClient(settings)\n clietn.configure(title.ACCESS_KEY, title.NEX_VERSION)\n client.connect(nex_token.host, nex_token.port)\n client.login(nex_token.username, nex_token.password, auth_info, login_data)\n return client\n\n\n<mask token>\n", "step-2": "<mask token>\nlogging.basicConfig(level=logging.INFO)\n<mask token>\n\n\ndef backend_login(title, use_auth_info, use_login_data, settings=None):\n api.set_title(title.TITLE_ID_EUR, title.LATEST_VERSION)\n nex_token = api.get_nex_token(title.GAME_SERVER_ID)\n auth_info = None\n login_data = None\n if use_auth_info:\n auth_info = authentication.AuthenticationInfo()\n auth_info.token = nex_token.token\n auth_info.server_version = title.SERVER_VERSION\n if use_login_data:\n login_data = authentication.NintendoLoginData()\n login_data.token = nex_token.token\n client = backend.BackEndClient(settings)\n clietn.configure(title.ACCESS_KEY, title.NEX_VERSION)\n client.connect(nex_token.host, nex_token.port)\n client.login(nex_token.username, nex_token.password, auth_info, login_data)\n return client\n\n\n<mask token>\napi.set_device(DEVICE_ID, SERIAL_NUMBER, SYSTEM_VERSION, REGION, COUNTRY)\napi.login(USERNAME, PASSWORD)\n<mask token>\nfriends_client.update_presence(presence)\ninput('Press enter to disconnect and exit\\n')\n<mask token>\nfriends_client.update_presence(presence)\ngame_backend.close()\nfriends_backend.close()\n", "step-3": "<mask token>\nlogging.basicConfig(level=logging.INFO)\nDEVICE_ID = 12345678\nSERIAL_NUMBER = '...'\nSYSTEM_VERSION = 544\nREGION = 4\nCOUNTRY = 'NL'\nUSERNAME = '...'\nPASSWORD = '...'\n\n\ndef backend_login(title, use_auth_info, use_login_data, settings=None):\n api.set_title(title.TITLE_ID_EUR, title.LATEST_VERSION)\n nex_token = api.get_nex_token(title.GAME_SERVER_ID)\n auth_info = None\n login_data = None\n if use_auth_info:\n auth_info = authentication.AuthenticationInfo()\n auth_info.token = nex_token.token\n auth_info.server_version = title.SERVER_VERSION\n if use_login_data:\n login_data = authentication.NintendoLoginData()\n login_data.token = nex_token.token\n client = backend.BackEndClient(settings)\n clietn.configure(title.ACCESS_KEY, title.NEX_VERSION)\n client.connect(nex_token.host, nex_token.port)\n client.login(nex_token.username, nex_token.password, auth_info, login_data)\n return client\n\n\napi = AccountAPI()\napi.set_device(DEVICE_ID, SERIAL_NUMBER, SYSTEM_VERSION, REGION, COUNTRY)\napi.login(USERNAME, PASSWORD)\nfriends_backend = backend_login(Friends, False, True, 'friends.cfg')\ngame_backend = backend_login(MK8, True, False)\npid = game_backend.get_pid()\nfriends_client = friends.FriendsClient(friends_backend.secure_client)\nmatchmaker = matchmaking.MatchmakeExtensionClient(game_backend.secure_client)\nmatchmake_session = matchmaking.MatchmakeSession()\nmatchmake_session.player_min = 2\nmatchmake_session.player_max = 12\nmatchmake_session.participation_policy = 98\nmatchmake_session.game_mode = 3\nmatchmake_session.attribs[4] = 1027\nmatchmake_session.matchmake_system = matchmaking.MatchmakeSystem.FRIENDS\nsession_id = matchmaker.create_matchmake_session(matchmake_session, '', 1).gid\napplication_data = (b'\\x00\\x00 \\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x18' +\n struct.pack('<I', pid) + b'\\x00\\x00\\x00')\npresence = friends.NintendoPresenceV2()\npresence.flags = 494\npresence.is_online = True\npresence.game_key.title_id = MK8.TITLE_ID_EUR\npresence.game_key.title_version = MK8.LATEST_VERSION\npresence.message = \"I'm a Python client\"\npresence.unk2 = 2\npresence.unk3 = 2\npresence.game_server_id = MK8.GAME_SERVER_ID\npresence.unk4 = 3\npresence.pid = pid\npresence.gathering_id = session_id\npresence.application_data = application_data\nfriends_client.update_presence(presence)\ninput('Press enter to disconnect and exit\\n')\npresence = friends.NintendoPresenceV2()\nfriends_client.update_presence(presence)\ngame_backend.close()\nfriends_backend.close()\n", "step-4": "from nintendo.nex import backend, authentication, friends, matchmaking, common\nfrom nintendo.account import AccountAPI\nfrom nintendo.games import MK8, Friends\nimport struct\nimport logging\nlogging.basicConfig(level=logging.INFO)\nDEVICE_ID = 12345678\nSERIAL_NUMBER = '...'\nSYSTEM_VERSION = 544\nREGION = 4\nCOUNTRY = 'NL'\nUSERNAME = '...'\nPASSWORD = '...'\n\n\ndef backend_login(title, use_auth_info, use_login_data, settings=None):\n api.set_title(title.TITLE_ID_EUR, title.LATEST_VERSION)\n nex_token = api.get_nex_token(title.GAME_SERVER_ID)\n auth_info = None\n login_data = None\n if use_auth_info:\n auth_info = authentication.AuthenticationInfo()\n auth_info.token = nex_token.token\n auth_info.server_version = title.SERVER_VERSION\n if use_login_data:\n login_data = authentication.NintendoLoginData()\n login_data.token = nex_token.token\n client = backend.BackEndClient(settings)\n clietn.configure(title.ACCESS_KEY, title.NEX_VERSION)\n client.connect(nex_token.host, nex_token.port)\n client.login(nex_token.username, nex_token.password, auth_info, login_data)\n return client\n\n\napi = AccountAPI()\napi.set_device(DEVICE_ID, SERIAL_NUMBER, SYSTEM_VERSION, REGION, COUNTRY)\napi.login(USERNAME, PASSWORD)\nfriends_backend = backend_login(Friends, False, True, 'friends.cfg')\ngame_backend = backend_login(MK8, True, False)\npid = game_backend.get_pid()\nfriends_client = friends.FriendsClient(friends_backend.secure_client)\nmatchmaker = matchmaking.MatchmakeExtensionClient(game_backend.secure_client)\nmatchmake_session = matchmaking.MatchmakeSession()\nmatchmake_session.player_min = 2\nmatchmake_session.player_max = 12\nmatchmake_session.participation_policy = 98\nmatchmake_session.game_mode = 3\nmatchmake_session.attribs[4] = 1027\nmatchmake_session.matchmake_system = matchmaking.MatchmakeSystem.FRIENDS\nsession_id = matchmaker.create_matchmake_session(matchmake_session, '', 1).gid\napplication_data = (b'\\x00\\x00 \\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x18' +\n struct.pack('<I', pid) + b'\\x00\\x00\\x00')\npresence = friends.NintendoPresenceV2()\npresence.flags = 494\npresence.is_online = True\npresence.game_key.title_id = MK8.TITLE_ID_EUR\npresence.game_key.title_version = MK8.LATEST_VERSION\npresence.message = \"I'm a Python client\"\npresence.unk2 = 2\npresence.unk3 = 2\npresence.game_server_id = MK8.GAME_SERVER_ID\npresence.unk4 = 3\npresence.pid = pid\npresence.gathering_id = session_id\npresence.application_data = application_data\nfriends_client.update_presence(presence)\ninput('Press enter to disconnect and exit\\n')\npresence = friends.NintendoPresenceV2()\nfriends_client.update_presence(presence)\ngame_backend.close()\nfriends_backend.close()\n", "step-5": "\nfrom nintendo.nex import backend, authentication, friends, matchmaking, common\nfrom nintendo.account import AccountAPI\nfrom nintendo.games import MK8, Friends\nimport struct\n\nimport logging\nlogging.basicConfig(level=logging.INFO)\n\n#Device id can be retrieved with a call to MCP_GetDeviceId on the Wii U\n#Serial number can be found on the back of the Wii U\nDEVICE_ID = 12345678\nSERIAL_NUMBER = \"...\"\nSYSTEM_VERSION = 0x220\nREGION = 4 #EUR\nCOUNTRY = \"NL\"\n\nUSERNAME = \"...\" #Nintendo network id\nPASSWORD = \"...\" #Nintendo network password\n\n\n#This function logs in on a game server\ndef backend_login(title, use_auth_info, use_login_data, settings=None):\n\tapi.set_title(title.TITLE_ID_EUR, title.LATEST_VERSION)\n\tnex_token = api.get_nex_token(title.GAME_SERVER_ID)\n\n\tauth_info = None\n\tlogin_data = None\n\tif use_auth_info:\n\t\tauth_info = authentication.AuthenticationInfo()\n\t\tauth_info.token = nex_token.token\n\t\tauth_info.server_version = title.SERVER_VERSION\n\tif use_login_data:\n\t\tlogin_data = authentication.NintendoLoginData()\n\t\tlogin_data.token = nex_token.token\n\t\n\tclient = backend.BackEndClient(settings)\n\tclietn.configure(title.ACCESS_KEY, title.NEX_VERSION)\n\tclient.connect(nex_token.host, nex_token.port)\n\tclient.login(\n\t\tnex_token.username, nex_token.password, auth_info, login_data\n\t)\n\treturn client\n\n\napi = AccountAPI()\napi.set_device(DEVICE_ID, SERIAL_NUMBER, SYSTEM_VERSION, REGION, COUNTRY)\napi.login(USERNAME, PASSWORD)\n\n#Connect to both the Mario Kart 8 server and the Wii U friends server\nfriends_backend = backend_login(\n\tFriends, False, True, \"friends.cfg\"\n)\ngame_backend = backend_login(MK8, True, False)\n\npid = game_backend.get_pid()\n\nfriends_client = friends.FriendsClient(friends_backend.secure_client)\nmatchmaker = matchmaking.MatchmakeExtensionClient(game_backend.secure_client)\n\n#Create a matchmake session\nmatchmake_session = matchmaking.MatchmakeSession()\nmatchmake_session.player_min = 2\nmatchmake_session.player_max = 12\nmatchmake_session.participation_policy = 98\nmatchmake_session.game_mode = 3\nmatchmake_session.attribs[4] = 0x403 #DLCs enabled\nmatchmake_session.matchmake_system = matchmaking.MatchmakeSystem.FRIENDS\n\nsession_id = matchmaker.create_matchmake_session(\n\tmatchmake_session, \"\", 1\n).gid\n\n#Tell friends we're playing MK8 and have created a room\napplication_data = b\"\\0\\0\\x20\\x03\\0\\0\\0\\0\\0\\0\\0\\0\\x18\" + struct.pack(\"<I\", pid) + b\"\\0\\0\\0\"\n\npresence = friends.NintendoPresenceV2()\npresence.flags = 0x1EE\npresence.is_online = True\npresence.game_key.title_id = MK8.TITLE_ID_EUR\npresence.game_key.title_version = MK8.LATEST_VERSION\npresence.message = \"I'm a Python client\"\npresence.unk2 = 2\npresence.unk3 = 2\npresence.game_server_id = MK8.GAME_SERVER_ID\npresence.unk4 = 3\npresence.pid = pid\npresence.gathering_id = session_id\npresence.application_data = application_data\n\nfriends_client.update_presence(presence)\n\ninput(\"Press enter to disconnect and exit\\n\")\n\n#Tell friends we've gone offline\npresence = friends.NintendoPresenceV2()\nfriends_client.update_presence(presence)\n\n#Disconnect from servers\ngame_backend.close()\nfriends_backend.close()\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
<|reserved_special_token_0|> def boxplot(values): """Calculate percentiles needed for a boxplot.""" percentiles = percentile(values, [0, 25, 50, 75, 100]) result = {'min_val': percentiles[0], 'q1_val': percentiles[1], 'mean_val': percentiles[2], 'q3_val': percentiles[3], 'max_val': percentiles[4]} return result def scrub_category_val(category_val): """Make sure that category val is a string with positive length.""" if not isinstance(category_val, str): category_val = str(category_val) if category_val.lower() == 'nan': category_val = 'NaN' if not category_val: category_val = 'NaN' return category_val def collate_samples(tool_name, fields, samples): """Group a set of ToolResult fields from a set of samples by sample name.""" sample_dict = {} for sample in samples: sample_name = sample['name'] sample_dict[sample_name] = {} tool_result = sample[tool_name] for field in fields: sample_dict[sample_name][field] = tool_result[field] return sample_dict def categories_from_metadata(samples, min_size=2): """ Create dict of categories and their values from sample metadata. Parameters ---------- samples : list List of sample models. min_size: int Minimum number of values required for a given metadata item to be included in returned categories. Returns ------- dict Dictionary of form {<category_name>: [category_value[, category_value]]} """ categories = {} all_metadata = [sample['metadata'] for sample in samples] for metadata in all_metadata: properties = [prop for prop in metadata.keys()] for prop in properties: if prop not in categories: categories[prop] = set([]) category_val = metadata[prop] category_val = scrub_category_val(category_val) categories[prop].add(category_val) categories = {category_name: list(category_values) for category_name, category_values in categories.items() if len(category_values) >= min_size} return categories <|reserved_special_token_1|> <|reserved_special_token_0|> def get_primary_module(package): """Extract AnalysisModule primary module from package.""" def test_submodule(submodule): """Test a submodule to see if it is an AnalysisModule module.""" is_correct_subclass = issubclass(submodule, AnalysisModule) is_correct_module = package.__name__ in submodule.__module__ return is_correct_subclass and is_correct_module submodules = inspect.getmembers(package, inspect.isclass) module = next(submodule for _, submodule in submodules if test_submodule(submodule)) return module def scrub_object(obj): """Remove protected fields from object (dict or list).""" if isinstance(obj, list): return [scrub_object(item) for item in obj] if isinstance(obj, dict): clean_dict = {key: scrub_object(value) for key, value in obj.items( ) if not key.startswith('_')} return clean_dict return obj <|reserved_special_token_0|> def boxplot(values): """Calculate percentiles needed for a boxplot.""" percentiles = percentile(values, [0, 25, 50, 75, 100]) result = {'min_val': percentiles[0], 'q1_val': percentiles[1], 'mean_val': percentiles[2], 'q3_val': percentiles[3], 'max_val': percentiles[4]} return result def scrub_category_val(category_val): """Make sure that category val is a string with positive length.""" if not isinstance(category_val, str): category_val = str(category_val) if category_val.lower() == 'nan': category_val = 'NaN' if not category_val: category_val = 'NaN' return category_val def collate_samples(tool_name, fields, samples): """Group a set of ToolResult fields from a set of samples by sample name.""" sample_dict = {} for sample in samples: sample_name = sample['name'] sample_dict[sample_name] = {} tool_result = sample[tool_name] for field in fields: sample_dict[sample_name][field] = tool_result[field] return sample_dict def categories_from_metadata(samples, min_size=2): """ Create dict of categories and their values from sample metadata. Parameters ---------- samples : list List of sample models. min_size: int Minimum number of values required for a given metadata item to be included in returned categories. Returns ------- dict Dictionary of form {<category_name>: [category_value[, category_value]]} """ categories = {} all_metadata = [sample['metadata'] for sample in samples] for metadata in all_metadata: properties = [prop for prop in metadata.keys()] for prop in properties: if prop not in categories: categories[prop] = set([]) category_val = metadata[prop] category_val = scrub_category_val(category_val) categories[prop].add(category_val) categories = {category_name: list(category_values) for category_name, category_values in categories.items() if len(category_values) >= min_size} return categories <|reserved_special_token_1|> <|reserved_special_token_0|> def get_primary_module(package): """Extract AnalysisModule primary module from package.""" def test_submodule(submodule): """Test a submodule to see if it is an AnalysisModule module.""" is_correct_subclass = issubclass(submodule, AnalysisModule) is_correct_module = package.__name__ in submodule.__module__ return is_correct_subclass and is_correct_module submodules = inspect.getmembers(package, inspect.isclass) module = next(submodule for _, submodule in submodules if test_submodule(submodule)) return module def scrub_object(obj): """Remove protected fields from object (dict or list).""" if isinstance(obj, list): return [scrub_object(item) for item in obj] if isinstance(obj, dict): clean_dict = {key: scrub_object(value) for key, value in obj.items( ) if not key.startswith('_')} return clean_dict return obj def jsonify(mongo_doc): """Convert Mongo document to JSON for serialization.""" if isinstance(mongo_doc, (QuerySet, list)): return [jsonify(element) for element in mongo_doc] result_dict = mongo_doc.to_mongo().to_dict() clean_dict = scrub_object(result_dict) return clean_dict def boxplot(values): """Calculate percentiles needed for a boxplot.""" percentiles = percentile(values, [0, 25, 50, 75, 100]) result = {'min_val': percentiles[0], 'q1_val': percentiles[1], 'mean_val': percentiles[2], 'q3_val': percentiles[3], 'max_val': percentiles[4]} return result def scrub_category_val(category_val): """Make sure that category val is a string with positive length.""" if not isinstance(category_val, str): category_val = str(category_val) if category_val.lower() == 'nan': category_val = 'NaN' if not category_val: category_val = 'NaN' return category_val def collate_samples(tool_name, fields, samples): """Group a set of ToolResult fields from a set of samples by sample name.""" sample_dict = {} for sample in samples: sample_name = sample['name'] sample_dict[sample_name] = {} tool_result = sample[tool_name] for field in fields: sample_dict[sample_name][field] = tool_result[field] return sample_dict def categories_from_metadata(samples, min_size=2): """ Create dict of categories and their values from sample metadata. Parameters ---------- samples : list List of sample models. min_size: int Minimum number of values required for a given metadata item to be included in returned categories. Returns ------- dict Dictionary of form {<category_name>: [category_value[, category_value]]} """ categories = {} all_metadata = [sample['metadata'] for sample in samples] for metadata in all_metadata: properties = [prop for prop in metadata.keys()] for prop in properties: if prop not in categories: categories[prop] = set([]) category_val = metadata[prop] category_val = scrub_category_val(category_val) categories[prop].add(category_val) categories = {category_name: list(category_values) for category_name, category_values in categories.items() if len(category_values) >= min_size} return categories <|reserved_special_token_1|> <|reserved_special_token_0|> import inspect from mongoengine import QuerySet from numpy import percentile from .modules import AnalysisModule def get_primary_module(package): """Extract AnalysisModule primary module from package.""" def test_submodule(submodule): """Test a submodule to see if it is an AnalysisModule module.""" is_correct_subclass = issubclass(submodule, AnalysisModule) is_correct_module = package.__name__ in submodule.__module__ return is_correct_subclass and is_correct_module submodules = inspect.getmembers(package, inspect.isclass) module = next(submodule for _, submodule in submodules if test_submodule(submodule)) return module def scrub_object(obj): """Remove protected fields from object (dict or list).""" if isinstance(obj, list): return [scrub_object(item) for item in obj] if isinstance(obj, dict): clean_dict = {key: scrub_object(value) for key, value in obj.items( ) if not key.startswith('_')} return clean_dict return obj def jsonify(mongo_doc): """Convert Mongo document to JSON for serialization.""" if isinstance(mongo_doc, (QuerySet, list)): return [jsonify(element) for element in mongo_doc] result_dict = mongo_doc.to_mongo().to_dict() clean_dict = scrub_object(result_dict) return clean_dict def boxplot(values): """Calculate percentiles needed for a boxplot.""" percentiles = percentile(values, [0, 25, 50, 75, 100]) result = {'min_val': percentiles[0], 'q1_val': percentiles[1], 'mean_val': percentiles[2], 'q3_val': percentiles[3], 'max_val': percentiles[4]} return result def scrub_category_val(category_val): """Make sure that category val is a string with positive length.""" if not isinstance(category_val, str): category_val = str(category_val) if category_val.lower() == 'nan': category_val = 'NaN' if not category_val: category_val = 'NaN' return category_val def collate_samples(tool_name, fields, samples): """Group a set of ToolResult fields from a set of samples by sample name.""" sample_dict = {} for sample in samples: sample_name = sample['name'] sample_dict[sample_name] = {} tool_result = sample[tool_name] for field in fields: sample_dict[sample_name][field] = tool_result[field] return sample_dict def categories_from_metadata(samples, min_size=2): """ Create dict of categories and their values from sample metadata. Parameters ---------- samples : list List of sample models. min_size: int Minimum number of values required for a given metadata item to be included in returned categories. Returns ------- dict Dictionary of form {<category_name>: [category_value[, category_value]]} """ categories = {} all_metadata = [sample['metadata'] for sample in samples] for metadata in all_metadata: properties = [prop for prop in metadata.keys()] for prop in properties: if prop not in categories: categories[prop] = set([]) category_val = metadata[prop] category_val = scrub_category_val(category_val) categories[prop].add(category_val) categories = {category_name: list(category_values) for category_name, category_values in categories.items() if len(category_values) >= min_size} return categories <|reserved_special_token_1|> """Utilities for AnalysisModules.""" import inspect from mongoengine import QuerySet from numpy import percentile from .modules import AnalysisModule def get_primary_module(package): """Extract AnalysisModule primary module from package.""" def test_submodule(submodule): """Test a submodule to see if it is an AnalysisModule module.""" is_correct_subclass = issubclass(submodule, AnalysisModule) # Ensure submodule is defined within the package we are inspecting (and not 'base') is_correct_module = package.__name__ in submodule.__module__ return is_correct_subclass and is_correct_module submodules = inspect.getmembers(package, inspect.isclass) module = next(submodule for _, submodule in submodules if test_submodule(submodule)) return module def scrub_object(obj): """Remove protected fields from object (dict or list).""" if isinstance(obj, list): return [scrub_object(item) for item in obj] if isinstance(obj, dict): clean_dict = {key: scrub_object(value) for key, value in obj.items() if not key.startswith('_')} return clean_dict return obj def jsonify(mongo_doc): """Convert Mongo document to JSON for serialization.""" if isinstance(mongo_doc, (QuerySet, list,)): return [jsonify(element) for element in mongo_doc] result_dict = mongo_doc.to_mongo().to_dict() clean_dict = scrub_object(result_dict) return clean_dict def boxplot(values): """Calculate percentiles needed for a boxplot.""" percentiles = percentile(values, [0, 25, 50, 75, 100]) result = {'min_val': percentiles[0], 'q1_val': percentiles[1], 'mean_val': percentiles[2], 'q3_val': percentiles[3], 'max_val': percentiles[4]} return result def scrub_category_val(category_val): """Make sure that category val is a string with positive length.""" if not isinstance(category_val, str): category_val = str(category_val) if category_val.lower() == 'nan': category_val = 'NaN' if not category_val: category_val = 'NaN' return category_val def collate_samples(tool_name, fields, samples): """Group a set of ToolResult fields from a set of samples by sample name.""" sample_dict = {} for sample in samples: sample_name = sample['name'] sample_dict[sample_name] = {} tool_result = sample[tool_name] for field in fields: sample_dict[sample_name][field] = tool_result[field] return sample_dict def categories_from_metadata(samples, min_size=2): """ Create dict of categories and their values from sample metadata. Parameters ---------- samples : list List of sample models. min_size: int Minimum number of values required for a given metadata item to be included in returned categories. Returns ------- dict Dictionary of form {<category_name>: [category_value[, category_value]]} """ categories = {} # Gather categories and values all_metadata = [sample['metadata'] for sample in samples] for metadata in all_metadata: properties = [prop for prop in metadata.keys()] for prop in properties: if prop not in categories: categories[prop] = set([]) category_val = metadata[prop] category_val = scrub_category_val(category_val) categories[prop].add(category_val) # Filter for minimum number of values categories = {category_name: list(category_values) for category_name, category_values in categories.items() if len(category_values) >= min_size} return categories
flexible
{ "blob_id": "3472dc0c9d00c10ab0690c052e70fbf6a4bdb13d", "index": 7889, "step-1": "<mask token>\n\n\ndef boxplot(values):\n \"\"\"Calculate percentiles needed for a boxplot.\"\"\"\n percentiles = percentile(values, [0, 25, 50, 75, 100])\n result = {'min_val': percentiles[0], 'q1_val': percentiles[1],\n 'mean_val': percentiles[2], 'q3_val': percentiles[3], 'max_val':\n percentiles[4]}\n return result\n\n\ndef scrub_category_val(category_val):\n \"\"\"Make sure that category val is a string with positive length.\"\"\"\n if not isinstance(category_val, str):\n category_val = str(category_val)\n if category_val.lower() == 'nan':\n category_val = 'NaN'\n if not category_val:\n category_val = 'NaN'\n return category_val\n\n\ndef collate_samples(tool_name, fields, samples):\n \"\"\"Group a set of ToolResult fields from a set of samples by sample name.\"\"\"\n sample_dict = {}\n for sample in samples:\n sample_name = sample['name']\n sample_dict[sample_name] = {}\n tool_result = sample[tool_name]\n for field in fields:\n sample_dict[sample_name][field] = tool_result[field]\n return sample_dict\n\n\ndef categories_from_metadata(samples, min_size=2):\n \"\"\"\n Create dict of categories and their values from sample metadata.\n\n Parameters\n ----------\n samples : list\n List of sample models.\n min_size: int\n Minimum number of values required for a given metadata item to\n be included in returned categories.\n\n Returns\n -------\n dict\n Dictionary of form {<category_name>: [category_value[, category_value]]}\n\n \"\"\"\n categories = {}\n all_metadata = [sample['metadata'] for sample in samples]\n for metadata in all_metadata:\n properties = [prop for prop in metadata.keys()]\n for prop in properties:\n if prop not in categories:\n categories[prop] = set([])\n category_val = metadata[prop]\n category_val = scrub_category_val(category_val)\n categories[prop].add(category_val)\n categories = {category_name: list(category_values) for category_name,\n category_values in categories.items() if len(category_values) >=\n min_size}\n return categories\n", "step-2": "<mask token>\n\n\ndef get_primary_module(package):\n \"\"\"Extract AnalysisModule primary module from package.\"\"\"\n\n def test_submodule(submodule):\n \"\"\"Test a submodule to see if it is an AnalysisModule module.\"\"\"\n is_correct_subclass = issubclass(submodule, AnalysisModule)\n is_correct_module = package.__name__ in submodule.__module__\n return is_correct_subclass and is_correct_module\n submodules = inspect.getmembers(package, inspect.isclass)\n module = next(submodule for _, submodule in submodules if\n test_submodule(submodule))\n return module\n\n\ndef scrub_object(obj):\n \"\"\"Remove protected fields from object (dict or list).\"\"\"\n if isinstance(obj, list):\n return [scrub_object(item) for item in obj]\n if isinstance(obj, dict):\n clean_dict = {key: scrub_object(value) for key, value in obj.items(\n ) if not key.startswith('_')}\n return clean_dict\n return obj\n\n\n<mask token>\n\n\ndef boxplot(values):\n \"\"\"Calculate percentiles needed for a boxplot.\"\"\"\n percentiles = percentile(values, [0, 25, 50, 75, 100])\n result = {'min_val': percentiles[0], 'q1_val': percentiles[1],\n 'mean_val': percentiles[2], 'q3_val': percentiles[3], 'max_val':\n percentiles[4]}\n return result\n\n\ndef scrub_category_val(category_val):\n \"\"\"Make sure that category val is a string with positive length.\"\"\"\n if not isinstance(category_val, str):\n category_val = str(category_val)\n if category_val.lower() == 'nan':\n category_val = 'NaN'\n if not category_val:\n category_val = 'NaN'\n return category_val\n\n\ndef collate_samples(tool_name, fields, samples):\n \"\"\"Group a set of ToolResult fields from a set of samples by sample name.\"\"\"\n sample_dict = {}\n for sample in samples:\n sample_name = sample['name']\n sample_dict[sample_name] = {}\n tool_result = sample[tool_name]\n for field in fields:\n sample_dict[sample_name][field] = tool_result[field]\n return sample_dict\n\n\ndef categories_from_metadata(samples, min_size=2):\n \"\"\"\n Create dict of categories and their values from sample metadata.\n\n Parameters\n ----------\n samples : list\n List of sample models.\n min_size: int\n Minimum number of values required for a given metadata item to\n be included in returned categories.\n\n Returns\n -------\n dict\n Dictionary of form {<category_name>: [category_value[, category_value]]}\n\n \"\"\"\n categories = {}\n all_metadata = [sample['metadata'] for sample in samples]\n for metadata in all_metadata:\n properties = [prop for prop in metadata.keys()]\n for prop in properties:\n if prop not in categories:\n categories[prop] = set([])\n category_val = metadata[prop]\n category_val = scrub_category_val(category_val)\n categories[prop].add(category_val)\n categories = {category_name: list(category_values) for category_name,\n category_values in categories.items() if len(category_values) >=\n min_size}\n return categories\n", "step-3": "<mask token>\n\n\ndef get_primary_module(package):\n \"\"\"Extract AnalysisModule primary module from package.\"\"\"\n\n def test_submodule(submodule):\n \"\"\"Test a submodule to see if it is an AnalysisModule module.\"\"\"\n is_correct_subclass = issubclass(submodule, AnalysisModule)\n is_correct_module = package.__name__ in submodule.__module__\n return is_correct_subclass and is_correct_module\n submodules = inspect.getmembers(package, inspect.isclass)\n module = next(submodule for _, submodule in submodules if\n test_submodule(submodule))\n return module\n\n\ndef scrub_object(obj):\n \"\"\"Remove protected fields from object (dict or list).\"\"\"\n if isinstance(obj, list):\n return [scrub_object(item) for item in obj]\n if isinstance(obj, dict):\n clean_dict = {key: scrub_object(value) for key, value in obj.items(\n ) if not key.startswith('_')}\n return clean_dict\n return obj\n\n\ndef jsonify(mongo_doc):\n \"\"\"Convert Mongo document to JSON for serialization.\"\"\"\n if isinstance(mongo_doc, (QuerySet, list)):\n return [jsonify(element) for element in mongo_doc]\n result_dict = mongo_doc.to_mongo().to_dict()\n clean_dict = scrub_object(result_dict)\n return clean_dict\n\n\ndef boxplot(values):\n \"\"\"Calculate percentiles needed for a boxplot.\"\"\"\n percentiles = percentile(values, [0, 25, 50, 75, 100])\n result = {'min_val': percentiles[0], 'q1_val': percentiles[1],\n 'mean_val': percentiles[2], 'q3_val': percentiles[3], 'max_val':\n percentiles[4]}\n return result\n\n\ndef scrub_category_val(category_val):\n \"\"\"Make sure that category val is a string with positive length.\"\"\"\n if not isinstance(category_val, str):\n category_val = str(category_val)\n if category_val.lower() == 'nan':\n category_val = 'NaN'\n if not category_val:\n category_val = 'NaN'\n return category_val\n\n\ndef collate_samples(tool_name, fields, samples):\n \"\"\"Group a set of ToolResult fields from a set of samples by sample name.\"\"\"\n sample_dict = {}\n for sample in samples:\n sample_name = sample['name']\n sample_dict[sample_name] = {}\n tool_result = sample[tool_name]\n for field in fields:\n sample_dict[sample_name][field] = tool_result[field]\n return sample_dict\n\n\ndef categories_from_metadata(samples, min_size=2):\n \"\"\"\n Create dict of categories and their values from sample metadata.\n\n Parameters\n ----------\n samples : list\n List of sample models.\n min_size: int\n Minimum number of values required for a given metadata item to\n be included in returned categories.\n\n Returns\n -------\n dict\n Dictionary of form {<category_name>: [category_value[, category_value]]}\n\n \"\"\"\n categories = {}\n all_metadata = [sample['metadata'] for sample in samples]\n for metadata in all_metadata:\n properties = [prop for prop in metadata.keys()]\n for prop in properties:\n if prop not in categories:\n categories[prop] = set([])\n category_val = metadata[prop]\n category_val = scrub_category_val(category_val)\n categories[prop].add(category_val)\n categories = {category_name: list(category_values) for category_name,\n category_values in categories.items() if len(category_values) >=\n min_size}\n return categories\n", "step-4": "<mask token>\nimport inspect\nfrom mongoengine import QuerySet\nfrom numpy import percentile\nfrom .modules import AnalysisModule\n\n\ndef get_primary_module(package):\n \"\"\"Extract AnalysisModule primary module from package.\"\"\"\n\n def test_submodule(submodule):\n \"\"\"Test a submodule to see if it is an AnalysisModule module.\"\"\"\n is_correct_subclass = issubclass(submodule, AnalysisModule)\n is_correct_module = package.__name__ in submodule.__module__\n return is_correct_subclass and is_correct_module\n submodules = inspect.getmembers(package, inspect.isclass)\n module = next(submodule for _, submodule in submodules if\n test_submodule(submodule))\n return module\n\n\ndef scrub_object(obj):\n \"\"\"Remove protected fields from object (dict or list).\"\"\"\n if isinstance(obj, list):\n return [scrub_object(item) for item in obj]\n if isinstance(obj, dict):\n clean_dict = {key: scrub_object(value) for key, value in obj.items(\n ) if not key.startswith('_')}\n return clean_dict\n return obj\n\n\ndef jsonify(mongo_doc):\n \"\"\"Convert Mongo document to JSON for serialization.\"\"\"\n if isinstance(mongo_doc, (QuerySet, list)):\n return [jsonify(element) for element in mongo_doc]\n result_dict = mongo_doc.to_mongo().to_dict()\n clean_dict = scrub_object(result_dict)\n return clean_dict\n\n\ndef boxplot(values):\n \"\"\"Calculate percentiles needed for a boxplot.\"\"\"\n percentiles = percentile(values, [0, 25, 50, 75, 100])\n result = {'min_val': percentiles[0], 'q1_val': percentiles[1],\n 'mean_val': percentiles[2], 'q3_val': percentiles[3], 'max_val':\n percentiles[4]}\n return result\n\n\ndef scrub_category_val(category_val):\n \"\"\"Make sure that category val is a string with positive length.\"\"\"\n if not isinstance(category_val, str):\n category_val = str(category_val)\n if category_val.lower() == 'nan':\n category_val = 'NaN'\n if not category_val:\n category_val = 'NaN'\n return category_val\n\n\ndef collate_samples(tool_name, fields, samples):\n \"\"\"Group a set of ToolResult fields from a set of samples by sample name.\"\"\"\n sample_dict = {}\n for sample in samples:\n sample_name = sample['name']\n sample_dict[sample_name] = {}\n tool_result = sample[tool_name]\n for field in fields:\n sample_dict[sample_name][field] = tool_result[field]\n return sample_dict\n\n\ndef categories_from_metadata(samples, min_size=2):\n \"\"\"\n Create dict of categories and their values from sample metadata.\n\n Parameters\n ----------\n samples : list\n List of sample models.\n min_size: int\n Minimum number of values required for a given metadata item to\n be included in returned categories.\n\n Returns\n -------\n dict\n Dictionary of form {<category_name>: [category_value[, category_value]]}\n\n \"\"\"\n categories = {}\n all_metadata = [sample['metadata'] for sample in samples]\n for metadata in all_metadata:\n properties = [prop for prop in metadata.keys()]\n for prop in properties:\n if prop not in categories:\n categories[prop] = set([])\n category_val = metadata[prop]\n category_val = scrub_category_val(category_val)\n categories[prop].add(category_val)\n categories = {category_name: list(category_values) for category_name,\n category_values in categories.items() if len(category_values) >=\n min_size}\n return categories\n", "step-5": "\"\"\"Utilities for AnalysisModules.\"\"\"\n\nimport inspect\n\nfrom mongoengine import QuerySet\nfrom numpy import percentile\n\nfrom .modules import AnalysisModule\n\n\ndef get_primary_module(package):\n \"\"\"Extract AnalysisModule primary module from package.\"\"\"\n def test_submodule(submodule):\n \"\"\"Test a submodule to see if it is an AnalysisModule module.\"\"\"\n is_correct_subclass = issubclass(submodule, AnalysisModule)\n # Ensure submodule is defined within the package we are inspecting (and not 'base')\n is_correct_module = package.__name__ in submodule.__module__\n return is_correct_subclass and is_correct_module\n\n submodules = inspect.getmembers(package, inspect.isclass)\n module = next(submodule for _, submodule in submodules\n if test_submodule(submodule))\n return module\n\n\ndef scrub_object(obj):\n \"\"\"Remove protected fields from object (dict or list).\"\"\"\n if isinstance(obj, list):\n return [scrub_object(item) for item in obj]\n if isinstance(obj, dict):\n clean_dict = {key: scrub_object(value)\n for key, value in obj.items()\n if not key.startswith('_')}\n return clean_dict\n return obj\n\n\ndef jsonify(mongo_doc):\n \"\"\"Convert Mongo document to JSON for serialization.\"\"\"\n if isinstance(mongo_doc, (QuerySet, list,)):\n return [jsonify(element) for element in mongo_doc]\n result_dict = mongo_doc.to_mongo().to_dict()\n clean_dict = scrub_object(result_dict)\n return clean_dict\n\n\ndef boxplot(values):\n \"\"\"Calculate percentiles needed for a boxplot.\"\"\"\n percentiles = percentile(values, [0, 25, 50, 75, 100])\n result = {'min_val': percentiles[0],\n 'q1_val': percentiles[1],\n 'mean_val': percentiles[2],\n 'q3_val': percentiles[3],\n 'max_val': percentiles[4]}\n return result\n\n\ndef scrub_category_val(category_val):\n \"\"\"Make sure that category val is a string with positive length.\"\"\"\n if not isinstance(category_val, str):\n category_val = str(category_val)\n if category_val.lower() == 'nan':\n category_val = 'NaN'\n if not category_val:\n category_val = 'NaN'\n return category_val\n\n\ndef collate_samples(tool_name, fields, samples):\n \"\"\"Group a set of ToolResult fields from a set of samples by sample name.\"\"\"\n sample_dict = {}\n for sample in samples:\n sample_name = sample['name']\n sample_dict[sample_name] = {}\n tool_result = sample[tool_name]\n for field in fields:\n sample_dict[sample_name][field] = tool_result[field]\n\n return sample_dict\n\n\ndef categories_from_metadata(samples, min_size=2):\n \"\"\"\n Create dict of categories and their values from sample metadata.\n\n Parameters\n ----------\n samples : list\n List of sample models.\n min_size: int\n Minimum number of values required for a given metadata item to\n be included in returned categories.\n\n Returns\n -------\n dict\n Dictionary of form {<category_name>: [category_value[, category_value]]}\n\n \"\"\"\n categories = {}\n\n # Gather categories and values\n all_metadata = [sample['metadata'] for sample in samples]\n for metadata in all_metadata:\n properties = [prop for prop in metadata.keys()]\n for prop in properties:\n if prop not in categories:\n categories[prop] = set([])\n category_val = metadata[prop]\n category_val = scrub_category_val(category_val)\n categories[prop].add(category_val)\n\n # Filter for minimum number of values\n categories = {category_name: list(category_values)\n for category_name, category_values in categories.items()\n if len(category_values) >= min_size}\n\n return categories\n", "step-ids": [ 4, 6, 7, 8, 9 ] }
[ 4, 6, 7, 8, 9 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> TOTAL = 1306336 ONE = {'0': 1473, '1': 5936, '2': 3681, '3': 2996, '4': 2480, '5': 2494, '6': 1324, '7': 1474, '8': 1754, '9': 1740, 'a': 79714, 'b': 83472, 'c': 78015, 'd': 61702, 'e': 42190, 'f': 68530, 'g': 48942, 'h': 63661, 'i': 34947, 'j': 24312, 'k': 26724, 'l': 66351, 'm': 77245, 'n': 36942, 'o': 40744, 'p': 68978, 'q': 6750, 'r': 49135, 's': 116034, 't': 87440, 'u': 19423, 'v': 22356, 'w': 50718, 'x': 6079, 'y': 13089, 'z': 7491} TWO = {'0-': 19, '00': 145, '01': 143, '02': 212, '03': 90, '04': 61, '05': 241, '06': 31, '07': 151, '08': 104, '09': 99, '0a': 8, '0b': 8, '0c': 16, '0d': 18, '0e': 8, '0f': 7, '0g': 5, '0h': 4, '0i': 9, '0j': 1, '0k': 4, '0l': 2, '0m': 8, '0n': 10, '0o': 6, '0p': 10, '0r': 10, '0s': 10, '0t': 6, '0u': 5, '0v': 5, '0w': 5, '0x': 4, '0y': 3, '0z': 5, '1-': 177, '10': 983, '11': 537, '12': 767, '13': 327, '14': 270, '15': 257, '16': 276, '17': 318, '18': 505, '19': 280, '1a': 61, '1b': 58, '1c': 84, '1d': 52, '1e': 33, '1f': 32, '1g': 38, '1h': 44, '1i': 25, '1j': 13, '1k': 32, '1l': 33, '1m': 59, '1n': 39, '1o': 37, '1p': 68, '1q': 7, '1r': 21, '1s': 336, '1t': 54, '1u': 15, '1v': 14, '1w': 53, '1x': 7, '1y': 14, '1z': 10, '2-': 30, '20': 889, '21': 406, '22': 228, '23': 172, '24': 480, '25': 177, '26': 126, '27': 96, '28': 108, '29': 73, '2a': 50, '2b': 94, '2c': 59, '2d': 61, '2e': 29, '2f': 29, '2g': 47, '2h': 24, '2i': 22, '2j': 13, '2k': 27, '2l': 35, '2m': 62, '2n': 53, '2o': 22, '2p': 48, '2q': 7, '2r': 14, '2s': 53, '2t': 43, '2u': 20, '2v': 7, '2w': 43, '2x': 21, '2y': 7, '2z': 6, '3-': 53, '30': 292, '31': 224, '32': 188, '33': 179, '34': 91, '35': 153, '36': 367, '37': 101, '38': 122, '39': 118, '3a': 50, '3b': 45, '3c': 37, '3d': 350, '3e': 17, '3f': 26, '3g': 125, '3h': 27, '3i': 11, '3j': 9, '3k': 19, '3l': 25, '3m': 45, '3n': 17, '3o': 15, '3p': 32, '3q': 12, '3r': 72, '3s': 53, '3t': 28, '3u': 3, '3v': 13, '3w': 36, '3x': 17, '3y': 14, '3z': 10, '4-': 76, '40': 357, '41': 259, '42': 170, '43': 88, '44': 126, '45': 102, '46': 67, '47': 56, '48': 97, '49': 62, '4a': 41, '4b': 49, '4c': 55, '4d': 53, '4e': 51, '4f': 43, '4g': 52, '4h': 44, '4i': 19, '4j': 13, '4k': 22, '4l': 48, '4m': 62, '4n': 22, '4o': 21, '4p': 60, '4q': 11, '4r': 26, '4s': 94, '4t': 44, '4u': 40, '4v': 17, '4w': 45, '4x': 58, '4y': 24, '4z': 6, '5-': 30, '50': 323, '51': 574, '52': 361, '53': 79, '54': 155, '55': 141, '56': 109, '57': 66, '58': 85, '59': 87, '5a': 32, '5b': 17, '5c': 21, '5d': 39, '5e': 7, '5f': 17, '5g': 21, '5h': 10, '5i': 54, '5j': 4, '5k': 16, '5l': 19, '5m': 22, '5n': 8, '5o': 12, '5p': 27, '5q': 4, '5r': 9, '5s': 55, '5t': 38, '5u': 17, '5v': 5, '5w': 5, '5x': 9, '5y': 10, '5z': 6, '6-': 42, '60': 173, '61': 182, '62': 63, '63': 56, '64': 51, '65': 125, '66': 134, '67': 62, '68': 58, '69': 105, '6a': 12, '6b': 7, '6c': 11, '6d': 61, '6e': 6, '6f': 15, '6g': 7, '6h': 11, '6i': 6, '6j': 1, '6k': 7, '6l': 8, '6m': 16, '6n': 1, '6o': 5, '6p': 12, '6q': 6, '6r': 15, '6s': 28, '6t': 12, '6u': 2, '6v': 2, '6w': 8, '6x': 5, '6y': 9, '7-': 34, '70': 200, '71': 205, '72': 81, '73': 58, '74': 53, '75': 59, '76': 69, '77': 191, '78': 92, '79': 48, '7a': 33, '7b': 18, '7c': 28, '7d': 31, '7e': 13, '7f': 15, '7g': 11, '7h': 15, '7i': 7, '7j': 8, '7k': 16, '7l': 19, '7m': 15, '7n': 5, '7o': 13, '7p': 10, '7q': 2, '7r': 4, '7s': 33, '7t': 37, '7u': 2, '7v': 3, '7w': 13, '7x': 13, '7y': 12, '7z': 8, '8-': 61, '80': 336, '81': 180, '82': 61, '83': 62, '84': 99, '85': 85, '86': 138, '87': 85, '88': 339, '89': 78, '8a': 11, '8b': 16, '8c': 9, '8d': 10, '8e': 6, '8f': 10, '8g': 18, '8h': 7, '8i': 12, '8j': 4, '8k': 6, '8l': 6, '8m': 11, '8n': 2, '8o': 8, '8p': 15, '8q': 7, '8r': 10, '8s': 18, '8t': 24, '8u': 3, '8v': 2, '8w': 4, '8x': 1, '8y': 6, '8z': 4, '9-': 45, '90': 173, '91': 275, '92': 149, '93': 59, '94': 76, '95': 82, '96': 76, '97': 123, '98': 74, '99': 270, '9a': 19, '9b': 9, '9c': 22, '9d': 17, '9e': 10, '9f': 5, '9g': 16, '9h': 6, '9i': 20, '9j': 10, '9k': 9, '9l': 13, '9m': 19, '9n': 8, '9o': 8, '9p': 29, '9q': 3, '9r': 11, '9s': 22, '9t': 26, '9u': 3, '9v': 8, '9w': 11, '9x': 21, '9y': 8, '9z': 5, 'a-': 307, 'a0': 6, 'a1': 172, 'a2': 58, 'a3': 25, 'a4': 16, 'a5': 9, 'a6': 8, 'a7': 20, 'a8': 12, 'a9': 15, 'aa': 778, 'ab': 3124, 'ac': 4416, 'ad': 5316, 'ae': 537, 'af': 1343, 'ag': 4563, 'ah': 760, 'ai': 5617, 'aj': 331, 'ak': 715, 'al': 9102, 'am': 4388, 'an': 8462, 'ao': 351, 'ap': 2155, 'aq': 426, 'ar': 9178, 'as': 6419, 'at': 4007, 'au': 2485, 'av': 1218, 'aw': 1869, 'ax': 403, 'ay': 457, 'az': 646, 'b-': 148, 'b0': 7, 'b1': 19, 'b2': 94, 'b3': 12, 'b4': 24, 'b5': 9, 'b6': 4, 'b7': 5, 'b8': 2, 'b9': 6, 'ba': 15356, 'bb': 477, 'bc': 323, 'bd': 266, 'be': 14064, 'bf': 200, 'bg': 189, 'bh': 311, 'bi': 11911, 'bj': 604, 'bk': 178, 'bl': 5297, 'bm': 306, 'bn': 218, 'bo': 11986, 'bp': 229, 'bq': 103, 'br': 5001, 'bs': 317, 'bt': 266, 'bu': 12643, 'bv': 126, 'bw': 147, 'bx': 91, 'by': 2394, 'bz': 139, 'c-': 120, 'c0': 13, 'c1': 21, 'c2': 78, 'c3': 34, 'c4': 30, 'c5': 6, 'c6': 4, 'c7': 10, 'c8': 4, 'c9': 8, 'ca': 18400, 'cb': 368, 'cc': 678, 'cd': 484, 'ce': 3579, 'cf': 300, 'cg': 253, 'ch': 11318, 'ci': 2463, 'cj': 218, 'ck': 165, 'cl': 5881, 'cm': 371, 'cn': 895, 'co': 15790, 'cp': 497, 'cq': 239, 'cr': 5502, 'cs': 1710, 'ct': 364, 'cu': 6370, 'cv': 200, 'cw': 201, 'cx': 142, 'cy': 1053, 'cz': 246, 'd-': 147, 'd0': 7, 'd1': 14, 'd2': 37, 'd3': 29, 'd4': 10, 'd5': 5, 'd6': 4, 'd7': 8, 'd8': 6, 'd9': 7, 'da': 9910, 'db': 288, 'dc': 410, 'dd': 303, 'de': 11362, 'df': 288, 'dg': 307, 'dh': 280, 'di': 10934, 'dj': 682, 'dk': 157, 'dl': 393, 'dm': 361, 'dn': 476, 'do': 10944, 'dp': 211, 'dq': 106, 'dr': 6965, 'ds': 393, 'dt': 262, 'du': 4853, 'dv': 376, 'dw': 211, 'dx': 157, 'dy': 630, 'dz': 169, 'e-': 1066, 'e0': 9, 'e1': 12, 'e2': 26, 'e3': 27, 'e4': 11, 'e5': 2, 'e6': 6, 'e7': 6, 'e8': 37, 'e9': 5, 'ea': 6784, 'eb': 771, 'ec': 1530, 'ed': 2674, 'ee': 257, 'ef': 482, 'eg': 508, 'eh': 276, 'ei': 552, 'ej': 151, 'ek': 329, 'el': 3657, 'em': 1785, 'en': 4879, 'eo': 205, 'ep': 613, 'eq': 553, 'er': 2767, 'es': 1797, 'et': 766, 'eu': 1095, 'ev': 2975, 'ew': 218, 'ex': 2521, 'ey': 2066, 'ez': 772, 'f-': 75, 'f0': 6, 'f1': 54, 'f2': 25, 'f3': 5, 'f4': 9, 'f5': 12, 'f6': 2, 'f7': 4, 'f8': 10, 'f9': 2, 'fa': 12917, 'fb': 165, 'fc': 272, 'fd': 154, 'fe': 8514, 'ff': 195, 'fg': 107, 'fh': 175, 'fi': 14464, 'fj': 192, 'fk': 90, 'fl': 7482, 'fm': 210, 'fn': 114, 'fo': 8864, 'fp': 132, 'fq': 77, 'fr': 7566, 'fs': 413, 'ft': 252, 'fu': 5259, 'fv': 88, 'fw': 129, 'fx': 197, 'fy': 155, 'fz': 143, 'g-': 138, 'g0': 21, 'g1': 38, 'g2': 26, 'g3': 34, 'g4': 26, 'g5': 11, 'g6': 5, 'g7': 5, 'g8': 15, 'g9': 5, 'ga': 8708, 'gb': 232, 'gc': 262, 'gd': 339, 'ge': 5489, 'gf': 176, 'gg': 245, 'gh': 399, 'gi': 3752, 'gj': 108, 'gk': 138, 'gl': 2606, 'gm': 387, 'gn': 217, 'go': 9782, 'gp': 455, 'gq': 78, 'gr': 8101, 'gs': 381, 'gt': 252, 'gu': 5335, 'gv': 138, 'gw': 202, 'gx': 176, 'gy': 265, 'gz': 395, 'h-': 120, 'h0': 6, 'h1': 14, 'h2': 149, 'h3': 15, 'h4': 28, 'h5': 6, 'h6': 3, 'h7': 2, 'h8': 8, 'h9': 2, 'ha': 16216, 'hb': 351, 'hc': 228, 'hd': 442, 'he': 12087, 'hf': 180, 'hg': 158, 'hh': 243, 'hi': 10582, 'hj': 147, 'hk': 331, 'hl': 215, 'hm': 214, 'hn': 317, 'ho': 14380, 'hp': 207, 'hq': 147, 'hr': 318, 'hs': 380, 'ht': 314, 'hu': 4226, 'hv': 119, 'hw': 147, 'hx': 150, 'hy': 943, 'hz': 266, 'i-': 527, 'i0': 9, 'i1': 8, 'i2': 34, 'i3': 14, 'i4': 17, 'i5': 4, 'i6': 6, 'i7': 12, 'i8': 8, 'i9': 11, 'ia': 606, 'ib': 659, 'ic': 2175, 'id': 1981, 'ie': 282, 'if': 1514, 'ig': 488, 'ih': 370, 'ii': 226, 'ij': 122, 'ik': 351, 'il': 2287, 'im': 2155, 'in': 10117, 'io': 344, 'ip': 818, 'iq': 154, 'ir': 1037, 'is': 2803, 'it': 4514, 'iu': 135, 'iv': 328, 'iw': 391, 'ix': 87, 'iy': 123, 'iz': 230, 'j-': 143, 'j0': 7, 'j1': 2, 'j2': 20, 'j3': 8, 'j4': 10, 'j5': 1, 'j6': 1, 'j7': 2, 'j8': 3, 'j9': 2, 'ja': 3167, 'jb': 251, 'jc': 336, 'jd': 290, 'je': 2239, 'jf': 152, 'jg': 136, 'jh': 228, 'ji': 1541, 'jj': 266, 'jk': 191, 'jl': 249, 'jm': 340, 'jn': 230, 'jo': 7930, 'jp': 278, 'jq': 82, 'jr': 261, 'js': 448, 'jt': 174, 'ju': 4460, 'jv': 125, 'jw': 191, 'jx': 200, 'jy': 202, 'jz': 146, 'k-': 110, 'k0': 7, 'k1': 29, 'k2': 30, 'k3': 14, 'k4': 5, 'k5': 7, 'k6': 5, 'k7': 7, 'k8': 4, 'k9': 32, 'ka': 3724, 'kb': 212, 'kc': 275, 'kd': 182, 'ke': 6054, 'kf': 130, 'kg': 137, 'kh': 420, 'ki': 6316, 'kj': 144, 'kk': 167, 'kl': 487, 'km': 248, 'kn': 2612, 'ko': 1868, 'kp': 181, 'kq': 59, 'kr': 785, 'ks': 300, 'kt': 192, 'ku': 1013, 'kv': 116, 'kw': 230, 'kx': 88, 'ky': 444, 'kz': 90, 'l-': 45, 'l0': 12, 'l1': 8, 'l2': 60, 'l3': 18, 'l4': 6, 'l5': 2, 'l6': 2, 'l7': 14, 'l8': 5, 'l9': 3, 'la': 14155, 'lb': 350, 'lc': 258, 'ld': 192, 'le': 13390, 'lf': 172, 'lg': 196, 'lh': 179, 'li': 13775, 'lj': 185, 'lk': 82, 'll': 276, 'lm': 185, 'ln': 163, 'lo': 17441, 'lp': 192, 'lq': 75, 'lr': 137, 'ls': 265, 'lt': 197, 'lu': 2947, 'lv': 210, 'lw': 133, 'lx': 124, 'ly': 761, 'lz': 136, 'm-': 162, 'm0': 11, 'm1': 26, 'm2': 47, 'm3': 32, 'm4': 31, 'm5': 8, 'm6': 7, 'm7': 6, 'm8': 11, 'm9': 4, 'ma': 21517, 'mb': 321, 'mc': 856, 'md': 322, 'me': 12983, 'mf': 160, 'mg': 199, 'mh': 180, 'mi': 10847, 'mj': 190, 'mk': 192, 'ml': 403, 'mm': 572, 'mn': 242, 'mo': 11994, 'mp': 473, 'mq': 80, 'mr': 773, 'ms': 631, 'mt': 424, 'mu': 3947, 'mv': 212, 'mw': 145, 'mx': 138, 'my': 8975, 'mz': 124, 'n-': 85, 'n0': 16, 'n1': 16, 'n2': 209, 'n3': 6, 'n4': 10, 'n5': 3, 'n6': 4, 'n7': 6, 'n8': 14, 'n9': 2, 'na': 5028, 'nb': 379, 'nc': 347, 'nd': 186, 'ne': 10656, 'nf': 213, 'ng': 232, 'nh': 270, 'ni': 3672, 'nj': 370, 'nk': 136, 'nl': 174, 'nm': 214, 'nn': 176, 'no': 10377, 'np': 174, 'nq': 65, 'nr': 161, 'ns': 248, 'nt': 270, 'nu': 1902, 'nv': 175, 'nw': 247, 'nx': 119, 'ny': 630, 'nz': 150, 'o-': 95, 'o0': 2, 'o1': 5, 'o2': 28, 'o3': 13, 'o4': 2, 'o5': 5, 'o6': 2, 'o7': 2, 'o8': 4, 'o9': 1, 'oa': 322, 'ob': 1267, 'oc': 959, 'od': 2152, 'oe': 162, 'of': 3428, 'og': 208, 'oh': 1587, 'oi': 1494, 'oj': 147, 'ok': 724, 'ol': 2309, 'om': 1532, 'on': 7256, 'oo': 234, 'op': 1673, 'oq': 58, 'or': 3282, 'os': 555, 'ot': 634, 'ou': 4435, 'ov': 944, 'ow': 4377, 'ox': 218, 'oy': 187, 'oz': 441, 'p-': 86, 'p0': 12, 'p1': 19, 'p2': 43, 'p3': 24, 'p4': 15, 'p5': 8, 'p6': 2, 'p7': 2, 'p8': 5, 'p9': 5, 'pa': 15758, 'pb': 196, 'pc': 606, 'pd': 263, 'pe': 6763, 'pf': 189, 'pg': 207, 'ph': 2496, 'pi': 7203, 'pj': 148, 'pk': 162, 'pl': 7080, 'pm': 258, 'pn': 171, 'po': 11038, 'pp': 369, 'pq': 91, 'pr': 7474, 'ps': 686, 'pt': 286, 'pu': 6557, 'pv': 156, 'pw': 142, 'px': 98, 'py': 246, 'pz': 114, 'q-': 38, 'q0': 3, 'q1': 8, 'q2': 8, 'q3': 6, 'q4': 4, 'q5': 3, 'q6': 3, 'q8': 44, 'q9': 4, 'qa': 220, 'qb': 92, 'qc': 118, 'qd': 194, 'qe': 117, 'qf': 81, 'qg': 84, 'qh': 109, 'qi': 391, 'qj': 79, 'qk': 73, 'ql': 125, 'qm': 82, 'qn': 92, 'qo': 114, 'qp': 102, 'qq': 248, 'qr': 83, 'qs': 131, 'qt': 89, 'qu': 3434, 'qv': 65, 'qw': 148, 'qx': 97, 'qy': 109, 'qz': 152, 'r-': 89, 'r0': 7, 'r1': 10, 'r2': 26, 'r3': 20, 'r4': 17, 'r5': 2, 'r6': 2, 'r7': 10, 'r8': 1, 'r9': 4, 'ra': 9842, 'rb': 211, 'rc': 334, 'rd': 204, 're': 13653, 'rf': 215, 'rg': 139, 'rh': 365, 'ri': 7079, 'rj': 156, 'rk': 119, 'rl': 150, 'rm': 254, 'rn': 173, 'ro': 9275, 'rp': 216, 'rq': 46, 'rr': 143, 'rs': 333, 'rt': 270, 'ru': 4797, 'rv': 182, 'rw': 142, 'rx': 199, 'ry': 348, 'rz': 102, 's-': 122, 's0': 6, 's1': 26, 's2': 33, 's3': 27, 's4': 19, 's5': 10, 's6': 12, 's7': 19, 's8': 12, 's9': 6, 'sa': 17038, 'sb': 328, 'sc': 3980, 'sd': 603, 'se': 18133, 'sf': 356, 'sg': 309, 'sh': 12388, 'si': 10761, 'sj': 286, 'sk': 1551, 'sl': 2639, 'sm': 2210, 'sn': 818, 'so': 11313, 'sp': 6560, 'sq': 323, 'sr': 385, 'ss': 497, 'st': 11992, 'su': 9496, 'sv': 251, 'sw': 1490, 'sx': 289, 'sy': 1044, 'sz': 702, 't-': 149, 't0': 4, 't1': 14, 't2': 16, 't3': 20, 't4': 8, 't5': 8, 't6': 6, 't7': 9, 't8': 34, 't9': 12, 'ta': 10163, 'tb': 227, 'tc': 353, 'td': 239, 'te': 12576, 'tf': 165, 'tg': 186, 'th': 20347, 'ti': 8367, 'tj': 321, 'tk': 178, 'tl': 273, 'tm': 289, 'tn': 269, 'to': 11512, 'tp': 227, 'tq': 90, 'tr': 12301, 'ts': 469, 'tt': 280, 'tu': 3572, 'tv': 534, 'tw': 2193, 'tx': 266, 'ty': 1588, 'tz': 175, 'u-': 119, 'u0': 3, 'u1': 15, 'u2': 21, 'u3': 9, 'u4': 3, 'u5': 3, 'u6': 3, 'u7': 2, 'u8': 11, 'u9': 5, 'ua': 258, 'ub': 352, 'uc': 350, 'ud': 179, 'ue': 138, 'uf': 188, 'ug': 891, 'uh': 194, 'ui': 127, 'uj': 66, 'uk': 422, 'ul': 536, 'um': 326, 'un': 3354, 'uo': 106, 'up': 2718, 'uq': 59, 'ur': 923, 'us': 6826, 'ut': 474, 'uu': 108, 'uv': 172, 'uw': 165, 'ux': 93, 'uy': 107, 'uz': 97, 'v-': 87, 'v0': 3, 'v1': 8, 'v2': 11, 'v3': 20, 'v4': 8, 'v5': 4, 'v6': 14, 'v7': 2, 'v8': 9, 'v9': 2, 'va': 5048, 'vb': 168, 'vc': 200, 'vd': 140, 've': 2797, 'vf': 111, 'vg': 120, 'vh': 121, 'vi': 8878, 'vj': 91, 'vk': 117, 'vl': 172, 'vm': 152, 'vn': 149, 'vo': 2303, 'vp': 172, 'vq': 76, 'vr': 232, 'vs': 192, 'vt': 174, 'vu': 214, 'vv': 139, 'vw': 105, 'vx': 101, 'vy': 119, 'vz': 97, 'w-': 49, 'w0': 9, 'w1': 15, 'w2': 10, 'w3': 49, 'w4': 11, 'w5': 5, 'w6': 2, 'w7': 5, 'w8': 16, 'w9': 7, 'wa': 13399, 'wb': 160, 'wc': 178, 'wd': 138, 'we': 10270, 'wf': 145, 'wg': 135, 'wh': 7676, 'wi': 8165, 'wj': 154, 'wk': 103, 'wl': 169, 'wm': 219, 'wn': 137, 'wo': 4635, 'wp': 199, 'wq': 78, 'wr': 578, 'ws': 246, 'wt': 170, 'wu': 306, 'wv': 136, 'ww': 2494, 'wx': 193, 'wy': 274, 'wz': 183, 'x-': 157, 'x0': 8, 'x1': 15, 'x2': 61, 'x3': 15, 'x4': 2, 'x5': 8, 'x6': 12, 'x7': 3, 'x8': 5, 'x9': 3, 'xa': 329, 'xb': 191, 'xc': 220, 'xd': 145, 'xe': 282, 'xf': 158, 'xg': 136, 'xh': 144, 'xi': 794, 'xj': 208, 'xk': 79, 'xl': 180, 'xm': 285, 'xn': 107, 'xo': 163, 'xp': 292, 'xq': 82, 'xr': 126, 'xs': 195, 'xt': 344, 'xu': 208, 'xv': 78, 'xw': 84, 'xx': 552, 'xy': 237, 'xz': 171, 'y-': 45, 'y0': 5, 'y1': 6, 'y2': 13, 'y3': 2, 'y5': 5, 'y6': 4, 'y7': 4, 'y8': 3, 'y9': 4, 'ya': 1485, 'yb': 102, 'yc': 195, 'yd': 137, 'ye': 2320, 'yf': 106, 'yg': 116, 'yh': 159, 'yi': 568, 'yj': 136, 'yk': 131, 'yl': 168, 'ym': 174, 'yn': 217, 'yo': 4580, 'yp': 214, 'yq': 89, 'yr': 98, 'ys': 233, 'yt': 248, 'yu': 779, 'yv': 101, 'yw': 155, 'yx': 142, 'yy': 176, 'yz': 169, 'z-': 40, 'z0': 4, 'z1': 6, 'z2': 6, 'z3': 5, 'z4': 1, 'z5': 4, 'z7': 2, 'z8': 1, 'z9': 3, 'za': 920, 'zb': 144, 'zc': 129, 'zd': 141, 'ze': 1083, 'zf': 95, 'zg': 277, 'zh': 651, 'zi': 676, 'zj': 315, 'zk': 102, 'zl': 176, 'zm': 120, 'zn': 117, 'zo': 741, 'zp': 107, 'zq': 131, 'zr': 148, 'zs': 167, 'zt': 102, 'zu': 336, 'zv': 70, 'zw': 126, 'zx': 108, 'zy': 171, 'zz': 266} <|reserved_special_token_1|> TOTAL = 1306336 ONE = { '0': 1473, '1': 5936, '2': 3681, '3': 2996, '4': 2480, '5': 2494, '6': 1324, '7': 1474, '8': 1754, '9': 1740, 'a': 79714, 'b': 83472, 'c': 78015, 'd': 61702, 'e': 42190, 'f': 68530, 'g': 48942, 'h': 63661, 'i': 34947, 'j': 24312, 'k': 26724, 'l': 66351, 'm': 77245, 'n': 36942, 'o': 40744, 'p': 68978, 'q': 6750, 'r': 49135, 's': 116034, 't': 87440, 'u': 19423, 'v': 22356, 'w': 50718, 'x': 6079, 'y': 13089, 'z': 7491, } TWO = { '0-': 19, '00': 145, '01': 143, '02': 212, '03': 90, '04': 61, '05': 241, '06': 31, '07': 151, '08': 104, '09': 99, '0a': 8, '0b': 8, '0c': 16, '0d': 18, '0e': 8, '0f': 7, '0g': 5, '0h': 4, '0i': 9, '0j': 1, '0k': 4, '0l': 2, '0m': 8, '0n': 10, '0o': 6, '0p': 10, '0r': 10, '0s': 10, '0t': 6, '0u': 5, '0v': 5, '0w': 5, '0x': 4, '0y': 3, '0z': 5, '1-': 177, '10': 983, '11': 537, '12': 767, '13': 327, '14': 270, '15': 257, '16': 276, '17': 318, '18': 505, '19': 280, '1a': 61, '1b': 58, '1c': 84, '1d': 52, '1e': 33, '1f': 32, '1g': 38, '1h': 44, '1i': 25, '1j': 13, '1k': 32, '1l': 33, '1m': 59, '1n': 39, '1o': 37, '1p': 68, '1q': 7, '1r': 21, '1s': 336, '1t': 54, '1u': 15, '1v': 14, '1w': 53, '1x': 7, '1y': 14, '1z': 10, '2-': 30, '20': 889, '21': 406, '22': 228, '23': 172, '24': 480, '25': 177, '26': 126, '27': 96, '28': 108, '29': 73, '2a': 50, '2b': 94, '2c': 59, '2d': 61, '2e': 29, '2f': 29, '2g': 47, '2h': 24, '2i': 22, '2j': 13, '2k': 27, '2l': 35, '2m': 62, '2n': 53, '2o': 22, '2p': 48, '2q': 7, '2r': 14, '2s': 53, '2t': 43, '2u': 20, '2v': 7, '2w': 43, '2x': 21, '2y': 7, '2z': 6, '3-': 53, '30': 292, '31': 224, '32': 188, '33': 179, '34': 91, '35': 153, '36': 367, '37': 101, '38': 122, '39': 118, '3a': 50, '3b': 45, '3c': 37, '3d': 350, '3e': 17, '3f': 26, '3g': 125, '3h': 27, '3i': 11, '3j': 9, '3k': 19, '3l': 25, '3m': 45, '3n': 17, '3o': 15, '3p': 32, '3q': 12, '3r': 72, '3s': 53, '3t': 28, '3u': 3, '3v': 13, '3w': 36, '3x': 17, '3y': 14, '3z': 10, '4-': 76, '40': 357, '41': 259, '42': 170, '43': 88, '44': 126, '45': 102, '46': 67, '47': 56, '48': 97, '49': 62, '4a': 41, '4b': 49, '4c': 55, '4d': 53, '4e': 51, '4f': 43, '4g': 52, '4h': 44, '4i': 19, '4j': 13, '4k': 22, '4l': 48, '4m': 62, '4n': 22, '4o': 21, '4p': 60, '4q': 11, '4r': 26, '4s': 94, '4t': 44, '4u': 40, '4v': 17, '4w': 45, '4x': 58, '4y': 24, '4z': 6, '5-': 30, '50': 323, '51': 574, '52': 361, '53': 79, '54': 155, '55': 141, '56': 109, '57': 66, '58': 85, '59': 87, '5a': 32, '5b': 17, '5c': 21, '5d': 39, '5e': 7, '5f': 17, '5g': 21, '5h': 10, '5i': 54, '5j': 4, '5k': 16, '5l': 19, '5m': 22, '5n': 8, '5o': 12, '5p': 27, '5q': 4, '5r': 9, '5s': 55, '5t': 38, '5u': 17, '5v': 5, '5w': 5, '5x': 9, '5y': 10, '5z': 6, '6-': 42, '60': 173, '61': 182, '62': 63, '63': 56, '64': 51, '65': 125, '66': 134, '67': 62, '68': 58, '69': 105, '6a': 12, '6b': 7, '6c': 11, '6d': 61, '6e': 6, '6f': 15, '6g': 7, '6h': 11, '6i': 6, '6j': 1, '6k': 7, '6l': 8, '6m': 16, '6n': 1, '6o': 5, '6p': 12, '6q': 6, '6r': 15, '6s': 28, '6t': 12, '6u': 2, '6v': 2, '6w': 8, '6x': 5, '6y': 9, '7-': 34, '70': 200, '71': 205, '72': 81, '73': 58, '74': 53, '75': 59, '76': 69, '77': 191, '78': 92, '79': 48, '7a': 33, '7b': 18, '7c': 28, '7d': 31, '7e': 13, '7f': 15, '7g': 11, '7h': 15, '7i': 7, '7j': 8, '7k': 16, '7l': 19, '7m': 15, '7n': 5, '7o': 13, '7p': 10, '7q': 2, '7r': 4, '7s': 33, '7t': 37, '7u': 2, '7v': 3, '7w': 13, '7x': 13, '7y': 12, '7z': 8, '8-': 61, '80': 336, '81': 180, '82': 61, '83': 62, '84': 99, '85': 85, '86': 138, '87': 85, '88': 339, '89': 78, '8a': 11, '8b': 16, '8c': 9, '8d': 10, '8e': 6, '8f': 10, '8g': 18, '8h': 7, '8i': 12, '8j': 4, '8k': 6, '8l': 6, '8m': 11, '8n': 2, '8o': 8, '8p': 15, '8q': 7, '8r': 10, '8s': 18, '8t': 24, '8u': 3, '8v': 2, '8w': 4, '8x': 1, '8y': 6, '8z': 4, '9-': 45, '90': 173, '91': 275, '92': 149, '93': 59, '94': 76, '95': 82, '96': 76, '97': 123, '98': 74, '99': 270, '9a': 19, '9b': 9, '9c': 22, '9d': 17, '9e': 10, '9f': 5, '9g': 16, '9h': 6, '9i': 20, '9j': 10, '9k': 9, '9l': 13, '9m': 19, '9n': 8, '9o': 8, '9p': 29, '9q': 3, '9r': 11, '9s': 22, '9t': 26, '9u': 3, '9v': 8, '9w': 11, '9x': 21, '9y': 8, '9z': 5, 'a-': 307, 'a0': 6, 'a1': 172, 'a2': 58, 'a3': 25, 'a4': 16, 'a5': 9, 'a6': 8, 'a7': 20, 'a8': 12, 'a9': 15, 'aa': 778, 'ab': 3124, 'ac': 4416, 'ad': 5316, 'ae': 537, 'af': 1343, 'ag': 4563, 'ah': 760, 'ai': 5617, 'aj': 331, 'ak': 715, 'al': 9102, 'am': 4388, 'an': 8462, 'ao': 351, 'ap': 2155, 'aq': 426, 'ar': 9178, 'as': 6419, 'at': 4007, 'au': 2485, 'av': 1218, 'aw': 1869, 'ax': 403, 'ay': 457, 'az': 646, 'b-': 148, 'b0': 7, 'b1': 19, 'b2': 94, 'b3': 12, 'b4': 24, 'b5': 9, 'b6': 4, 'b7': 5, 'b8': 2, 'b9': 6, 'ba': 15356, 'bb': 477, 'bc': 323, 'bd': 266, 'be': 14064, 'bf': 200, 'bg': 189, 'bh': 311, 'bi': 11911, 'bj': 604, 'bk': 178, 'bl': 5297, 'bm': 306, 'bn': 218, 'bo': 11986, 'bp': 229, 'bq': 103, 'br': 5001, 'bs': 317, 'bt': 266, 'bu': 12643, 'bv': 126, 'bw': 147, 'bx': 91, 'by': 2394, 'bz': 139, 'c-': 120, 'c0': 13, 'c1': 21, 'c2': 78, 'c3': 34, 'c4': 30, 'c5': 6, 'c6': 4, 'c7': 10, 'c8': 4, 'c9': 8, 'ca': 18400, 'cb': 368, 'cc': 678, 'cd': 484, 'ce': 3579, 'cf': 300, 'cg': 253, 'ch': 11318, 'ci': 2463, 'cj': 218, 'ck': 165, 'cl': 5881, 'cm': 371, 'cn': 895, 'co': 15790, 'cp': 497, 'cq': 239, 'cr': 5502, 'cs': 1710, 'ct': 364, 'cu': 6370, 'cv': 200, 'cw': 201, 'cx': 142, 'cy': 1053, 'cz': 246, 'd-': 147, 'd0': 7, 'd1': 14, 'd2': 37, 'd3': 29, 'd4': 10, 'd5': 5, 'd6': 4, 'd7': 8, 'd8': 6, 'd9': 7, 'da': 9910, 'db': 288, 'dc': 410, 'dd': 303, 'de': 11362, 'df': 288, 'dg': 307, 'dh': 280, 'di': 10934, 'dj': 682, 'dk': 157, 'dl': 393, 'dm': 361, 'dn': 476, 'do': 10944, 'dp': 211, 'dq': 106, 'dr': 6965, 'ds': 393, 'dt': 262, 'du': 4853, 'dv': 376, 'dw': 211, 'dx': 157, 'dy': 630, 'dz': 169, 'e-': 1066, 'e0': 9, 'e1': 12, 'e2': 26, 'e3': 27, 'e4': 11, 'e5': 2, 'e6': 6, 'e7': 6, 'e8': 37, 'e9': 5, 'ea': 6784, 'eb': 771, 'ec': 1530, 'ed': 2674, 'ee': 257, 'ef': 482, 'eg': 508, 'eh': 276, 'ei': 552, 'ej': 151, 'ek': 329, 'el': 3657, 'em': 1785, 'en': 4879, 'eo': 205, 'ep': 613, 'eq': 553, 'er': 2767, 'es': 1797, 'et': 766, 'eu': 1095, 'ev': 2975, 'ew': 218, 'ex': 2521, 'ey': 2066, 'ez': 772, 'f-': 75, 'f0': 6, 'f1': 54, 'f2': 25, 'f3': 5, 'f4': 9, 'f5': 12, 'f6': 2, 'f7': 4, 'f8': 10, 'f9': 2, 'fa': 12917, 'fb': 165, 'fc': 272, 'fd': 154, 'fe': 8514, 'ff': 195, 'fg': 107, 'fh': 175, 'fi': 14464, 'fj': 192, 'fk': 90, 'fl': 7482, 'fm': 210, 'fn': 114, 'fo': 8864, 'fp': 132, 'fq': 77, 'fr': 7566, 'fs': 413, 'ft': 252, 'fu': 5259, 'fv': 88, 'fw': 129, 'fx': 197, 'fy': 155, 'fz': 143, 'g-': 138, 'g0': 21, 'g1': 38, 'g2': 26, 'g3': 34, 'g4': 26, 'g5': 11, 'g6': 5, 'g7': 5, 'g8': 15, 'g9': 5, 'ga': 8708, 'gb': 232, 'gc': 262, 'gd': 339, 'ge': 5489, 'gf': 176, 'gg': 245, 'gh': 399, 'gi': 3752, 'gj': 108, 'gk': 138, 'gl': 2606, 'gm': 387, 'gn': 217, 'go': 9782, 'gp': 455, 'gq': 78, 'gr': 8101, 'gs': 381, 'gt': 252, 'gu': 5335, 'gv': 138, 'gw': 202, 'gx': 176, 'gy': 265, 'gz': 395, 'h-': 120, 'h0': 6, 'h1': 14, 'h2': 149, 'h3': 15, 'h4': 28, 'h5': 6, 'h6': 3, 'h7': 2, 'h8': 8, 'h9': 2, 'ha': 16216, 'hb': 351, 'hc': 228, 'hd': 442, 'he': 12087, 'hf': 180, 'hg': 158, 'hh': 243, 'hi': 10582, 'hj': 147, 'hk': 331, 'hl': 215, 'hm': 214, 'hn': 317, 'ho': 14380, 'hp': 207, 'hq': 147, 'hr': 318, 'hs': 380, 'ht': 314, 'hu': 4226, 'hv': 119, 'hw': 147, 'hx': 150, 'hy': 943, 'hz': 266, 'i-': 527, 'i0': 9, 'i1': 8, 'i2': 34, 'i3': 14, 'i4': 17, 'i5': 4, 'i6': 6, 'i7': 12, 'i8': 8, 'i9': 11, 'ia': 606, 'ib': 659, 'ic': 2175, 'id': 1981, 'ie': 282, 'if': 1514, 'ig': 488, 'ih': 370, 'ii': 226, 'ij': 122, 'ik': 351, 'il': 2287, 'im': 2155, 'in': 10117, 'io': 344, 'ip': 818, 'iq': 154, 'ir': 1037, 'is': 2803, 'it': 4514, 'iu': 135, 'iv': 328, 'iw': 391, 'ix': 87, 'iy': 123, 'iz': 230, 'j-': 143, 'j0': 7, 'j1': 2, 'j2': 20, 'j3': 8, 'j4': 10, 'j5': 1, 'j6': 1, 'j7': 2, 'j8': 3, 'j9': 2, 'ja': 3167, 'jb': 251, 'jc': 336, 'jd': 290, 'je': 2239, 'jf': 152, 'jg': 136, 'jh': 228, 'ji': 1541, 'jj': 266, 'jk': 191, 'jl': 249, 'jm': 340, 'jn': 230, 'jo': 7930, 'jp': 278, 'jq': 82, 'jr': 261, 'js': 448, 'jt': 174, 'ju': 4460, 'jv': 125, 'jw': 191, 'jx': 200, 'jy': 202, 'jz': 146, 'k-': 110, 'k0': 7, 'k1': 29, 'k2': 30, 'k3': 14, 'k4': 5, 'k5': 7, 'k6': 5, 'k7': 7, 'k8': 4, 'k9': 32, 'ka': 3724, 'kb': 212, 'kc': 275, 'kd': 182, 'ke': 6054, 'kf': 130, 'kg': 137, 'kh': 420, 'ki': 6316, 'kj': 144, 'kk': 167, 'kl': 487, 'km': 248, 'kn': 2612, 'ko': 1868, 'kp': 181, 'kq': 59, 'kr': 785, 'ks': 300, 'kt': 192, 'ku': 1013, 'kv': 116, 'kw': 230, 'kx': 88, 'ky': 444, 'kz': 90, 'l-': 45, 'l0': 12, 'l1': 8, 'l2': 60, 'l3': 18, 'l4': 6, 'l5': 2, 'l6': 2, 'l7': 14, 'l8': 5, 'l9': 3, 'la': 14155, 'lb': 350, 'lc': 258, 'ld': 192, 'le': 13390, 'lf': 172, 'lg': 196, 'lh': 179, 'li': 13775, 'lj': 185, 'lk': 82, 'll': 276, 'lm': 185, 'ln': 163, 'lo': 17441, 'lp': 192, 'lq': 75, 'lr': 137, 'ls': 265, 'lt': 197, 'lu': 2947, 'lv': 210, 'lw': 133, 'lx': 124, 'ly': 761, 'lz': 136, 'm-': 162, 'm0': 11, 'm1': 26, 'm2': 47, 'm3': 32, 'm4': 31, 'm5': 8, 'm6': 7, 'm7': 6, 'm8': 11, 'm9': 4, 'ma': 21517, 'mb': 321, 'mc': 856, 'md': 322, 'me': 12983, 'mf': 160, 'mg': 199, 'mh': 180, 'mi': 10847, 'mj': 190, 'mk': 192, 'ml': 403, 'mm': 572, 'mn': 242, 'mo': 11994, 'mp': 473, 'mq': 80, 'mr': 773, 'ms': 631, 'mt': 424, 'mu': 3947, 'mv': 212, 'mw': 145, 'mx': 138, 'my': 8975, 'mz': 124, 'n-': 85, 'n0': 16, 'n1': 16, 'n2': 209, 'n3': 6, 'n4': 10, 'n5': 3, 'n6': 4, 'n7': 6, 'n8': 14, 'n9': 2, 'na': 5028, 'nb': 379, 'nc': 347, 'nd': 186, 'ne': 10656, 'nf': 213, 'ng': 232, 'nh': 270, 'ni': 3672, 'nj': 370, 'nk': 136, 'nl': 174, 'nm': 214, 'nn': 176, 'no': 10377, 'np': 174, 'nq': 65, 'nr': 161, 'ns': 248, 'nt': 270, 'nu': 1902, 'nv': 175, 'nw': 247, 'nx': 119, 'ny': 630, 'nz': 150, 'o-': 95, 'o0': 2, 'o1': 5, 'o2': 28, 'o3': 13, 'o4': 2, 'o5': 5, 'o6': 2, 'o7': 2, 'o8': 4, 'o9': 1, 'oa': 322, 'ob': 1267, 'oc': 959, 'od': 2152, 'oe': 162, 'of': 3428, 'og': 208, 'oh': 1587, 'oi': 1494, 'oj': 147, 'ok': 724, 'ol': 2309, 'om': 1532, 'on': 7256, 'oo': 234, 'op': 1673, 'oq': 58, 'or': 3282, 'os': 555, 'ot': 634, 'ou': 4435, 'ov': 944, 'ow': 4377, 'ox': 218, 'oy': 187, 'oz': 441, 'p-': 86, 'p0': 12, 'p1': 19, 'p2': 43, 'p3': 24, 'p4': 15, 'p5': 8, 'p6': 2, 'p7': 2, 'p8': 5, 'p9': 5, 'pa': 15758, 'pb': 196, 'pc': 606, 'pd': 263, 'pe': 6763, 'pf': 189, 'pg': 207, 'ph': 2496, 'pi': 7203, 'pj': 148, 'pk': 162, 'pl': 7080, 'pm': 258, 'pn': 171, 'po': 11038, 'pp': 369, 'pq': 91, 'pr': 7474, 'ps': 686, 'pt': 286, 'pu': 6557, 'pv': 156, 'pw': 142, 'px': 98, 'py': 246, 'pz': 114, 'q-': 38, 'q0': 3, 'q1': 8, 'q2': 8, 'q3': 6, 'q4': 4, 'q5': 3, 'q6': 3, 'q8': 44, 'q9': 4, 'qa': 220, 'qb': 92, 'qc': 118, 'qd': 194, 'qe': 117, 'qf': 81, 'qg': 84, 'qh': 109, 'qi': 391, 'qj': 79, 'qk': 73, 'ql': 125, 'qm': 82, 'qn': 92, 'qo': 114, 'qp': 102, 'qq': 248, 'qr': 83, 'qs': 131, 'qt': 89, 'qu': 3434, 'qv': 65, 'qw': 148, 'qx': 97, 'qy': 109, 'qz': 152, 'r-': 89, 'r0': 7, 'r1': 10, 'r2': 26, 'r3': 20, 'r4': 17, 'r5': 2, 'r6': 2, 'r7': 10, 'r8': 1, 'r9': 4, 'ra': 9842, 'rb': 211, 'rc': 334, 'rd': 204, 're': 13653, 'rf': 215, 'rg': 139, 'rh': 365, 'ri': 7079, 'rj': 156, 'rk': 119, 'rl': 150, 'rm': 254, 'rn': 173, 'ro': 9275, 'rp': 216, 'rq': 46, 'rr': 143, 'rs': 333, 'rt': 270, 'ru': 4797, 'rv': 182, 'rw': 142, 'rx': 199, 'ry': 348, 'rz': 102, 's-': 122, 's0': 6, 's1': 26, 's2': 33, 's3': 27, 's4': 19, 's5': 10, 's6': 12, 's7': 19, 's8': 12, 's9': 6, 'sa': 17038, 'sb': 328, 'sc': 3980, 'sd': 603, 'se': 18133, 'sf': 356, 'sg': 309, 'sh': 12388, 'si': 10761, 'sj': 286, 'sk': 1551, 'sl': 2639, 'sm': 2210, 'sn': 818, 'so': 11313, 'sp': 6560, 'sq': 323, 'sr': 385, 'ss': 497, 'st': 11992, 'su': 9496, 'sv': 251, 'sw': 1490, 'sx': 289, 'sy': 1044, 'sz': 702, 't-': 149, 't0': 4, 't1': 14, 't2': 16, 't3': 20, 't4': 8, 't5': 8, 't6': 6, 't7': 9, 't8': 34, 't9': 12, 'ta': 10163, 'tb': 227, 'tc': 353, 'td': 239, 'te': 12576, 'tf': 165, 'tg': 186, 'th': 20347, 'ti': 8367, 'tj': 321, 'tk': 178, 'tl': 273, 'tm': 289, 'tn': 269, 'to': 11512, 'tp': 227, 'tq': 90, 'tr': 12301, 'ts': 469, 'tt': 280, 'tu': 3572, 'tv': 534, 'tw': 2193, 'tx': 266, 'ty': 1588, 'tz': 175, 'u-': 119, 'u0': 3, 'u1': 15, 'u2': 21, 'u3': 9, 'u4': 3, 'u5': 3, 'u6': 3, 'u7': 2, 'u8': 11, 'u9': 5, 'ua': 258, 'ub': 352, 'uc': 350, 'ud': 179, 'ue': 138, 'uf': 188, 'ug': 891, 'uh': 194, 'ui': 127, 'uj': 66, 'uk': 422, 'ul': 536, 'um': 326, 'un': 3354, 'uo': 106, 'up': 2718, 'uq': 59, 'ur': 923, 'us': 6826, 'ut': 474, 'uu': 108, 'uv': 172, 'uw': 165, 'ux': 93, 'uy': 107, 'uz': 97, 'v-': 87, 'v0': 3, 'v1': 8, 'v2': 11, 'v3': 20, 'v4': 8, 'v5': 4, 'v6': 14, 'v7': 2, 'v8': 9, 'v9': 2, 'va': 5048, 'vb': 168, 'vc': 200, 'vd': 140, 've': 2797, 'vf': 111, 'vg': 120, 'vh': 121, 'vi': 8878, 'vj': 91, 'vk': 117, 'vl': 172, 'vm': 152, 'vn': 149, 'vo': 2303, 'vp': 172, 'vq': 76, 'vr': 232, 'vs': 192, 'vt': 174, 'vu': 214, 'vv': 139, 'vw': 105, 'vx': 101, 'vy': 119, 'vz': 97, 'w-': 49, 'w0': 9, 'w1': 15, 'w2': 10, 'w3': 49, 'w4': 11, 'w5': 5, 'w6': 2, 'w7': 5, 'w8': 16, 'w9': 7, 'wa': 13399, 'wb': 160, 'wc': 178, 'wd': 138, 'we': 10270, 'wf': 145, 'wg': 135, 'wh': 7676, 'wi': 8165, 'wj': 154, 'wk': 103, 'wl': 169, 'wm': 219, 'wn': 137, 'wo': 4635, 'wp': 199, 'wq': 78, 'wr': 578, 'ws': 246, 'wt': 170, 'wu': 306, 'wv': 136, 'ww': 2494, 'wx': 193, 'wy': 274, 'wz': 183, 'x-': 157, 'x0': 8, 'x1': 15, 'x2': 61, 'x3': 15, 'x4': 2, 'x5': 8, 'x6': 12, 'x7': 3, 'x8': 5, 'x9': 3, 'xa': 329, 'xb': 191, 'xc': 220, 'xd': 145, 'xe': 282, 'xf': 158, 'xg': 136, 'xh': 144, 'xi': 794, 'xj': 208, 'xk': 79, 'xl': 180, 'xm': 285, 'xn': 107, 'xo': 163, 'xp': 292, 'xq': 82, 'xr': 126, 'xs': 195, 'xt': 344, 'xu': 208, 'xv': 78, 'xw': 84, 'xx': 552, 'xy': 237, 'xz': 171, 'y-': 45, 'y0': 5, 'y1': 6, 'y2': 13, 'y3': 2, 'y5': 5, 'y6': 4, 'y7': 4, 'y8': 3, 'y9': 4, 'ya': 1485, 'yb': 102, 'yc': 195, 'yd': 137, 'ye': 2320, 'yf': 106, 'yg': 116, 'yh': 159, 'yi': 568, 'yj': 136, 'yk': 131, 'yl': 168, 'ym': 174, 'yn': 217, 'yo': 4580, 'yp': 214, 'yq': 89, 'yr': 98, 'ys': 233, 'yt': 248, 'yu': 779, 'yv': 101, 'yw': 155, 'yx': 142, 'yy': 176, 'yz': 169, 'z-': 40, 'z0': 4, 'z1': 6, 'z2': 6, 'z3': 5, 'z4': 1, 'z5': 4, 'z7': 2, 'z8': 1, 'z9': 3, 'za': 920, 'zb': 144, 'zc': 129, 'zd': 141, 'ze': 1083, 'zf': 95, 'zg': 277, 'zh': 651, 'zi': 676, 'zj': 315, 'zk': 102, 'zl': 176, 'zm': 120, 'zn': 117, 'zo': 741, 'zp': 107, 'zq': 131, 'zr': 148, 'zs': 167, 'zt': 102, 'zu': 336, 'zv': 70, 'zw': 126, 'zx': 108, 'zy': 171, 'zz': 266, }
flexible
{ "blob_id": "f254f93193a7cb7ed2e55e4481ed85821cafcd7b", "index": 4339, "step-1": "<mask token>\n", "step-2": "TOTAL = 1306336\nONE = {'0': 1473, '1': 5936, '2': 3681, '3': 2996, '4': 2480, '5': 2494,\n '6': 1324, '7': 1474, '8': 1754, '9': 1740, 'a': 79714, 'b': 83472, 'c':\n 78015, 'd': 61702, 'e': 42190, 'f': 68530, 'g': 48942, 'h': 63661, 'i':\n 34947, 'j': 24312, 'k': 26724, 'l': 66351, 'm': 77245, 'n': 36942, 'o':\n 40744, 'p': 68978, 'q': 6750, 'r': 49135, 's': 116034, 't': 87440, 'u':\n 19423, 'v': 22356, 'w': 50718, 'x': 6079, 'y': 13089, 'z': 7491}\nTWO = {'0-': 19, '00': 145, '01': 143, '02': 212, '03': 90, '04': 61, '05':\n 241, '06': 31, '07': 151, '08': 104, '09': 99, '0a': 8, '0b': 8, '0c': \n 16, '0d': 18, '0e': 8, '0f': 7, '0g': 5, '0h': 4, '0i': 9, '0j': 1,\n '0k': 4, '0l': 2, '0m': 8, '0n': 10, '0o': 6, '0p': 10, '0r': 10, '0s':\n 10, '0t': 6, '0u': 5, '0v': 5, '0w': 5, '0x': 4, '0y': 3, '0z': 5, '1-':\n 177, '10': 983, '11': 537, '12': 767, '13': 327, '14': 270, '15': 257,\n '16': 276, '17': 318, '18': 505, '19': 280, '1a': 61, '1b': 58, '1c': \n 84, '1d': 52, '1e': 33, '1f': 32, '1g': 38, '1h': 44, '1i': 25, '1j': \n 13, '1k': 32, '1l': 33, '1m': 59, '1n': 39, '1o': 37, '1p': 68, '1q': 7,\n '1r': 21, '1s': 336, '1t': 54, '1u': 15, '1v': 14, '1w': 53, '1x': 7,\n '1y': 14, '1z': 10, '2-': 30, '20': 889, '21': 406, '22': 228, '23': \n 172, '24': 480, '25': 177, '26': 126, '27': 96, '28': 108, '29': 73,\n '2a': 50, '2b': 94, '2c': 59, '2d': 61, '2e': 29, '2f': 29, '2g': 47,\n '2h': 24, '2i': 22, '2j': 13, '2k': 27, '2l': 35, '2m': 62, '2n': 53,\n '2o': 22, '2p': 48, '2q': 7, '2r': 14, '2s': 53, '2t': 43, '2u': 20,\n '2v': 7, '2w': 43, '2x': 21, '2y': 7, '2z': 6, '3-': 53, '30': 292,\n '31': 224, '32': 188, '33': 179, '34': 91, '35': 153, '36': 367, '37': \n 101, '38': 122, '39': 118, '3a': 50, '3b': 45, '3c': 37, '3d': 350,\n '3e': 17, '3f': 26, '3g': 125, '3h': 27, '3i': 11, '3j': 9, '3k': 19,\n '3l': 25, '3m': 45, '3n': 17, '3o': 15, '3p': 32, '3q': 12, '3r': 72,\n '3s': 53, '3t': 28, '3u': 3, '3v': 13, '3w': 36, '3x': 17, '3y': 14,\n '3z': 10, '4-': 76, '40': 357, '41': 259, '42': 170, '43': 88, '44': \n 126, '45': 102, '46': 67, '47': 56, '48': 97, '49': 62, '4a': 41, '4b':\n 49, '4c': 55, '4d': 53, '4e': 51, '4f': 43, '4g': 52, '4h': 44, '4i': \n 19, '4j': 13, '4k': 22, '4l': 48, '4m': 62, '4n': 22, '4o': 21, '4p': \n 60, '4q': 11, '4r': 26, '4s': 94, '4t': 44, '4u': 40, '4v': 17, '4w': \n 45, '4x': 58, '4y': 24, '4z': 6, '5-': 30, '50': 323, '51': 574, '52': \n 361, '53': 79, '54': 155, '55': 141, '56': 109, '57': 66, '58': 85,\n '59': 87, '5a': 32, '5b': 17, '5c': 21, '5d': 39, '5e': 7, '5f': 17,\n '5g': 21, '5h': 10, '5i': 54, '5j': 4, '5k': 16, '5l': 19, '5m': 22,\n '5n': 8, '5o': 12, '5p': 27, '5q': 4, '5r': 9, '5s': 55, '5t': 38, '5u':\n 17, '5v': 5, '5w': 5, '5x': 9, '5y': 10, '5z': 6, '6-': 42, '60': 173,\n '61': 182, '62': 63, '63': 56, '64': 51, '65': 125, '66': 134, '67': 62,\n '68': 58, '69': 105, '6a': 12, '6b': 7, '6c': 11, '6d': 61, '6e': 6,\n '6f': 15, '6g': 7, '6h': 11, '6i': 6, '6j': 1, '6k': 7, '6l': 8, '6m': \n 16, '6n': 1, '6o': 5, '6p': 12, '6q': 6, '6r': 15, '6s': 28, '6t': 12,\n '6u': 2, '6v': 2, '6w': 8, '6x': 5, '6y': 9, '7-': 34, '70': 200, '71':\n 205, '72': 81, '73': 58, '74': 53, '75': 59, '76': 69, '77': 191, '78':\n 92, '79': 48, '7a': 33, '7b': 18, '7c': 28, '7d': 31, '7e': 13, '7f': \n 15, '7g': 11, '7h': 15, '7i': 7, '7j': 8, '7k': 16, '7l': 19, '7m': 15,\n '7n': 5, '7o': 13, '7p': 10, '7q': 2, '7r': 4, '7s': 33, '7t': 37, '7u':\n 2, '7v': 3, '7w': 13, '7x': 13, '7y': 12, '7z': 8, '8-': 61, '80': 336,\n '81': 180, '82': 61, '83': 62, '84': 99, '85': 85, '86': 138, '87': 85,\n '88': 339, '89': 78, '8a': 11, '8b': 16, '8c': 9, '8d': 10, '8e': 6,\n '8f': 10, '8g': 18, '8h': 7, '8i': 12, '8j': 4, '8k': 6, '8l': 6, '8m':\n 11, '8n': 2, '8o': 8, '8p': 15, '8q': 7, '8r': 10, '8s': 18, '8t': 24,\n '8u': 3, '8v': 2, '8w': 4, '8x': 1, '8y': 6, '8z': 4, '9-': 45, '90': \n 173, '91': 275, '92': 149, '93': 59, '94': 76, '95': 82, '96': 76, '97':\n 123, '98': 74, '99': 270, '9a': 19, '9b': 9, '9c': 22, '9d': 17, '9e': \n 10, '9f': 5, '9g': 16, '9h': 6, '9i': 20, '9j': 10, '9k': 9, '9l': 13,\n '9m': 19, '9n': 8, '9o': 8, '9p': 29, '9q': 3, '9r': 11, '9s': 22, '9t':\n 26, '9u': 3, '9v': 8, '9w': 11, '9x': 21, '9y': 8, '9z': 5, 'a-': 307,\n 'a0': 6, 'a1': 172, 'a2': 58, 'a3': 25, 'a4': 16, 'a5': 9, 'a6': 8,\n 'a7': 20, 'a8': 12, 'a9': 15, 'aa': 778, 'ab': 3124, 'ac': 4416, 'ad': \n 5316, 'ae': 537, 'af': 1343, 'ag': 4563, 'ah': 760, 'ai': 5617, 'aj': \n 331, 'ak': 715, 'al': 9102, 'am': 4388, 'an': 8462, 'ao': 351, 'ap': \n 2155, 'aq': 426, 'ar': 9178, 'as': 6419, 'at': 4007, 'au': 2485, 'av': \n 1218, 'aw': 1869, 'ax': 403, 'ay': 457, 'az': 646, 'b-': 148, 'b0': 7,\n 'b1': 19, 'b2': 94, 'b3': 12, 'b4': 24, 'b5': 9, 'b6': 4, 'b7': 5, 'b8':\n 2, 'b9': 6, 'ba': 15356, 'bb': 477, 'bc': 323, 'bd': 266, 'be': 14064,\n 'bf': 200, 'bg': 189, 'bh': 311, 'bi': 11911, 'bj': 604, 'bk': 178,\n 'bl': 5297, 'bm': 306, 'bn': 218, 'bo': 11986, 'bp': 229, 'bq': 103,\n 'br': 5001, 'bs': 317, 'bt': 266, 'bu': 12643, 'bv': 126, 'bw': 147,\n 'bx': 91, 'by': 2394, 'bz': 139, 'c-': 120, 'c0': 13, 'c1': 21, 'c2': \n 78, 'c3': 34, 'c4': 30, 'c5': 6, 'c6': 4, 'c7': 10, 'c8': 4, 'c9': 8,\n 'ca': 18400, 'cb': 368, 'cc': 678, 'cd': 484, 'ce': 3579, 'cf': 300,\n 'cg': 253, 'ch': 11318, 'ci': 2463, 'cj': 218, 'ck': 165, 'cl': 5881,\n 'cm': 371, 'cn': 895, 'co': 15790, 'cp': 497, 'cq': 239, 'cr': 5502,\n 'cs': 1710, 'ct': 364, 'cu': 6370, 'cv': 200, 'cw': 201, 'cx': 142,\n 'cy': 1053, 'cz': 246, 'd-': 147, 'd0': 7, 'd1': 14, 'd2': 37, 'd3': 29,\n 'd4': 10, 'd5': 5, 'd6': 4, 'd7': 8, 'd8': 6, 'd9': 7, 'da': 9910, 'db':\n 288, 'dc': 410, 'dd': 303, 'de': 11362, 'df': 288, 'dg': 307, 'dh': 280,\n 'di': 10934, 'dj': 682, 'dk': 157, 'dl': 393, 'dm': 361, 'dn': 476,\n 'do': 10944, 'dp': 211, 'dq': 106, 'dr': 6965, 'ds': 393, 'dt': 262,\n 'du': 4853, 'dv': 376, 'dw': 211, 'dx': 157, 'dy': 630, 'dz': 169, 'e-':\n 1066, 'e0': 9, 'e1': 12, 'e2': 26, 'e3': 27, 'e4': 11, 'e5': 2, 'e6': 6,\n 'e7': 6, 'e8': 37, 'e9': 5, 'ea': 6784, 'eb': 771, 'ec': 1530, 'ed': \n 2674, 'ee': 257, 'ef': 482, 'eg': 508, 'eh': 276, 'ei': 552, 'ej': 151,\n 'ek': 329, 'el': 3657, 'em': 1785, 'en': 4879, 'eo': 205, 'ep': 613,\n 'eq': 553, 'er': 2767, 'es': 1797, 'et': 766, 'eu': 1095, 'ev': 2975,\n 'ew': 218, 'ex': 2521, 'ey': 2066, 'ez': 772, 'f-': 75, 'f0': 6, 'f1': \n 54, 'f2': 25, 'f3': 5, 'f4': 9, 'f5': 12, 'f6': 2, 'f7': 4, 'f8': 10,\n 'f9': 2, 'fa': 12917, 'fb': 165, 'fc': 272, 'fd': 154, 'fe': 8514, 'ff':\n 195, 'fg': 107, 'fh': 175, 'fi': 14464, 'fj': 192, 'fk': 90, 'fl': 7482,\n 'fm': 210, 'fn': 114, 'fo': 8864, 'fp': 132, 'fq': 77, 'fr': 7566, 'fs':\n 413, 'ft': 252, 'fu': 5259, 'fv': 88, 'fw': 129, 'fx': 197, 'fy': 155,\n 'fz': 143, 'g-': 138, 'g0': 21, 'g1': 38, 'g2': 26, 'g3': 34, 'g4': 26,\n 'g5': 11, 'g6': 5, 'g7': 5, 'g8': 15, 'g9': 5, 'ga': 8708, 'gb': 232,\n 'gc': 262, 'gd': 339, 'ge': 5489, 'gf': 176, 'gg': 245, 'gh': 399, 'gi':\n 3752, 'gj': 108, 'gk': 138, 'gl': 2606, 'gm': 387, 'gn': 217, 'go': \n 9782, 'gp': 455, 'gq': 78, 'gr': 8101, 'gs': 381, 'gt': 252, 'gu': 5335,\n 'gv': 138, 'gw': 202, 'gx': 176, 'gy': 265, 'gz': 395, 'h-': 120, 'h0':\n 6, 'h1': 14, 'h2': 149, 'h3': 15, 'h4': 28, 'h5': 6, 'h6': 3, 'h7': 2,\n 'h8': 8, 'h9': 2, 'ha': 16216, 'hb': 351, 'hc': 228, 'hd': 442, 'he': \n 12087, 'hf': 180, 'hg': 158, 'hh': 243, 'hi': 10582, 'hj': 147, 'hk': \n 331, 'hl': 215, 'hm': 214, 'hn': 317, 'ho': 14380, 'hp': 207, 'hq': 147,\n 'hr': 318, 'hs': 380, 'ht': 314, 'hu': 4226, 'hv': 119, 'hw': 147, 'hx':\n 150, 'hy': 943, 'hz': 266, 'i-': 527, 'i0': 9, 'i1': 8, 'i2': 34, 'i3':\n 14, 'i4': 17, 'i5': 4, 'i6': 6, 'i7': 12, 'i8': 8, 'i9': 11, 'ia': 606,\n 'ib': 659, 'ic': 2175, 'id': 1981, 'ie': 282, 'if': 1514, 'ig': 488,\n 'ih': 370, 'ii': 226, 'ij': 122, 'ik': 351, 'il': 2287, 'im': 2155,\n 'in': 10117, 'io': 344, 'ip': 818, 'iq': 154, 'ir': 1037, 'is': 2803,\n 'it': 4514, 'iu': 135, 'iv': 328, 'iw': 391, 'ix': 87, 'iy': 123, 'iz':\n 230, 'j-': 143, 'j0': 7, 'j1': 2, 'j2': 20, 'j3': 8, 'j4': 10, 'j5': 1,\n 'j6': 1, 'j7': 2, 'j8': 3, 'j9': 2, 'ja': 3167, 'jb': 251, 'jc': 336,\n 'jd': 290, 'je': 2239, 'jf': 152, 'jg': 136, 'jh': 228, 'ji': 1541,\n 'jj': 266, 'jk': 191, 'jl': 249, 'jm': 340, 'jn': 230, 'jo': 7930, 'jp':\n 278, 'jq': 82, 'jr': 261, 'js': 448, 'jt': 174, 'ju': 4460, 'jv': 125,\n 'jw': 191, 'jx': 200, 'jy': 202, 'jz': 146, 'k-': 110, 'k0': 7, 'k1': \n 29, 'k2': 30, 'k3': 14, 'k4': 5, 'k5': 7, 'k6': 5, 'k7': 7, 'k8': 4,\n 'k9': 32, 'ka': 3724, 'kb': 212, 'kc': 275, 'kd': 182, 'ke': 6054, 'kf':\n 130, 'kg': 137, 'kh': 420, 'ki': 6316, 'kj': 144, 'kk': 167, 'kl': 487,\n 'km': 248, 'kn': 2612, 'ko': 1868, 'kp': 181, 'kq': 59, 'kr': 785, 'ks':\n 300, 'kt': 192, 'ku': 1013, 'kv': 116, 'kw': 230, 'kx': 88, 'ky': 444,\n 'kz': 90, 'l-': 45, 'l0': 12, 'l1': 8, 'l2': 60, 'l3': 18, 'l4': 6,\n 'l5': 2, 'l6': 2, 'l7': 14, 'l8': 5, 'l9': 3, 'la': 14155, 'lb': 350,\n 'lc': 258, 'ld': 192, 'le': 13390, 'lf': 172, 'lg': 196, 'lh': 179,\n 'li': 13775, 'lj': 185, 'lk': 82, 'll': 276, 'lm': 185, 'ln': 163, 'lo':\n 17441, 'lp': 192, 'lq': 75, 'lr': 137, 'ls': 265, 'lt': 197, 'lu': 2947,\n 'lv': 210, 'lw': 133, 'lx': 124, 'ly': 761, 'lz': 136, 'm-': 162, 'm0':\n 11, 'm1': 26, 'm2': 47, 'm3': 32, 'm4': 31, 'm5': 8, 'm6': 7, 'm7': 6,\n 'm8': 11, 'm9': 4, 'ma': 21517, 'mb': 321, 'mc': 856, 'md': 322, 'me': \n 12983, 'mf': 160, 'mg': 199, 'mh': 180, 'mi': 10847, 'mj': 190, 'mk': \n 192, 'ml': 403, 'mm': 572, 'mn': 242, 'mo': 11994, 'mp': 473, 'mq': 80,\n 'mr': 773, 'ms': 631, 'mt': 424, 'mu': 3947, 'mv': 212, 'mw': 145, 'mx':\n 138, 'my': 8975, 'mz': 124, 'n-': 85, 'n0': 16, 'n1': 16, 'n2': 209,\n 'n3': 6, 'n4': 10, 'n5': 3, 'n6': 4, 'n7': 6, 'n8': 14, 'n9': 2, 'na': \n 5028, 'nb': 379, 'nc': 347, 'nd': 186, 'ne': 10656, 'nf': 213, 'ng': \n 232, 'nh': 270, 'ni': 3672, 'nj': 370, 'nk': 136, 'nl': 174, 'nm': 214,\n 'nn': 176, 'no': 10377, 'np': 174, 'nq': 65, 'nr': 161, 'ns': 248, 'nt':\n 270, 'nu': 1902, 'nv': 175, 'nw': 247, 'nx': 119, 'ny': 630, 'nz': 150,\n 'o-': 95, 'o0': 2, 'o1': 5, 'o2': 28, 'o3': 13, 'o4': 2, 'o5': 5, 'o6':\n 2, 'o7': 2, 'o8': 4, 'o9': 1, 'oa': 322, 'ob': 1267, 'oc': 959, 'od': \n 2152, 'oe': 162, 'of': 3428, 'og': 208, 'oh': 1587, 'oi': 1494, 'oj': \n 147, 'ok': 724, 'ol': 2309, 'om': 1532, 'on': 7256, 'oo': 234, 'op': \n 1673, 'oq': 58, 'or': 3282, 'os': 555, 'ot': 634, 'ou': 4435, 'ov': 944,\n 'ow': 4377, 'ox': 218, 'oy': 187, 'oz': 441, 'p-': 86, 'p0': 12, 'p1': \n 19, 'p2': 43, 'p3': 24, 'p4': 15, 'p5': 8, 'p6': 2, 'p7': 2, 'p8': 5,\n 'p9': 5, 'pa': 15758, 'pb': 196, 'pc': 606, 'pd': 263, 'pe': 6763, 'pf':\n 189, 'pg': 207, 'ph': 2496, 'pi': 7203, 'pj': 148, 'pk': 162, 'pl': \n 7080, 'pm': 258, 'pn': 171, 'po': 11038, 'pp': 369, 'pq': 91, 'pr': \n 7474, 'ps': 686, 'pt': 286, 'pu': 6557, 'pv': 156, 'pw': 142, 'px': 98,\n 'py': 246, 'pz': 114, 'q-': 38, 'q0': 3, 'q1': 8, 'q2': 8, 'q3': 6,\n 'q4': 4, 'q5': 3, 'q6': 3, 'q8': 44, 'q9': 4, 'qa': 220, 'qb': 92, 'qc':\n 118, 'qd': 194, 'qe': 117, 'qf': 81, 'qg': 84, 'qh': 109, 'qi': 391,\n 'qj': 79, 'qk': 73, 'ql': 125, 'qm': 82, 'qn': 92, 'qo': 114, 'qp': 102,\n 'qq': 248, 'qr': 83, 'qs': 131, 'qt': 89, 'qu': 3434, 'qv': 65, 'qw': \n 148, 'qx': 97, 'qy': 109, 'qz': 152, 'r-': 89, 'r0': 7, 'r1': 10, 'r2':\n 26, 'r3': 20, 'r4': 17, 'r5': 2, 'r6': 2, 'r7': 10, 'r8': 1, 'r9': 4,\n 'ra': 9842, 'rb': 211, 'rc': 334, 'rd': 204, 're': 13653, 'rf': 215,\n 'rg': 139, 'rh': 365, 'ri': 7079, 'rj': 156, 'rk': 119, 'rl': 150, 'rm':\n 254, 'rn': 173, 'ro': 9275, 'rp': 216, 'rq': 46, 'rr': 143, 'rs': 333,\n 'rt': 270, 'ru': 4797, 'rv': 182, 'rw': 142, 'rx': 199, 'ry': 348, 'rz':\n 102, 's-': 122, 's0': 6, 's1': 26, 's2': 33, 's3': 27, 's4': 19, 's5': \n 10, 's6': 12, 's7': 19, 's8': 12, 's9': 6, 'sa': 17038, 'sb': 328, 'sc':\n 3980, 'sd': 603, 'se': 18133, 'sf': 356, 'sg': 309, 'sh': 12388, 'si': \n 10761, 'sj': 286, 'sk': 1551, 'sl': 2639, 'sm': 2210, 'sn': 818, 'so': \n 11313, 'sp': 6560, 'sq': 323, 'sr': 385, 'ss': 497, 'st': 11992, 'su': \n 9496, 'sv': 251, 'sw': 1490, 'sx': 289, 'sy': 1044, 'sz': 702, 't-': \n 149, 't0': 4, 't1': 14, 't2': 16, 't3': 20, 't4': 8, 't5': 8, 't6': 6,\n 't7': 9, 't8': 34, 't9': 12, 'ta': 10163, 'tb': 227, 'tc': 353, 'td': \n 239, 'te': 12576, 'tf': 165, 'tg': 186, 'th': 20347, 'ti': 8367, 'tj': \n 321, 'tk': 178, 'tl': 273, 'tm': 289, 'tn': 269, 'to': 11512, 'tp': 227,\n 'tq': 90, 'tr': 12301, 'ts': 469, 'tt': 280, 'tu': 3572, 'tv': 534,\n 'tw': 2193, 'tx': 266, 'ty': 1588, 'tz': 175, 'u-': 119, 'u0': 3, 'u1':\n 15, 'u2': 21, 'u3': 9, 'u4': 3, 'u5': 3, 'u6': 3, 'u7': 2, 'u8': 11,\n 'u9': 5, 'ua': 258, 'ub': 352, 'uc': 350, 'ud': 179, 'ue': 138, 'uf': \n 188, 'ug': 891, 'uh': 194, 'ui': 127, 'uj': 66, 'uk': 422, 'ul': 536,\n 'um': 326, 'un': 3354, 'uo': 106, 'up': 2718, 'uq': 59, 'ur': 923, 'us':\n 6826, 'ut': 474, 'uu': 108, 'uv': 172, 'uw': 165, 'ux': 93, 'uy': 107,\n 'uz': 97, 'v-': 87, 'v0': 3, 'v1': 8, 'v2': 11, 'v3': 20, 'v4': 8, 'v5':\n 4, 'v6': 14, 'v7': 2, 'v8': 9, 'v9': 2, 'va': 5048, 'vb': 168, 'vc': \n 200, 'vd': 140, 've': 2797, 'vf': 111, 'vg': 120, 'vh': 121, 'vi': 8878,\n 'vj': 91, 'vk': 117, 'vl': 172, 'vm': 152, 'vn': 149, 'vo': 2303, 'vp':\n 172, 'vq': 76, 'vr': 232, 'vs': 192, 'vt': 174, 'vu': 214, 'vv': 139,\n 'vw': 105, 'vx': 101, 'vy': 119, 'vz': 97, 'w-': 49, 'w0': 9, 'w1': 15,\n 'w2': 10, 'w3': 49, 'w4': 11, 'w5': 5, 'w6': 2, 'w7': 5, 'w8': 16, 'w9':\n 7, 'wa': 13399, 'wb': 160, 'wc': 178, 'wd': 138, 'we': 10270, 'wf': 145,\n 'wg': 135, 'wh': 7676, 'wi': 8165, 'wj': 154, 'wk': 103, 'wl': 169,\n 'wm': 219, 'wn': 137, 'wo': 4635, 'wp': 199, 'wq': 78, 'wr': 578, 'ws':\n 246, 'wt': 170, 'wu': 306, 'wv': 136, 'ww': 2494, 'wx': 193, 'wy': 274,\n 'wz': 183, 'x-': 157, 'x0': 8, 'x1': 15, 'x2': 61, 'x3': 15, 'x4': 2,\n 'x5': 8, 'x6': 12, 'x7': 3, 'x8': 5, 'x9': 3, 'xa': 329, 'xb': 191,\n 'xc': 220, 'xd': 145, 'xe': 282, 'xf': 158, 'xg': 136, 'xh': 144, 'xi':\n 794, 'xj': 208, 'xk': 79, 'xl': 180, 'xm': 285, 'xn': 107, 'xo': 163,\n 'xp': 292, 'xq': 82, 'xr': 126, 'xs': 195, 'xt': 344, 'xu': 208, 'xv': \n 78, 'xw': 84, 'xx': 552, 'xy': 237, 'xz': 171, 'y-': 45, 'y0': 5, 'y1':\n 6, 'y2': 13, 'y3': 2, 'y5': 5, 'y6': 4, 'y7': 4, 'y8': 3, 'y9': 4, 'ya':\n 1485, 'yb': 102, 'yc': 195, 'yd': 137, 'ye': 2320, 'yf': 106, 'yg': 116,\n 'yh': 159, 'yi': 568, 'yj': 136, 'yk': 131, 'yl': 168, 'ym': 174, 'yn':\n 217, 'yo': 4580, 'yp': 214, 'yq': 89, 'yr': 98, 'ys': 233, 'yt': 248,\n 'yu': 779, 'yv': 101, 'yw': 155, 'yx': 142, 'yy': 176, 'yz': 169, 'z-':\n 40, 'z0': 4, 'z1': 6, 'z2': 6, 'z3': 5, 'z4': 1, 'z5': 4, 'z7': 2, 'z8':\n 1, 'z9': 3, 'za': 920, 'zb': 144, 'zc': 129, 'zd': 141, 'ze': 1083,\n 'zf': 95, 'zg': 277, 'zh': 651, 'zi': 676, 'zj': 315, 'zk': 102, 'zl': \n 176, 'zm': 120, 'zn': 117, 'zo': 741, 'zp': 107, 'zq': 131, 'zr': 148,\n 'zs': 167, 'zt': 102, 'zu': 336, 'zv': 70, 'zw': 126, 'zx': 108, 'zy': \n 171, 'zz': 266}\n", "step-3": "TOTAL = 1306336\n\nONE = {\n'0': 1473,\n'1': 5936,\n'2': 3681,\n'3': 2996,\n'4': 2480,\n'5': 2494,\n'6': 1324,\n'7': 1474,\n'8': 1754,\n'9': 1740,\n'a': 79714,\n'b': 83472,\n'c': 78015,\n'd': 61702,\n'e': 42190,\n'f': 68530,\n'g': 48942,\n'h': 63661,\n'i': 34947,\n'j': 24312,\n'k': 26724,\n'l': 66351,\n'm': 77245,\n'n': 36942,\n'o': 40744,\n'p': 68978,\n'q': 6750,\n'r': 49135,\n's': 116034,\n't': 87440,\n'u': 19423,\n'v': 22356,\n'w': 50718,\n'x': 6079,\n'y': 13089,\n'z': 7491,\n}\n\nTWO = {\n'0-': 19,\n'00': 145,\n'01': 143,\n'02': 212,\n'03': 90,\n'04': 61,\n'05': 241,\n'06': 31,\n'07': 151,\n'08': 104,\n'09': 99,\n'0a': 8,\n'0b': 8,\n'0c': 16,\n'0d': 18,\n'0e': 8,\n'0f': 7,\n'0g': 5,\n'0h': 4,\n'0i': 9,\n'0j': 1,\n'0k': 4,\n'0l': 2,\n'0m': 8,\n'0n': 10,\n'0o': 6,\n'0p': 10,\n'0r': 10,\n'0s': 10,\n'0t': 6,\n'0u': 5,\n'0v': 5,\n'0w': 5,\n'0x': 4,\n'0y': 3,\n'0z': 5,\n'1-': 177,\n'10': 983,\n'11': 537,\n'12': 767,\n'13': 327,\n'14': 270,\n'15': 257,\n'16': 276,\n'17': 318,\n'18': 505,\n'19': 280,\n'1a': 61,\n'1b': 58,\n'1c': 84,\n'1d': 52,\n'1e': 33,\n'1f': 32,\n'1g': 38,\n'1h': 44,\n'1i': 25,\n'1j': 13,\n'1k': 32,\n'1l': 33,\n'1m': 59,\n'1n': 39,\n'1o': 37,\n'1p': 68,\n'1q': 7,\n'1r': 21,\n'1s': 336,\n'1t': 54,\n'1u': 15,\n'1v': 14,\n'1w': 53,\n'1x': 7,\n'1y': 14,\n'1z': 10,\n'2-': 30,\n'20': 889,\n'21': 406,\n'22': 228,\n'23': 172,\n'24': 480,\n'25': 177,\n'26': 126,\n'27': 96,\n'28': 108,\n'29': 73,\n'2a': 50,\n'2b': 94,\n'2c': 59,\n'2d': 61,\n'2e': 29,\n'2f': 29,\n'2g': 47,\n'2h': 24,\n'2i': 22,\n'2j': 13,\n'2k': 27,\n'2l': 35,\n'2m': 62,\n'2n': 53,\n'2o': 22,\n'2p': 48,\n'2q': 7,\n'2r': 14,\n'2s': 53,\n'2t': 43,\n'2u': 20,\n'2v': 7,\n'2w': 43,\n'2x': 21,\n'2y': 7,\n'2z': 6,\n'3-': 53,\n'30': 292,\n'31': 224,\n'32': 188,\n'33': 179,\n'34': 91,\n'35': 153,\n'36': 367,\n'37': 101,\n'38': 122,\n'39': 118,\n'3a': 50,\n'3b': 45,\n'3c': 37,\n'3d': 350,\n'3e': 17,\n'3f': 26,\n'3g': 125,\n'3h': 27,\n'3i': 11,\n'3j': 9,\n'3k': 19,\n'3l': 25,\n'3m': 45,\n'3n': 17,\n'3o': 15,\n'3p': 32,\n'3q': 12,\n'3r': 72,\n'3s': 53,\n'3t': 28,\n'3u': 3,\n'3v': 13,\n'3w': 36,\n'3x': 17,\n'3y': 14,\n'3z': 10,\n'4-': 76,\n'40': 357,\n'41': 259,\n'42': 170,\n'43': 88,\n'44': 126,\n'45': 102,\n'46': 67,\n'47': 56,\n'48': 97,\n'49': 62,\n'4a': 41,\n'4b': 49,\n'4c': 55,\n'4d': 53,\n'4e': 51,\n'4f': 43,\n'4g': 52,\n'4h': 44,\n'4i': 19,\n'4j': 13,\n'4k': 22,\n'4l': 48,\n'4m': 62,\n'4n': 22,\n'4o': 21,\n'4p': 60,\n'4q': 11,\n'4r': 26,\n'4s': 94,\n'4t': 44,\n'4u': 40,\n'4v': 17,\n'4w': 45,\n'4x': 58,\n'4y': 24,\n'4z': 6,\n'5-': 30,\n'50': 323,\n'51': 574,\n'52': 361,\n'53': 79,\n'54': 155,\n'55': 141,\n'56': 109,\n'57': 66,\n'58': 85,\n'59': 87,\n'5a': 32,\n'5b': 17,\n'5c': 21,\n'5d': 39,\n'5e': 7,\n'5f': 17,\n'5g': 21,\n'5h': 10,\n'5i': 54,\n'5j': 4,\n'5k': 16,\n'5l': 19,\n'5m': 22,\n'5n': 8,\n'5o': 12,\n'5p': 27,\n'5q': 4,\n'5r': 9,\n'5s': 55,\n'5t': 38,\n'5u': 17,\n'5v': 5,\n'5w': 5,\n'5x': 9,\n'5y': 10,\n'5z': 6,\n'6-': 42,\n'60': 173,\n'61': 182,\n'62': 63,\n'63': 56,\n'64': 51,\n'65': 125,\n'66': 134,\n'67': 62,\n'68': 58,\n'69': 105,\n'6a': 12,\n'6b': 7,\n'6c': 11,\n'6d': 61,\n'6e': 6,\n'6f': 15,\n'6g': 7,\n'6h': 11,\n'6i': 6,\n'6j': 1,\n'6k': 7,\n'6l': 8,\n'6m': 16,\n'6n': 1,\n'6o': 5,\n'6p': 12,\n'6q': 6,\n'6r': 15,\n'6s': 28,\n'6t': 12,\n'6u': 2,\n'6v': 2,\n'6w': 8,\n'6x': 5,\n'6y': 9,\n'7-': 34,\n'70': 200,\n'71': 205,\n'72': 81,\n'73': 58,\n'74': 53,\n'75': 59,\n'76': 69,\n'77': 191,\n'78': 92,\n'79': 48,\n'7a': 33,\n'7b': 18,\n'7c': 28,\n'7d': 31,\n'7e': 13,\n'7f': 15,\n'7g': 11,\n'7h': 15,\n'7i': 7,\n'7j': 8,\n'7k': 16,\n'7l': 19,\n'7m': 15,\n'7n': 5,\n'7o': 13,\n'7p': 10,\n'7q': 2,\n'7r': 4,\n'7s': 33,\n'7t': 37,\n'7u': 2,\n'7v': 3,\n'7w': 13,\n'7x': 13,\n'7y': 12,\n'7z': 8,\n'8-': 61,\n'80': 336,\n'81': 180,\n'82': 61,\n'83': 62,\n'84': 99,\n'85': 85,\n'86': 138,\n'87': 85,\n'88': 339,\n'89': 78,\n'8a': 11,\n'8b': 16,\n'8c': 9,\n'8d': 10,\n'8e': 6,\n'8f': 10,\n'8g': 18,\n'8h': 7,\n'8i': 12,\n'8j': 4,\n'8k': 6,\n'8l': 6,\n'8m': 11,\n'8n': 2,\n'8o': 8,\n'8p': 15,\n'8q': 7,\n'8r': 10,\n'8s': 18,\n'8t': 24,\n'8u': 3,\n'8v': 2,\n'8w': 4,\n'8x': 1,\n'8y': 6,\n'8z': 4,\n'9-': 45,\n'90': 173,\n'91': 275,\n'92': 149,\n'93': 59,\n'94': 76,\n'95': 82,\n'96': 76,\n'97': 123,\n'98': 74,\n'99': 270,\n'9a': 19,\n'9b': 9,\n'9c': 22,\n'9d': 17,\n'9e': 10,\n'9f': 5,\n'9g': 16,\n'9h': 6,\n'9i': 20,\n'9j': 10,\n'9k': 9,\n'9l': 13,\n'9m': 19,\n'9n': 8,\n'9o': 8,\n'9p': 29,\n'9q': 3,\n'9r': 11,\n'9s': 22,\n'9t': 26,\n'9u': 3,\n'9v': 8,\n'9w': 11,\n'9x': 21,\n'9y': 8,\n'9z': 5,\n'a-': 307,\n'a0': 6,\n'a1': 172,\n'a2': 58,\n'a3': 25,\n'a4': 16,\n'a5': 9,\n'a6': 8,\n'a7': 20,\n'a8': 12,\n'a9': 15,\n'aa': 778,\n'ab': 3124,\n'ac': 4416,\n'ad': 5316,\n'ae': 537,\n'af': 1343,\n'ag': 4563,\n'ah': 760,\n'ai': 5617,\n'aj': 331,\n'ak': 715,\n'al': 9102,\n'am': 4388,\n'an': 8462,\n'ao': 351,\n'ap': 2155,\n'aq': 426,\n'ar': 9178,\n'as': 6419,\n'at': 4007,\n'au': 2485,\n'av': 1218,\n'aw': 1869,\n'ax': 403,\n'ay': 457,\n'az': 646,\n'b-': 148,\n'b0': 7,\n'b1': 19,\n'b2': 94,\n'b3': 12,\n'b4': 24,\n'b5': 9,\n'b6': 4,\n'b7': 5,\n'b8': 2,\n'b9': 6,\n'ba': 15356,\n'bb': 477,\n'bc': 323,\n'bd': 266,\n'be': 14064,\n'bf': 200,\n'bg': 189,\n'bh': 311,\n'bi': 11911,\n'bj': 604,\n'bk': 178,\n'bl': 5297,\n'bm': 306,\n'bn': 218,\n'bo': 11986,\n'bp': 229,\n'bq': 103,\n'br': 5001,\n'bs': 317,\n'bt': 266,\n'bu': 12643,\n'bv': 126,\n'bw': 147,\n'bx': 91,\n'by': 2394,\n'bz': 139,\n'c-': 120,\n'c0': 13,\n'c1': 21,\n'c2': 78,\n'c3': 34,\n'c4': 30,\n'c5': 6,\n'c6': 4,\n'c7': 10,\n'c8': 4,\n'c9': 8,\n'ca': 18400,\n'cb': 368,\n'cc': 678,\n'cd': 484,\n'ce': 3579,\n'cf': 300,\n'cg': 253,\n'ch': 11318,\n'ci': 2463,\n'cj': 218,\n'ck': 165,\n'cl': 5881,\n'cm': 371,\n'cn': 895,\n'co': 15790,\n'cp': 497,\n'cq': 239,\n'cr': 5502,\n'cs': 1710,\n'ct': 364,\n'cu': 6370,\n'cv': 200,\n'cw': 201,\n'cx': 142,\n'cy': 1053,\n'cz': 246,\n'd-': 147,\n'd0': 7,\n'd1': 14,\n'd2': 37,\n'd3': 29,\n'd4': 10,\n'd5': 5,\n'd6': 4,\n'd7': 8,\n'd8': 6,\n'd9': 7,\n'da': 9910,\n'db': 288,\n'dc': 410,\n'dd': 303,\n'de': 11362,\n'df': 288,\n'dg': 307,\n'dh': 280,\n'di': 10934,\n'dj': 682,\n'dk': 157,\n'dl': 393,\n'dm': 361,\n'dn': 476,\n'do': 10944,\n'dp': 211,\n'dq': 106,\n'dr': 6965,\n'ds': 393,\n'dt': 262,\n'du': 4853,\n'dv': 376,\n'dw': 211,\n'dx': 157,\n'dy': 630,\n'dz': 169,\n'e-': 1066,\n'e0': 9,\n'e1': 12,\n'e2': 26,\n'e3': 27,\n'e4': 11,\n'e5': 2,\n'e6': 6,\n'e7': 6,\n'e8': 37,\n'e9': 5,\n'ea': 6784,\n'eb': 771,\n'ec': 1530,\n'ed': 2674,\n'ee': 257,\n'ef': 482,\n'eg': 508,\n'eh': 276,\n'ei': 552,\n'ej': 151,\n'ek': 329,\n'el': 3657,\n'em': 1785,\n'en': 4879,\n'eo': 205,\n'ep': 613,\n'eq': 553,\n'er': 2767,\n'es': 1797,\n'et': 766,\n'eu': 1095,\n'ev': 2975,\n'ew': 218,\n'ex': 2521,\n'ey': 2066,\n'ez': 772,\n'f-': 75,\n'f0': 6,\n'f1': 54,\n'f2': 25,\n'f3': 5,\n'f4': 9,\n'f5': 12,\n'f6': 2,\n'f7': 4,\n'f8': 10,\n'f9': 2,\n'fa': 12917,\n'fb': 165,\n'fc': 272,\n'fd': 154,\n'fe': 8514,\n'ff': 195,\n'fg': 107,\n'fh': 175,\n'fi': 14464,\n'fj': 192,\n'fk': 90,\n'fl': 7482,\n'fm': 210,\n'fn': 114,\n'fo': 8864,\n'fp': 132,\n'fq': 77,\n'fr': 7566,\n'fs': 413,\n'ft': 252,\n'fu': 5259,\n'fv': 88,\n'fw': 129,\n'fx': 197,\n'fy': 155,\n'fz': 143,\n'g-': 138,\n'g0': 21,\n'g1': 38,\n'g2': 26,\n'g3': 34,\n'g4': 26,\n'g5': 11,\n'g6': 5,\n'g7': 5,\n'g8': 15,\n'g9': 5,\n'ga': 8708,\n'gb': 232,\n'gc': 262,\n'gd': 339,\n'ge': 5489,\n'gf': 176,\n'gg': 245,\n'gh': 399,\n'gi': 3752,\n'gj': 108,\n'gk': 138,\n'gl': 2606,\n'gm': 387,\n'gn': 217,\n'go': 9782,\n'gp': 455,\n'gq': 78,\n'gr': 8101,\n'gs': 381,\n'gt': 252,\n'gu': 5335,\n'gv': 138,\n'gw': 202,\n'gx': 176,\n'gy': 265,\n'gz': 395,\n'h-': 120,\n'h0': 6,\n'h1': 14,\n'h2': 149,\n'h3': 15,\n'h4': 28,\n'h5': 6,\n'h6': 3,\n'h7': 2,\n'h8': 8,\n'h9': 2,\n'ha': 16216,\n'hb': 351,\n'hc': 228,\n'hd': 442,\n'he': 12087,\n'hf': 180,\n'hg': 158,\n'hh': 243,\n'hi': 10582,\n'hj': 147,\n'hk': 331,\n'hl': 215,\n'hm': 214,\n'hn': 317,\n'ho': 14380,\n'hp': 207,\n'hq': 147,\n'hr': 318,\n'hs': 380,\n'ht': 314,\n'hu': 4226,\n'hv': 119,\n'hw': 147,\n'hx': 150,\n'hy': 943,\n'hz': 266,\n'i-': 527,\n'i0': 9,\n'i1': 8,\n'i2': 34,\n'i3': 14,\n'i4': 17,\n'i5': 4,\n'i6': 6,\n'i7': 12,\n'i8': 8,\n'i9': 11,\n'ia': 606,\n'ib': 659,\n'ic': 2175,\n'id': 1981,\n'ie': 282,\n'if': 1514,\n'ig': 488,\n'ih': 370,\n'ii': 226,\n'ij': 122,\n'ik': 351,\n'il': 2287,\n'im': 2155,\n'in': 10117,\n'io': 344,\n'ip': 818,\n'iq': 154,\n'ir': 1037,\n'is': 2803,\n'it': 4514,\n'iu': 135,\n'iv': 328,\n'iw': 391,\n'ix': 87,\n'iy': 123,\n'iz': 230,\n'j-': 143,\n'j0': 7,\n'j1': 2,\n'j2': 20,\n'j3': 8,\n'j4': 10,\n'j5': 1,\n'j6': 1,\n'j7': 2,\n'j8': 3,\n'j9': 2,\n'ja': 3167,\n'jb': 251,\n'jc': 336,\n'jd': 290,\n'je': 2239,\n'jf': 152,\n'jg': 136,\n'jh': 228,\n'ji': 1541,\n'jj': 266,\n'jk': 191,\n'jl': 249,\n'jm': 340,\n'jn': 230,\n'jo': 7930,\n'jp': 278,\n'jq': 82,\n'jr': 261,\n'js': 448,\n'jt': 174,\n'ju': 4460,\n'jv': 125,\n'jw': 191,\n'jx': 200,\n'jy': 202,\n'jz': 146,\n'k-': 110,\n'k0': 7,\n'k1': 29,\n'k2': 30,\n'k3': 14,\n'k4': 5,\n'k5': 7,\n'k6': 5,\n'k7': 7,\n'k8': 4,\n'k9': 32,\n'ka': 3724,\n'kb': 212,\n'kc': 275,\n'kd': 182,\n'ke': 6054,\n'kf': 130,\n'kg': 137,\n'kh': 420,\n'ki': 6316,\n'kj': 144,\n'kk': 167,\n'kl': 487,\n'km': 248,\n'kn': 2612,\n'ko': 1868,\n'kp': 181,\n'kq': 59,\n'kr': 785,\n'ks': 300,\n'kt': 192,\n'ku': 1013,\n'kv': 116,\n'kw': 230,\n'kx': 88,\n'ky': 444,\n'kz': 90,\n'l-': 45,\n'l0': 12,\n'l1': 8,\n'l2': 60,\n'l3': 18,\n'l4': 6,\n'l5': 2,\n'l6': 2,\n'l7': 14,\n'l8': 5,\n'l9': 3,\n'la': 14155,\n'lb': 350,\n'lc': 258,\n'ld': 192,\n'le': 13390,\n'lf': 172,\n'lg': 196,\n'lh': 179,\n'li': 13775,\n'lj': 185,\n'lk': 82,\n'll': 276,\n'lm': 185,\n'ln': 163,\n'lo': 17441,\n'lp': 192,\n'lq': 75,\n'lr': 137,\n'ls': 265,\n'lt': 197,\n'lu': 2947,\n'lv': 210,\n'lw': 133,\n'lx': 124,\n'ly': 761,\n'lz': 136,\n'm-': 162,\n'm0': 11,\n'm1': 26,\n'm2': 47,\n'm3': 32,\n'm4': 31,\n'm5': 8,\n'm6': 7,\n'm7': 6,\n'm8': 11,\n'm9': 4,\n'ma': 21517,\n'mb': 321,\n'mc': 856,\n'md': 322,\n'me': 12983,\n'mf': 160,\n'mg': 199,\n'mh': 180,\n'mi': 10847,\n'mj': 190,\n'mk': 192,\n'ml': 403,\n'mm': 572,\n'mn': 242,\n'mo': 11994,\n'mp': 473,\n'mq': 80,\n'mr': 773,\n'ms': 631,\n'mt': 424,\n'mu': 3947,\n'mv': 212,\n'mw': 145,\n'mx': 138,\n'my': 8975,\n'mz': 124,\n'n-': 85,\n'n0': 16,\n'n1': 16,\n'n2': 209,\n'n3': 6,\n'n4': 10,\n'n5': 3,\n'n6': 4,\n'n7': 6,\n'n8': 14,\n'n9': 2,\n'na': 5028,\n'nb': 379,\n'nc': 347,\n'nd': 186,\n'ne': 10656,\n'nf': 213,\n'ng': 232,\n'nh': 270,\n'ni': 3672,\n'nj': 370,\n'nk': 136,\n'nl': 174,\n'nm': 214,\n'nn': 176,\n'no': 10377,\n'np': 174,\n'nq': 65,\n'nr': 161,\n'ns': 248,\n'nt': 270,\n'nu': 1902,\n'nv': 175,\n'nw': 247,\n'nx': 119,\n'ny': 630,\n'nz': 150,\n'o-': 95,\n'o0': 2,\n'o1': 5,\n'o2': 28,\n'o3': 13,\n'o4': 2,\n'o5': 5,\n'o6': 2,\n'o7': 2,\n'o8': 4,\n'o9': 1,\n'oa': 322,\n'ob': 1267,\n'oc': 959,\n'od': 2152,\n'oe': 162,\n'of': 3428,\n'og': 208,\n'oh': 1587,\n'oi': 1494,\n'oj': 147,\n'ok': 724,\n'ol': 2309,\n'om': 1532,\n'on': 7256,\n'oo': 234,\n'op': 1673,\n'oq': 58,\n'or': 3282,\n'os': 555,\n'ot': 634,\n'ou': 4435,\n'ov': 944,\n'ow': 4377,\n'ox': 218,\n'oy': 187,\n'oz': 441,\n'p-': 86,\n'p0': 12,\n'p1': 19,\n'p2': 43,\n'p3': 24,\n'p4': 15,\n'p5': 8,\n'p6': 2,\n'p7': 2,\n'p8': 5,\n'p9': 5,\n'pa': 15758,\n'pb': 196,\n'pc': 606,\n'pd': 263,\n'pe': 6763,\n'pf': 189,\n'pg': 207,\n'ph': 2496,\n'pi': 7203,\n'pj': 148,\n'pk': 162,\n'pl': 7080,\n'pm': 258,\n'pn': 171,\n'po': 11038,\n'pp': 369,\n'pq': 91,\n'pr': 7474,\n'ps': 686,\n'pt': 286,\n'pu': 6557,\n'pv': 156,\n'pw': 142,\n'px': 98,\n'py': 246,\n'pz': 114,\n'q-': 38,\n'q0': 3,\n'q1': 8,\n'q2': 8,\n'q3': 6,\n'q4': 4,\n'q5': 3,\n'q6': 3,\n'q8': 44,\n'q9': 4,\n'qa': 220,\n'qb': 92,\n'qc': 118,\n'qd': 194,\n'qe': 117,\n'qf': 81,\n'qg': 84,\n'qh': 109,\n'qi': 391,\n'qj': 79,\n'qk': 73,\n'ql': 125,\n'qm': 82,\n'qn': 92,\n'qo': 114,\n'qp': 102,\n'qq': 248,\n'qr': 83,\n'qs': 131,\n'qt': 89,\n'qu': 3434,\n'qv': 65,\n'qw': 148,\n'qx': 97,\n'qy': 109,\n'qz': 152,\n'r-': 89,\n'r0': 7,\n'r1': 10,\n'r2': 26,\n'r3': 20,\n'r4': 17,\n'r5': 2,\n'r6': 2,\n'r7': 10,\n'r8': 1,\n'r9': 4,\n'ra': 9842,\n'rb': 211,\n'rc': 334,\n'rd': 204,\n're': 13653,\n'rf': 215,\n'rg': 139,\n'rh': 365,\n'ri': 7079,\n'rj': 156,\n'rk': 119,\n'rl': 150,\n'rm': 254,\n'rn': 173,\n'ro': 9275,\n'rp': 216,\n'rq': 46,\n'rr': 143,\n'rs': 333,\n'rt': 270,\n'ru': 4797,\n'rv': 182,\n'rw': 142,\n'rx': 199,\n'ry': 348,\n'rz': 102,\n's-': 122,\n's0': 6,\n's1': 26,\n's2': 33,\n's3': 27,\n's4': 19,\n's5': 10,\n's6': 12,\n's7': 19,\n's8': 12,\n's9': 6,\n'sa': 17038,\n'sb': 328,\n'sc': 3980,\n'sd': 603,\n'se': 18133,\n'sf': 356,\n'sg': 309,\n'sh': 12388,\n'si': 10761,\n'sj': 286,\n'sk': 1551,\n'sl': 2639,\n'sm': 2210,\n'sn': 818,\n'so': 11313,\n'sp': 6560,\n'sq': 323,\n'sr': 385,\n'ss': 497,\n'st': 11992,\n'su': 9496,\n'sv': 251,\n'sw': 1490,\n'sx': 289,\n'sy': 1044,\n'sz': 702,\n't-': 149,\n't0': 4,\n't1': 14,\n't2': 16,\n't3': 20,\n't4': 8,\n't5': 8,\n't6': 6,\n't7': 9,\n't8': 34,\n't9': 12,\n'ta': 10163,\n'tb': 227,\n'tc': 353,\n'td': 239,\n'te': 12576,\n'tf': 165,\n'tg': 186,\n'th': 20347,\n'ti': 8367,\n'tj': 321,\n'tk': 178,\n'tl': 273,\n'tm': 289,\n'tn': 269,\n'to': 11512,\n'tp': 227,\n'tq': 90,\n'tr': 12301,\n'ts': 469,\n'tt': 280,\n'tu': 3572,\n'tv': 534,\n'tw': 2193,\n'tx': 266,\n'ty': 1588,\n'tz': 175,\n'u-': 119,\n'u0': 3,\n'u1': 15,\n'u2': 21,\n'u3': 9,\n'u4': 3,\n'u5': 3,\n'u6': 3,\n'u7': 2,\n'u8': 11,\n'u9': 5,\n'ua': 258,\n'ub': 352,\n'uc': 350,\n'ud': 179,\n'ue': 138,\n'uf': 188,\n'ug': 891,\n'uh': 194,\n'ui': 127,\n'uj': 66,\n'uk': 422,\n'ul': 536,\n'um': 326,\n'un': 3354,\n'uo': 106,\n'up': 2718,\n'uq': 59,\n'ur': 923,\n'us': 6826,\n'ut': 474,\n'uu': 108,\n'uv': 172,\n'uw': 165,\n'ux': 93,\n'uy': 107,\n'uz': 97,\n'v-': 87,\n'v0': 3,\n'v1': 8,\n'v2': 11,\n'v3': 20,\n'v4': 8,\n'v5': 4,\n'v6': 14,\n'v7': 2,\n'v8': 9,\n'v9': 2,\n'va': 5048,\n'vb': 168,\n'vc': 200,\n'vd': 140,\n've': 2797,\n'vf': 111,\n'vg': 120,\n'vh': 121,\n'vi': 8878,\n'vj': 91,\n'vk': 117,\n'vl': 172,\n'vm': 152,\n'vn': 149,\n'vo': 2303,\n'vp': 172,\n'vq': 76,\n'vr': 232,\n'vs': 192,\n'vt': 174,\n'vu': 214,\n'vv': 139,\n'vw': 105,\n'vx': 101,\n'vy': 119,\n'vz': 97,\n'w-': 49,\n'w0': 9,\n'w1': 15,\n'w2': 10,\n'w3': 49,\n'w4': 11,\n'w5': 5,\n'w6': 2,\n'w7': 5,\n'w8': 16,\n'w9': 7,\n'wa': 13399,\n'wb': 160,\n'wc': 178,\n'wd': 138,\n'we': 10270,\n'wf': 145,\n'wg': 135,\n'wh': 7676,\n'wi': 8165,\n'wj': 154,\n'wk': 103,\n'wl': 169,\n'wm': 219,\n'wn': 137,\n'wo': 4635,\n'wp': 199,\n'wq': 78,\n'wr': 578,\n'ws': 246,\n'wt': 170,\n'wu': 306,\n'wv': 136,\n'ww': 2494,\n'wx': 193,\n'wy': 274,\n'wz': 183,\n'x-': 157,\n'x0': 8,\n'x1': 15,\n'x2': 61,\n'x3': 15,\n'x4': 2,\n'x5': 8,\n'x6': 12,\n'x7': 3,\n'x8': 5,\n'x9': 3,\n'xa': 329,\n'xb': 191,\n'xc': 220,\n'xd': 145,\n'xe': 282,\n'xf': 158,\n'xg': 136,\n'xh': 144,\n'xi': 794,\n'xj': 208,\n'xk': 79,\n'xl': 180,\n'xm': 285,\n'xn': 107,\n'xo': 163,\n'xp': 292,\n'xq': 82,\n'xr': 126,\n'xs': 195,\n'xt': 344,\n'xu': 208,\n'xv': 78,\n'xw': 84,\n'xx': 552,\n'xy': 237,\n'xz': 171,\n'y-': 45,\n'y0': 5,\n'y1': 6,\n'y2': 13,\n'y3': 2,\n'y5': 5,\n'y6': 4,\n'y7': 4,\n'y8': 3,\n'y9': 4,\n'ya': 1485,\n'yb': 102,\n'yc': 195,\n'yd': 137,\n'ye': 2320,\n'yf': 106,\n'yg': 116,\n'yh': 159,\n'yi': 568,\n'yj': 136,\n'yk': 131,\n'yl': 168,\n'ym': 174,\n'yn': 217,\n'yo': 4580,\n'yp': 214,\n'yq': 89,\n'yr': 98,\n'ys': 233,\n'yt': 248,\n'yu': 779,\n'yv': 101,\n'yw': 155,\n'yx': 142,\n'yy': 176,\n'yz': 169,\n'z-': 40,\n'z0': 4,\n'z1': 6,\n'z2': 6,\n'z3': 5,\n'z4': 1,\n'z5': 4,\n'z7': 2,\n'z8': 1,\n'z9': 3,\n'za': 920,\n'zb': 144,\n'zc': 129,\n'zd': 141,\n'ze': 1083,\n'zf': 95,\n'zg': 277,\n'zh': 651,\n'zi': 676,\n'zj': 315,\n'zk': 102,\n'zl': 176,\n'zm': 120,\n'zn': 117,\n'zo': 741,\n'zp': 107,\n'zq': 131,\n'zr': 148,\n'zs': 167,\n'zt': 102,\n'zu': 336,\n'zv': 70,\n'zw': 126,\n'zx': 108,\n'zy': 171,\n'zz': 266,\n}\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
# ------------------------------------------- # Created by: jasper # Date: 11/24/19 # -------------------------------------------- from os import path, mkdir class IOHandler: def __init__(self, directory, fName, data_instance): """Save the setup of a class instance or load a class instance from a saved setup Parameters ---------- directory : str path of the directory the files are saved to or read from fName : str Name of the project. File endings will be set automaticaly data_instance : object class instance to perform actions on """ self.fName = fName self.data_instance = data_instance self.directory = directory def dump_data(self): """save the data contained in data_instance, checking whether the directories already exist and asking whether to create them if not. """ while not path.isdir(self.directory): print( "# The directory {} does not exist. Do you want to create it (1, default) or specify another? (2) [1/2]".format( self.directory)) select = input() if select == "2": self.directory = input("Enter new directory: \n") else: mkdir(self.directory) print("# Directory " + self.directory + " created") self.fullpath = self.directory + "/" + self.fName self.data_instance.dump_data(self.fullpath) def dump_data_to_txt(self): while not path.isdir(self.directory): print( "# The directory {} does not exist. Do you want to create it (1, default) or specify another? (2) [1/2]".format( self.directory)) select = input() if select == "2": self.directory = input("Enter new directory: \n") else: mkdir(self.directory) print("# Directory " + self.directory + " created") self.fullpath = self.directory + "/" + self.fName self.data_instance.dump_to_txt(self.fullpath) def read_data(self): """Read data into the specified data_instance. If the read process hits a not existing file, it will be notified to you""" try: self.data_instance.read_data(self.directory + self.fName) except FileNotFoundError as file_error: print( "# The file {} belonging to {} do not exist.".format( file_error.filename, self.fName))
normal
{ "blob_id": "267276eab470b5216a2102f3e7616f7aecadcfe9", "index": 9428, "step-1": "<mask token>\n\n\nclass IOHandler:\n <mask token>\n\n def dump_data(self):\n \"\"\"save the data contained in data_instance, checking whether the\n directories already exist and asking whether to create them if not. \"\"\"\n while not path.isdir(self.directory):\n print(\n '# The directory {} does not exist. Do you want to create it (1, default) or specify another? (2) [1/2]'\n .format(self.directory))\n select = input()\n if select == '2':\n self.directory = input('Enter new directory: \\n')\n else:\n mkdir(self.directory)\n print('# Directory ' + self.directory + ' created')\n self.fullpath = self.directory + '/' + self.fName\n self.data_instance.dump_data(self.fullpath)\n\n def dump_data_to_txt(self):\n while not path.isdir(self.directory):\n print(\n '# The directory {} does not exist. Do you want to create it (1, default) or specify another? (2) [1/2]'\n .format(self.directory))\n select = input()\n if select == '2':\n self.directory = input('Enter new directory: \\n')\n else:\n mkdir(self.directory)\n print('# Directory ' + self.directory + ' created')\n self.fullpath = self.directory + '/' + self.fName\n self.data_instance.dump_to_txt(self.fullpath)\n <mask token>\n", "step-2": "<mask token>\n\n\nclass IOHandler:\n <mask token>\n\n def dump_data(self):\n \"\"\"save the data contained in data_instance, checking whether the\n directories already exist and asking whether to create them if not. \"\"\"\n while not path.isdir(self.directory):\n print(\n '# The directory {} does not exist. Do you want to create it (1, default) or specify another? (2) [1/2]'\n .format(self.directory))\n select = input()\n if select == '2':\n self.directory = input('Enter new directory: \\n')\n else:\n mkdir(self.directory)\n print('# Directory ' + self.directory + ' created')\n self.fullpath = self.directory + '/' + self.fName\n self.data_instance.dump_data(self.fullpath)\n\n def dump_data_to_txt(self):\n while not path.isdir(self.directory):\n print(\n '# The directory {} does not exist. Do you want to create it (1, default) or specify another? (2) [1/2]'\n .format(self.directory))\n select = input()\n if select == '2':\n self.directory = input('Enter new directory: \\n')\n else:\n mkdir(self.directory)\n print('# Directory ' + self.directory + ' created')\n self.fullpath = self.directory + '/' + self.fName\n self.data_instance.dump_to_txt(self.fullpath)\n\n def read_data(self):\n \"\"\"Read data into the specified data_instance. If the read process\n hits a not existing file, it will be notified to you\"\"\"\n try:\n self.data_instance.read_data(self.directory + self.fName)\n except FileNotFoundError as file_error:\n print('# The file {} belonging to {} do not exist.'.format(\n file_error.filename, self.fName))\n", "step-3": "<mask token>\n\n\nclass IOHandler:\n\n def __init__(self, directory, fName, data_instance):\n \"\"\"Save the setup of a class instance or load a class instance from a saved setup\n\n\n Parameters\n ----------\n directory : str\n path of the directory the files are saved to or read from\n fName : str\n Name of the project. File endings will be set automaticaly\n data_instance : object\n class instance to perform actions on\n \"\"\"\n self.fName = fName\n self.data_instance = data_instance\n self.directory = directory\n\n def dump_data(self):\n \"\"\"save the data contained in data_instance, checking whether the\n directories already exist and asking whether to create them if not. \"\"\"\n while not path.isdir(self.directory):\n print(\n '# The directory {} does not exist. Do you want to create it (1, default) or specify another? (2) [1/2]'\n .format(self.directory))\n select = input()\n if select == '2':\n self.directory = input('Enter new directory: \\n')\n else:\n mkdir(self.directory)\n print('# Directory ' + self.directory + ' created')\n self.fullpath = self.directory + '/' + self.fName\n self.data_instance.dump_data(self.fullpath)\n\n def dump_data_to_txt(self):\n while not path.isdir(self.directory):\n print(\n '# The directory {} does not exist. Do you want to create it (1, default) or specify another? (2) [1/2]'\n .format(self.directory))\n select = input()\n if select == '2':\n self.directory = input('Enter new directory: \\n')\n else:\n mkdir(self.directory)\n print('# Directory ' + self.directory + ' created')\n self.fullpath = self.directory + '/' + self.fName\n self.data_instance.dump_to_txt(self.fullpath)\n\n def read_data(self):\n \"\"\"Read data into the specified data_instance. If the read process\n hits a not existing file, it will be notified to you\"\"\"\n try:\n self.data_instance.read_data(self.directory + self.fName)\n except FileNotFoundError as file_error:\n print('# The file {} belonging to {} do not exist.'.format(\n file_error.filename, self.fName))\n", "step-4": "from os import path, mkdir\n\n\nclass IOHandler:\n\n def __init__(self, directory, fName, data_instance):\n \"\"\"Save the setup of a class instance or load a class instance from a saved setup\n\n\n Parameters\n ----------\n directory : str\n path of the directory the files are saved to or read from\n fName : str\n Name of the project. File endings will be set automaticaly\n data_instance : object\n class instance to perform actions on\n \"\"\"\n self.fName = fName\n self.data_instance = data_instance\n self.directory = directory\n\n def dump_data(self):\n \"\"\"save the data contained in data_instance, checking whether the\n directories already exist and asking whether to create them if not. \"\"\"\n while not path.isdir(self.directory):\n print(\n '# The directory {} does not exist. Do you want to create it (1, default) or specify another? (2) [1/2]'\n .format(self.directory))\n select = input()\n if select == '2':\n self.directory = input('Enter new directory: \\n')\n else:\n mkdir(self.directory)\n print('# Directory ' + self.directory + ' created')\n self.fullpath = self.directory + '/' + self.fName\n self.data_instance.dump_data(self.fullpath)\n\n def dump_data_to_txt(self):\n while not path.isdir(self.directory):\n print(\n '# The directory {} does not exist. Do you want to create it (1, default) or specify another? (2) [1/2]'\n .format(self.directory))\n select = input()\n if select == '2':\n self.directory = input('Enter new directory: \\n')\n else:\n mkdir(self.directory)\n print('# Directory ' + self.directory + ' created')\n self.fullpath = self.directory + '/' + self.fName\n self.data_instance.dump_to_txt(self.fullpath)\n\n def read_data(self):\n \"\"\"Read data into the specified data_instance. If the read process\n hits a not existing file, it will be notified to you\"\"\"\n try:\n self.data_instance.read_data(self.directory + self.fName)\n except FileNotFoundError as file_error:\n print('# The file {} belonging to {} do not exist.'.format(\n file_error.filename, self.fName))\n", "step-5": "# -------------------------------------------\n\n# Created by: jasper\n# Date: 11/24/19\n\n# --------------------------------------------\n\nfrom os import path, mkdir\n\n\nclass IOHandler:\n\n def __init__(self, directory, fName, data_instance):\n \"\"\"Save the setup of a class instance or load a class instance from a saved setup\n\n\n Parameters\n ----------\n directory : str\n path of the directory the files are saved to or read from\n fName : str\n Name of the project. File endings will be set automaticaly\n data_instance : object\n class instance to perform actions on\n \"\"\"\n self.fName = fName\n self.data_instance = data_instance\n self.directory = directory\n\n def dump_data(self):\n \"\"\"save the data contained in data_instance, checking whether the\n directories already exist and asking whether to create them if not. \"\"\"\n while not path.isdir(self.directory):\n print(\n \"# The directory {} does not exist. Do you want to create it (1, default) or specify another? (2) [1/2]\".format(\n self.directory))\n select = input()\n if select == \"2\":\n self.directory = input(\"Enter new directory: \\n\")\n else:\n mkdir(self.directory)\n print(\"# Directory \" + self.directory + \" created\")\n\n self.fullpath = self.directory + \"/\" + self.fName\n\n self.data_instance.dump_data(self.fullpath)\n\n def dump_data_to_txt(self):\n while not path.isdir(self.directory):\n print(\n \"# The directory {} does not exist. Do you want to create it (1, default) or specify another? (2) [1/2]\".format(\n self.directory))\n select = input()\n if select == \"2\":\n self.directory = input(\"Enter new directory: \\n\")\n else:\n mkdir(self.directory)\n print(\"# Directory \" + self.directory + \" created\")\n\n self.fullpath = self.directory + \"/\" + self.fName\n\n self.data_instance.dump_to_txt(self.fullpath)\n\n def read_data(self):\n \"\"\"Read data into the specified data_instance. If the read process\n hits a not existing file, it will be notified to you\"\"\"\n\n try:\n self.data_instance.read_data(self.directory + self.fName)\n except FileNotFoundError as file_error:\n print(\n \"# The file {} belonging to {} do not exist.\".format(\n file_error.filename, self.fName))\n", "step-ids": [ 3, 4, 5, 6, 7 ] }
[ 3, 4, 5, 6, 7 ]
#! /user/bin/env python import requests from getpass import getpass import csv # Set up the variables with open("ACI PostMan Variable Values.csv", encoding='utf-8-sig') as csvfile: reader = csv.DictReader(csvfile) for row in reader: print(row) print("Let's configure the subnets on the Old BD") print("First Let's log in") print('What is the ip address of the APIC?') apic = input() user = getpass('What is you username?') password = getpass('What is your password?') print('whats the name of the tenant?') tenant = input() print('what is the name of the app profile?') app_profile = input() print('what is the name of the old BD?') old_bd = input() print('what is the name of the network?') subnet_network = input() print('what is the name of the network IP?') subnet_ip = input() print('what is the name of the netmask?') subnet_mask = input() print('what is the name of the epg?') epg = input() # set session persistance for all the API calls s = requests.session() # first call to authenticate into the apic url = "https://%s/api/aaaLogin.json" % (apic) payload = "{\r\n\t\"aaaUser\":{\r\n\t\t\"attributes\":{\r\n\t\t\t\"name\": \"%s\",\r\n\t\t\t\"pwd\":\"%s\"\r\n\t\t}\r\n\t}\r\n}" % (user, password) headers = { 'Content-Type': 'application/json' } response = s.request("POST", url, headers=headers, data = payload, verify = False) print(response.text.encode('utf8')) # Create Subnets under Old BD url = "https://%s/api/node/mo/uni/tn-%s/BD-%s/subnet-[%s.%s/%s].json" % (apic, tenant, old_bd, subnet_network, subnet_ip, subnet_mask) payload = "{\"fvSubnet\":{\"attributes\":{\"dn\":\"uni/tn-%s/BD-%s/subnet-[%s.%s/%s]\",\"ip\":\"%s.%s/%s\",\"scope\":\"public\",\"rn\":\"subnet-[%s.%s/%s]\",\"status\":\"created\"},\"children\":[]}}\r\n" % (tenant, old_bd, subnet_network, subnet_ip, subnet_mask, subnet_network, subnet_ip, subnet_mask, subnet_network, subnet_ip, subnet_mask) headers = { 'Content-Type': 'application/json' } response = s.request("POST", url, headers=headers, data = payload, verify = False) print(response.text.encode('utf8')) # create EPG's for demo url = "https://%s/api/node/mo/uni/tn-%s/ap-%s/epg-%s.json" % (apic, tenant, app_profile, epg) payload = "{\"fvAEPg\":{\"attributes\":{\"dn\":\"uni/tn-%s/ap-%s/epg-%s\",\"name\":\"%s\",\"rn\":\"%s\",\"status\":\"created\"},\"children\":[{\"fvRsBd\":{\"attributes\":{\"tnFvBDName\":\"%s\",\"status\":\"created,modified\"},\"children\":[]}}]}}\r\n" % (tenant, app_profile, epg, epg, epg, old_bd) headers = { 'Content-Type': 'application/json' } response = s.request("POST", url, headers=headers, data = payload, verify = False) print(response.text.encode('utf8'))
normal
{ "blob_id": "bdc9856bfc61127d6bca31658b1faf3da09f5b86", "index": 161, "step-1": "<mask token>\n", "step-2": "<mask token>\nwith open('ACI PostMan Variable Values.csv', encoding='utf-8-sig') as csvfile:\n reader = csv.DictReader(csvfile)\n for row in reader:\n print(row)\nprint(\"Let's configure the subnets on the Old BD\")\nprint(\"First Let's log in\")\nprint('What is the ip address of the APIC?')\n<mask token>\nprint('whats the name of the tenant?')\n<mask token>\nprint('what is the name of the app profile?')\n<mask token>\nprint('what is the name of the old BD?')\n<mask token>\nprint('what is the name of the network?')\n<mask token>\nprint('what is the name of the network IP?')\n<mask token>\nprint('what is the name of the netmask?')\n<mask token>\nprint('what is the name of the epg?')\n<mask token>\nprint(response.text.encode('utf8'))\n<mask token>\nprint(response.text.encode('utf8'))\n<mask token>\nprint(response.text.encode('utf8'))\n", "step-3": "<mask token>\nwith open('ACI PostMan Variable Values.csv', encoding='utf-8-sig') as csvfile:\n reader = csv.DictReader(csvfile)\n for row in reader:\n print(row)\nprint(\"Let's configure the subnets on the Old BD\")\nprint(\"First Let's log in\")\nprint('What is the ip address of the APIC?')\napic = input()\nuser = getpass('What is you username?')\npassword = getpass('What is your password?')\nprint('whats the name of the tenant?')\ntenant = input()\nprint('what is the name of the app profile?')\napp_profile = input()\nprint('what is the name of the old BD?')\nold_bd = input()\nprint('what is the name of the network?')\nsubnet_network = input()\nprint('what is the name of the network IP?')\nsubnet_ip = input()\nprint('what is the name of the netmask?')\nsubnet_mask = input()\nprint('what is the name of the epg?')\nepg = input()\ns = requests.session()\nurl = 'https://%s/api/aaaLogin.json' % apic\npayload = (\n '{\\r\\n\\t\"aaaUser\":{\\r\\n\\t\\t\"attributes\":{\\r\\n\\t\\t\\t\"name\": \"%s\",\\r\\n\\t\\t\\t\"pwd\":\"%s\"\\r\\n\\t\\t}\\r\\n\\t}\\r\\n}'\n % (user, password))\nheaders = {'Content-Type': 'application/json'}\nresponse = s.request('POST', url, headers=headers, data=payload, verify=False)\nprint(response.text.encode('utf8'))\nurl = 'https://%s/api/node/mo/uni/tn-%s/BD-%s/subnet-[%s.%s/%s].json' % (apic,\n tenant, old_bd, subnet_network, subnet_ip, subnet_mask)\npayload = (\n '{\"fvSubnet\":{\"attributes\":{\"dn\":\"uni/tn-%s/BD-%s/subnet-[%s.%s/%s]\",\"ip\":\"%s.%s/%s\",\"scope\":\"public\",\"rn\":\"subnet-[%s.%s/%s]\",\"status\":\"created\"},\"children\":[]}}\\r\\n'\n % (tenant, old_bd, subnet_network, subnet_ip, subnet_mask,\n subnet_network, subnet_ip, subnet_mask, subnet_network, subnet_ip,\n subnet_mask))\nheaders = {'Content-Type': 'application/json'}\nresponse = s.request('POST', url, headers=headers, data=payload, verify=False)\nprint(response.text.encode('utf8'))\nurl = 'https://%s/api/node/mo/uni/tn-%s/ap-%s/epg-%s.json' % (apic, tenant,\n app_profile, epg)\npayload = (\n '{\"fvAEPg\":{\"attributes\":{\"dn\":\"uni/tn-%s/ap-%s/epg-%s\",\"name\":\"%s\",\"rn\":\"%s\",\"status\":\"created\"},\"children\":[{\"fvRsBd\":{\"attributes\":{\"tnFvBDName\":\"%s\",\"status\":\"created,modified\"},\"children\":[]}}]}}\\r\\n'\n % (tenant, app_profile, epg, epg, epg, old_bd))\nheaders = {'Content-Type': 'application/json'}\nresponse = s.request('POST', url, headers=headers, data=payload, verify=False)\nprint(response.text.encode('utf8'))\n", "step-4": "import requests\nfrom getpass import getpass\nimport csv\nwith open('ACI PostMan Variable Values.csv', encoding='utf-8-sig') as csvfile:\n reader = csv.DictReader(csvfile)\n for row in reader:\n print(row)\nprint(\"Let's configure the subnets on the Old BD\")\nprint(\"First Let's log in\")\nprint('What is the ip address of the APIC?')\napic = input()\nuser = getpass('What is you username?')\npassword = getpass('What is your password?')\nprint('whats the name of the tenant?')\ntenant = input()\nprint('what is the name of the app profile?')\napp_profile = input()\nprint('what is the name of the old BD?')\nold_bd = input()\nprint('what is the name of the network?')\nsubnet_network = input()\nprint('what is the name of the network IP?')\nsubnet_ip = input()\nprint('what is the name of the netmask?')\nsubnet_mask = input()\nprint('what is the name of the epg?')\nepg = input()\ns = requests.session()\nurl = 'https://%s/api/aaaLogin.json' % apic\npayload = (\n '{\\r\\n\\t\"aaaUser\":{\\r\\n\\t\\t\"attributes\":{\\r\\n\\t\\t\\t\"name\": \"%s\",\\r\\n\\t\\t\\t\"pwd\":\"%s\"\\r\\n\\t\\t}\\r\\n\\t}\\r\\n}'\n % (user, password))\nheaders = {'Content-Type': 'application/json'}\nresponse = s.request('POST', url, headers=headers, data=payload, verify=False)\nprint(response.text.encode('utf8'))\nurl = 'https://%s/api/node/mo/uni/tn-%s/BD-%s/subnet-[%s.%s/%s].json' % (apic,\n tenant, old_bd, subnet_network, subnet_ip, subnet_mask)\npayload = (\n '{\"fvSubnet\":{\"attributes\":{\"dn\":\"uni/tn-%s/BD-%s/subnet-[%s.%s/%s]\",\"ip\":\"%s.%s/%s\",\"scope\":\"public\",\"rn\":\"subnet-[%s.%s/%s]\",\"status\":\"created\"},\"children\":[]}}\\r\\n'\n % (tenant, old_bd, subnet_network, subnet_ip, subnet_mask,\n subnet_network, subnet_ip, subnet_mask, subnet_network, subnet_ip,\n subnet_mask))\nheaders = {'Content-Type': 'application/json'}\nresponse = s.request('POST', url, headers=headers, data=payload, verify=False)\nprint(response.text.encode('utf8'))\nurl = 'https://%s/api/node/mo/uni/tn-%s/ap-%s/epg-%s.json' % (apic, tenant,\n app_profile, epg)\npayload = (\n '{\"fvAEPg\":{\"attributes\":{\"dn\":\"uni/tn-%s/ap-%s/epg-%s\",\"name\":\"%s\",\"rn\":\"%s\",\"status\":\"created\"},\"children\":[{\"fvRsBd\":{\"attributes\":{\"tnFvBDName\":\"%s\",\"status\":\"created,modified\"},\"children\":[]}}]}}\\r\\n'\n % (tenant, app_profile, epg, epg, epg, old_bd))\nheaders = {'Content-Type': 'application/json'}\nresponse = s.request('POST', url, headers=headers, data=payload, verify=False)\nprint(response.text.encode('utf8'))\n", "step-5": "#! /user/bin/env python\r\n\r\nimport requests\r\nfrom getpass import getpass\r\nimport csv\r\n\r\n# Set up the variables\r\n\r\nwith open(\"ACI PostMan Variable Values.csv\", encoding='utf-8-sig') as csvfile:\r\n reader = csv.DictReader(csvfile)\r\n for row in reader: \r\n print(row)\r\n\r\nprint(\"Let's configure the subnets on the Old BD\")\r\nprint(\"First Let's log in\")\r\nprint('What is the ip address of the APIC?')\r\napic = input()\r\nuser = getpass('What is you username?')\r\npassword = getpass('What is your password?')\r\nprint('whats the name of the tenant?')\r\ntenant = input()\r\nprint('what is the name of the app profile?')\r\napp_profile = input()\r\nprint('what is the name of the old BD?')\r\nold_bd = input()\r\nprint('what is the name of the network?')\r\nsubnet_network = input()\r\nprint('what is the name of the network IP?')\r\nsubnet_ip = input()\r\nprint('what is the name of the netmask?')\r\nsubnet_mask = input()\r\nprint('what is the name of the epg?')\r\nepg = input()\r\n\r\n\r\n# set session persistance for all the API calls\r\n\r\ns = requests.session()\r\n\r\n# first call to authenticate into the apic\r\n\r\nurl = \"https://%s/api/aaaLogin.json\" % (apic)\r\n\r\npayload = \"{\\r\\n\\t\\\"aaaUser\\\":{\\r\\n\\t\\t\\\"attributes\\\":{\\r\\n\\t\\t\\t\\\"name\\\": \\\"%s\\\",\\r\\n\\t\\t\\t\\\"pwd\\\":\\\"%s\\\"\\r\\n\\t\\t}\\r\\n\\t}\\r\\n}\" % (user, password)\r\nheaders = {\r\n 'Content-Type': 'application/json'\r\n}\r\n\r\nresponse = s.request(\"POST\", url, headers=headers, data = payload, verify = False)\r\n\r\nprint(response.text.encode('utf8'))\r\n\r\n# Create Subnets under Old BD\r\n\r\nurl = \"https://%s/api/node/mo/uni/tn-%s/BD-%s/subnet-[%s.%s/%s].json\" % (apic, tenant, old_bd, subnet_network, subnet_ip, subnet_mask)\r\n\r\npayload = \"{\\\"fvSubnet\\\":{\\\"attributes\\\":{\\\"dn\\\":\\\"uni/tn-%s/BD-%s/subnet-[%s.%s/%s]\\\",\\\"ip\\\":\\\"%s.%s/%s\\\",\\\"scope\\\":\\\"public\\\",\\\"rn\\\":\\\"subnet-[%s.%s/%s]\\\",\\\"status\\\":\\\"created\\\"},\\\"children\\\":[]}}\\r\\n\" % (tenant, old_bd, subnet_network, subnet_ip, subnet_mask, subnet_network, subnet_ip, subnet_mask, subnet_network, subnet_ip, subnet_mask)\r\nheaders = {\r\n 'Content-Type': 'application/json'\r\n}\r\n\r\nresponse = s.request(\"POST\", url, headers=headers, data = payload, verify = False)\r\n\r\nprint(response.text.encode('utf8'))\r\n\r\n# create EPG's for demo\r\n\r\nurl = \"https://%s/api/node/mo/uni/tn-%s/ap-%s/epg-%s.json\" % (apic, tenant, app_profile, epg)\r\n\r\npayload = \"{\\\"fvAEPg\\\":{\\\"attributes\\\":{\\\"dn\\\":\\\"uni/tn-%s/ap-%s/epg-%s\\\",\\\"name\\\":\\\"%s\\\",\\\"rn\\\":\\\"%s\\\",\\\"status\\\":\\\"created\\\"},\\\"children\\\":[{\\\"fvRsBd\\\":{\\\"attributes\\\":{\\\"tnFvBDName\\\":\\\"%s\\\",\\\"status\\\":\\\"created,modified\\\"},\\\"children\\\":[]}}]}}\\r\\n\" % (tenant, app_profile, epg, epg, epg, old_bd)\r\nheaders = {\r\n 'Content-Type': 'application/json'\r\n}\r\n\r\nresponse = s.request(\"POST\", url, headers=headers, data = payload, verify = False)\r\n\r\nprint(response.text.encode('utf8'))\r\n\r\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
import requests import logging import json class Handler(object): def __init__(self): """ This class is used to handle interaction towards coffee interface. """ super(Handler, self).__init__() logging.warning('Initializing coffeeHandler....') # get an active token and get prepared for sending request self.coffee_session = requests.session() def get_rsp_from_url(self, url, params=None, method='get', data=None): logging.warning('when using method {}, header is:\n {} \n data is: \n{}.\n'. format(method, self.coffee_session.headers, data)) rsp = None if 'get' == method: rsp = self.coffee_session.get(url, params=params, timeout=10) elif 'put' == method: rsp = self.coffee_session.put(url, data=json.dumps(data)) elif 'post' == method: rsp = self.coffee_session.post(url, data=json.dumps(data)) elif 'delete' == method: rsp = self.coffee_session.delete(url, data=json.dumps(data)) else: assert 0, 'We only support get/post/put/delete for now!!!' logging.warning('\n\n#####\nget rsp from url: \n{} is :\n##### \n{}\n#####\n\ntext is: \n{}\n#####\n'. format(url, repr(rsp), repr(rsp.text))) return rsp def check_rsp(self, origin_rsp, expected_rsp, check_format=False, check_partial_rsp=False, check_length=False, check_format_ignore_list_length=False, check_format_null_str=False): if check_format: logging.warning('Now start to check format for origin_rsp and expected_rsp!') self._check_format(origin_rsp, expected_rsp, check_format_ignore_list_length, check_format_null_str) if check_partial_rsp: self._check_partial_rsp(expected_rsp, origin_rsp) if check_length is not False: for key, expected_length in check_length.iteritems(): current_length = len(origin_rsp[key]) assert expected_length == current_length, \ 'We expect to see length of \'{}\' in origin_rsp is {}, but now it is {}'.format( key, expected_length, current_length) if not any([check_format, check_partial_rsp, check_length]): sorted_expected_rsp = self._order_json(expected_rsp) sorted_origin_rsp = self._order_json(origin_rsp) logging.warning('\nWe expect to see \n\n{}, \n\nand we get \n\n{}.'.format(sorted_expected_rsp, sorted_origin_rsp)) assert sorted_expected_rsp == sorted_origin_rsp, \ 'We don\'t get the expected,please check the log' logging.warning('\033[0;32m check_rsp done!!! PASS\033[0m') def _check_format(self, origin_rsp, expected_rsp, check_format_ignore_list_length, check_format_null_str): logging.warning(u'now compare origin rsp: \n{}'.format(origin_rsp)) logging.warning(u'\nAnd expected_rsp: \n{}'.format(expected_rsp)) if isinstance(origin_rsp, dict) and isinstance(expected_rsp, dict): assert len(origin_rsp) == len( expected_rsp), 'Length of dict is not right! Please check the length.\norigin_rsp: ' \ '\n{}\nexpected_rsp: \n{}'.format(origin_rsp, expected_rsp) for key, value in origin_rsp.iteritems(): assert expected_rsp.get( key), 'In expected_rsp, there is no key: {} while there is in origin_rsp'.format(str(key)) logging.warning(u'Check value for the same key: [{}] in origin_rsp and expected_rsp'.format(key)) self._check_format(value, expected_rsp.get(key), check_format_ignore_list_length, check_format_null_str) elif isinstance(origin_rsp, list) and isinstance(expected_rsp, list): if expected_rsp: logging.warning('Length of list is not right! Please check the length.\norigin_rsp: \n{}\nexpected_rsp:' ' \n{}'.format(origin_rsp, expected_rsp)) if check_format_ignore_list_length: for index in xrange(len(expected_rsp)): self._check_format(origin_rsp[index], expected_rsp[index], check_format_ignore_list_length, check_format_null_str) else: assert len(origin_rsp) == len( expected_rsp), 'Length of list is not right! Please check the length.' for index in xrange(len(origin_rsp)): self._check_format(origin_rsp[index], expected_rsp[index], check_format_ignore_list_length, check_format_null_str) else: return True elif isinstance(origin_rsp, int) and isinstance(expected_rsp, int): return True elif isinstance(origin_rsp, float) and isinstance(expected_rsp, float): return True elif (isinstance(origin_rsp, str) or isinstance(origin_rsp, unicode)) and ( isinstance(expected_rsp, str) or isinstance(expected_rsp, unicode)): return True elif check_format_null_str: if origin_rsp is None and isinstance(expected_rsp, str): return True if origin_rsp is None and isinstance(expected_rsp, int): return True else: logging.warning( 'Check format fail!!!! We get different value here!!\norigin_rsp: \n{}\nbut we expect to see in ' 'expected_rsp: \n{}'.format(origin_rsp, expected_rsp)) assert 0, 'Check format fail!!!! We get different value here!!' def _order_json(self, json_string): """ Return an ordered list for compare. :param json_string: string in json format :return: an ordered list """ if isinstance(json_string, dict): return sorted((k, self._order_json(v)) for k, v in json_string.items()) if isinstance(json_string, list): return sorted(self._order_json(x) for x in json_string) else: return json_string def _check_partial_rsp(self, exp, ori): """ Check partial rsp but not the while rsp. :param exp: expected rsp :param ori: origin rsp :return: None """ logging.warning('Start to check if expected_rsp: {} is part of origin_rsp: {}'.format(exp, ori)) # so far, leaf node could be string or list which must be exactly the same if isinstance(exp, dict): for k, v in exp.iteritems(): if ori.get(k): self._check_partial_rsp(exp[k], ori[k]) else: assert 0, 'key \'{}\' does not exist in original response.'.format(k) elif isinstance(exp, list): for index in xrange(len(exp)): if isinstance(exp[index], dict): self._assert_dict_contain(exp[index], ori[index]) elif isinstance(exp[index], list): self._check_partial_rsp(exp[index], ori[index]) else: assert exp[index] in ori, 'exp: {} does not in ori: {}'.format(exp[index], ori) else: assert exp == ori, 'exp: {} does not equal to ori: {}'.format(exp, ori) @staticmethod def _assert_dict_contain(subset_dict, whole_dict): logging.warning('subset_dict is {}, whole_dict is {}'.format(subset_dict, whole_dict)) for key in subset_dict: if whole_dict.get(key): continue else: assert 0, '{} should be subset of {}, but now it is not!!'.format(subset_dict, whole_dict)
normal
{ "blob_id": "00228facd19c72bebd9afbbe52597e390233d41e", "index": 5822, "step-1": "<mask token>\n\n\nclass Handler(object):\n <mask token>\n\n def get_rsp_from_url(self, url, params=None, method='get', data=None):\n logging.warning(\n 'when using method {}, header is:\\n {} \\n data is: \\n{}.\\n'.\n format(method, self.coffee_session.headers, data))\n rsp = None\n if 'get' == method:\n rsp = self.coffee_session.get(url, params=params, timeout=10)\n elif 'put' == method:\n rsp = self.coffee_session.put(url, data=json.dumps(data))\n elif 'post' == method:\n rsp = self.coffee_session.post(url, data=json.dumps(data))\n elif 'delete' == method:\n rsp = self.coffee_session.delete(url, data=json.dumps(data))\n else:\n assert 0, 'We only support get/post/put/delete for now!!!'\n logging.warning(\n \"\"\"\n\n#####\nget rsp from url: \n{} is :\n##### \n{}\n#####\n\ntext is: \n{}\n#####\n\"\"\"\n .format(url, repr(rsp), repr(rsp.text)))\n return rsp\n <mask token>\n <mask token>\n <mask token>\n\n def _check_partial_rsp(self, exp, ori):\n \"\"\"\n Check partial rsp but not the while rsp.\n :param exp: expected rsp\n :param ori: origin rsp\n :return: None\n \"\"\"\n logging.warning(\n 'Start to check if expected_rsp: {} is part of origin_rsp: {}'.\n format(exp, ori))\n if isinstance(exp, dict):\n for k, v in exp.iteritems():\n if ori.get(k):\n self._check_partial_rsp(exp[k], ori[k])\n else:\n assert 0, \"key '{}' does not exist in original response.\".format(\n k)\n elif isinstance(exp, list):\n for index in xrange(len(exp)):\n if isinstance(exp[index], dict):\n self._assert_dict_contain(exp[index], ori[index])\n elif isinstance(exp[index], list):\n self._check_partial_rsp(exp[index], ori[index])\n else:\n assert exp[index\n ] in ori, 'exp: {} does not in ori: {}'.format(exp[\n index], ori)\n else:\n assert exp == ori, 'exp: {} does not equal to ori: {}'.format(exp,\n ori)\n\n @staticmethod\n def _assert_dict_contain(subset_dict, whole_dict):\n logging.warning('subset_dict is {}, whole_dict is {}'.format(\n subset_dict, whole_dict))\n for key in subset_dict:\n if whole_dict.get(key):\n continue\n else:\n assert 0, '{} should be subset of {}, but now it is not!!'.format(\n subset_dict, whole_dict)\n", "step-2": "<mask token>\n\n\nclass Handler(object):\n\n def __init__(self):\n \"\"\"\n This class is used to handle interaction towards coffee interface.\n \"\"\"\n super(Handler, self).__init__()\n logging.warning('Initializing coffeeHandler....')\n self.coffee_session = requests.session()\n\n def get_rsp_from_url(self, url, params=None, method='get', data=None):\n logging.warning(\n 'when using method {}, header is:\\n {} \\n data is: \\n{}.\\n'.\n format(method, self.coffee_session.headers, data))\n rsp = None\n if 'get' == method:\n rsp = self.coffee_session.get(url, params=params, timeout=10)\n elif 'put' == method:\n rsp = self.coffee_session.put(url, data=json.dumps(data))\n elif 'post' == method:\n rsp = self.coffee_session.post(url, data=json.dumps(data))\n elif 'delete' == method:\n rsp = self.coffee_session.delete(url, data=json.dumps(data))\n else:\n assert 0, 'We only support get/post/put/delete for now!!!'\n logging.warning(\n \"\"\"\n\n#####\nget rsp from url: \n{} is :\n##### \n{}\n#####\n\ntext is: \n{}\n#####\n\"\"\"\n .format(url, repr(rsp), repr(rsp.text)))\n return rsp\n <mask token>\n\n def _check_format(self, origin_rsp, expected_rsp,\n check_format_ignore_list_length, check_format_null_str):\n logging.warning(u'now compare origin rsp: \\n{}'.format(origin_rsp))\n logging.warning(u'\\nAnd expected_rsp: \\n{}'.format(expected_rsp))\n if isinstance(origin_rsp, dict) and isinstance(expected_rsp, dict):\n assert len(origin_rsp) == len(expected_rsp\n ), \"\"\"Length of dict is not right! Please check the length.\norigin_rsp: \n{}\nexpected_rsp: \n{}\"\"\".format(\n origin_rsp, expected_rsp)\n for key, value in origin_rsp.iteritems():\n assert expected_rsp.get(key\n ), 'In expected_rsp, there is no key: {} while there is in origin_rsp'.format(\n str(key))\n logging.warning(\n u'Check value for the same key: [{}] in origin_rsp and expected_rsp'\n .format(key))\n self._check_format(value, expected_rsp.get(key),\n check_format_ignore_list_length, check_format_null_str)\n elif isinstance(origin_rsp, list) and isinstance(expected_rsp, list):\n if expected_rsp:\n logging.warning(\n \"\"\"Length of list is not right! Please check the length.\norigin_rsp: \n{}\nexpected_rsp: \n{}\"\"\"\n .format(origin_rsp, expected_rsp))\n if check_format_ignore_list_length:\n for index in xrange(len(expected_rsp)):\n self._check_format(origin_rsp[index], expected_rsp[\n index], check_format_ignore_list_length,\n check_format_null_str)\n else:\n assert len(origin_rsp) == len(expected_rsp\n ), 'Length of list is not right! Please check the length.'\n for index in xrange(len(origin_rsp)):\n self._check_format(origin_rsp[index], expected_rsp[\n index], check_format_ignore_list_length,\n check_format_null_str)\n else:\n return True\n elif isinstance(origin_rsp, int) and isinstance(expected_rsp, int):\n return True\n elif isinstance(origin_rsp, float) and isinstance(expected_rsp, float):\n return True\n elif (isinstance(origin_rsp, str) or isinstance(origin_rsp, unicode)\n ) and (isinstance(expected_rsp, str) or isinstance(expected_rsp,\n unicode)):\n return True\n elif check_format_null_str:\n if origin_rsp is None and isinstance(expected_rsp, str):\n return True\n if origin_rsp is None and isinstance(expected_rsp, int):\n return True\n else:\n logging.warning(\n \"\"\"Check format fail!!!! We get different value here!!\norigin_rsp: \n{}\nbut we expect to see in expected_rsp: \n{}\"\"\"\n .format(origin_rsp, expected_rsp))\n assert 0, 'Check format fail!!!! We get different value here!!'\n <mask token>\n\n def _check_partial_rsp(self, exp, ori):\n \"\"\"\n Check partial rsp but not the while rsp.\n :param exp: expected rsp\n :param ori: origin rsp\n :return: None\n \"\"\"\n logging.warning(\n 'Start to check if expected_rsp: {} is part of origin_rsp: {}'.\n format(exp, ori))\n if isinstance(exp, dict):\n for k, v in exp.iteritems():\n if ori.get(k):\n self._check_partial_rsp(exp[k], ori[k])\n else:\n assert 0, \"key '{}' does not exist in original response.\".format(\n k)\n elif isinstance(exp, list):\n for index in xrange(len(exp)):\n if isinstance(exp[index], dict):\n self._assert_dict_contain(exp[index], ori[index])\n elif isinstance(exp[index], list):\n self._check_partial_rsp(exp[index], ori[index])\n else:\n assert exp[index\n ] in ori, 'exp: {} does not in ori: {}'.format(exp[\n index], ori)\n else:\n assert exp == ori, 'exp: {} does not equal to ori: {}'.format(exp,\n ori)\n\n @staticmethod\n def _assert_dict_contain(subset_dict, whole_dict):\n logging.warning('subset_dict is {}, whole_dict is {}'.format(\n subset_dict, whole_dict))\n for key in subset_dict:\n if whole_dict.get(key):\n continue\n else:\n assert 0, '{} should be subset of {}, but now it is not!!'.format(\n subset_dict, whole_dict)\n", "step-3": "<mask token>\n\n\nclass Handler(object):\n\n def __init__(self):\n \"\"\"\n This class is used to handle interaction towards coffee interface.\n \"\"\"\n super(Handler, self).__init__()\n logging.warning('Initializing coffeeHandler....')\n self.coffee_session = requests.session()\n\n def get_rsp_from_url(self, url, params=None, method='get', data=None):\n logging.warning(\n 'when using method {}, header is:\\n {} \\n data is: \\n{}.\\n'.\n format(method, self.coffee_session.headers, data))\n rsp = None\n if 'get' == method:\n rsp = self.coffee_session.get(url, params=params, timeout=10)\n elif 'put' == method:\n rsp = self.coffee_session.put(url, data=json.dumps(data))\n elif 'post' == method:\n rsp = self.coffee_session.post(url, data=json.dumps(data))\n elif 'delete' == method:\n rsp = self.coffee_session.delete(url, data=json.dumps(data))\n else:\n assert 0, 'We only support get/post/put/delete for now!!!'\n logging.warning(\n \"\"\"\n\n#####\nget rsp from url: \n{} is :\n##### \n{}\n#####\n\ntext is: \n{}\n#####\n\"\"\"\n .format(url, repr(rsp), repr(rsp.text)))\n return rsp\n\n def check_rsp(self, origin_rsp, expected_rsp, check_format=False,\n check_partial_rsp=False, check_length=False,\n check_format_ignore_list_length=False, check_format_null_str=False):\n if check_format:\n logging.warning(\n 'Now start to check format for origin_rsp and expected_rsp!')\n self._check_format(origin_rsp, expected_rsp,\n check_format_ignore_list_length, check_format_null_str)\n if check_partial_rsp:\n self._check_partial_rsp(expected_rsp, origin_rsp)\n if check_length is not False:\n for key, expected_length in check_length.iteritems():\n current_length = len(origin_rsp[key])\n assert expected_length == current_length, \"We expect to see length of '{}' in origin_rsp is {}, but now it is {}\".format(\n key, expected_length, current_length)\n if not any([check_format, check_partial_rsp, check_length]):\n sorted_expected_rsp = self._order_json(expected_rsp)\n sorted_origin_rsp = self._order_json(origin_rsp)\n logging.warning('\\nWe expect to see \\n\\n{}, \\n\\nand we get \\n\\n{}.'\n .format(sorted_expected_rsp, sorted_origin_rsp))\n assert sorted_expected_rsp == sorted_origin_rsp, \"We don't get the expected,please check the log\"\n logging.warning('\\x1b[0;32m check_rsp done!!! PASS\\x1b[0m')\n\n def _check_format(self, origin_rsp, expected_rsp,\n check_format_ignore_list_length, check_format_null_str):\n logging.warning(u'now compare origin rsp: \\n{}'.format(origin_rsp))\n logging.warning(u'\\nAnd expected_rsp: \\n{}'.format(expected_rsp))\n if isinstance(origin_rsp, dict) and isinstance(expected_rsp, dict):\n assert len(origin_rsp) == len(expected_rsp\n ), \"\"\"Length of dict is not right! Please check the length.\norigin_rsp: \n{}\nexpected_rsp: \n{}\"\"\".format(\n origin_rsp, expected_rsp)\n for key, value in origin_rsp.iteritems():\n assert expected_rsp.get(key\n ), 'In expected_rsp, there is no key: {} while there is in origin_rsp'.format(\n str(key))\n logging.warning(\n u'Check value for the same key: [{}] in origin_rsp and expected_rsp'\n .format(key))\n self._check_format(value, expected_rsp.get(key),\n check_format_ignore_list_length, check_format_null_str)\n elif isinstance(origin_rsp, list) and isinstance(expected_rsp, list):\n if expected_rsp:\n logging.warning(\n \"\"\"Length of list is not right! Please check the length.\norigin_rsp: \n{}\nexpected_rsp: \n{}\"\"\"\n .format(origin_rsp, expected_rsp))\n if check_format_ignore_list_length:\n for index in xrange(len(expected_rsp)):\n self._check_format(origin_rsp[index], expected_rsp[\n index], check_format_ignore_list_length,\n check_format_null_str)\n else:\n assert len(origin_rsp) == len(expected_rsp\n ), 'Length of list is not right! Please check the length.'\n for index in xrange(len(origin_rsp)):\n self._check_format(origin_rsp[index], expected_rsp[\n index], check_format_ignore_list_length,\n check_format_null_str)\n else:\n return True\n elif isinstance(origin_rsp, int) and isinstance(expected_rsp, int):\n return True\n elif isinstance(origin_rsp, float) and isinstance(expected_rsp, float):\n return True\n elif (isinstance(origin_rsp, str) or isinstance(origin_rsp, unicode)\n ) and (isinstance(expected_rsp, str) or isinstance(expected_rsp,\n unicode)):\n return True\n elif check_format_null_str:\n if origin_rsp is None and isinstance(expected_rsp, str):\n return True\n if origin_rsp is None and isinstance(expected_rsp, int):\n return True\n else:\n logging.warning(\n \"\"\"Check format fail!!!! We get different value here!!\norigin_rsp: \n{}\nbut we expect to see in expected_rsp: \n{}\"\"\"\n .format(origin_rsp, expected_rsp))\n assert 0, 'Check format fail!!!! We get different value here!!'\n\n def _order_json(self, json_string):\n \"\"\"\n Return an ordered list for compare.\n :param json_string: string in json format\n :return: an ordered list\n \"\"\"\n if isinstance(json_string, dict):\n return sorted((k, self._order_json(v)) for k, v in json_string.\n items())\n if isinstance(json_string, list):\n return sorted(self._order_json(x) for x in json_string)\n else:\n return json_string\n\n def _check_partial_rsp(self, exp, ori):\n \"\"\"\n Check partial rsp but not the while rsp.\n :param exp: expected rsp\n :param ori: origin rsp\n :return: None\n \"\"\"\n logging.warning(\n 'Start to check if expected_rsp: {} is part of origin_rsp: {}'.\n format(exp, ori))\n if isinstance(exp, dict):\n for k, v in exp.iteritems():\n if ori.get(k):\n self._check_partial_rsp(exp[k], ori[k])\n else:\n assert 0, \"key '{}' does not exist in original response.\".format(\n k)\n elif isinstance(exp, list):\n for index in xrange(len(exp)):\n if isinstance(exp[index], dict):\n self._assert_dict_contain(exp[index], ori[index])\n elif isinstance(exp[index], list):\n self._check_partial_rsp(exp[index], ori[index])\n else:\n assert exp[index\n ] in ori, 'exp: {} does not in ori: {}'.format(exp[\n index], ori)\n else:\n assert exp == ori, 'exp: {} does not equal to ori: {}'.format(exp,\n ori)\n\n @staticmethod\n def _assert_dict_contain(subset_dict, whole_dict):\n logging.warning('subset_dict is {}, whole_dict is {}'.format(\n subset_dict, whole_dict))\n for key in subset_dict:\n if whole_dict.get(key):\n continue\n else:\n assert 0, '{} should be subset of {}, but now it is not!!'.format(\n subset_dict, whole_dict)\n", "step-4": "import requests\nimport logging\nimport json\n\n\nclass Handler(object):\n\n def __init__(self):\n \"\"\"\n This class is used to handle interaction towards coffee interface.\n \"\"\"\n super(Handler, self).__init__()\n logging.warning('Initializing coffeeHandler....')\n self.coffee_session = requests.session()\n\n def get_rsp_from_url(self, url, params=None, method='get', data=None):\n logging.warning(\n 'when using method {}, header is:\\n {} \\n data is: \\n{}.\\n'.\n format(method, self.coffee_session.headers, data))\n rsp = None\n if 'get' == method:\n rsp = self.coffee_session.get(url, params=params, timeout=10)\n elif 'put' == method:\n rsp = self.coffee_session.put(url, data=json.dumps(data))\n elif 'post' == method:\n rsp = self.coffee_session.post(url, data=json.dumps(data))\n elif 'delete' == method:\n rsp = self.coffee_session.delete(url, data=json.dumps(data))\n else:\n assert 0, 'We only support get/post/put/delete for now!!!'\n logging.warning(\n \"\"\"\n\n#####\nget rsp from url: \n{} is :\n##### \n{}\n#####\n\ntext is: \n{}\n#####\n\"\"\"\n .format(url, repr(rsp), repr(rsp.text)))\n return rsp\n\n def check_rsp(self, origin_rsp, expected_rsp, check_format=False,\n check_partial_rsp=False, check_length=False,\n check_format_ignore_list_length=False, check_format_null_str=False):\n if check_format:\n logging.warning(\n 'Now start to check format for origin_rsp and expected_rsp!')\n self._check_format(origin_rsp, expected_rsp,\n check_format_ignore_list_length, check_format_null_str)\n if check_partial_rsp:\n self._check_partial_rsp(expected_rsp, origin_rsp)\n if check_length is not False:\n for key, expected_length in check_length.iteritems():\n current_length = len(origin_rsp[key])\n assert expected_length == current_length, \"We expect to see length of '{}' in origin_rsp is {}, but now it is {}\".format(\n key, expected_length, current_length)\n if not any([check_format, check_partial_rsp, check_length]):\n sorted_expected_rsp = self._order_json(expected_rsp)\n sorted_origin_rsp = self._order_json(origin_rsp)\n logging.warning('\\nWe expect to see \\n\\n{}, \\n\\nand we get \\n\\n{}.'\n .format(sorted_expected_rsp, sorted_origin_rsp))\n assert sorted_expected_rsp == sorted_origin_rsp, \"We don't get the expected,please check the log\"\n logging.warning('\\x1b[0;32m check_rsp done!!! PASS\\x1b[0m')\n\n def _check_format(self, origin_rsp, expected_rsp,\n check_format_ignore_list_length, check_format_null_str):\n logging.warning(u'now compare origin rsp: \\n{}'.format(origin_rsp))\n logging.warning(u'\\nAnd expected_rsp: \\n{}'.format(expected_rsp))\n if isinstance(origin_rsp, dict) and isinstance(expected_rsp, dict):\n assert len(origin_rsp) == len(expected_rsp\n ), \"\"\"Length of dict is not right! Please check the length.\norigin_rsp: \n{}\nexpected_rsp: \n{}\"\"\".format(\n origin_rsp, expected_rsp)\n for key, value in origin_rsp.iteritems():\n assert expected_rsp.get(key\n ), 'In expected_rsp, there is no key: {} while there is in origin_rsp'.format(\n str(key))\n logging.warning(\n u'Check value for the same key: [{}] in origin_rsp and expected_rsp'\n .format(key))\n self._check_format(value, expected_rsp.get(key),\n check_format_ignore_list_length, check_format_null_str)\n elif isinstance(origin_rsp, list) and isinstance(expected_rsp, list):\n if expected_rsp:\n logging.warning(\n \"\"\"Length of list is not right! Please check the length.\norigin_rsp: \n{}\nexpected_rsp: \n{}\"\"\"\n .format(origin_rsp, expected_rsp))\n if check_format_ignore_list_length:\n for index in xrange(len(expected_rsp)):\n self._check_format(origin_rsp[index], expected_rsp[\n index], check_format_ignore_list_length,\n check_format_null_str)\n else:\n assert len(origin_rsp) == len(expected_rsp\n ), 'Length of list is not right! Please check the length.'\n for index in xrange(len(origin_rsp)):\n self._check_format(origin_rsp[index], expected_rsp[\n index], check_format_ignore_list_length,\n check_format_null_str)\n else:\n return True\n elif isinstance(origin_rsp, int) and isinstance(expected_rsp, int):\n return True\n elif isinstance(origin_rsp, float) and isinstance(expected_rsp, float):\n return True\n elif (isinstance(origin_rsp, str) or isinstance(origin_rsp, unicode)\n ) and (isinstance(expected_rsp, str) or isinstance(expected_rsp,\n unicode)):\n return True\n elif check_format_null_str:\n if origin_rsp is None and isinstance(expected_rsp, str):\n return True\n if origin_rsp is None and isinstance(expected_rsp, int):\n return True\n else:\n logging.warning(\n \"\"\"Check format fail!!!! We get different value here!!\norigin_rsp: \n{}\nbut we expect to see in expected_rsp: \n{}\"\"\"\n .format(origin_rsp, expected_rsp))\n assert 0, 'Check format fail!!!! We get different value here!!'\n\n def _order_json(self, json_string):\n \"\"\"\n Return an ordered list for compare.\n :param json_string: string in json format\n :return: an ordered list\n \"\"\"\n if isinstance(json_string, dict):\n return sorted((k, self._order_json(v)) for k, v in json_string.\n items())\n if isinstance(json_string, list):\n return sorted(self._order_json(x) for x in json_string)\n else:\n return json_string\n\n def _check_partial_rsp(self, exp, ori):\n \"\"\"\n Check partial rsp but not the while rsp.\n :param exp: expected rsp\n :param ori: origin rsp\n :return: None\n \"\"\"\n logging.warning(\n 'Start to check if expected_rsp: {} is part of origin_rsp: {}'.\n format(exp, ori))\n if isinstance(exp, dict):\n for k, v in exp.iteritems():\n if ori.get(k):\n self._check_partial_rsp(exp[k], ori[k])\n else:\n assert 0, \"key '{}' does not exist in original response.\".format(\n k)\n elif isinstance(exp, list):\n for index in xrange(len(exp)):\n if isinstance(exp[index], dict):\n self._assert_dict_contain(exp[index], ori[index])\n elif isinstance(exp[index], list):\n self._check_partial_rsp(exp[index], ori[index])\n else:\n assert exp[index\n ] in ori, 'exp: {} does not in ori: {}'.format(exp[\n index], ori)\n else:\n assert exp == ori, 'exp: {} does not equal to ori: {}'.format(exp,\n ori)\n\n @staticmethod\n def _assert_dict_contain(subset_dict, whole_dict):\n logging.warning('subset_dict is {}, whole_dict is {}'.format(\n subset_dict, whole_dict))\n for key in subset_dict:\n if whole_dict.get(key):\n continue\n else:\n assert 0, '{} should be subset of {}, but now it is not!!'.format(\n subset_dict, whole_dict)\n", "step-5": "import requests\nimport logging\nimport json\n\n\nclass Handler(object):\n def __init__(self):\n \"\"\"\n This class is used to handle interaction towards coffee interface.\n \"\"\"\n super(Handler, self).__init__()\n logging.warning('Initializing coffeeHandler....')\n\n # get an active token and get prepared for sending request\n self.coffee_session = requests.session()\n\n def get_rsp_from_url(self, url, params=None, method='get', data=None):\n logging.warning('when using method {}, header is:\\n {} \\n data is: \\n{}.\\n'.\n format(method, self.coffee_session.headers, data))\n rsp = None\n\n if 'get' == method:\n rsp = self.coffee_session.get(url, params=params, timeout=10)\n elif 'put' == method:\n rsp = self.coffee_session.put(url, data=json.dumps(data))\n elif 'post' == method:\n rsp = self.coffee_session.post(url, data=json.dumps(data))\n elif 'delete' == method:\n rsp = self.coffee_session.delete(url, data=json.dumps(data))\n else:\n assert 0, 'We only support get/post/put/delete for now!!!'\n\n logging.warning('\\n\\n#####\\nget rsp from url: \\n{} is :\\n##### \\n{}\\n#####\\n\\ntext is: \\n{}\\n#####\\n'.\n format(url, repr(rsp), repr(rsp.text)))\n return rsp\n\n def check_rsp(self, origin_rsp, expected_rsp, check_format=False, check_partial_rsp=False, check_length=False,\n check_format_ignore_list_length=False, check_format_null_str=False):\n\n if check_format:\n logging.warning('Now start to check format for origin_rsp and expected_rsp!')\n\n self._check_format(origin_rsp, expected_rsp, check_format_ignore_list_length, check_format_null_str)\n if check_partial_rsp:\n self._check_partial_rsp(expected_rsp, origin_rsp)\n if check_length is not False:\n for key, expected_length in check_length.iteritems():\n current_length = len(origin_rsp[key])\n assert expected_length == current_length, \\\n 'We expect to see length of \\'{}\\' in origin_rsp is {}, but now it is {}'.format(\n key, expected_length, current_length)\n if not any([check_format, check_partial_rsp, check_length]):\n sorted_expected_rsp = self._order_json(expected_rsp)\n sorted_origin_rsp = self._order_json(origin_rsp)\n logging.warning('\\nWe expect to see \\n\\n{}, \\n\\nand we get \\n\\n{}.'.format(sorted_expected_rsp,\n sorted_origin_rsp))\n assert sorted_expected_rsp == sorted_origin_rsp, \\\n 'We don\\'t get the expected,please check the log'\n\n logging.warning('\\033[0;32m check_rsp done!!! PASS\\033[0m')\n\n def _check_format(self, origin_rsp, expected_rsp, check_format_ignore_list_length, check_format_null_str):\n\n logging.warning(u'now compare origin rsp: \\n{}'.format(origin_rsp))\n logging.warning(u'\\nAnd expected_rsp: \\n{}'.format(expected_rsp))\n\n if isinstance(origin_rsp, dict) and isinstance(expected_rsp, dict):\n assert len(origin_rsp) == len(\n expected_rsp), 'Length of dict is not right! Please check the length.\\norigin_rsp: ' \\\n '\\n{}\\nexpected_rsp: \\n{}'.format(origin_rsp, expected_rsp)\n for key, value in origin_rsp.iteritems():\n assert expected_rsp.get(\n key), 'In expected_rsp, there is no key: {} while there is in origin_rsp'.format(str(key))\n logging.warning(u'Check value for the same key: [{}] in origin_rsp and expected_rsp'.format(key))\n self._check_format(value, expected_rsp.get(key),\n check_format_ignore_list_length, check_format_null_str)\n elif isinstance(origin_rsp, list) and isinstance(expected_rsp, list):\n if expected_rsp:\n logging.warning('Length of list is not right! Please check the length.\\norigin_rsp: \\n{}\\nexpected_rsp:'\n ' \\n{}'.format(origin_rsp, expected_rsp))\n if check_format_ignore_list_length:\n for index in xrange(len(expected_rsp)):\n self._check_format(origin_rsp[index], expected_rsp[index],\n check_format_ignore_list_length, check_format_null_str)\n else:\n assert len(origin_rsp) == len(\n expected_rsp), 'Length of list is not right! Please check the length.'\n\n for index in xrange(len(origin_rsp)):\n self._check_format(origin_rsp[index], expected_rsp[index],\n check_format_ignore_list_length, check_format_null_str)\n else:\n return True\n elif isinstance(origin_rsp, int) and isinstance(expected_rsp, int):\n return True\n elif isinstance(origin_rsp, float) and isinstance(expected_rsp, float):\n return True\n elif (isinstance(origin_rsp, str) or isinstance(origin_rsp, unicode)) and (\n isinstance(expected_rsp, str) or isinstance(expected_rsp, unicode)):\n return True\n elif check_format_null_str:\n if origin_rsp is None and isinstance(expected_rsp, str):\n return True\n if origin_rsp is None and isinstance(expected_rsp, int):\n return True\n else:\n logging.warning(\n 'Check format fail!!!! We get different value here!!\\norigin_rsp: \\n{}\\nbut we expect to see in '\n 'expected_rsp: \\n{}'.format(origin_rsp, expected_rsp))\n assert 0, 'Check format fail!!!! We get different value here!!'\n\n def _order_json(self, json_string):\n \"\"\"\n Return an ordered list for compare.\n :param json_string: string in json format\n :return: an ordered list\n \"\"\"\n\n if isinstance(json_string, dict):\n return sorted((k, self._order_json(v)) for k, v in json_string.items())\n if isinstance(json_string, list):\n return sorted(self._order_json(x) for x in json_string)\n else:\n return json_string\n\n def _check_partial_rsp(self, exp, ori):\n \"\"\"\n Check partial rsp but not the while rsp.\n :param exp: expected rsp\n :param ori: origin rsp\n :return: None\n \"\"\"\n logging.warning('Start to check if expected_rsp: {} is part of origin_rsp: {}'.format(exp, ori))\n # so far, leaf node could be string or list which must be exactly the same\n\n if isinstance(exp, dict):\n for k, v in exp.iteritems():\n if ori.get(k):\n self._check_partial_rsp(exp[k], ori[k])\n else:\n assert 0, 'key \\'{}\\' does not exist in original response.'.format(k)\n elif isinstance(exp, list):\n for index in xrange(len(exp)):\n if isinstance(exp[index], dict):\n self._assert_dict_contain(exp[index], ori[index])\n elif isinstance(exp[index], list):\n self._check_partial_rsp(exp[index], ori[index])\n else:\n assert exp[index] in ori, 'exp: {} does not in ori: {}'.format(exp[index], ori)\n else:\n assert exp == ori, 'exp: {} does not equal to ori: {}'.format(exp, ori)\n\n @staticmethod\n def _assert_dict_contain(subset_dict, whole_dict):\n logging.warning('subset_dict is {}, whole_dict is {}'.format(subset_dict, whole_dict))\n for key in subset_dict:\n if whole_dict.get(key):\n continue\n else:\n assert 0, '{} should be subset of {}, but now it is not!!'.format(subset_dict, whole_dict)\n", "step-ids": [ 4, 6, 8, 9, 10 ] }
[ 4, 6, 8, 9, 10 ]
from collections import OrderedDict import tcod.event from components import Entity, PaperDoll, Brain from components.enums import Intention from engine import GameScene from scenes.list_menu_scene import MenuAction, ListMenuScene from systems.utilities import set_intention, retract_intention def run(scene: GameScene): handle_show_equip_screen(scene) def handle_show_equip_screen(scene: GameScene): brains = [b for b in scene.cm.get(Brain) if b.intention is Intention.SHOW_EQUIP_SCREEN] for brain in brains: entity = brain.entity menu_actions = OrderedDict() equip_action = MenuAction('e', 'equip', lambda slot: set_intention(scene, entity, slot, Intention.EQUIP_SLOT)) menu_actions[tcod.event.K_e] = equip_action equipment_scene = ListMenuScene( "Equipment", get_slots_query(scene, entity), row_builder=row_builder, default_action=equip_action, menu_actions=menu_actions, id_extractor=lambda e: e[1], parent_scene=scene ) scene.controller.push_scene(equipment_scene) retract_intention(scene, entity) def get_slots_query(scene: GameScene, entity: int): """Return a query that resolves to entity's equipment slots and their equipped items.""" def query(): paper_doll: PaperDoll = scene.cm.get_one(PaperDoll, entity) equipment = paper_doll.get_equipment() return [ (k, scene.cm.get_one(Entity, v)) for k, v in equipment.items() ] return query def row_builder(entity): part_name = entity[0] equipped = entity[1].name if entity[1] else '' return [part_name, equipped]
normal
{ "blob_id": "f1547e0893ce9c4661b546e49f3fc998745390d9", "index": 4397, "step-1": "<mask token>\n\n\ndef get_slots_query(scene: GameScene, entity: int):\n \"\"\"Return a query that resolves to entity's equipment slots and their equipped items.\"\"\"\n\n def query():\n paper_doll: PaperDoll = scene.cm.get_one(PaperDoll, entity)\n equipment = paper_doll.get_equipment()\n return [(k, scene.cm.get_one(Entity, v)) for k, v in equipment.items()]\n return query\n\n\ndef row_builder(entity):\n part_name = entity[0]\n equipped = entity[1].name if entity[1] else ''\n return [part_name, equipped]\n", "step-2": "<mask token>\n\n\ndef handle_show_equip_screen(scene: GameScene):\n brains = [b for b in scene.cm.get(Brain) if b.intention is Intention.\n SHOW_EQUIP_SCREEN]\n for brain in brains:\n entity = brain.entity\n menu_actions = OrderedDict()\n equip_action = MenuAction('e', 'equip', lambda slot: set_intention(\n scene, entity, slot, Intention.EQUIP_SLOT))\n menu_actions[tcod.event.K_e] = equip_action\n equipment_scene = ListMenuScene('Equipment', get_slots_query(scene,\n entity), row_builder=row_builder, default_action=equip_action,\n menu_actions=menu_actions, id_extractor=lambda e: e[1],\n parent_scene=scene)\n scene.controller.push_scene(equipment_scene)\n retract_intention(scene, entity)\n\n\ndef get_slots_query(scene: GameScene, entity: int):\n \"\"\"Return a query that resolves to entity's equipment slots and their equipped items.\"\"\"\n\n def query():\n paper_doll: PaperDoll = scene.cm.get_one(PaperDoll, entity)\n equipment = paper_doll.get_equipment()\n return [(k, scene.cm.get_one(Entity, v)) for k, v in equipment.items()]\n return query\n\n\ndef row_builder(entity):\n part_name = entity[0]\n equipped = entity[1].name if entity[1] else ''\n return [part_name, equipped]\n", "step-3": "<mask token>\n\n\ndef run(scene: GameScene):\n handle_show_equip_screen(scene)\n\n\ndef handle_show_equip_screen(scene: GameScene):\n brains = [b for b in scene.cm.get(Brain) if b.intention is Intention.\n SHOW_EQUIP_SCREEN]\n for brain in brains:\n entity = brain.entity\n menu_actions = OrderedDict()\n equip_action = MenuAction('e', 'equip', lambda slot: set_intention(\n scene, entity, slot, Intention.EQUIP_SLOT))\n menu_actions[tcod.event.K_e] = equip_action\n equipment_scene = ListMenuScene('Equipment', get_slots_query(scene,\n entity), row_builder=row_builder, default_action=equip_action,\n menu_actions=menu_actions, id_extractor=lambda e: e[1],\n parent_scene=scene)\n scene.controller.push_scene(equipment_scene)\n retract_intention(scene, entity)\n\n\ndef get_slots_query(scene: GameScene, entity: int):\n \"\"\"Return a query that resolves to entity's equipment slots and their equipped items.\"\"\"\n\n def query():\n paper_doll: PaperDoll = scene.cm.get_one(PaperDoll, entity)\n equipment = paper_doll.get_equipment()\n return [(k, scene.cm.get_one(Entity, v)) for k, v in equipment.items()]\n return query\n\n\ndef row_builder(entity):\n part_name = entity[0]\n equipped = entity[1].name if entity[1] else ''\n return [part_name, equipped]\n", "step-4": "from collections import OrderedDict\nimport tcod.event\nfrom components import Entity, PaperDoll, Brain\nfrom components.enums import Intention\nfrom engine import GameScene\nfrom scenes.list_menu_scene import MenuAction, ListMenuScene\nfrom systems.utilities import set_intention, retract_intention\n\n\ndef run(scene: GameScene):\n handle_show_equip_screen(scene)\n\n\ndef handle_show_equip_screen(scene: GameScene):\n brains = [b for b in scene.cm.get(Brain) if b.intention is Intention.\n SHOW_EQUIP_SCREEN]\n for brain in brains:\n entity = brain.entity\n menu_actions = OrderedDict()\n equip_action = MenuAction('e', 'equip', lambda slot: set_intention(\n scene, entity, slot, Intention.EQUIP_SLOT))\n menu_actions[tcod.event.K_e] = equip_action\n equipment_scene = ListMenuScene('Equipment', get_slots_query(scene,\n entity), row_builder=row_builder, default_action=equip_action,\n menu_actions=menu_actions, id_extractor=lambda e: e[1],\n parent_scene=scene)\n scene.controller.push_scene(equipment_scene)\n retract_intention(scene, entity)\n\n\ndef get_slots_query(scene: GameScene, entity: int):\n \"\"\"Return a query that resolves to entity's equipment slots and their equipped items.\"\"\"\n\n def query():\n paper_doll: PaperDoll = scene.cm.get_one(PaperDoll, entity)\n equipment = paper_doll.get_equipment()\n return [(k, scene.cm.get_one(Entity, v)) for k, v in equipment.items()]\n return query\n\n\ndef row_builder(entity):\n part_name = entity[0]\n equipped = entity[1].name if entity[1] else ''\n return [part_name, equipped]\n", "step-5": "\n\nfrom collections import OrderedDict\n\nimport tcod.event\n\nfrom components import Entity, PaperDoll, Brain\nfrom components.enums import Intention\nfrom engine import GameScene\nfrom scenes.list_menu_scene import MenuAction, ListMenuScene\nfrom systems.utilities import set_intention, retract_intention\n\n\ndef run(scene: GameScene):\n handle_show_equip_screen(scene)\n\n\ndef handle_show_equip_screen(scene: GameScene):\n brains = [b for b in scene.cm.get(Brain) if b.intention is Intention.SHOW_EQUIP_SCREEN]\n for brain in brains:\n entity = brain.entity\n menu_actions = OrderedDict()\n equip_action = MenuAction('e', 'equip', lambda slot: set_intention(scene, entity, slot, Intention.EQUIP_SLOT))\n menu_actions[tcod.event.K_e] = equip_action\n equipment_scene = ListMenuScene(\n \"Equipment\",\n get_slots_query(scene, entity),\n row_builder=row_builder,\n default_action=equip_action,\n menu_actions=menu_actions,\n id_extractor=lambda e: e[1],\n parent_scene=scene\n )\n scene.controller.push_scene(equipment_scene)\n retract_intention(scene, entity)\n\n\ndef get_slots_query(scene: GameScene, entity: int):\n \"\"\"Return a query that resolves to entity's equipment slots and their equipped items.\"\"\"\n\n def query():\n\n paper_doll: PaperDoll = scene.cm.get_one(PaperDoll, entity)\n equipment = paper_doll.get_equipment()\n\n return [\n (k, scene.cm.get_one(Entity, v))\n for k, v in equipment.items()\n ]\n return query\n\n\ndef row_builder(entity):\n part_name = entity[0]\n equipped = entity[1].name if entity[1] else ''\n return [part_name, equipped]\n", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]