code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
|---|---|---|---|
"""This module provides the definition of the exceptions that can be raised from the database module."""
class DatabaseError(Exception):
"""Raised when the requested database operation can not be completed."""
pass
class InvalidDictError(Exception):
"""Raised when the object can not be created from the provided dict."""
pass
|
normal
|
{
"blob_id": "94130b4962ecff2ea087ab34cf50a084254bf980",
"index": 8948,
"step-1": "<mask token>\n\n\nclass InvalidDictError(Exception):\n <mask token>\n pass\n",
"step-2": "<mask token>\n\n\nclass InvalidDictError(Exception):\n \"\"\"Raised when the object can not be created from the provided dict.\"\"\"\n pass\n",
"step-3": "<mask token>\n\n\nclass DatabaseError(Exception):\n <mask token>\n pass\n\n\nclass InvalidDictError(Exception):\n \"\"\"Raised when the object can not be created from the provided dict.\"\"\"\n pass\n",
"step-4": "<mask token>\n\n\nclass DatabaseError(Exception):\n \"\"\"Raised when the requested database operation can not be completed.\"\"\"\n pass\n\n\nclass InvalidDictError(Exception):\n \"\"\"Raised when the object can not be created from the provided dict.\"\"\"\n pass\n",
"step-5": "\"\"\"This module provides the definition of the exceptions that can be raised from the database module.\"\"\"\n\nclass DatabaseError(Exception):\n \"\"\"Raised when the requested database operation can not be completed.\"\"\"\n pass\n\nclass InvalidDictError(Exception):\n \"\"\"Raised when the object can not be created from the provided dict.\"\"\"\n pass",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class ObjectCenter(object):
def __init__(self, args):
"""Initialize variables."""
self.args = args
def load_classes(self, path):
with open(path, 'r') as names_file:
names = names_file.read().split('\n')
return list(filter(None, names))
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def filter_objects(self, frame, predictions, object_x=None, object_y=
None, center_x=None, center_y=None):
"""Apply object detection."""
predictions = self._filter_(frame, predictions)
if predictions is not None and len(predictions) > 0:
if predictions[0][0] is not None and len(predictions) == 3:
bbox, label, conf = predictions[0][0]
H, W = frame.shape[:2]
center_x.value = W // 2
center_y.value = H // 2
object_location = self.update(predictions, frame, (center_x
.value, center_y.value))
(object_x.value, object_y.value), predictions = object_location
if self.args.no_show:
return None
else:
inferred_image = draw_bbox(frame, bbox, label, conf,
write_conf=True)
return inferred_image
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ObjectCenter(object):
def __init__(self, args):
"""Initialize variables."""
self.args = args
def load_classes(self, path):
with open(path, 'r') as names_file:
names = names_file.read().split('\n')
return list(filter(None, names))
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def update(self, predictions, frame, frameCenter):
"""Asynchronous update of detection results to return object center."""
if len(predictions) > 0:
x, y, w, h = predictions[0][0]
objectX = int(x + w / 2.0)
objectY = int(y + h / 2.0)
return (objectX, objectY), predictions
else:
return frameCenter, None
def filter_objects(self, frame, predictions, object_x=None, object_y=
None, center_x=None, center_y=None):
"""Apply object detection."""
predictions = self._filter_(frame, predictions)
if predictions is not None and len(predictions) > 0:
if predictions[0][0] is not None and len(predictions) == 3:
bbox, label, conf = predictions[0][0]
H, W = frame.shape[:2]
center_x.value = W // 2
center_y.value = H // 2
object_location = self.update(predictions, frame, (center_x
.value, center_y.value))
(object_x.value, object_y.value), predictions = object_location
if self.args.no_show:
return None
else:
inferred_image = draw_bbox(frame, bbox, label, conf,
write_conf=True)
return inferred_image
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ObjectCenter(object):
def __init__(self, args):
"""Initialize variables."""
self.args = args
def load_classes(self, path):
with open(path, 'r') as names_file:
names = names_file.read().split('\n')
return list(filter(None, names))
def _filter_(self, frame, predictions):
"""Apply object detection."""
if not self.args.no_filter_object_category:
names = self.load_classes(self.args.names)
object_category = names.index(self.args.object_category)
predictions = self.filter_inference_results(predictions,
object_category=object_category)
return predictions
<|reserved_special_token_0|>
def update(self, predictions, frame, frameCenter):
"""Asynchronous update of detection results to return object center."""
if len(predictions) > 0:
x, y, w, h = predictions[0][0]
objectX = int(x + w / 2.0)
objectY = int(y + h / 2.0)
return (objectX, objectY), predictions
else:
return frameCenter, None
def filter_objects(self, frame, predictions, object_x=None, object_y=
None, center_x=None, center_y=None):
"""Apply object detection."""
predictions = self._filter_(frame, predictions)
if predictions is not None and len(predictions) > 0:
if predictions[0][0] is not None and len(predictions) == 3:
bbox, label, conf = predictions[0][0]
H, W = frame.shape[:2]
center_x.value = W // 2
center_y.value = H // 2
object_location = self.update(predictions, frame, (center_x
.value, center_y.value))
(object_x.value, object_y.value), predictions = object_location
if self.args.no_show:
return None
else:
inferred_image = draw_bbox(frame, bbox, label, conf,
write_conf=True)
return inferred_image
<|reserved_special_token_1|>
from cvlib.object_detection import draw_bbox
class ObjectCenter(object):
def __init__(self, args):
"""Initialize variables."""
self.args = args
def load_classes(self, path):
with open(path, 'r') as names_file:
names = names_file.read().split('\n')
return list(filter(None, names))
def _filter_(self, frame, predictions):
"""Apply object detection."""
if not self.args.no_filter_object_category:
names = self.load_classes(self.args.names)
object_category = names.index(self.args.object_category)
predictions = self.filter_inference_results(predictions,
object_category=object_category)
return predictions
def filter_inference_results(self, predictions, object_category='person'):
"""Return bounding box of biggest object of selected category."""
if predictions is not None and len(predictions) == 3:
bboxes, labels, confs = predictions
category_bboxes = [(bbox, label, conf) for bbox, label, conf in
zip(bboxes, labels, confs) if (label == object_category).any()]
if len(category_bboxes) > 0:
biggest_bbox = None
biggest_label = None
biggest_conf = None
most_pixels = 0
for bbox, label, conf in category_bboxes:
x, y, w, h = bbox
n_pixels = w * h
if n_pixels > most_pixels:
most_pixels = n_pixels
biggest_bbox = bbox
biggest_label = label
biggest_conf = conf
category_bboxes = [biggest_bbox], [biggest_label], [
biggest_conf]
predictions = category_bboxes
return predictions
def update(self, predictions, frame, frameCenter):
"""Asynchronous update of detection results to return object center."""
if len(predictions) > 0:
x, y, w, h = predictions[0][0]
objectX = int(x + w / 2.0)
objectY = int(y + h / 2.0)
return (objectX, objectY), predictions
else:
return frameCenter, None
def filter_objects(self, frame, predictions, object_x=None, object_y=
None, center_x=None, center_y=None):
"""Apply object detection."""
predictions = self._filter_(frame, predictions)
if predictions is not None and len(predictions) > 0:
if predictions[0][0] is not None and len(predictions) == 3:
bbox, label, conf = predictions[0][0]
H, W = frame.shape[:2]
center_x.value = W // 2
center_y.value = H // 2
object_location = self.update(predictions, frame, (center_x
.value, center_y.value))
(object_x.value, object_y.value), predictions = object_location
if self.args.no_show:
return None
else:
inferred_image = draw_bbox(frame, bbox, label, conf,
write_conf=True)
return inferred_image
<|reserved_special_token_1|>
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# File: software/jetson/fastmot/utils/sot.py
# By: Samuel Duclos
# For: Myself
# Description: This file returns detection results from an image.
from cvlib.object_detection import draw_bbox
class ObjectCenter(object):
def __init__(self, args):
"""Initialize variables."""
self.args = args
def load_classes(self, path):
with open(path, 'r') as names_file:
names = names_file.read().split('\n')
return list(filter(None, names))
def _filter_(self, frame, predictions):
"""Apply object detection."""
if not self.args.no_filter_object_category:
names = self.load_classes(self.args.names)
object_category = names.index(self.args.object_category)
predictions = self.filter_inference_results(predictions,
object_category=object_category)
return predictions
def filter_inference_results(self, predictions, object_category='person'):
"""Return bounding box of biggest object of selected category."""
if predictions is not None and len(predictions) == 3:
bboxes, labels, confs = predictions
# Only return bounding boxes for the selected object category.
category_bboxes = [(bbox,
label,
conf) for (bbox,
label,
conf) in zip(bboxes,
labels,
confs) if (label == object_category).any()]
if len(category_bboxes) > 0:
# Choose biggest object of selected category.
biggest_bbox = None
biggest_label = None
biggest_conf = None
most_pixels = 0
for (bbox, label, conf) in category_bboxes:
(x, y, w, h) = bbox
n_pixels = w * h
if n_pixels > most_pixels:
most_pixels = n_pixels
biggest_bbox = bbox
biggest_label = label
biggest_conf = conf
category_bboxes = ([biggest_bbox], [biggest_label], [biggest_conf])
predictions = category_bboxes
return predictions
def update(self, predictions, frame, frameCenter):
"""Asynchronous update of detection results to return object center."""
if len(predictions) > 0:
(x, y, w, h) = predictions[0][0]
objectX = int(x + (w / 2.0))
objectY = int(y + (h / 2.0))
return ((objectX, objectY), predictions)
else:
return (frameCenter, None)
def filter_objects(self, frame, predictions, object_x=None, object_y=None, center_x=None, center_y=None):
"""Apply object detection."""
predictions = self._filter_(frame, predictions)
if predictions is not None and len(predictions) > 0:
if predictions[0][0] is not None and len(predictions) == 3:
bbox, label, conf = predictions[0][0]
# Calculate the center of the frame since we will be trying to keep the object there.
(H, W) = frame.shape[:2]
center_x.value = W // 2
center_y.value = H // 2
object_location = self.update(predictions, frame, (center_x.value, center_y.value))
((object_x.value, object_y.value), predictions) = object_location
if self.args.no_show:
return None
else:
# Draw bounding box over detected objects.
inferred_image = draw_bbox(frame, bbox, label, conf, write_conf=True)
return inferred_image
|
flexible
|
{
"blob_id": "8f14bbab8b2a4bc0758c6b48feb20f8b0e3e348b",
"index": 5460,
"step-1": "<mask token>\n\n\nclass ObjectCenter(object):\n\n def __init__(self, args):\n \"\"\"Initialize variables.\"\"\"\n self.args = args\n\n def load_classes(self, path):\n with open(path, 'r') as names_file:\n names = names_file.read().split('\\n')\n return list(filter(None, names))\n <mask token>\n <mask token>\n <mask token>\n\n def filter_objects(self, frame, predictions, object_x=None, object_y=\n None, center_x=None, center_y=None):\n \"\"\"Apply object detection.\"\"\"\n predictions = self._filter_(frame, predictions)\n if predictions is not None and len(predictions) > 0:\n if predictions[0][0] is not None and len(predictions) == 3:\n bbox, label, conf = predictions[0][0]\n H, W = frame.shape[:2]\n center_x.value = W // 2\n center_y.value = H // 2\n object_location = self.update(predictions, frame, (center_x\n .value, center_y.value))\n (object_x.value, object_y.value), predictions = object_location\n if self.args.no_show:\n return None\n else:\n inferred_image = draw_bbox(frame, bbox, label, conf,\n write_conf=True)\n return inferred_image\n",
"step-2": "<mask token>\n\n\nclass ObjectCenter(object):\n\n def __init__(self, args):\n \"\"\"Initialize variables.\"\"\"\n self.args = args\n\n def load_classes(self, path):\n with open(path, 'r') as names_file:\n names = names_file.read().split('\\n')\n return list(filter(None, names))\n <mask token>\n <mask token>\n\n def update(self, predictions, frame, frameCenter):\n \"\"\"Asynchronous update of detection results to return object center.\"\"\"\n if len(predictions) > 0:\n x, y, w, h = predictions[0][0]\n objectX = int(x + w / 2.0)\n objectY = int(y + h / 2.0)\n return (objectX, objectY), predictions\n else:\n return frameCenter, None\n\n def filter_objects(self, frame, predictions, object_x=None, object_y=\n None, center_x=None, center_y=None):\n \"\"\"Apply object detection.\"\"\"\n predictions = self._filter_(frame, predictions)\n if predictions is not None and len(predictions) > 0:\n if predictions[0][0] is not None and len(predictions) == 3:\n bbox, label, conf = predictions[0][0]\n H, W = frame.shape[:2]\n center_x.value = W // 2\n center_y.value = H // 2\n object_location = self.update(predictions, frame, (center_x\n .value, center_y.value))\n (object_x.value, object_y.value), predictions = object_location\n if self.args.no_show:\n return None\n else:\n inferred_image = draw_bbox(frame, bbox, label, conf,\n write_conf=True)\n return inferred_image\n",
"step-3": "<mask token>\n\n\nclass ObjectCenter(object):\n\n def __init__(self, args):\n \"\"\"Initialize variables.\"\"\"\n self.args = args\n\n def load_classes(self, path):\n with open(path, 'r') as names_file:\n names = names_file.read().split('\\n')\n return list(filter(None, names))\n\n def _filter_(self, frame, predictions):\n \"\"\"Apply object detection.\"\"\"\n if not self.args.no_filter_object_category:\n names = self.load_classes(self.args.names)\n object_category = names.index(self.args.object_category)\n predictions = self.filter_inference_results(predictions,\n object_category=object_category)\n return predictions\n <mask token>\n\n def update(self, predictions, frame, frameCenter):\n \"\"\"Asynchronous update of detection results to return object center.\"\"\"\n if len(predictions) > 0:\n x, y, w, h = predictions[0][0]\n objectX = int(x + w / 2.0)\n objectY = int(y + h / 2.0)\n return (objectX, objectY), predictions\n else:\n return frameCenter, None\n\n def filter_objects(self, frame, predictions, object_x=None, object_y=\n None, center_x=None, center_y=None):\n \"\"\"Apply object detection.\"\"\"\n predictions = self._filter_(frame, predictions)\n if predictions is not None and len(predictions) > 0:\n if predictions[0][0] is not None and len(predictions) == 3:\n bbox, label, conf = predictions[0][0]\n H, W = frame.shape[:2]\n center_x.value = W // 2\n center_y.value = H // 2\n object_location = self.update(predictions, frame, (center_x\n .value, center_y.value))\n (object_x.value, object_y.value), predictions = object_location\n if self.args.no_show:\n return None\n else:\n inferred_image = draw_bbox(frame, bbox, label, conf,\n write_conf=True)\n return inferred_image\n",
"step-4": "from cvlib.object_detection import draw_bbox\n\n\nclass ObjectCenter(object):\n\n def __init__(self, args):\n \"\"\"Initialize variables.\"\"\"\n self.args = args\n\n def load_classes(self, path):\n with open(path, 'r') as names_file:\n names = names_file.read().split('\\n')\n return list(filter(None, names))\n\n def _filter_(self, frame, predictions):\n \"\"\"Apply object detection.\"\"\"\n if not self.args.no_filter_object_category:\n names = self.load_classes(self.args.names)\n object_category = names.index(self.args.object_category)\n predictions = self.filter_inference_results(predictions,\n object_category=object_category)\n return predictions\n\n def filter_inference_results(self, predictions, object_category='person'):\n \"\"\"Return bounding box of biggest object of selected category.\"\"\"\n if predictions is not None and len(predictions) == 3:\n bboxes, labels, confs = predictions\n category_bboxes = [(bbox, label, conf) for bbox, label, conf in\n zip(bboxes, labels, confs) if (label == object_category).any()]\n if len(category_bboxes) > 0:\n biggest_bbox = None\n biggest_label = None\n biggest_conf = None\n most_pixels = 0\n for bbox, label, conf in category_bboxes:\n x, y, w, h = bbox\n n_pixels = w * h\n if n_pixels > most_pixels:\n most_pixels = n_pixels\n biggest_bbox = bbox\n biggest_label = label\n biggest_conf = conf\n category_bboxes = [biggest_bbox], [biggest_label], [\n biggest_conf]\n predictions = category_bboxes\n return predictions\n\n def update(self, predictions, frame, frameCenter):\n \"\"\"Asynchronous update of detection results to return object center.\"\"\"\n if len(predictions) > 0:\n x, y, w, h = predictions[0][0]\n objectX = int(x + w / 2.0)\n objectY = int(y + h / 2.0)\n return (objectX, objectY), predictions\n else:\n return frameCenter, None\n\n def filter_objects(self, frame, predictions, object_x=None, object_y=\n None, center_x=None, center_y=None):\n \"\"\"Apply object detection.\"\"\"\n predictions = self._filter_(frame, predictions)\n if predictions is not None and len(predictions) > 0:\n if predictions[0][0] is not None and len(predictions) == 3:\n bbox, label, conf = predictions[0][0]\n H, W = frame.shape[:2]\n center_x.value = W // 2\n center_y.value = H // 2\n object_location = self.update(predictions, frame, (center_x\n .value, center_y.value))\n (object_x.value, object_y.value), predictions = object_location\n if self.args.no_show:\n return None\n else:\n inferred_image = draw_bbox(frame, bbox, label, conf,\n write_conf=True)\n return inferred_image\n",
"step-5": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\n# File: software/jetson/fastmot/utils/sot.py\n# By: Samuel Duclos\n# For: Myself\n# Description: This file returns detection results from an image.\n\nfrom cvlib.object_detection import draw_bbox\n\nclass ObjectCenter(object):\n def __init__(self, args):\n \"\"\"Initialize variables.\"\"\"\n self.args = args\n\n def load_classes(self, path):\n with open(path, 'r') as names_file:\n names = names_file.read().split('\\n')\n return list(filter(None, names))\n\n def _filter_(self, frame, predictions):\n \"\"\"Apply object detection.\"\"\"\n\n if not self.args.no_filter_object_category:\n names = self.load_classes(self.args.names)\n object_category = names.index(self.args.object_category)\n predictions = self.filter_inference_results(predictions, \n object_category=object_category)\n\n return predictions\n\n def filter_inference_results(self, predictions, object_category='person'):\n \"\"\"Return bounding box of biggest object of selected category.\"\"\"\n if predictions is not None and len(predictions) == 3:\n bboxes, labels, confs = predictions\n\n # Only return bounding boxes for the selected object category.\n category_bboxes = [(bbox, \n label, \n conf) for (bbox, \n label, \n conf) in zip(bboxes, \n labels, \n confs) if (label == object_category).any()]\n\n if len(category_bboxes) > 0:\n # Choose biggest object of selected category.\n biggest_bbox = None\n biggest_label = None\n biggest_conf = None\n most_pixels = 0\n\n for (bbox, label, conf) in category_bboxes:\n (x, y, w, h) = bbox\n n_pixels = w * h\n\n if n_pixels > most_pixels:\n most_pixels = n_pixels\n biggest_bbox = bbox\n biggest_label = label\n biggest_conf = conf\n\n category_bboxes = ([biggest_bbox], [biggest_label], [biggest_conf])\n\n predictions = category_bboxes\n\n return predictions\n\n def update(self, predictions, frame, frameCenter):\n \"\"\"Asynchronous update of detection results to return object center.\"\"\"\n if len(predictions) > 0:\n (x, y, w, h) = predictions[0][0]\n objectX = int(x + (w / 2.0))\n objectY = int(y + (h / 2.0))\n return ((objectX, objectY), predictions)\n\n else:\n return (frameCenter, None)\n\n def filter_objects(self, frame, predictions, object_x=None, object_y=None, center_x=None, center_y=None):\n \"\"\"Apply object detection.\"\"\"\n\n predictions = self._filter_(frame, predictions)\n\n if predictions is not None and len(predictions) > 0:\n if predictions[0][0] is not None and len(predictions) == 3:\n bbox, label, conf = predictions[0][0]\n\n # Calculate the center of the frame since we will be trying to keep the object there.\n (H, W) = frame.shape[:2]\n center_x.value = W // 2\n center_y.value = H // 2\n\n object_location = self.update(predictions, frame, (center_x.value, center_y.value))\n ((object_x.value, object_y.value), predictions) = object_location\n\n if self.args.no_show:\n return None\n\n else:\n # Draw bounding box over detected objects.\n inferred_image = draw_bbox(frame, bbox, label, conf, write_conf=True)\n return inferred_image\n",
"step-ids": [
4,
5,
6,
8,
9
]
}
|
[
4,
5,
6,
8,
9
] |
def five_x_cubed_plus_1(x):
return 5 * x ** 3 + 1
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def five_x_cubed_plus_1(x):
return 5 * x ** 3 + 1
def pair_off(ary):
result = []
for i in range(0, int(len(ary) / 2 * 2), 2):
result.append([ary[i], ary[i + 1]])
if int(len(ary) % 2) == 1:
result.append([ary[-1]])
return result
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def five_x_cubed_plus_1(x):
return 5 * x ** 3 + 1
def pair_off(ary):
result = []
for i in range(0, int(len(ary) / 2 * 2), 2):
result.append([ary[i], ary[i + 1]])
if int(len(ary) % 2) == 1:
result.append([ary[-1]])
return result
def mystery_code(input_string):
result = ''
for c in input_string:
next_char = c
if str.isalpha(c):
if c.upper() < 'H':
if c.islower():
next_char = chr(ord(c) + 19).upper()
else:
next_char = chr(ord(c) + 19).lower()
elif c.islower():
next_char = chr(ord(c) - 7).upper()
else:
next_char = chr(ord(c) - 7).lower()
result = result + next_char
return result
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def five_x_cubed_plus_1(x):
return 5 * x ** 3 + 1
def pair_off(ary):
result = []
for i in range(0, int(len(ary) / 2 * 2), 2):
result.append([ary[i], ary[i + 1]])
if int(len(ary) % 2) == 1:
result.append([ary[-1]])
return result
def mystery_code(input_string):
result = ''
for c in input_string:
next_char = c
if str.isalpha(c):
if c.upper() < 'H':
if c.islower():
next_char = chr(ord(c) + 19).upper()
else:
next_char = chr(ord(c) + 19).lower()
elif c.islower():
next_char = chr(ord(c) - 7).upper()
else:
next_char = chr(ord(c) - 7).lower()
result = result + next_char
return result
def past_tense(words):
result = []
irregular_dict = {'have': 'had', 'be': 'was', 'eat': 'ate', 'go': 'went'}
for word in words:
word = str.lower(word)
if word in irregular_dict.keys():
result.append(irregular_dict[word])
elif word[-1] is 'e':
result.append(word + 'd')
elif word[-1] is 'y' and word[-2] not in 'aeiou':
result.append(word[:-1] + 'ied')
elif word[-2] in 'aeiou' and word[-1] not in 'aeiouwy' and word[-3
] not in 'aeiou':
result.append(word + word[-1] + 'ed')
else:
result.append(word + 'ed')
return result
<|reserved_special_token_1|>
# CSE 415 Winter 2019
# Assignment 1
# Jichun Li 1531264
# Part A
# 1
def five_x_cubed_plus_1(x):
return 5 * (x ** 3) + 1
#2
def pair_off(ary):
result = []
for i in range(0, int(len(ary) / 2 * 2), 2):
result.append([ary[i], ary[i + 1]])
if (int (len(ary) % 2) == 1):
result.append([ary[-1]])
return result
#3
def mystery_code(input_string):
result = ''
for c in input_string:
next_char = c
if str.isalpha(c):
if c.upper() < 'H':
if c.islower():
next_char = chr(ord(c) + 19).upper()
else:
next_char = chr(ord(c) + 19).lower()
else:
if c.islower():
next_char = chr(ord(c) - 7).upper()
else:
next_char = chr(ord(c) - 7).lower()
result = result + next_char
return result
#4
def past_tense(words):
result = []
irregular_dict = {'have':'had',
'be':'was',
'eat':'ate',
'go':'went'}
for word in words:
word = str.lower(word)
if word in irregular_dict.keys():
result.append(irregular_dict[word])
elif word[-1] is 'e':
result.append(word + 'd')
elif word[-1] is 'y' and word[-2] not in 'aeiou':
result.append(word[:-1] + 'ied')
elif word[-2] in 'aeiou' and word[-1] not in 'aeiouwy' and word[-3] not in 'aeiou':
result.append(word + word[-1] + 'ed')
else:
result.append(word + 'ed')
return result
|
flexible
|
{
"blob_id": "681788ffe7672458e8d334316aa87936746352b1",
"index": 4054,
"step-1": "def five_x_cubed_plus_1(x):\n return 5 * x ** 3 + 1\n\n\n<mask token>\n",
"step-2": "def five_x_cubed_plus_1(x):\n return 5 * x ** 3 + 1\n\n\ndef pair_off(ary):\n result = []\n for i in range(0, int(len(ary) / 2 * 2), 2):\n result.append([ary[i], ary[i + 1]])\n if int(len(ary) % 2) == 1:\n result.append([ary[-1]])\n return result\n\n\n<mask token>\n",
"step-3": "def five_x_cubed_plus_1(x):\n return 5 * x ** 3 + 1\n\n\ndef pair_off(ary):\n result = []\n for i in range(0, int(len(ary) / 2 * 2), 2):\n result.append([ary[i], ary[i + 1]])\n if int(len(ary) % 2) == 1:\n result.append([ary[-1]])\n return result\n\n\ndef mystery_code(input_string):\n result = ''\n for c in input_string:\n next_char = c\n if str.isalpha(c):\n if c.upper() < 'H':\n if c.islower():\n next_char = chr(ord(c) + 19).upper()\n else:\n next_char = chr(ord(c) + 19).lower()\n elif c.islower():\n next_char = chr(ord(c) - 7).upper()\n else:\n next_char = chr(ord(c) - 7).lower()\n result = result + next_char\n return result\n\n\n<mask token>\n",
"step-4": "def five_x_cubed_plus_1(x):\n return 5 * x ** 3 + 1\n\n\ndef pair_off(ary):\n result = []\n for i in range(0, int(len(ary) / 2 * 2), 2):\n result.append([ary[i], ary[i + 1]])\n if int(len(ary) % 2) == 1:\n result.append([ary[-1]])\n return result\n\n\ndef mystery_code(input_string):\n result = ''\n for c in input_string:\n next_char = c\n if str.isalpha(c):\n if c.upper() < 'H':\n if c.islower():\n next_char = chr(ord(c) + 19).upper()\n else:\n next_char = chr(ord(c) + 19).lower()\n elif c.islower():\n next_char = chr(ord(c) - 7).upper()\n else:\n next_char = chr(ord(c) - 7).lower()\n result = result + next_char\n return result\n\n\ndef past_tense(words):\n result = []\n irregular_dict = {'have': 'had', 'be': 'was', 'eat': 'ate', 'go': 'went'}\n for word in words:\n word = str.lower(word)\n if word in irregular_dict.keys():\n result.append(irregular_dict[word])\n elif word[-1] is 'e':\n result.append(word + 'd')\n elif word[-1] is 'y' and word[-2] not in 'aeiou':\n result.append(word[:-1] + 'ied')\n elif word[-2] in 'aeiou' and word[-1] not in 'aeiouwy' and word[-3\n ] not in 'aeiou':\n result.append(word + word[-1] + 'ed')\n else:\n result.append(word + 'ed')\n return result\n",
"step-5": "# CSE 415 Winter 2019\n# Assignment 1\n# Jichun Li 1531264\n\n# Part A\n# 1\ndef five_x_cubed_plus_1(x):\n\treturn 5 * (x ** 3) + 1\n\n#2\ndef pair_off(ary):\n result = []\n \n for i in range(0, int(len(ary) / 2 * 2), 2):\n result.append([ary[i], ary[i + 1]])\n if (int (len(ary) % 2) == 1):\n result.append([ary[-1]])\n return result\n\n#3\ndef mystery_code(input_string):\n\tresult = ''\n\tfor c in input_string:\n\t\tnext_char = c\n\t\tif str.isalpha(c):\n\t\t\tif c.upper() < 'H':\n\t\t\t\tif c.islower():\n\t\t\t\t\tnext_char = chr(ord(c) + 19).upper()\n\t\t\t\telse:\n\t\t\t\t\tnext_char = chr(ord(c) + 19).lower()\n\t\t\telse:\n\t\t\t\tif c.islower():\n\t\t\t\t\tnext_char = chr(ord(c) - 7).upper()\n\t\t\t\telse:\n\t\t\t\t\tnext_char = chr(ord(c) - 7).lower()\n\t\tresult = result + next_char\n\treturn result\n\n#4\ndef past_tense(words):\n\tresult = []\n\tirregular_dict = {'have':'had',\n\t\t\t 'be':'was',\n\t\t\t 'eat':'ate',\n\t\t\t 'go':'went'}\n\tfor word in words:\n\t\tword = str.lower(word)\n\t\tif word in irregular_dict.keys():\n\t\t\tresult.append(irregular_dict[word])\n\t\telif word[-1] is 'e':\n\t\t\tresult.append(word + 'd')\n\t\telif word[-1] is 'y' and word[-2] not in 'aeiou':\n\t\t\tresult.append(word[:-1] + 'ied')\n\t\telif word[-2] in 'aeiou' and word[-1] not in 'aeiouwy' and word[-3] not in 'aeiou':\n\t\t\tresult.append(word + word[-1] + 'ed')\n\t\telse:\n\t\t\tresult.append(word + 'ed')\n\treturn result\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from collections import deque
from etaprogress.eta import ETA
def test_linear_slope_1():
eta = ETA(100)
eta._timing_data = deque([(10, 10), (20, 20), (30, 30), (40, 40)])
getattr(eta, '_calculate')()
assert 100 == eta.eta_epoch
assert 1.0 == eta.rate
assert 1.0 == eta.rate_unstable
def test_linear_slope_2():
eta = ETA(100)
eta._timing_data = deque([(10, 20), (20, 40), (30, 60), (40, 80)])
getattr(eta, '_calculate')()
assert 50 == eta.eta_epoch
assert 2.0 == eta.rate
assert 2.0 == eta.rate_unstable
def test_linear_transform():
"""Wolfram Alpha:
x is the timestamp. y is the numerator. 120 is the denominator.
linear fit {1.2, 22},{2.4, 58},{3.1, 102},{4.4, 118}
The closer we get to 100%, the more vertical shift/transform is applied to the line.
As we near the end we want the line to get closer to the last point on the graph.
This avoids having 99% with an ETA in the past.
"""
eta = ETA(120)
eta._timing_data = deque([(1.2, 22), (2.4, 58), (3.1, 102), (4.4, 118)])
getattr(eta, '_calculate')()
assert 4.4 < eta.eta_epoch < 4.6
assert 30 < eta.rate < 35
assert 12 < eta.rate_unstable < 13
def test_linear_transform_undefined():
eta = ETA()
eta._timing_data = deque([(1.2, 22), (2.4, 58), (3.1, 102), (4.4, 118)])
getattr(eta, '_calculate')()
assert eta.eta_epoch is None
assert 30 < eta.rate < 35
assert 12 < eta.rate_unstable < 13
|
normal
|
{
"blob_id": "810017cd5814fc20ebcdbdf26a32ea1bcfc88625",
"index": 2164,
"step-1": "<mask token>\n\n\ndef test_linear_slope_2():\n eta = ETA(100)\n eta._timing_data = deque([(10, 20), (20, 40), (30, 60), (40, 80)])\n getattr(eta, '_calculate')()\n assert 50 == eta.eta_epoch\n assert 2.0 == eta.rate\n assert 2.0 == eta.rate_unstable\n\n\ndef test_linear_transform():\n \"\"\"Wolfram Alpha:\n x is the timestamp. y is the numerator. 120 is the denominator.\n linear fit {1.2, 22},{2.4, 58},{3.1, 102},{4.4, 118}\n\n The closer we get to 100%, the more vertical shift/transform is applied to the line.\n As we near the end we want the line to get closer to the last point on the graph.\n This avoids having 99% with an ETA in the past.\n \"\"\"\n eta = ETA(120)\n eta._timing_data = deque([(1.2, 22), (2.4, 58), (3.1, 102), (4.4, 118)])\n getattr(eta, '_calculate')()\n assert 4.4 < eta.eta_epoch < 4.6\n assert 30 < eta.rate < 35\n assert 12 < eta.rate_unstable < 13\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_linear_slope_1():\n eta = ETA(100)\n eta._timing_data = deque([(10, 10), (20, 20), (30, 30), (40, 40)])\n getattr(eta, '_calculate')()\n assert 100 == eta.eta_epoch\n assert 1.0 == eta.rate\n assert 1.0 == eta.rate_unstable\n\n\ndef test_linear_slope_2():\n eta = ETA(100)\n eta._timing_data = deque([(10, 20), (20, 40), (30, 60), (40, 80)])\n getattr(eta, '_calculate')()\n assert 50 == eta.eta_epoch\n assert 2.0 == eta.rate\n assert 2.0 == eta.rate_unstable\n\n\ndef test_linear_transform():\n \"\"\"Wolfram Alpha:\n x is the timestamp. y is the numerator. 120 is the denominator.\n linear fit {1.2, 22},{2.4, 58},{3.1, 102},{4.4, 118}\n\n The closer we get to 100%, the more vertical shift/transform is applied to the line.\n As we near the end we want the line to get closer to the last point on the graph.\n This avoids having 99% with an ETA in the past.\n \"\"\"\n eta = ETA(120)\n eta._timing_data = deque([(1.2, 22), (2.4, 58), (3.1, 102), (4.4, 118)])\n getattr(eta, '_calculate')()\n assert 4.4 < eta.eta_epoch < 4.6\n assert 30 < eta.rate < 35\n assert 12 < eta.rate_unstable < 13\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef test_linear_slope_1():\n eta = ETA(100)\n eta._timing_data = deque([(10, 10), (20, 20), (30, 30), (40, 40)])\n getattr(eta, '_calculate')()\n assert 100 == eta.eta_epoch\n assert 1.0 == eta.rate\n assert 1.0 == eta.rate_unstable\n\n\ndef test_linear_slope_2():\n eta = ETA(100)\n eta._timing_data = deque([(10, 20), (20, 40), (30, 60), (40, 80)])\n getattr(eta, '_calculate')()\n assert 50 == eta.eta_epoch\n assert 2.0 == eta.rate\n assert 2.0 == eta.rate_unstable\n\n\ndef test_linear_transform():\n \"\"\"Wolfram Alpha:\n x is the timestamp. y is the numerator. 120 is the denominator.\n linear fit {1.2, 22},{2.4, 58},{3.1, 102},{4.4, 118}\n\n The closer we get to 100%, the more vertical shift/transform is applied to the line.\n As we near the end we want the line to get closer to the last point on the graph.\n This avoids having 99% with an ETA in the past.\n \"\"\"\n eta = ETA(120)\n eta._timing_data = deque([(1.2, 22), (2.4, 58), (3.1, 102), (4.4, 118)])\n getattr(eta, '_calculate')()\n assert 4.4 < eta.eta_epoch < 4.6\n assert 30 < eta.rate < 35\n assert 12 < eta.rate_unstable < 13\n\n\ndef test_linear_transform_undefined():\n eta = ETA()\n eta._timing_data = deque([(1.2, 22), (2.4, 58), (3.1, 102), (4.4, 118)])\n getattr(eta, '_calculate')()\n assert eta.eta_epoch is None\n assert 30 < eta.rate < 35\n assert 12 < eta.rate_unstable < 13\n",
"step-4": "from collections import deque\nfrom etaprogress.eta import ETA\n\n\ndef test_linear_slope_1():\n eta = ETA(100)\n eta._timing_data = deque([(10, 10), (20, 20), (30, 30), (40, 40)])\n getattr(eta, '_calculate')()\n assert 100 == eta.eta_epoch\n assert 1.0 == eta.rate\n assert 1.0 == eta.rate_unstable\n\n\ndef test_linear_slope_2():\n eta = ETA(100)\n eta._timing_data = deque([(10, 20), (20, 40), (30, 60), (40, 80)])\n getattr(eta, '_calculate')()\n assert 50 == eta.eta_epoch\n assert 2.0 == eta.rate\n assert 2.0 == eta.rate_unstable\n\n\ndef test_linear_transform():\n \"\"\"Wolfram Alpha:\n x is the timestamp. y is the numerator. 120 is the denominator.\n linear fit {1.2, 22},{2.4, 58},{3.1, 102},{4.4, 118}\n\n The closer we get to 100%, the more vertical shift/transform is applied to the line.\n As we near the end we want the line to get closer to the last point on the graph.\n This avoids having 99% with an ETA in the past.\n \"\"\"\n eta = ETA(120)\n eta._timing_data = deque([(1.2, 22), (2.4, 58), (3.1, 102), (4.4, 118)])\n getattr(eta, '_calculate')()\n assert 4.4 < eta.eta_epoch < 4.6\n assert 30 < eta.rate < 35\n assert 12 < eta.rate_unstable < 13\n\n\ndef test_linear_transform_undefined():\n eta = ETA()\n eta._timing_data = deque([(1.2, 22), (2.4, 58), (3.1, 102), (4.4, 118)])\n getattr(eta, '_calculate')()\n assert eta.eta_epoch is None\n assert 30 < eta.rate < 35\n assert 12 < eta.rate_unstable < 13\n",
"step-5": null,
"step-ids": [
2,
3,
4,
5
]
}
|
[
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class Substr(ConvertAbstract):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Substr(ConvertAbstract):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def apply(self, data):
result = ''
if self._isBlank(data):
return [result]
s_idx = 0
e_idx = 0
if len(self.arg_list) >= 2:
s_idx = int(self.arg_list[0])
e_idx = int(self.arg_list[1])
else:
return [result]
if s_idx > len(data):
s_idx = 0
if e_idx > len(data):
e_idx = len(data)
if e_idx == 0:
result = data[s_idx:]
else:
result = data[s_idx:e_idx]
return [result]
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Substr(ConvertAbstract):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def apply(self, data):
result = ''
if self._isBlank(data):
return [result]
s_idx = 0
e_idx = 0
if len(self.arg_list) >= 2:
s_idx = int(self.arg_list[0])
e_idx = int(self.arg_list[1])
else:
return [result]
if s_idx > len(data):
s_idx = 0
if e_idx > len(data):
e_idx = len(data)
if e_idx == 0:
result = data[s_idx:]
else:
result = data[s_idx:e_idx]
return [result]
if __name__ == '__main__':
_str = 'Korea'
print(Substr(arg_list=[0, 1]).apply(_str))
<|reserved_special_token_1|>
from mlps.core.data.cnvrtr.ConvertAbstract import ConvertAbstract
class Substr(ConvertAbstract):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def apply(self, data):
result = ''
if self._isBlank(data):
return [result]
s_idx = 0
e_idx = 0
if len(self.arg_list) >= 2:
s_idx = int(self.arg_list[0])
e_idx = int(self.arg_list[1])
else:
return [result]
if s_idx > len(data):
s_idx = 0
if e_idx > len(data):
e_idx = len(data)
if e_idx == 0:
result = data[s_idx:]
else:
result = data[s_idx:e_idx]
return [result]
if __name__ == '__main__':
_str = 'Korea'
print(Substr(arg_list=[0, 1]).apply(_str))
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
# Author : Seungyeon Jo
# e-mail : syjo@seculayer.co.kr
# Powered by Seculayer © 2018 AI-Core Team
from mlps.core.data.cnvrtr.ConvertAbstract import ConvertAbstract
class Substr(ConvertAbstract):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def apply(self, data):
result = ''
# check blank
if self._isBlank(data) :
return [result]
s_idx = 0
e_idx = 0
if len(self.arg_list) >= 2 :
s_idx = int(self.arg_list[0])
e_idx = int(self.arg_list[1])
else:
return [result]
if s_idx > len(data):
s_idx = 0
if e_idx > len(data):
e_idx = len(data)
if e_idx == 0:
result = data[s_idx:]
else:
result = data[s_idx:e_idx]
return [result]
if __name__ == "__main__":
_str = "Korea"
print(Substr(arg_list=[0, 1]).apply(_str))
|
flexible
|
{
"blob_id": "f704742b9e023a1c3386fed293032fd8196b875e",
"index": 7344,
"step-1": "<mask token>\n\n\nclass Substr(ConvertAbstract):\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Substr(ConvertAbstract):\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n def apply(self, data):\n result = ''\n if self._isBlank(data):\n return [result]\n s_idx = 0\n e_idx = 0\n if len(self.arg_list) >= 2:\n s_idx = int(self.arg_list[0])\n e_idx = int(self.arg_list[1])\n else:\n return [result]\n if s_idx > len(data):\n s_idx = 0\n if e_idx > len(data):\n e_idx = len(data)\n if e_idx == 0:\n result = data[s_idx:]\n else:\n result = data[s_idx:e_idx]\n return [result]\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Substr(ConvertAbstract):\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n def apply(self, data):\n result = ''\n if self._isBlank(data):\n return [result]\n s_idx = 0\n e_idx = 0\n if len(self.arg_list) >= 2:\n s_idx = int(self.arg_list[0])\n e_idx = int(self.arg_list[1])\n else:\n return [result]\n if s_idx > len(data):\n s_idx = 0\n if e_idx > len(data):\n e_idx = len(data)\n if e_idx == 0:\n result = data[s_idx:]\n else:\n result = data[s_idx:e_idx]\n return [result]\n\n\nif __name__ == '__main__':\n _str = 'Korea'\n print(Substr(arg_list=[0, 1]).apply(_str))\n",
"step-4": "from mlps.core.data.cnvrtr.ConvertAbstract import ConvertAbstract\n\n\nclass Substr(ConvertAbstract):\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n def apply(self, data):\n result = ''\n if self._isBlank(data):\n return [result]\n s_idx = 0\n e_idx = 0\n if len(self.arg_list) >= 2:\n s_idx = int(self.arg_list[0])\n e_idx = int(self.arg_list[1])\n else:\n return [result]\n if s_idx > len(data):\n s_idx = 0\n if e_idx > len(data):\n e_idx = len(data)\n if e_idx == 0:\n result = data[s_idx:]\n else:\n result = data[s_idx:e_idx]\n return [result]\n\n\nif __name__ == '__main__':\n _str = 'Korea'\n print(Substr(arg_list=[0, 1]).apply(_str))\n",
"step-5": "# -*- coding: utf-8 -*-\n# Author : Seungyeon Jo\n# e-mail : syjo@seculayer.co.kr\n# Powered by Seculayer © 2018 AI-Core Team\n\nfrom mlps.core.data.cnvrtr.ConvertAbstract import ConvertAbstract\n\n\nclass Substr(ConvertAbstract):\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n \n def apply(self, data):\n result = ''\n \n # check blank\n if self._isBlank(data) :\n return [result]\n \n s_idx = 0\n e_idx = 0\n if len(self.arg_list) >= 2 :\n s_idx = int(self.arg_list[0])\n e_idx = int(self.arg_list[1])\n else:\n return [result]\n \n if s_idx > len(data):\n s_idx = 0\n if e_idx > len(data):\n e_idx = len(data)\n \n if e_idx == 0:\n result = data[s_idx:]\n else:\n result = data[s_idx:e_idx]\n \n return [result]\n\n\nif __name__ == \"__main__\":\n _str = \"Korea\"\n print(Substr(arg_list=[0, 1]).apply(_str))\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
from utils.gradient_strategy.dct_generator import DCTGenerator
from utils.gradient_strategy.random_generator import RandomGenerator
from utils.gradient_strategy.upsample_generator import UpSampleGenerator
from utils.gradient_strategy.centerconv_generator import CenterConvGenerator
from utils.attack_setting import *
from utils.construct_model_data import construct_model_and_data
from utils.generate_model import ImageModel
from utils.generate_video import video
from utils.load_data import ImageData, split_data
from utils.show_or_save import *
from utils.gradient_strategy.centerconv_generator import CenterConvGenerator
|
flexible
|
{
"blob_id": "399097ef7cfdc061b307c3cc29615c9f50b1e6bf",
"index": 5511,
"step-1": "<mask token>\n",
"step-2": "from utils.gradient_strategy.dct_generator import DCTGenerator\nfrom utils.gradient_strategy.random_generator import RandomGenerator\nfrom utils.gradient_strategy.upsample_generator import UpSampleGenerator\nfrom utils.gradient_strategy.centerconv_generator import CenterConvGenerator\nfrom utils.attack_setting import *\nfrom utils.construct_model_data import construct_model_and_data\nfrom utils.generate_model import ImageModel\nfrom utils.generate_video import video\nfrom utils.load_data import ImageData, split_data\nfrom utils.show_or_save import *\nfrom utils.gradient_strategy.centerconv_generator import CenterConvGenerator\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
from flask import Flask, render_template
app = Flask(__name__)
@app.route('/',methods=["GET","POST"])
def inicio():
nombre = "jose"
return render_template("inicio.html",nombre=nombre)
app.run(debug=True)
|
normal
|
{
"blob_id": "caa28bd64141c8d2f3212b5e4e77129d81d24c71",
"index": 2290,
"step-1": "<mask token>\n\n\n@app.route('/', methods=['GET', 'POST'])\ndef inicio():\n nombre = 'jose'\n return render_template('inicio.html', nombre=nombre)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@app.route('/', methods=['GET', 'POST'])\ndef inicio():\n nombre = 'jose'\n return render_template('inicio.html', nombre=nombre)\n\n\napp.run(debug=True)\n",
"step-3": "<mask token>\napp = Flask(__name__)\n\n\n@app.route('/', methods=['GET', 'POST'])\ndef inicio():\n nombre = 'jose'\n return render_template('inicio.html', nombre=nombre)\n\n\napp.run(debug=True)\n",
"step-4": "from flask import Flask, render_template\napp = Flask(__name__)\n\n\n@app.route('/', methods=['GET', 'POST'])\ndef inicio():\n nombre = 'jose'\n return render_template('inicio.html', nombre=nombre)\n\n\napp.run(debug=True)\n",
"step-5": "from flask import Flask, render_template\napp = Flask(__name__)\t\n\n@app.route('/',methods=[\"GET\",\"POST\"])\ndef inicio():\n\tnombre = \"jose\"\n\treturn render_template(\"inicio.html\",nombre=nombre)\n\napp.run(debug=True)",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
final.to_csv('../../Downloads/final_con_final.tsv', sep='\t', index=False)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
df1 = pd.read_csv('../final/your_no.tsv', '\t')
df2 = pd.read_csv('../../Downloads/me.csv', '\t')
final = pd.concat([df1, df2])
final.to_csv('../../Downloads/final_con_final.tsv', sep='\t', index=False)
<|reserved_special_token_1|>
import pandas as pd
df1 = pd.read_csv('../final/your_no.tsv', '\t')
df2 = pd.read_csv('../../Downloads/me.csv', '\t')
final = pd.concat([df1, df2])
final.to_csv('../../Downloads/final_con_final.tsv', sep='\t', index=False)
<|reserved_special_token_1|>
import pandas as pd
df1 = pd.read_csv("../final/your_no.tsv", '\t')
df2 = pd.read_csv("../../Downloads/me.csv", '\t')
final = pd.concat([df1, df2])
final.to_csv('../../Downloads/final_con_final.tsv', sep='\t', index=False)
|
flexible
|
{
"blob_id": "cd5945631a9dd505bf67089bab8c5a37ad375129",
"index": 410,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfinal.to_csv('../../Downloads/final_con_final.tsv', sep='\\t', index=False)\n",
"step-3": "<mask token>\ndf1 = pd.read_csv('../final/your_no.tsv', '\\t')\ndf2 = pd.read_csv('../../Downloads/me.csv', '\\t')\nfinal = pd.concat([df1, df2])\nfinal.to_csv('../../Downloads/final_con_final.tsv', sep='\\t', index=False)\n",
"step-4": "import pandas as pd\ndf1 = pd.read_csv('../final/your_no.tsv', '\\t')\ndf2 = pd.read_csv('../../Downloads/me.csv', '\\t')\nfinal = pd.concat([df1, df2])\nfinal.to_csv('../../Downloads/final_con_final.tsv', sep='\\t', index=False)\n",
"step-5": "import pandas as pd \ndf1 = pd.read_csv(\"../final/your_no.tsv\", '\\t')\ndf2 = pd.read_csv(\"../../Downloads/me.csv\", '\\t')\nfinal = pd.concat([df1, df2])\nfinal.to_csv('../../Downloads/final_con_final.tsv', sep='\\t', index=False)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
print('Choose from the following options: ')
<|reserved_special_token_0|>
print(one, '\n', two, '\n', three, '\n', four, '\n', five)
<|reserved_special_token_0|>
if value == 1:
modem_on = input('\nIs your modem on? (Enter Y or N): ')
if modem_on == 'Y':
router_on = input('\nIs your router on? (Enter Y or N): ')
if router_on == 'Y':
redlight = input(
'\nDoes your router emit a red light? (Enter Y or N): ')
if redlight == 'Y':
print(
'Unplug your router and wait thirty seconds. Then plug your router into the nearest outlet to restart your router. If you still cannot connect to the internet, restart this program. Note, this program will now terminate. Goodbye!'
)
else:
comp_wifi_on = input(
'\nAre both your computer and wifi on? (Enter Y or N): ')
if comp_wifi_on == 'Y':
print(
'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'
)
else:
print(
"If your computer is not on, please turn it on by pressing the power button. Also make sure your computer's wifi is on. If you still cannot connect to the internet, restart this program. Note, this program will now terminate. Goodbye!"
)
else:
print(
'Plug your router into the nearest outlet to turn on your router. If you still cannot connect to the Internet, restart this program. Note, this program will now terminate. Goodbye!'
)
else:
print(
'Plug your modem into the nearest outlet to turn on your modem. If you still cannot connect to the Internet, restart this program. Note, this program will now terminate. Goodbye!'
)
if value == 2:
cable_on = input('\nIs your cable box on? (Enter Y or N): ')
if cable_on == 'Y':
tv_on = input('\nIs your TV on? (Enter Y or N): ')
if tv_on == 'Y':
print(
'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'
)
else:
print(
'Plug your TV into the nearest outlet and press the power button on your remote to turn on your TV. If you still do not recieve a cable signal, restart this program. Note, this program will now terminate. Goodbye!'
)
else:
print(
'Plug your cable box into the nearest outlet to turn on your cable box. If you still do not recieve a cable signal, please restart this program. Note, this program will now terminate. Goodbye!'
)
if value == 3:
phones_on = input('\nAre your phones on? (Enter Y or N): ')
if phone_on == 'Y':
landline_plugged = input(
"""
Is there a landline wire plugged into each phone or the wireless phone terminal? (Enter Y or N): """
)
if landline_plugged == 'Y':
print(
'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'
)
else:
print(
'Plug a landline wire into each phone or phone terminal. If you still cannot use your phones, please restart this program. Note, this program will now terminate. Goodbye!'
)
else:
print(
'Plug your phones into the nearest outlet to turn them on. If you are using wireless phones, please make sure you change them before attempting to use them. If you still cannot use your phones, please restart this program. Note, this program will now terminate. Goodbye!'
)
if value == 4:
late_payment = input(
'\nWere you late on your last payment? (Enter Y or N): ')
if late_payment == 'Y':
print(
'If you were late on your last payment, you will be charged an additional 5% interest fee. Therefore, your bill may be more than usual. If you would like to contest your charge, please call 555-555-5555 for additional support with this matter. Thank you for your patience. Note, this program will now terminate. Goodbye!'
)
else:
print(
'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'
)
if value == 5:
print(
'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'
)
if value < 1 or value > 5:
print(
'You entered an invalid menu choice. It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'
)
<|reserved_special_token_1|>
print('Choose from the following options: ')
one = ' 1. My internet is not working.'
two = '2. My cable is not working.'
three = '3. My phones are not working.'
four = '4. My bill is wrong.'
five = '5. I want to upgrade my plan.'
print(one, '\n', two, '\n', three, '\n', four, '\n', five)
value = int(input('(Enter a value 1 - 5): '))
if value == 1:
modem_on = input('\nIs your modem on? (Enter Y or N): ')
if modem_on == 'Y':
router_on = input('\nIs your router on? (Enter Y or N): ')
if router_on == 'Y':
redlight = input(
'\nDoes your router emit a red light? (Enter Y or N): ')
if redlight == 'Y':
print(
'Unplug your router and wait thirty seconds. Then plug your router into the nearest outlet to restart your router. If you still cannot connect to the internet, restart this program. Note, this program will now terminate. Goodbye!'
)
else:
comp_wifi_on = input(
'\nAre both your computer and wifi on? (Enter Y or N): ')
if comp_wifi_on == 'Y':
print(
'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'
)
else:
print(
"If your computer is not on, please turn it on by pressing the power button. Also make sure your computer's wifi is on. If you still cannot connect to the internet, restart this program. Note, this program will now terminate. Goodbye!"
)
else:
print(
'Plug your router into the nearest outlet to turn on your router. If you still cannot connect to the Internet, restart this program. Note, this program will now terminate. Goodbye!'
)
else:
print(
'Plug your modem into the nearest outlet to turn on your modem. If you still cannot connect to the Internet, restart this program. Note, this program will now terminate. Goodbye!'
)
if value == 2:
cable_on = input('\nIs your cable box on? (Enter Y or N): ')
if cable_on == 'Y':
tv_on = input('\nIs your TV on? (Enter Y or N): ')
if tv_on == 'Y':
print(
'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'
)
else:
print(
'Plug your TV into the nearest outlet and press the power button on your remote to turn on your TV. If you still do not recieve a cable signal, restart this program. Note, this program will now terminate. Goodbye!'
)
else:
print(
'Plug your cable box into the nearest outlet to turn on your cable box. If you still do not recieve a cable signal, please restart this program. Note, this program will now terminate. Goodbye!'
)
if value == 3:
phones_on = input('\nAre your phones on? (Enter Y or N): ')
if phone_on == 'Y':
landline_plugged = input(
"""
Is there a landline wire plugged into each phone or the wireless phone terminal? (Enter Y or N): """
)
if landline_plugged == 'Y':
print(
'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'
)
else:
print(
'Plug a landline wire into each phone or phone terminal. If you still cannot use your phones, please restart this program. Note, this program will now terminate. Goodbye!'
)
else:
print(
'Plug your phones into the nearest outlet to turn them on. If you are using wireless phones, please make sure you change them before attempting to use them. If you still cannot use your phones, please restart this program. Note, this program will now terminate. Goodbye!'
)
if value == 4:
late_payment = input(
'\nWere you late on your last payment? (Enter Y or N): ')
if late_payment == 'Y':
print(
'If you were late on your last payment, you will be charged an additional 5% interest fee. Therefore, your bill may be more than usual. If you would like to contest your charge, please call 555-555-5555 for additional support with this matter. Thank you for your patience. Note, this program will now terminate. Goodbye!'
)
else:
print(
'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'
)
if value == 5:
print(
'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'
)
if value < 1 or value > 5:
print(
'You entered an invalid menu choice. It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'
)
<|reserved_special_token_1|>
# "Time Warner Python" Salma Hashem netid: sh5640
#Design costumer service application by asking users series of questions, and based on the customers' answers to the questions, provide them with instructions.
#Ask the user to choose from the following options
print("Choose from the following options: ")
#assign each menu option to a number
one= " 1. My internet is not working."
two= "2. My cable is not working."
three= "3. My phones are not working."
four= "4. My bill is wrong."
five= "5. I want to upgrade my plan."
#Print the options each on its own line and ask the user to input a number and convert into an integer
print(one, "\n", two, "\n", three, "\n", four, "\n", five)
value= int(input("(Enter a value 1 - 5): "))
#assign variables to user inputs using if else statements for scenario one and print output based on user inputs
if value==1:
modem_on=input("\nIs your modem on? (Enter Y or N): ")
if modem_on=="Y":
router_on=input("\nIs your router on? (Enter Y or N): ")
if router_on=="Y":
redlight= input("\nDoes your router emit a red light? (Enter Y or N): ")
if redlight=="Y":
print("Unplug your router and wait thirty seconds. Then plug your router into the nearest outlet to restart your router. If you still cannot connect to the internet, restart this program. Note, this program will now terminate. Goodbye!")
else:
comp_wifi_on=input("\nAre both your computer and wifi on? (Enter Y or N): ")
if comp_wifi_on=="Y":
print("It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.")
else:
print("If your computer is not on, please turn it on by pressing the power button. Also make sure your computer's wifi is on. If you still cannot connect to the internet, restart this program. Note, this program will now terminate. Goodbye!")
else:
print("Plug your router into the nearest outlet to turn on your router. If you still cannot connect to the Internet, restart this program. Note, this program will now terminate. Goodbye!")
else:
print("Plug your modem into the nearest outlet to turn on your modem. If you still cannot connect to the Internet, restart this program. Note, this program will now terminate. Goodbye!")
#assign variables to user inputs using if statements for scenario two and print output based on user inputs
if value==2:
cable_on=input("\nIs your cable box on? (Enter Y or N): ")
if cable_on=="Y":
tv_on=input("\nIs your TV on? (Enter Y or N): ")
if tv_on=="Y":
print("It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.")
else:
print("Plug your TV into the nearest outlet and press the power button on your remote to turn on your TV. If you still do not recieve a cable signal, restart this program. Note, this program will now terminate. Goodbye!")
else:
print("Plug your cable box into the nearest outlet to turn on your cable box. If you still do not recieve a cable signal, please restart this program. Note, this program will now terminate. Goodbye!")
#assign variables to user inputs using if statements for scenario three and print output based on user inputs
if value==3:
phones_on=input("\nAre your phones on? (Enter Y or N): ")
if phone_on=="Y":
landline_plugged=input("\nIs there a landline wire plugged into each phone or the wireless phone terminal? (Enter Y or N): ")
if landline_plugged=="Y":
print("It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.")
else:
print("Plug a landline wire into each phone or phone terminal. If you still cannot use your phones, please restart this program. Note, this program will now terminate. Goodbye!")
else:
print("Plug your phones into the nearest outlet to turn them on. If you are using wireless phones, please make sure you change them before attempting to use them. If you still cannot use your phones, please restart this program. Note, this program will now terminate. Goodbye!")
#assign variables to user inputs using if statements for scenario four and print output based on user inputs
if value==4:
late_payment= input("\nWere you late on your last payment? (Enter Y or N): ")
if late_payment=="Y":
print("If you were late on your last payment, you will be charged an additional 5% interest fee. Therefore, your bill may be more than usual. If you would like to contest your charge, please call 555-555-5555 for additional support with this matter. Thank you for your patience. Note, this program will now terminate. Goodbye!")
else:
print("It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.")
#scenario 5--evaluate input and print output based on user input
if value==5:
print("It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.")
#create if statements to evaluate invalid user inputs
if value<1 or value>5:
print("You entered an invalid menu choice. It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.")
|
flexible
|
{
"blob_id": "736b84bbcf1d5954b491068be4060edeade2c1c5",
"index": 2205,
"step-1": "<mask token>\n",
"step-2": "print('Choose from the following options: ')\n<mask token>\nprint(one, '\\n', two, '\\n', three, '\\n', four, '\\n', five)\n<mask token>\nif value == 1:\n modem_on = input('\\nIs your modem on? (Enter Y or N): ')\n if modem_on == 'Y':\n router_on = input('\\nIs your router on? (Enter Y or N): ')\n if router_on == 'Y':\n redlight = input(\n '\\nDoes your router emit a red light? (Enter Y or N): ')\n if redlight == 'Y':\n print(\n 'Unplug your router and wait thirty seconds. Then plug your router into the nearest outlet to restart your router. If you still cannot connect to the internet, restart this program. Note, this program will now terminate. Goodbye!'\n )\n else:\n comp_wifi_on = input(\n '\\nAre both your computer and wifi on? (Enter Y or N): ')\n if comp_wifi_on == 'Y':\n print(\n 'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'\n )\n else:\n print(\n \"If your computer is not on, please turn it on by pressing the power button. Also make sure your computer's wifi is on. If you still cannot connect to the internet, restart this program. Note, this program will now terminate. Goodbye!\"\n )\n else:\n print(\n 'Plug your router into the nearest outlet to turn on your router. If you still cannot connect to the Internet, restart this program. Note, this program will now terminate. Goodbye!'\n )\n else:\n print(\n 'Plug your modem into the nearest outlet to turn on your modem. If you still cannot connect to the Internet, restart this program. Note, this program will now terminate. Goodbye!'\n )\nif value == 2:\n cable_on = input('\\nIs your cable box on? (Enter Y or N): ')\n if cable_on == 'Y':\n tv_on = input('\\nIs your TV on? (Enter Y or N): ')\n if tv_on == 'Y':\n print(\n 'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'\n )\n else:\n print(\n 'Plug your TV into the nearest outlet and press the power button on your remote to turn on your TV. If you still do not recieve a cable signal, restart this program. Note, this program will now terminate. Goodbye!'\n )\n else:\n print(\n 'Plug your cable box into the nearest outlet to turn on your cable box. If you still do not recieve a cable signal, please restart this program. Note, this program will now terminate. Goodbye!'\n )\nif value == 3:\n phones_on = input('\\nAre your phones on? (Enter Y or N): ')\n if phone_on == 'Y':\n landline_plugged = input(\n \"\"\"\nIs there a landline wire plugged into each phone or the wireless phone terminal? (Enter Y or N): \"\"\"\n )\n if landline_plugged == 'Y':\n print(\n 'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'\n )\n else:\n print(\n 'Plug a landline wire into each phone or phone terminal. If you still cannot use your phones, please restart this program. Note, this program will now terminate. Goodbye!'\n )\n else:\n print(\n 'Plug your phones into the nearest outlet to turn them on. If you are using wireless phones, please make sure you change them before attempting to use them. If you still cannot use your phones, please restart this program. Note, this program will now terminate. Goodbye!'\n )\nif value == 4:\n late_payment = input(\n '\\nWere you late on your last payment? (Enter Y or N): ')\n if late_payment == 'Y':\n print(\n 'If you were late on your last payment, you will be charged an additional 5% interest fee. Therefore, your bill may be more than usual. If you would like to contest your charge, please call 555-555-5555 for additional support with this matter. Thank you for your patience. Note, this program will now terminate. Goodbye!'\n )\n else:\n print(\n 'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'\n )\nif value == 5:\n print(\n 'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'\n )\nif value < 1 or value > 5:\n print(\n 'You entered an invalid menu choice. It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'\n )\n",
"step-3": "print('Choose from the following options: ')\none = ' 1. My internet is not working.'\ntwo = '2. My cable is not working.'\nthree = '3. My phones are not working.'\nfour = '4. My bill is wrong.'\nfive = '5. I want to upgrade my plan.'\nprint(one, '\\n', two, '\\n', three, '\\n', four, '\\n', five)\nvalue = int(input('(Enter a value 1 - 5): '))\nif value == 1:\n modem_on = input('\\nIs your modem on? (Enter Y or N): ')\n if modem_on == 'Y':\n router_on = input('\\nIs your router on? (Enter Y or N): ')\n if router_on == 'Y':\n redlight = input(\n '\\nDoes your router emit a red light? (Enter Y or N): ')\n if redlight == 'Y':\n print(\n 'Unplug your router and wait thirty seconds. Then plug your router into the nearest outlet to restart your router. If you still cannot connect to the internet, restart this program. Note, this program will now terminate. Goodbye!'\n )\n else:\n comp_wifi_on = input(\n '\\nAre both your computer and wifi on? (Enter Y or N): ')\n if comp_wifi_on == 'Y':\n print(\n 'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'\n )\n else:\n print(\n \"If your computer is not on, please turn it on by pressing the power button. Also make sure your computer's wifi is on. If you still cannot connect to the internet, restart this program. Note, this program will now terminate. Goodbye!\"\n )\n else:\n print(\n 'Plug your router into the nearest outlet to turn on your router. If you still cannot connect to the Internet, restart this program. Note, this program will now terminate. Goodbye!'\n )\n else:\n print(\n 'Plug your modem into the nearest outlet to turn on your modem. If you still cannot connect to the Internet, restart this program. Note, this program will now terminate. Goodbye!'\n )\nif value == 2:\n cable_on = input('\\nIs your cable box on? (Enter Y or N): ')\n if cable_on == 'Y':\n tv_on = input('\\nIs your TV on? (Enter Y or N): ')\n if tv_on == 'Y':\n print(\n 'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'\n )\n else:\n print(\n 'Plug your TV into the nearest outlet and press the power button on your remote to turn on your TV. If you still do not recieve a cable signal, restart this program. Note, this program will now terminate. Goodbye!'\n )\n else:\n print(\n 'Plug your cable box into the nearest outlet to turn on your cable box. If you still do not recieve a cable signal, please restart this program. Note, this program will now terminate. Goodbye!'\n )\nif value == 3:\n phones_on = input('\\nAre your phones on? (Enter Y or N): ')\n if phone_on == 'Y':\n landline_plugged = input(\n \"\"\"\nIs there a landline wire plugged into each phone or the wireless phone terminal? (Enter Y or N): \"\"\"\n )\n if landline_plugged == 'Y':\n print(\n 'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'\n )\n else:\n print(\n 'Plug a landline wire into each phone or phone terminal. If you still cannot use your phones, please restart this program. Note, this program will now terminate. Goodbye!'\n )\n else:\n print(\n 'Plug your phones into the nearest outlet to turn them on. If you are using wireless phones, please make sure you change them before attempting to use them. If you still cannot use your phones, please restart this program. Note, this program will now terminate. Goodbye!'\n )\nif value == 4:\n late_payment = input(\n '\\nWere you late on your last payment? (Enter Y or N): ')\n if late_payment == 'Y':\n print(\n 'If you were late on your last payment, you will be charged an additional 5% interest fee. Therefore, your bill may be more than usual. If you would like to contest your charge, please call 555-555-5555 for additional support with this matter. Thank you for your patience. Note, this program will now terminate. Goodbye!'\n )\n else:\n print(\n 'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'\n )\nif value == 5:\n print(\n 'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'\n )\nif value < 1 or value > 5:\n print(\n 'You entered an invalid menu choice. It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'\n )\n",
"step-4": "# \"Time Warner Python\" Salma Hashem netid: sh5640\n#Design costumer service application by asking users series of questions, and based on the customers' answers to the questions, provide them with instructions. \n#Ask the user to choose from the following options \nprint(\"Choose from the following options: \")\n#assign each menu option to a number\none= \" 1. My internet is not working.\"\ntwo= \"2. My cable is not working.\"\nthree= \"3. My phones are not working.\"\nfour= \"4. My bill is wrong.\"\nfive= \"5. I want to upgrade my plan.\"\n#Print the options each on its own line and ask the user to input a number and convert into an integer\nprint(one, \"\\n\", two, \"\\n\", three, \"\\n\", four, \"\\n\", five)\nvalue= int(input(\"(Enter a value 1 - 5): \"))\n#assign variables to user inputs using if else statements for scenario one and print output based on user inputs \n\n\nif value==1:\n modem_on=input(\"\\nIs your modem on? (Enter Y or N): \")\n if modem_on==\"Y\":\n router_on=input(\"\\nIs your router on? (Enter Y or N): \")\n if router_on==\"Y\":\n redlight= input(\"\\nDoes your router emit a red light? (Enter Y or N): \")\n if redlight==\"Y\":\n print(\"Unplug your router and wait thirty seconds. Then plug your router into the nearest outlet to restart your router. If you still cannot connect to the internet, restart this program. Note, this program will now terminate. Goodbye!\")\n else:\n comp_wifi_on=input(\"\\nAre both your computer and wifi on? (Enter Y or N): \")\n if comp_wifi_on==\"Y\":\n print(\"It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.\")\n else:\n print(\"If your computer is not on, please turn it on by pressing the power button. Also make sure your computer's wifi is on. If you still cannot connect to the internet, restart this program. Note, this program will now terminate. Goodbye!\")\n else:\n print(\"Plug your router into the nearest outlet to turn on your router. If you still cannot connect to the Internet, restart this program. Note, this program will now terminate. Goodbye!\")\n \n else:\n print(\"Plug your modem into the nearest outlet to turn on your modem. If you still cannot connect to the Internet, restart this program. Note, this program will now terminate. Goodbye!\")\n#assign variables to user inputs using if statements for scenario two and print output based on user inputs \nif value==2:\n cable_on=input(\"\\nIs your cable box on? (Enter Y or N): \")\n if cable_on==\"Y\":\n tv_on=input(\"\\nIs your TV on? (Enter Y or N): \")\n if tv_on==\"Y\":\n print(\"It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.\")\n else:\n print(\"Plug your TV into the nearest outlet and press the power button on your remote to turn on your TV. If you still do not recieve a cable signal, restart this program. Note, this program will now terminate. Goodbye!\")\n else:\n print(\"Plug your cable box into the nearest outlet to turn on your cable box. If you still do not recieve a cable signal, please restart this program. Note, this program will now terminate. Goodbye!\")\n#assign variables to user inputs using if statements for scenario three and print output based on user inputs \nif value==3:\n phones_on=input(\"\\nAre your phones on? (Enter Y or N): \")\n if phone_on==\"Y\":\n landline_plugged=input(\"\\nIs there a landline wire plugged into each phone or the wireless phone terminal? (Enter Y or N): \")\n if landline_plugged==\"Y\":\n print(\"It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.\")\n else:\n print(\"Plug a landline wire into each phone or phone terminal. If you still cannot use your phones, please restart this program. Note, this program will now terminate. Goodbye!\")\n else:\n print(\"Plug your phones into the nearest outlet to turn them on. If you are using wireless phones, please make sure you change them before attempting to use them. If you still cannot use your phones, please restart this program. Note, this program will now terminate. Goodbye!\")\n#assign variables to user inputs using if statements for scenario four and print output based on user inputs\nif value==4:\n late_payment= input(\"\\nWere you late on your last payment? (Enter Y or N): \")\n if late_payment==\"Y\":\n print(\"If you were late on your last payment, you will be charged an additional 5% interest fee. Therefore, your bill may be more than usual. If you would like to contest your charge, please call 555-555-5555 for additional support with this matter. Thank you for your patience. Note, this program will now terminate. Goodbye!\")\n else:\n print(\"It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.\")\n#scenario 5--evaluate input and print output based on user input\nif value==5:\n print(\"It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.\")\n#create if statements to evaluate invalid user inputs\nif value<1 or value>5:\n print(\"You entered an invalid menu choice. It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.\")\n \n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class Node:
<|reserved_special_token_0|>
def __getName(self):
return self.__name
def __getTotalCores(self):
return self.__totalCores
def __setTotalCores(self, total):
assert total >= 0 and total >= self.__usedCores
self.__totalCores = total
def __getUsedCores(self):
return self.__usedCores
<|reserved_special_token_0|>
def __getFreeCores(self):
return self.__totalCores - self.__usedCores
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def allocate(self, cores):
allocated = min(cores, self.free)
self.__usedCores += allocated
if self.resources is not None:
self.resources.nodeCoresAllocated(allocated)
return allocated
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Resources:
def __init__(self, nodes=None):
self.__nodes = nodes
if self.__nodes is None:
self.__nodes = []
for node in self.__nodes:
node.resources = self
self.__totalCores = 0
self.__usedCores = 0
self.__computeCores()
def __computeCores(self):
total, used = 0, 0
for node in self.__nodes:
total += node.total
used += node.used
self.__totalCores = total
self.__usedCores = used
def __getNodes(self):
return self.__nodes
def __getTotalCores(self):
return self.__totalCores
def __getUsedCores(self):
return self.__usedCores
def __getFreeCores(self):
return self.__totalCores - self.__usedCores
"""
Function called by the node when some cores has been allocated.
This function should track number of used cores in Resources statistics.
Args:
cores (int): number of allocated cores
"""
def nodeCoresAllocated(self, cores):
self.__usedCores += cores
"""
Function called by the node when some cores has been released.
This function should track number of used cores in Resources statistics.
Args:
cores (int): number of released cores
"""
def nodeCoresReleased(self, cores):
self.__usedCores -= cores
"""
Relase allocated resources.
Args:
alloc (Allocation): allocation to release
Raises:
InvalidResourceSpec: when number of cores to release on a node is greater
than number of used cores.
"""
def releaseAllocation(self, alloc):
for node in alloc.nodeAllocations:
node.node.release(node.cores)
def __str__(self):
header = '%d (%d used) cores on %d nodes\n' % (self.__totalCores,
self.__usedCores, len(self.__nodes))
return header + '\n'.join([str(node) for node in self.__nodes])
def nNodes(self):
return len(self.__nodes)
nodes = property(__getNodes, None, None, 'list of a nodes')
totalNodes = property(nNodes, None, None, 'total number of nodes')
totalCores = property(__getTotalCores, None, None, 'total number of cores')
usedCores = property(__getUsedCores, None, None, 'used number of cores')
freeCores = property(__getFreeCores, None, None, 'free number of cores')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Node:
<|reserved_special_token_0|>
def __getName(self):
return self.__name
def __getTotalCores(self):
return self.__totalCores
def __setTotalCores(self, total):
assert total >= 0 and total >= self.__usedCores
self.__totalCores = total
def __getUsedCores(self):
return self.__usedCores
def __setUsedCores(self, used):
assert used > 0 and used <= self.__totalCores
self.__usedCores = used
def __getFreeCores(self):
return self.__totalCores - self.__usedCores
def __str__(self):
return '%s %d (%d used)' % (self.__name, self.__totalCores, self.
__usedCores)
<|reserved_special_token_0|>
def allocate(self, cores):
allocated = min(cores, self.free)
self.__usedCores += allocated
if self.resources is not None:
self.resources.nodeCoresAllocated(allocated)
return allocated
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Resources:
def __init__(self, nodes=None):
self.__nodes = nodes
if self.__nodes is None:
self.__nodes = []
for node in self.__nodes:
node.resources = self
self.__totalCores = 0
self.__usedCores = 0
self.__computeCores()
def __computeCores(self):
total, used = 0, 0
for node in self.__nodes:
total += node.total
used += node.used
self.__totalCores = total
self.__usedCores = used
def __getNodes(self):
return self.__nodes
def __getTotalCores(self):
return self.__totalCores
def __getUsedCores(self):
return self.__usedCores
def __getFreeCores(self):
return self.__totalCores - self.__usedCores
"""
Function called by the node when some cores has been allocated.
This function should track number of used cores in Resources statistics.
Args:
cores (int): number of allocated cores
"""
def nodeCoresAllocated(self, cores):
self.__usedCores += cores
"""
Function called by the node when some cores has been released.
This function should track number of used cores in Resources statistics.
Args:
cores (int): number of released cores
"""
def nodeCoresReleased(self, cores):
self.__usedCores -= cores
"""
Relase allocated resources.
Args:
alloc (Allocation): allocation to release
Raises:
InvalidResourceSpec: when number of cores to release on a node is greater
than number of used cores.
"""
def releaseAllocation(self, alloc):
for node in alloc.nodeAllocations:
node.node.release(node.cores)
def __str__(self):
header = '%d (%d used) cores on %d nodes\n' % (self.__totalCores,
self.__usedCores, len(self.__nodes))
return header + '\n'.join([str(node) for node in self.__nodes])
def nNodes(self):
return len(self.__nodes)
nodes = property(__getNodes, None, None, 'list of a nodes')
totalNodes = property(nNodes, None, None, 'total number of nodes')
totalCores = property(__getTotalCores, None, None, 'total number of cores')
usedCores = property(__getUsedCores, None, None, 'used number of cores')
freeCores = property(__getFreeCores, None, None, 'free number of cores')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Node:
def __init__(self, name=None, totalCores=0, used=0):
self.__name = name
self.__totalCores = totalCores
self.__usedCores = used
self.resources = None
def __getName(self):
return self.__name
def __getTotalCores(self):
return self.__totalCores
def __setTotalCores(self, total):
assert total >= 0 and total >= self.__usedCores
self.__totalCores = total
def __getUsedCores(self):
return self.__usedCores
def __setUsedCores(self, used):
assert used > 0 and used <= self.__totalCores
self.__usedCores = used
def __getFreeCores(self):
return self.__totalCores - self.__usedCores
def __str__(self):
return '%s %d (%d used)' % (self.__name, self.__totalCores, self.
__usedCores)
<|reserved_special_token_0|>
def allocate(self, cores):
allocated = min(cores, self.free)
self.__usedCores += allocated
if self.resources is not None:
self.resources.nodeCoresAllocated(allocated)
return allocated
<|reserved_special_token_0|>
def release(self, cores):
if cores > self.__usedCores:
raise InvalidResourceSpec()
self.__usedCores -= cores
if self.resources is not None:
self.resources.nodeCoresReleased(cores)
name = property(__getName, None, None, 'name of the node')
total = property(__getTotalCores, __setTotalCores, None,
'total number of cores')
used = property(__getUsedCores, __setUsedCores, None,
'number of allocated cores')
free = property(__getFreeCores, None, None, 'number of available cores')
class Resources:
def __init__(self, nodes=None):
self.__nodes = nodes
if self.__nodes is None:
self.__nodes = []
for node in self.__nodes:
node.resources = self
self.__totalCores = 0
self.__usedCores = 0
self.__computeCores()
def __computeCores(self):
total, used = 0, 0
for node in self.__nodes:
total += node.total
used += node.used
self.__totalCores = total
self.__usedCores = used
def __getNodes(self):
return self.__nodes
def __getTotalCores(self):
return self.__totalCores
def __getUsedCores(self):
return self.__usedCores
def __getFreeCores(self):
return self.__totalCores - self.__usedCores
"""
Function called by the node when some cores has been allocated.
This function should track number of used cores in Resources statistics.
Args:
cores (int): number of allocated cores
"""
def nodeCoresAllocated(self, cores):
self.__usedCores += cores
"""
Function called by the node when some cores has been released.
This function should track number of used cores in Resources statistics.
Args:
cores (int): number of released cores
"""
def nodeCoresReleased(self, cores):
self.__usedCores -= cores
"""
Relase allocated resources.
Args:
alloc (Allocation): allocation to release
Raises:
InvalidResourceSpec: when number of cores to release on a node is greater
than number of used cores.
"""
def releaseAllocation(self, alloc):
for node in alloc.nodeAllocations:
node.node.release(node.cores)
def __str__(self):
header = '%d (%d used) cores on %d nodes\n' % (self.__totalCores,
self.__usedCores, len(self.__nodes))
return header + '\n'.join([str(node) for node in self.__nodes])
def nNodes(self):
return len(self.__nodes)
nodes = property(__getNodes, None, None, 'list of a nodes')
totalNodes = property(nNodes, None, None, 'total number of nodes')
totalCores = property(__getTotalCores, None, None, 'total number of cores')
usedCores = property(__getUsedCores, None, None, 'used number of cores')
freeCores = property(__getFreeCores, None, None, 'free number of cores')
<|reserved_special_token_1|>
from qcg.appscheduler.errors import *
class Node:
def __init__(self, name=None, totalCores=0, used=0):
self.__name = name
self.__totalCores = totalCores
self.__usedCores = used
self.resources = None
def __getName(self):
return self.__name
def __getTotalCores(self):
return self.__totalCores
def __setTotalCores(self, total):
assert total >= 0 and total >= self.__usedCores
self.__totalCores = total
def __getUsedCores(self):
return self.__usedCores
def __setUsedCores(self, used):
assert used > 0 and used <= self.__totalCores
self.__usedCores = used
def __getFreeCores(self):
return self.__totalCores - self.__usedCores
def __str__(self):
return '%s %d (%d used)' % (self.__name, self.__totalCores, self.
__usedCores)
"""
Allocate maximum number of cores on a node.
Args:
cores (int): maximum number of cores to allocate
Returns:
int: number of allocated cores
"""
def allocate(self, cores):
allocated = min(cores, self.free)
self.__usedCores += allocated
if self.resources is not None:
self.resources.nodeCoresAllocated(allocated)
return allocated
"""
Release specified number of cores on a node.
Args:
cores (int): number of cores to release
Raises:
InvalidResourceSpec: when number of cores to release exceeds number of of
used cores.
"""
def release(self, cores):
if cores > self.__usedCores:
raise InvalidResourceSpec()
self.__usedCores -= cores
if self.resources is not None:
self.resources.nodeCoresReleased(cores)
name = property(__getName, None, None, 'name of the node')
total = property(__getTotalCores, __setTotalCores, None,
'total number of cores')
used = property(__getUsedCores, __setUsedCores, None,
'number of allocated cores')
free = property(__getFreeCores, None, None, 'number of available cores')
class Resources:
def __init__(self, nodes=None):
self.__nodes = nodes
if self.__nodes is None:
self.__nodes = []
for node in self.__nodes:
node.resources = self
self.__totalCores = 0
self.__usedCores = 0
self.__computeCores()
def __computeCores(self):
total, used = 0, 0
for node in self.__nodes:
total += node.total
used += node.used
self.__totalCores = total
self.__usedCores = used
def __getNodes(self):
return self.__nodes
def __getTotalCores(self):
return self.__totalCores
def __getUsedCores(self):
return self.__usedCores
def __getFreeCores(self):
return self.__totalCores - self.__usedCores
"""
Function called by the node when some cores has been allocated.
This function should track number of used cores in Resources statistics.
Args:
cores (int): number of allocated cores
"""
def nodeCoresAllocated(self, cores):
self.__usedCores += cores
"""
Function called by the node when some cores has been released.
This function should track number of used cores in Resources statistics.
Args:
cores (int): number of released cores
"""
def nodeCoresReleased(self, cores):
self.__usedCores -= cores
"""
Relase allocated resources.
Args:
alloc (Allocation): allocation to release
Raises:
InvalidResourceSpec: when number of cores to release on a node is greater
than number of used cores.
"""
def releaseAllocation(self, alloc):
for node in alloc.nodeAllocations:
node.node.release(node.cores)
def __str__(self):
header = '%d (%d used) cores on %d nodes\n' % (self.__totalCores,
self.__usedCores, len(self.__nodes))
return header + '\n'.join([str(node) for node in self.__nodes])
def nNodes(self):
return len(self.__nodes)
nodes = property(__getNodes, None, None, 'list of a nodes')
totalNodes = property(nNodes, None, None, 'total number of nodes')
totalCores = property(__getTotalCores, None, None, 'total number of cores')
usedCores = property(__getUsedCores, None, None, 'used number of cores')
freeCores = property(__getFreeCores, None, None, 'free number of cores')
<|reserved_special_token_1|>
from qcg.appscheduler.errors import *
class Node:
def __init__(self, name=None, totalCores=0, used=0):
self.__name = name
self.__totalCores = totalCores
self.__usedCores = used
self.resources = None
def __getName(self):
return self.__name
def __getTotalCores(self):
return self.__totalCores
def __setTotalCores(self, total):
assert total >= 0 and total >= self.__usedCores
self.__totalCores = total
def __getUsedCores(self):
return self.__usedCores
def __setUsedCores(self, used):
assert used > 0 and used <= self.__totalCores
self.__usedCores = used
def __getFreeCores(self):
return self.__totalCores - self.__usedCores
def __str__(self):
return "%s %d (%d used)" % (self.__name, self.__totalCores, self.__usedCores)
"""
Allocate maximum number of cores on a node.
Args:
cores (int): maximum number of cores to allocate
Returns:
int: number of allocated cores
"""
def allocate(self, cores):
allocated = min(cores, self.free)
self.__usedCores += allocated
if self.resources is not None:
self.resources.nodeCoresAllocated(allocated)
return allocated
"""
Release specified number of cores on a node.
Args:
cores (int): number of cores to release
Raises:
InvalidResourceSpec: when number of cores to release exceeds number of of
used cores.
"""
def release(self, cores):
if cores > self.__usedCores:
raise InvalidResourceSpec()
self.__usedCores -= cores
if self.resources is not None:
self.resources.nodeCoresReleased(cores)
name = property(__getName, None, None, "name of the node")
total = property(__getTotalCores, __setTotalCores, None, "total number of cores")
used = property(__getUsedCores, __setUsedCores, None, "number of allocated cores")
free = property(__getFreeCores, None, None, "number of available cores")
class Resources:
def __init__(self, nodes=None):
self.__nodes = nodes
if self.__nodes is None:
self.__nodes = []
for node in self.__nodes:
node.resources = self
self.__totalCores = 0
self.__usedCores = 0
# print "initializing %d nodes" % len(nodes)
self.__computeCores()
def __computeCores(self):
total, used = 0, 0
for node in self.__nodes:
total += node.total
used += node.used
self.__totalCores = total
self.__usedCores = used
def __getNodes(self):
return self.__nodes
def __getTotalCores(self):
return self.__totalCores
def __getUsedCores(self):
return self.__usedCores
def __getFreeCores(self):
return self.__totalCores - self.__usedCores
"""
Function called by the node when some cores has been allocated.
This function should track number of used cores in Resources statistics.
Args:
cores (int): number of allocated cores
"""
def nodeCoresAllocated(self, cores):
self.__usedCores += cores
"""
Function called by the node when some cores has been released.
This function should track number of used cores in Resources statistics.
Args:
cores (int): number of released cores
"""
def nodeCoresReleased(self, cores):
self.__usedCores -= cores
"""
Relase allocated resources.
Args:
alloc (Allocation): allocation to release
Raises:
InvalidResourceSpec: when number of cores to release on a node is greater
than number of used cores.
"""
def releaseAllocation(self, alloc):
for node in alloc.nodeAllocations:
node.node.release(node.cores)
def __str__(self):
header = '%d (%d used) cores on %d nodes\n' % (self.__totalCores, self.__usedCores, \
len(self.__nodes))
return header + '\n'.join([str(node) for node in self.__nodes])
# if self.__nodes:
# for node in self.__nodes:
# result.join("\n%s" % node)
# return result
def nNodes(self):
return len(self.__nodes)
nodes = property(__getNodes, None, None, "list of a nodes")
totalNodes = property(nNodes, None, None, "total number of nodes")
totalCores = property(__getTotalCores, None, None, "total number of cores")
usedCores = property(__getUsedCores, None, None, "used number of cores")
freeCores = property(__getFreeCores, None, None, "free number of cores")
|
flexible
|
{
"blob_id": "23a7aa6b9a98bfd4fd43fea1ecfa26cb44969804",
"index": 8061,
"step-1": "<mask token>\n\n\nclass Node:\n <mask token>\n\n def __getName(self):\n return self.__name\n\n def __getTotalCores(self):\n return self.__totalCores\n\n def __setTotalCores(self, total):\n assert total >= 0 and total >= self.__usedCores\n self.__totalCores = total\n\n def __getUsedCores(self):\n return self.__usedCores\n <mask token>\n\n def __getFreeCores(self):\n return self.__totalCores - self.__usedCores\n <mask token>\n <mask token>\n\n def allocate(self, cores):\n allocated = min(cores, self.free)\n self.__usedCores += allocated\n if self.resources is not None:\n self.resources.nodeCoresAllocated(allocated)\n return allocated\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Resources:\n\n def __init__(self, nodes=None):\n self.__nodes = nodes\n if self.__nodes is None:\n self.__nodes = []\n for node in self.__nodes:\n node.resources = self\n self.__totalCores = 0\n self.__usedCores = 0\n self.__computeCores()\n\n def __computeCores(self):\n total, used = 0, 0\n for node in self.__nodes:\n total += node.total\n used += node.used\n self.__totalCores = total\n self.__usedCores = used\n\n def __getNodes(self):\n return self.__nodes\n\n def __getTotalCores(self):\n return self.__totalCores\n\n def __getUsedCores(self):\n return self.__usedCores\n\n def __getFreeCores(self):\n return self.__totalCores - self.__usedCores\n \"\"\"\n Function called by the node when some cores has been allocated.\n This function should track number of used cores in Resources statistics.\n\n Args:\n cores (int): number of allocated cores\n \"\"\"\n\n def nodeCoresAllocated(self, cores):\n self.__usedCores += cores\n \"\"\"\n Function called by the node when some cores has been released.\n This function should track number of used cores in Resources statistics.\n\n Args:\n cores (int): number of released cores\n \"\"\"\n\n def nodeCoresReleased(self, cores):\n self.__usedCores -= cores\n \"\"\"\n Relase allocated resources.\n\n Args:\n alloc (Allocation): allocation to release\n\n Raises:\n InvalidResourceSpec: when number of cores to release on a node is greater \n than number of used cores.\n \"\"\"\n\n def releaseAllocation(self, alloc):\n for node in alloc.nodeAllocations:\n node.node.release(node.cores)\n\n def __str__(self):\n header = '%d (%d used) cores on %d nodes\\n' % (self.__totalCores,\n self.__usedCores, len(self.__nodes))\n return header + '\\n'.join([str(node) for node in self.__nodes])\n\n def nNodes(self):\n return len(self.__nodes)\n nodes = property(__getNodes, None, None, 'list of a nodes')\n totalNodes = property(nNodes, None, None, 'total number of nodes')\n totalCores = property(__getTotalCores, None, None, 'total number of cores')\n usedCores = property(__getUsedCores, None, None, 'used number of cores')\n freeCores = property(__getFreeCores, None, None, 'free number of cores')\n",
"step-2": "<mask token>\n\n\nclass Node:\n <mask token>\n\n def __getName(self):\n return self.__name\n\n def __getTotalCores(self):\n return self.__totalCores\n\n def __setTotalCores(self, total):\n assert total >= 0 and total >= self.__usedCores\n self.__totalCores = total\n\n def __getUsedCores(self):\n return self.__usedCores\n\n def __setUsedCores(self, used):\n assert used > 0 and used <= self.__totalCores\n self.__usedCores = used\n\n def __getFreeCores(self):\n return self.__totalCores - self.__usedCores\n\n def __str__(self):\n return '%s %d (%d used)' % (self.__name, self.__totalCores, self.\n __usedCores)\n <mask token>\n\n def allocate(self, cores):\n allocated = min(cores, self.free)\n self.__usedCores += allocated\n if self.resources is not None:\n self.resources.nodeCoresAllocated(allocated)\n return allocated\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Resources:\n\n def __init__(self, nodes=None):\n self.__nodes = nodes\n if self.__nodes is None:\n self.__nodes = []\n for node in self.__nodes:\n node.resources = self\n self.__totalCores = 0\n self.__usedCores = 0\n self.__computeCores()\n\n def __computeCores(self):\n total, used = 0, 0\n for node in self.__nodes:\n total += node.total\n used += node.used\n self.__totalCores = total\n self.__usedCores = used\n\n def __getNodes(self):\n return self.__nodes\n\n def __getTotalCores(self):\n return self.__totalCores\n\n def __getUsedCores(self):\n return self.__usedCores\n\n def __getFreeCores(self):\n return self.__totalCores - self.__usedCores\n \"\"\"\n Function called by the node when some cores has been allocated.\n This function should track number of used cores in Resources statistics.\n\n Args:\n cores (int): number of allocated cores\n \"\"\"\n\n def nodeCoresAllocated(self, cores):\n self.__usedCores += cores\n \"\"\"\n Function called by the node when some cores has been released.\n This function should track number of used cores in Resources statistics.\n\n Args:\n cores (int): number of released cores\n \"\"\"\n\n def nodeCoresReleased(self, cores):\n self.__usedCores -= cores\n \"\"\"\n Relase allocated resources.\n\n Args:\n alloc (Allocation): allocation to release\n\n Raises:\n InvalidResourceSpec: when number of cores to release on a node is greater \n than number of used cores.\n \"\"\"\n\n def releaseAllocation(self, alloc):\n for node in alloc.nodeAllocations:\n node.node.release(node.cores)\n\n def __str__(self):\n header = '%d (%d used) cores on %d nodes\\n' % (self.__totalCores,\n self.__usedCores, len(self.__nodes))\n return header + '\\n'.join([str(node) for node in self.__nodes])\n\n def nNodes(self):\n return len(self.__nodes)\n nodes = property(__getNodes, None, None, 'list of a nodes')\n totalNodes = property(nNodes, None, None, 'total number of nodes')\n totalCores = property(__getTotalCores, None, None, 'total number of cores')\n usedCores = property(__getUsedCores, None, None, 'used number of cores')\n freeCores = property(__getFreeCores, None, None, 'free number of cores')\n",
"step-3": "<mask token>\n\n\nclass Node:\n\n def __init__(self, name=None, totalCores=0, used=0):\n self.__name = name\n self.__totalCores = totalCores\n self.__usedCores = used\n self.resources = None\n\n def __getName(self):\n return self.__name\n\n def __getTotalCores(self):\n return self.__totalCores\n\n def __setTotalCores(self, total):\n assert total >= 0 and total >= self.__usedCores\n self.__totalCores = total\n\n def __getUsedCores(self):\n return self.__usedCores\n\n def __setUsedCores(self, used):\n assert used > 0 and used <= self.__totalCores\n self.__usedCores = used\n\n def __getFreeCores(self):\n return self.__totalCores - self.__usedCores\n\n def __str__(self):\n return '%s %d (%d used)' % (self.__name, self.__totalCores, self.\n __usedCores)\n <mask token>\n\n def allocate(self, cores):\n allocated = min(cores, self.free)\n self.__usedCores += allocated\n if self.resources is not None:\n self.resources.nodeCoresAllocated(allocated)\n return allocated\n <mask token>\n\n def release(self, cores):\n if cores > self.__usedCores:\n raise InvalidResourceSpec()\n self.__usedCores -= cores\n if self.resources is not None:\n self.resources.nodeCoresReleased(cores)\n name = property(__getName, None, None, 'name of the node')\n total = property(__getTotalCores, __setTotalCores, None,\n 'total number of cores')\n used = property(__getUsedCores, __setUsedCores, None,\n 'number of allocated cores')\n free = property(__getFreeCores, None, None, 'number of available cores')\n\n\nclass Resources:\n\n def __init__(self, nodes=None):\n self.__nodes = nodes\n if self.__nodes is None:\n self.__nodes = []\n for node in self.__nodes:\n node.resources = self\n self.__totalCores = 0\n self.__usedCores = 0\n self.__computeCores()\n\n def __computeCores(self):\n total, used = 0, 0\n for node in self.__nodes:\n total += node.total\n used += node.used\n self.__totalCores = total\n self.__usedCores = used\n\n def __getNodes(self):\n return self.__nodes\n\n def __getTotalCores(self):\n return self.__totalCores\n\n def __getUsedCores(self):\n return self.__usedCores\n\n def __getFreeCores(self):\n return self.__totalCores - self.__usedCores\n \"\"\"\n Function called by the node when some cores has been allocated.\n This function should track number of used cores in Resources statistics.\n\n Args:\n cores (int): number of allocated cores\n \"\"\"\n\n def nodeCoresAllocated(self, cores):\n self.__usedCores += cores\n \"\"\"\n Function called by the node when some cores has been released.\n This function should track number of used cores in Resources statistics.\n\n Args:\n cores (int): number of released cores\n \"\"\"\n\n def nodeCoresReleased(self, cores):\n self.__usedCores -= cores\n \"\"\"\n Relase allocated resources.\n\n Args:\n alloc (Allocation): allocation to release\n\n Raises:\n InvalidResourceSpec: when number of cores to release on a node is greater \n than number of used cores.\n \"\"\"\n\n def releaseAllocation(self, alloc):\n for node in alloc.nodeAllocations:\n node.node.release(node.cores)\n\n def __str__(self):\n header = '%d (%d used) cores on %d nodes\\n' % (self.__totalCores,\n self.__usedCores, len(self.__nodes))\n return header + '\\n'.join([str(node) for node in self.__nodes])\n\n def nNodes(self):\n return len(self.__nodes)\n nodes = property(__getNodes, None, None, 'list of a nodes')\n totalNodes = property(nNodes, None, None, 'total number of nodes')\n totalCores = property(__getTotalCores, None, None, 'total number of cores')\n usedCores = property(__getUsedCores, None, None, 'used number of cores')\n freeCores = property(__getFreeCores, None, None, 'free number of cores')\n",
"step-4": "from qcg.appscheduler.errors import *\n\n\nclass Node:\n\n def __init__(self, name=None, totalCores=0, used=0):\n self.__name = name\n self.__totalCores = totalCores\n self.__usedCores = used\n self.resources = None\n\n def __getName(self):\n return self.__name\n\n def __getTotalCores(self):\n return self.__totalCores\n\n def __setTotalCores(self, total):\n assert total >= 0 and total >= self.__usedCores\n self.__totalCores = total\n\n def __getUsedCores(self):\n return self.__usedCores\n\n def __setUsedCores(self, used):\n assert used > 0 and used <= self.__totalCores\n self.__usedCores = used\n\n def __getFreeCores(self):\n return self.__totalCores - self.__usedCores\n\n def __str__(self):\n return '%s %d (%d used)' % (self.__name, self.__totalCores, self.\n __usedCores)\n \"\"\"\n Allocate maximum number of cores on a node.\n\n Args:\n cores (int): maximum number of cores to allocate\n\n Returns:\n int: number of allocated cores\n \"\"\"\n\n def allocate(self, cores):\n allocated = min(cores, self.free)\n self.__usedCores += allocated\n if self.resources is not None:\n self.resources.nodeCoresAllocated(allocated)\n return allocated\n \"\"\"\n Release specified number of cores on a node.\n\n Args:\n cores (int): number of cores to release\n\n Raises:\n InvalidResourceSpec: when number of cores to release exceeds number of of\n used cores.\n \"\"\"\n\n def release(self, cores):\n if cores > self.__usedCores:\n raise InvalidResourceSpec()\n self.__usedCores -= cores\n if self.resources is not None:\n self.resources.nodeCoresReleased(cores)\n name = property(__getName, None, None, 'name of the node')\n total = property(__getTotalCores, __setTotalCores, None,\n 'total number of cores')\n used = property(__getUsedCores, __setUsedCores, None,\n 'number of allocated cores')\n free = property(__getFreeCores, None, None, 'number of available cores')\n\n\nclass Resources:\n\n def __init__(self, nodes=None):\n self.__nodes = nodes\n if self.__nodes is None:\n self.__nodes = []\n for node in self.__nodes:\n node.resources = self\n self.__totalCores = 0\n self.__usedCores = 0\n self.__computeCores()\n\n def __computeCores(self):\n total, used = 0, 0\n for node in self.__nodes:\n total += node.total\n used += node.used\n self.__totalCores = total\n self.__usedCores = used\n\n def __getNodes(self):\n return self.__nodes\n\n def __getTotalCores(self):\n return self.__totalCores\n\n def __getUsedCores(self):\n return self.__usedCores\n\n def __getFreeCores(self):\n return self.__totalCores - self.__usedCores\n \"\"\"\n Function called by the node when some cores has been allocated.\n This function should track number of used cores in Resources statistics.\n\n Args:\n cores (int): number of allocated cores\n \"\"\"\n\n def nodeCoresAllocated(self, cores):\n self.__usedCores += cores\n \"\"\"\n Function called by the node when some cores has been released.\n This function should track number of used cores in Resources statistics.\n\n Args:\n cores (int): number of released cores\n \"\"\"\n\n def nodeCoresReleased(self, cores):\n self.__usedCores -= cores\n \"\"\"\n Relase allocated resources.\n\n Args:\n alloc (Allocation): allocation to release\n\n Raises:\n InvalidResourceSpec: when number of cores to release on a node is greater \n than number of used cores.\n \"\"\"\n\n def releaseAllocation(self, alloc):\n for node in alloc.nodeAllocations:\n node.node.release(node.cores)\n\n def __str__(self):\n header = '%d (%d used) cores on %d nodes\\n' % (self.__totalCores,\n self.__usedCores, len(self.__nodes))\n return header + '\\n'.join([str(node) for node in self.__nodes])\n\n def nNodes(self):\n return len(self.__nodes)\n nodes = property(__getNodes, None, None, 'list of a nodes')\n totalNodes = property(nNodes, None, None, 'total number of nodes')\n totalCores = property(__getTotalCores, None, None, 'total number of cores')\n usedCores = property(__getUsedCores, None, None, 'used number of cores')\n freeCores = property(__getFreeCores, None, None, 'free number of cores')\n",
"step-5": "from qcg.appscheduler.errors import *\n\n\nclass Node:\n def __init__(self, name=None, totalCores=0, used=0):\n self.__name = name\n self.__totalCores = totalCores\n self.__usedCores = used\n self.resources = None\n\n def __getName(self):\n return self.__name\n\n def __getTotalCores(self):\n return self.__totalCores\n\n def __setTotalCores(self, total):\n assert total >= 0 and total >= self.__usedCores\n self.__totalCores = total\n\n def __getUsedCores(self):\n return self.__usedCores\n\n def __setUsedCores(self, used):\n assert used > 0 and used <= self.__totalCores\n self.__usedCores = used\n\n def __getFreeCores(self):\n return self.__totalCores - self.__usedCores\n\n def __str__(self):\n return \"%s %d (%d used)\" % (self.__name, self.__totalCores, self.__usedCores)\n\n \"\"\"\n Allocate maximum number of cores on a node.\n\n Args:\n cores (int): maximum number of cores to allocate\n\n Returns:\n int: number of allocated cores\n \"\"\"\n\n def allocate(self, cores):\n allocated = min(cores, self.free)\n self.__usedCores += allocated\n\n if self.resources is not None:\n self.resources.nodeCoresAllocated(allocated)\n\n return allocated\n\n \"\"\"\n Release specified number of cores on a node.\n\n Args:\n cores (int): number of cores to release\n\n Raises:\n InvalidResourceSpec: when number of cores to release exceeds number of of\n used cores.\n \"\"\"\n\n def release(self, cores):\n if cores > self.__usedCores:\n raise InvalidResourceSpec()\n\n self.__usedCores -= cores\n\n if self.resources is not None:\n self.resources.nodeCoresReleased(cores)\n\n name = property(__getName, None, None, \"name of the node\")\n total = property(__getTotalCores, __setTotalCores, None, \"total number of cores\")\n used = property(__getUsedCores, __setUsedCores, None, \"number of allocated cores\")\n free = property(__getFreeCores, None, None, \"number of available cores\")\n\n\nclass Resources:\n\n def __init__(self, nodes=None):\n self.__nodes = nodes\n if self.__nodes is None:\n self.__nodes = []\n\n for node in self.__nodes:\n node.resources = self\n\n self.__totalCores = 0\n self.__usedCores = 0\n\n #\t\tprint \"initializing %d nodes\" % len(nodes)\n self.__computeCores()\n\n def __computeCores(self):\n total, used = 0, 0\n for node in self.__nodes:\n total += node.total\n used += node.used\n\n self.__totalCores = total\n self.__usedCores = used\n\n def __getNodes(self):\n return self.__nodes\n\n def __getTotalCores(self):\n return self.__totalCores\n\n def __getUsedCores(self):\n return self.__usedCores\n\n def __getFreeCores(self):\n return self.__totalCores - self.__usedCores\n\n \"\"\"\n Function called by the node when some cores has been allocated.\n This function should track number of used cores in Resources statistics.\n\n Args:\n cores (int): number of allocated cores\n \"\"\"\n\n def nodeCoresAllocated(self, cores):\n self.__usedCores += cores\n\n \"\"\"\n Function called by the node when some cores has been released.\n This function should track number of used cores in Resources statistics.\n\n Args:\n cores (int): number of released cores\n \"\"\"\n\n def nodeCoresReleased(self, cores):\n self.__usedCores -= cores\n\n \"\"\"\n Relase allocated resources.\n\n Args:\n alloc (Allocation): allocation to release\n\n Raises:\n InvalidResourceSpec: when number of cores to release on a node is greater \n than number of used cores.\n \"\"\"\n\n def releaseAllocation(self, alloc):\n for node in alloc.nodeAllocations:\n node.node.release(node.cores)\n\n def __str__(self):\n header = '%d (%d used) cores on %d nodes\\n' % (self.__totalCores, self.__usedCores, \\\n len(self.__nodes))\n return header + '\\n'.join([str(node) for node in self.__nodes])\n\n #\t\tif self.__nodes:\n #\t\t\tfor node in self.__nodes:\n #\t\t\t\tresult.join(\"\\n%s\" % node)\n #\t\treturn result\n\n def nNodes(self):\n return len(self.__nodes)\n\n nodes = property(__getNodes, None, None, \"list of a nodes\")\n totalNodes = property(nNodes, None, None, \"total number of nodes\")\n totalCores = property(__getTotalCores, None, None, \"total number of cores\")\n usedCores = property(__getUsedCores, None, None, \"used number of cores\")\n freeCores = property(__getFreeCores, None, None, \"free number of cores\")\n",
"step-ids": [
21,
23,
26,
28,
29
]
}
|
[
21,
23,
26,
28,
29
] |
# encoding: utf-8
from SpiderTools.tool import platform_system
from SpidersLog.file_handler import SafeFileHandler
from Env.parse_yaml import FileConfigParser
from Env import log_variable as lv
from staticparm import root_path
from SpiderTools.tool import get_username
import logging
import logging.handlers
import traceback
class ICrawlerLog:
level_relations = {
'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
'crit': logging.CRITICAL
} # 日志级别关系映射
def __init__(self, name, logger=None):
self.logger = logger
self.name = name
@property
def save(self, *args, **kwargs):
'''
指定保存日志的文件路径,日志级别,以及调用文件
将日志存入到指定的文件中
'''
jobinst_id = lv.get_jobinst_id()
job_code = lv.get_job_code()
fire_time = lv.get_fire_time()
group_code = lv.get_group_code()
address_code = lv.get_address_code()
# year = time.strftime('%Y', time.localtime()) # 获取完整年份
# month = time.strftime('%m', time.localtime()) # 获取月
# day = time.strftime('%d', time.localtime()) # 获取日
# 创建一个logger
self.logger = logging.getLogger(self.logger)
self.logger.setLevel(logging.INFO)
# 创建一个handler,用于写入日志文件
# self.log_time = time.strftime("%Y_%m_%d_")
if platform_system() == 'Linux':
log_path = FileConfigParser().get_path(server=platform_system(),key='log-cb')
if platform_system() == 'Windows':
log_path = root_path + FileConfigParser().get_path(server=platform_system(), key='log')
# log_path = './Logs/'
# log_path = '/home/ijep/domain/logs/python/'
# log_name = log_path + 'icrawlerspider.spider.%s-%s-%s.log' % (year, month, day)
if self.name == 'spider':
name = 'icrawlerspider.spider.log'
elif self.name == 'middleware':
name = 'icrawlerspider.middleware.log'
log_name = log_path + name
filename = self.logger.handlers[0].baseFilename.split('\\')[-1] if len(self.logger.handlers) > 0 else ''
if log_name.split('/')[-1] != filename:
self.logger.handlers.clear() # 多个不同文件名的情况下用这个
if not self.logger.handlers:
# 追加模式,按照日期来设置日志,handlers中TimedRotatingFileHandler就是按照日期来设置,RotatingFileHandler这个按照文件大小来设置
# fh = logging.handlers.TimedRotatingFileHandler(log_name, when='D', interval=1, encoding='utf-8')
fh = SafeFileHandler(log_name, mode='a', encoding='utf-8')
# fh.setLevel(logging.INFO)
# 定义handler的输出格式
formatter = logging.Formatter('[%(asctime)s][%(levelname)s] ' + '%s %s %s %s %s '
% (group_code, job_code, jobinst_id, fire_time, address_code) + '%(message)s')
# '%(filename)s->%(funcName)s line:%(lineno)d
fh.setFormatter(formatter)
# 给logger添加handler
self.logger.addHandler(fh)
# 添加下面一句,在记录日志之后移除句柄
# self.logger.info('记录数据')
# self.logger.removeHandler(fh)
# 关闭打开的文件
fh.close()
return self.logger
def log(name):
def wraaper(func):
def inner(*args, **kwargs): # 如果想返回result必须再包裹一层
log = ICrawlerLog(name).save
log.info("{}开始执行".format(func))
try:
result = func(*args, **kwargs) # 如果不是在类的函数里使用装饰器就可以这么写,如果这么写会报需要self入参(因为你是用类作为装饰器,函数就不会这样)
if result:
log.info("{}执行成功".format(func))
# log.info("结果是: %s" % result)
return result
else:
log.error("{}执行后返回值为空".format(func))
return None
except Exception as e:
# traceback.print_exc()
log.error("{}程序异常执行失败,程序终止".format(func))
log.error(e)
return False
return inner
return wraaper
|
normal
|
{
"blob_id": "63001128d9cb934d6f9d57db668a43ba58f4ece3",
"index": 1679,
"step-1": "<mask token>\n\n\nclass ICrawlerLog:\n <mask token>\n\n def __init__(self, name, logger=None):\n self.logger = logger\n self.name = name\n\n @property\n def save(self, *args, **kwargs):\n \"\"\"\n 指定保存日志的文件路径,日志级别,以及调用文件\n 将日志存入到指定的文件中\n \"\"\"\n jobinst_id = lv.get_jobinst_id()\n job_code = lv.get_job_code()\n fire_time = lv.get_fire_time()\n group_code = lv.get_group_code()\n address_code = lv.get_address_code()\n self.logger = logging.getLogger(self.logger)\n self.logger.setLevel(logging.INFO)\n if platform_system() == 'Linux':\n log_path = FileConfigParser().get_path(server=platform_system(),\n key='log-cb')\n if platform_system() == 'Windows':\n log_path = root_path + FileConfigParser().get_path(server=\n platform_system(), key='log')\n if self.name == 'spider':\n name = 'icrawlerspider.spider.log'\n elif self.name == 'middleware':\n name = 'icrawlerspider.middleware.log'\n log_name = log_path + name\n filename = self.logger.handlers[0].baseFilename.split('\\\\')[-1] if len(\n self.logger.handlers) > 0 else ''\n if log_name.split('/')[-1] != filename:\n self.logger.handlers.clear()\n if not self.logger.handlers:\n fh = SafeFileHandler(log_name, mode='a', encoding='utf-8')\n formatter = logging.Formatter('[%(asctime)s][%(levelname)s] ' +\n '%s %s %s %s %s ' % (group_code, job_code, jobinst_id,\n fire_time, address_code) + '%(message)s')\n fh.setFormatter(formatter)\n self.logger.addHandler(fh)\n fh.close()\n return self.logger\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ICrawlerLog:\n level_relations = {'debug': logging.DEBUG, 'info': logging.INFO,\n 'warning': logging.WARNING, 'error': logging.ERROR, 'crit': logging\n .CRITICAL}\n\n def __init__(self, name, logger=None):\n self.logger = logger\n self.name = name\n\n @property\n def save(self, *args, **kwargs):\n \"\"\"\n 指定保存日志的文件路径,日志级别,以及调用文件\n 将日志存入到指定的文件中\n \"\"\"\n jobinst_id = lv.get_jobinst_id()\n job_code = lv.get_job_code()\n fire_time = lv.get_fire_time()\n group_code = lv.get_group_code()\n address_code = lv.get_address_code()\n self.logger = logging.getLogger(self.logger)\n self.logger.setLevel(logging.INFO)\n if platform_system() == 'Linux':\n log_path = FileConfigParser().get_path(server=platform_system(),\n key='log-cb')\n if platform_system() == 'Windows':\n log_path = root_path + FileConfigParser().get_path(server=\n platform_system(), key='log')\n if self.name == 'spider':\n name = 'icrawlerspider.spider.log'\n elif self.name == 'middleware':\n name = 'icrawlerspider.middleware.log'\n log_name = log_path + name\n filename = self.logger.handlers[0].baseFilename.split('\\\\')[-1] if len(\n self.logger.handlers) > 0 else ''\n if log_name.split('/')[-1] != filename:\n self.logger.handlers.clear()\n if not self.logger.handlers:\n fh = SafeFileHandler(log_name, mode='a', encoding='utf-8')\n formatter = logging.Formatter('[%(asctime)s][%(levelname)s] ' +\n '%s %s %s %s %s ' % (group_code, job_code, jobinst_id,\n fire_time, address_code) + '%(message)s')\n fh.setFormatter(formatter)\n self.logger.addHandler(fh)\n fh.close()\n return self.logger\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass ICrawlerLog:\n level_relations = {'debug': logging.DEBUG, 'info': logging.INFO,\n 'warning': logging.WARNING, 'error': logging.ERROR, 'crit': logging\n .CRITICAL}\n\n def __init__(self, name, logger=None):\n self.logger = logger\n self.name = name\n\n @property\n def save(self, *args, **kwargs):\n \"\"\"\n 指定保存日志的文件路径,日志级别,以及调用文件\n 将日志存入到指定的文件中\n \"\"\"\n jobinst_id = lv.get_jobinst_id()\n job_code = lv.get_job_code()\n fire_time = lv.get_fire_time()\n group_code = lv.get_group_code()\n address_code = lv.get_address_code()\n self.logger = logging.getLogger(self.logger)\n self.logger.setLevel(logging.INFO)\n if platform_system() == 'Linux':\n log_path = FileConfigParser().get_path(server=platform_system(),\n key='log-cb')\n if platform_system() == 'Windows':\n log_path = root_path + FileConfigParser().get_path(server=\n platform_system(), key='log')\n if self.name == 'spider':\n name = 'icrawlerspider.spider.log'\n elif self.name == 'middleware':\n name = 'icrawlerspider.middleware.log'\n log_name = log_path + name\n filename = self.logger.handlers[0].baseFilename.split('\\\\')[-1] if len(\n self.logger.handlers) > 0 else ''\n if log_name.split('/')[-1] != filename:\n self.logger.handlers.clear()\n if not self.logger.handlers:\n fh = SafeFileHandler(log_name, mode='a', encoding='utf-8')\n formatter = logging.Formatter('[%(asctime)s][%(levelname)s] ' +\n '%s %s %s %s %s ' % (group_code, job_code, jobinst_id,\n fire_time, address_code) + '%(message)s')\n fh.setFormatter(formatter)\n self.logger.addHandler(fh)\n fh.close()\n return self.logger\n\n\ndef log(name):\n\n def wraaper(func):\n\n def inner(*args, **kwargs):\n log = ICrawlerLog(name).save\n log.info('{}开始执行'.format(func))\n try:\n result = func(*args, **kwargs)\n if result:\n log.info('{}执行成功'.format(func))\n return result\n else:\n log.error('{}执行后返回值为空'.format(func))\n return None\n except Exception as e:\n log.error('{}程序异常执行失败,程序终止'.format(func))\n log.error(e)\n return False\n return inner\n return wraaper\n",
"step-4": "from SpiderTools.tool import platform_system\nfrom SpidersLog.file_handler import SafeFileHandler\nfrom Env.parse_yaml import FileConfigParser\nfrom Env import log_variable as lv\nfrom staticparm import root_path\nfrom SpiderTools.tool import get_username\nimport logging\nimport logging.handlers\nimport traceback\n\n\nclass ICrawlerLog:\n level_relations = {'debug': logging.DEBUG, 'info': logging.INFO,\n 'warning': logging.WARNING, 'error': logging.ERROR, 'crit': logging\n .CRITICAL}\n\n def __init__(self, name, logger=None):\n self.logger = logger\n self.name = name\n\n @property\n def save(self, *args, **kwargs):\n \"\"\"\n 指定保存日志的文件路径,日志级别,以及调用文件\n 将日志存入到指定的文件中\n \"\"\"\n jobinst_id = lv.get_jobinst_id()\n job_code = lv.get_job_code()\n fire_time = lv.get_fire_time()\n group_code = lv.get_group_code()\n address_code = lv.get_address_code()\n self.logger = logging.getLogger(self.logger)\n self.logger.setLevel(logging.INFO)\n if platform_system() == 'Linux':\n log_path = FileConfigParser().get_path(server=platform_system(),\n key='log-cb')\n if platform_system() == 'Windows':\n log_path = root_path + FileConfigParser().get_path(server=\n platform_system(), key='log')\n if self.name == 'spider':\n name = 'icrawlerspider.spider.log'\n elif self.name == 'middleware':\n name = 'icrawlerspider.middleware.log'\n log_name = log_path + name\n filename = self.logger.handlers[0].baseFilename.split('\\\\')[-1] if len(\n self.logger.handlers) > 0 else ''\n if log_name.split('/')[-1] != filename:\n self.logger.handlers.clear()\n if not self.logger.handlers:\n fh = SafeFileHandler(log_name, mode='a', encoding='utf-8')\n formatter = logging.Formatter('[%(asctime)s][%(levelname)s] ' +\n '%s %s %s %s %s ' % (group_code, job_code, jobinst_id,\n fire_time, address_code) + '%(message)s')\n fh.setFormatter(formatter)\n self.logger.addHandler(fh)\n fh.close()\n return self.logger\n\n\ndef log(name):\n\n def wraaper(func):\n\n def inner(*args, **kwargs):\n log = ICrawlerLog(name).save\n log.info('{}开始执行'.format(func))\n try:\n result = func(*args, **kwargs)\n if result:\n log.info('{}执行成功'.format(func))\n return result\n else:\n log.error('{}执行后返回值为空'.format(func))\n return None\n except Exception as e:\n log.error('{}程序异常执行失败,程序终止'.format(func))\n log.error(e)\n return False\n return inner\n return wraaper\n",
"step-5": "# encoding: utf-8\nfrom SpiderTools.tool import platform_system\nfrom SpidersLog.file_handler import SafeFileHandler\nfrom Env.parse_yaml import FileConfigParser\nfrom Env import log_variable as lv\nfrom staticparm import root_path\nfrom SpiderTools.tool import get_username\nimport logging\nimport logging.handlers\nimport traceback\n\n\nclass ICrawlerLog:\n level_relations = {\n 'debug': logging.DEBUG,\n 'info': logging.INFO,\n 'warning': logging.WARNING,\n 'error': logging.ERROR,\n 'crit': logging.CRITICAL\n } # 日志级别关系映射\n\n def __init__(self, name, logger=None):\n self.logger = logger\n self.name = name\n\n @property\n def save(self, *args, **kwargs):\n '''\n 指定保存日志的文件路径,日志级别,以及调用文件\n 将日志存入到指定的文件中\n '''\n jobinst_id = lv.get_jobinst_id()\n job_code = lv.get_job_code()\n fire_time = lv.get_fire_time()\n group_code = lv.get_group_code()\n address_code = lv.get_address_code()\n\n # year = time.strftime('%Y', time.localtime()) # 获取完整年份\n # month = time.strftime('%m', time.localtime()) # 获取月\n # day = time.strftime('%d', time.localtime()) # 获取日\n\n # 创建一个logger\n self.logger = logging.getLogger(self.logger)\n self.logger.setLevel(logging.INFO)\n # 创建一个handler,用于写入日志文件\n # self.log_time = time.strftime(\"%Y_%m_%d_\")\n\n if platform_system() == 'Linux':\n log_path = FileConfigParser().get_path(server=platform_system(),key='log-cb')\n if platform_system() == 'Windows':\n log_path = root_path + FileConfigParser().get_path(server=platform_system(), key='log')\n # log_path = './Logs/'\n # log_path = '/home/ijep/domain/logs/python/'\n # log_name = log_path + 'icrawlerspider.spider.%s-%s-%s.log' % (year, month, day)\n if self.name == 'spider':\n name = 'icrawlerspider.spider.log'\n elif self.name == 'middleware':\n name = 'icrawlerspider.middleware.log'\n\n log_name = log_path + name\n\n filename = self.logger.handlers[0].baseFilename.split('\\\\')[-1] if len(self.logger.handlers) > 0 else ''\n\n if log_name.split('/')[-1] != filename:\n self.logger.handlers.clear() # 多个不同文件名的情况下用这个\n\n if not self.logger.handlers:\n # 追加模式,按照日期来设置日志,handlers中TimedRotatingFileHandler就是按照日期来设置,RotatingFileHandler这个按照文件大小来设置\n # fh = logging.handlers.TimedRotatingFileHandler(log_name, when='D', interval=1, encoding='utf-8')\n fh = SafeFileHandler(log_name, mode='a', encoding='utf-8')\n # fh.setLevel(logging.INFO)\n\n # 定义handler的输出格式\n formatter = logging.Formatter('[%(asctime)s][%(levelname)s] ' + '%s %s %s %s %s '\n % (group_code, job_code, jobinst_id, fire_time, address_code) + '%(message)s')\n # '%(filename)s->%(funcName)s line:%(lineno)d\n\n fh.setFormatter(formatter)\n\n # 给logger添加handler\n self.logger.addHandler(fh)\n\n # 添加下面一句,在记录日志之后移除句柄\n # self.logger.info('记录数据')\n # self.logger.removeHandler(fh)\n # 关闭打开的文件\n fh.close()\n return self.logger\n\n\ndef log(name):\n def wraaper(func):\n def inner(*args, **kwargs): # 如果想返回result必须再包裹一层\n log = ICrawlerLog(name).save\n log.info(\"{}开始执行\".format(func))\n try:\n result = func(*args, **kwargs) # 如果不是在类的函数里使用装饰器就可以这么写,如果这么写会报需要self入参(因为你是用类作为装饰器,函数就不会这样)\n if result:\n log.info(\"{}执行成功\".format(func))\n # log.info(\"结果是: %s\" % result)\n return result\n else:\n log.error(\"{}执行后返回值为空\".format(func))\n return None\n except Exception as e:\n # traceback.print_exc()\n log.error(\"{}程序异常执行失败,程序终止\".format(func))\n log.error(e)\n return False\n\n return inner\n\n return wraaper\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
class Solution(object):
def isIsomorphic(self, s, t):
"""
:type s: str
:type t: str
:rtype: bool
"""
n1 = len(s)
n2 = len(t)
if n1 != n2:
return False
else:
map1 = {}
map2 = {}
for i in range(n1):
if s[i] not in map1 and t[i] not in map2:
map1.update({s[i]: t[i]})
map2.update({t[i]: s[i]})
elif s[i] not in map1 or t[i] not in map2 or map1[s[i]] != t[i
] or map2[t[i]] != s[i]:
return False
return True
solution = Solution()
s = 'bb'
t = 'ab'
print(solution.isIsomorphic(s, t))
|
normal
|
{
"blob_id": "7fdddf98fc7b588e9b8816ffa22bc24f715d7efe",
"index": 5210,
"step-1": "class Solution(object):\n <mask token>\n\n\n<mask token>\n",
"step-2": "class Solution(object):\n\n def isIsomorphic(self, s, t):\n \"\"\"\n :type s: str\n :type t: str\n :rtype: bool\n \"\"\"\n n1 = len(s)\n n2 = len(t)\n if n1 != n2:\n return False\n else:\n map1 = {}\n map2 = {}\n for i in range(n1):\n if s[i] not in map1 and t[i] not in map2:\n map1.update({s[i]: t[i]})\n map2.update({t[i]: s[i]})\n elif s[i] not in map1 or t[i] not in map2 or map1[s[i]] != t[i\n ] or map2[t[i]] != s[i]:\n return False\n return True\n\n\n<mask token>\n",
"step-3": "class Solution(object):\n\n def isIsomorphic(self, s, t):\n \"\"\"\n :type s: str\n :type t: str\n :rtype: bool\n \"\"\"\n n1 = len(s)\n n2 = len(t)\n if n1 != n2:\n return False\n else:\n map1 = {}\n map2 = {}\n for i in range(n1):\n if s[i] not in map1 and t[i] not in map2:\n map1.update({s[i]: t[i]})\n map2.update({t[i]: s[i]})\n elif s[i] not in map1 or t[i] not in map2 or map1[s[i]] != t[i\n ] or map2[t[i]] != s[i]:\n return False\n return True\n\n\n<mask token>\nprint(solution.isIsomorphic(s, t))\n",
"step-4": "class Solution(object):\n\n def isIsomorphic(self, s, t):\n \"\"\"\n :type s: str\n :type t: str\n :rtype: bool\n \"\"\"\n n1 = len(s)\n n2 = len(t)\n if n1 != n2:\n return False\n else:\n map1 = {}\n map2 = {}\n for i in range(n1):\n if s[i] not in map1 and t[i] not in map2:\n map1.update({s[i]: t[i]})\n map2.update({t[i]: s[i]})\n elif s[i] not in map1 or t[i] not in map2 or map1[s[i]] != t[i\n ] or map2[t[i]] != s[i]:\n return False\n return True\n\n\nsolution = Solution()\ns = 'bb'\nt = 'ab'\nprint(solution.isIsomorphic(s, t))\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
import helpers
import os
import os.path
import json
import imp
import source.freesprints
from pygame.locals import *
class PluginLoader:
available_plugins = None
def __init__(self):
self.checkAvailablePlugins()
def checkAvailablePlugins(self):
print helpers.pluginsPath()
plugin_dirs = [plugin_path for plugin_path in os.listdir(helpers.pluginsPath())] #if os.path.isdir(f)]
self.available_plugins = []
for plugin_path in plugin_dirs:
print plugin_path
if plugin_path == ".DS_Store":
continue
plugin_path_absolute = os.path.join(helpers.pluginsPath(), plugin_path)
json_info_path = os.path.join(plugin_path_absolute, "info.json")
json_info_data = open(json_info_path)
jsonInfo = json.load(json_info_data)
self.available_plugins.append(Plugin(jsonInfo, plugin_path_absolute))
def getAvailablePlugins(self):
return self.available_plugins
class Plugin:
path = None
name = None
version = None
author = None
module = None
plugin_object = None
def __init__(self, info_json, path):
self.path = path
self.name = info_json.get("name")
self.version = info_json.get("version")
self.author = info_json.get("author")
print info_json
self.init_module()
def init_module(self):
#module = imp.find_module("pluginModule", [self.path])
self.module = imp.load_source("pluginModule", os.path.join(self.path, "__init__.py"))
print "FIND MODULE:"
print self.module
self.plugin_object = self.module.VisualisationPlugin(source.freesprints.get_app(), self)
#self.plugin_object.start()
#self.plugin_object.spinCount(123, 0)
def start(self, race_options):
source.freesprints.get_app().get_window_surface().fill(Color("black"))
self.plugin_object.start(race_options)
|
normal
|
{
"blob_id": "639669174435492f43bf51680c2724863017e9d2",
"index": 527,
"step-1": "import helpers\nimport os\nimport os.path\nimport json\nimport imp\nimport source.freesprints\nfrom pygame.locals import *\n\nclass PluginLoader:\n available_plugins = None\n \n def __init__(self):\n self.checkAvailablePlugins()\n \n def checkAvailablePlugins(self):\n print helpers.pluginsPath()\n \n plugin_dirs = [plugin_path for plugin_path in os.listdir(helpers.pluginsPath())] #if os.path.isdir(f)]\n \n self.available_plugins = []\n \n for plugin_path in plugin_dirs:\n print plugin_path\n if plugin_path == \".DS_Store\":\n continue\n \n plugin_path_absolute = os.path.join(helpers.pluginsPath(), plugin_path)\n json_info_path = os.path.join(plugin_path_absolute, \"info.json\")\n \n json_info_data = open(json_info_path)\n \n jsonInfo = json.load(json_info_data)\n \n self.available_plugins.append(Plugin(jsonInfo, plugin_path_absolute))\n \n \n \n def getAvailablePlugins(self):\n return self.available_plugins\n \nclass Plugin:\n path = None\n name = None\n version = None\n author = None\n \n module = None\n plugin_object = None\n \n def __init__(self, info_json, path):\n self.path = path\n \n self.name = info_json.get(\"name\")\n self.version = info_json.get(\"version\")\n self.author = info_json.get(\"author\")\n \n print info_json\n \n self.init_module()\n \n def init_module(self):\n #module = imp.find_module(\"pluginModule\", [self.path])\n self.module = imp.load_source(\"pluginModule\", os.path.join(self.path, \"__init__.py\"))\n \n print \"FIND MODULE:\"\n print self.module\n\n self.plugin_object = self.module.VisualisationPlugin(source.freesprints.get_app(), self)\n #self.plugin_object.start()\n #self.plugin_object.spinCount(123, 0)\n\n def start(self, race_options):\n source.freesprints.get_app().get_window_surface().fill(Color(\"black\"))\n self.plugin_object.start(race_options)\n\n\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
class ProsteUcilnice(object):
<|reserved_special_token_0|>
def __init__(self, ucilnice):
self.ucilnice = set(ucilnice)
self.zasedenost_ucilnic = defaultdict(dict)
self.rezerviranost_ucilnic = defaultdict(dict)
def dodaj_srecanja_semestra(self, semester, teden=None):
for srecanje in semester.srecanja.select_related('ucilnica', 'predmet'
).prefetch_related('ucitelji').filter(ucilnica__in=[u.pk for u in
self.ucilnice]).exclude(ura__isnull=True):
if teden is None or semester.od <= teden + datetime.timedelta(days
=srecanje.dan - 1) <= semester.do:
for i in range(srecanje.trajanje):
self.zasedenost_ucilnic[srecanje.dan, srecanje.ura + i][
srecanje.ucilnica] = srecanje
def upostevaj_rezervacije_za_teden(self, teden):
self.upostevaj_rezervacije(Rezervacija.objects.v_tednu(teden))
def upostevaj_rezervacije(self, rezervacije):
for rezervacija in rezervacije.prefetch_related(Prefetch('ucilnice',
queryset=Ucilnica.objects.filter(pk__in=[u.pk for u in self.
ucilnice]), to_attr='ustrezne_ucilnice'), 'osebe'):
for ucilnica in rezervacija.ustrezne_ucilnice:
for dan in rezervacija.dnevi():
for ura in range(rezervacija.od, rezervacija.do):
self.rezerviranost_ucilnic[dan.isoweekday(), ura][
ucilnica] = rezervacija
def dobi_termine(self):
termini = [ProsteUcilniceTermin(d, u, self.ucilnice, self.
zasedenost_ucilnic[d, u], self.rezerviranost_ucilnic[d, u]) for
d in range(1, len(DNEVI) + 1) for u in range(MIN_URA, MAX_URA)]
return termini
class Konflikt(object):
def __init__(self):
self.srecanja = []
self.rezervacije = []
@property
def st_konfliktov(self):
return len(self.srecanja) + len(self.rezervacije)
def __bool__(self):
return self.st_konfliktov > 0
def __str__(self):
return 'Konflikti:\n rezervacije:\n{}\n predmeti:\n{}'.format(
'\n '.join(map(str, self.rezervacije)), '\n '.join(map(
str, self.srecanja)))
class IskalnikKonfliktov(object):
"""Zgradi strukturo, ki omogoca hitro iskanje prekrivanj glede na datum in učilnico."""
def __init__(self, ucilnice, min_datum, max_datum):
self.ucilnice = set(ucilnice)
self.min_datum = min_datum
self.max_datum = max_datum
self.zasedenost_ucilnic = defaultdict(list)
self.rezerviranost_ucilnic = defaultdict(list)
def dodaj_srecanja(self):
self.dodaj_srecanja_semestrov(Semester.objects.v_obdobju(self.
min_datum, self.max_datum))
def dodaj_srecanja_semestrov(self, semestri):
for s in Srecanje.objects.filter(semester__in=semestri,
ucilnica__in=self.ucilnice).exclude(ura__isnull=True
).select_related('semester', 'predmet', 'ucilnica'):
for d in s.dnevi_med(self.min_datum, self.max_datum):
self.zasedenost_ucilnic[s.ucilnica_id, d].append(s)
def dodaj_rezervacije(self, rezervacije):
"""Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic"""
for r in rezervacije:
for u in r.seznam_ucilnic:
for d in r.dnevi_med(self.min_datum, self.max_datum):
self.rezerviranost_ucilnic[u.pk, d].append(r)
@staticmethod
def za_rezervacije(rezervacije: RezervacijaQuerySet):
"""Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic"""
min_datum = datetime.date.max
max_datum = datetime.date.min
ucilnice = set()
for r in rezervacije:
if r.zacetek < min_datum:
min_datum = r.zacetek
if r.konec > max_datum:
max_datum = r.konec
ucilnice.update(r.seznam_ucilnic)
iskalnik = IskalnikKonfliktov(ucilnice, min_datum, max_datum)
iskalnik.dodaj_srecanja()
iskalnik.dodaj_rezervacije(rezervacije)
return iskalnik
def konflikti_z_rezervacijo(self, r: Rezervacija):
if not hasattr(r, 'seznam_ucilnic'):
r.seznam_ucilnic = r.ucilnice.all()
for u in r.seznam_ucilnic:
for d in r.dnevi():
k = self.konflikti(u, d, r.od, r.do, r)
if k:
yield u, d, k
def konflikti(self, ucilnica, datum, od, do, ignore=None):
"""Vrne konflikte z dejavnostjo, ki bi v ucilnici `ucilnica` potekala dne `datum` od ure `od` do `do`."""
konflikti = Konflikt()
if ucilnica not in self.ucilnice:
raise ValueError(
'Struktura iskanja ni bila pripravljena za iskanje konfliktov v učilnici {}'
.format(ucilnica))
if not self.min_datum <= datum <= self.max_datum:
raise ValueError(
'Struktura iskanja ni bila pripravljena za iskanje konfliktov dne {}'
.format(datum))
for s in self.zasedenost_ucilnic[ucilnica.pk, datum]:
if s != ignore and s.se_po_urah_prekriva(od, do):
konflikti.srecanja.append(s)
for r in self.rezerviranost_ucilnic[ucilnica.pk, datum]:
if r != ignore and r.se_po_urah_prekriva(od, do):
konflikti.rezervacije.append(r)
return konflikti
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ProsteUcilniceTermin(Termin):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class ProsteUcilnice(object):
"""Zgradi strukturo, ki omogoca hitro iskanje prekrivanj za dane ucilnice glede na uro in dan v tednu."""
def __init__(self, ucilnice):
self.ucilnice = set(ucilnice)
self.zasedenost_ucilnic = defaultdict(dict)
self.rezerviranost_ucilnic = defaultdict(dict)
def dodaj_srecanja_semestra(self, semester, teden=None):
for srecanje in semester.srecanja.select_related('ucilnica', 'predmet'
).prefetch_related('ucitelji').filter(ucilnica__in=[u.pk for u in
self.ucilnice]).exclude(ura__isnull=True):
if teden is None or semester.od <= teden + datetime.timedelta(days
=srecanje.dan - 1) <= semester.do:
for i in range(srecanje.trajanje):
self.zasedenost_ucilnic[srecanje.dan, srecanje.ura + i][
srecanje.ucilnica] = srecanje
def upostevaj_rezervacije_za_teden(self, teden):
self.upostevaj_rezervacije(Rezervacija.objects.v_tednu(teden))
def upostevaj_rezervacije(self, rezervacije):
for rezervacija in rezervacije.prefetch_related(Prefetch('ucilnice',
queryset=Ucilnica.objects.filter(pk__in=[u.pk for u in self.
ucilnice]), to_attr='ustrezne_ucilnice'), 'osebe'):
for ucilnica in rezervacija.ustrezne_ucilnice:
for dan in rezervacija.dnevi():
for ura in range(rezervacija.od, rezervacija.do):
self.rezerviranost_ucilnic[dan.isoweekday(), ura][
ucilnica] = rezervacija
def dobi_termine(self):
termini = [ProsteUcilniceTermin(d, u, self.ucilnice, self.
zasedenost_ucilnic[d, u], self.rezerviranost_ucilnic[d, u]) for
d in range(1, len(DNEVI) + 1) for u in range(MIN_URA, MAX_URA)]
return termini
class Konflikt(object):
def __init__(self):
self.srecanja = []
self.rezervacije = []
@property
def st_konfliktov(self):
return len(self.srecanja) + len(self.rezervacije)
def __bool__(self):
return self.st_konfliktov > 0
def __str__(self):
return 'Konflikti:\n rezervacije:\n{}\n predmeti:\n{}'.format(
'\n '.join(map(str, self.rezervacije)), '\n '.join(map(
str, self.srecanja)))
class IskalnikKonfliktov(object):
"""Zgradi strukturo, ki omogoca hitro iskanje prekrivanj glede na datum in učilnico."""
def __init__(self, ucilnice, min_datum, max_datum):
self.ucilnice = set(ucilnice)
self.min_datum = min_datum
self.max_datum = max_datum
self.zasedenost_ucilnic = defaultdict(list)
self.rezerviranost_ucilnic = defaultdict(list)
def dodaj_srecanja(self):
self.dodaj_srecanja_semestrov(Semester.objects.v_obdobju(self.
min_datum, self.max_datum))
def dodaj_srecanja_semestrov(self, semestri):
for s in Srecanje.objects.filter(semester__in=semestri,
ucilnica__in=self.ucilnice).exclude(ura__isnull=True
).select_related('semester', 'predmet', 'ucilnica'):
for d in s.dnevi_med(self.min_datum, self.max_datum):
self.zasedenost_ucilnic[s.ucilnica_id, d].append(s)
def dodaj_rezervacije(self, rezervacije):
"""Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic"""
for r in rezervacije:
for u in r.seznam_ucilnic:
for d in r.dnevi_med(self.min_datum, self.max_datum):
self.rezerviranost_ucilnic[u.pk, d].append(r)
@staticmethod
def za_rezervacije(rezervacije: RezervacijaQuerySet):
"""Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic"""
min_datum = datetime.date.max
max_datum = datetime.date.min
ucilnice = set()
for r in rezervacije:
if r.zacetek < min_datum:
min_datum = r.zacetek
if r.konec > max_datum:
max_datum = r.konec
ucilnice.update(r.seznam_ucilnic)
iskalnik = IskalnikKonfliktov(ucilnice, min_datum, max_datum)
iskalnik.dodaj_srecanja()
iskalnik.dodaj_rezervacije(rezervacije)
return iskalnik
def konflikti_z_rezervacijo(self, r: Rezervacija):
if not hasattr(r, 'seznam_ucilnic'):
r.seznam_ucilnic = r.ucilnice.all()
for u in r.seznam_ucilnic:
for d in r.dnevi():
k = self.konflikti(u, d, r.od, r.do, r)
if k:
yield u, d, k
def konflikti(self, ucilnica, datum, od, do, ignore=None):
"""Vrne konflikte z dejavnostjo, ki bi v ucilnici `ucilnica` potekala dne `datum` od ure `od` do `do`."""
konflikti = Konflikt()
if ucilnica not in self.ucilnice:
raise ValueError(
'Struktura iskanja ni bila pripravljena za iskanje konfliktov v učilnici {}'
.format(ucilnica))
if not self.min_datum <= datum <= self.max_datum:
raise ValueError(
'Struktura iskanja ni bila pripravljena za iskanje konfliktov dne {}'
.format(datum))
for s in self.zasedenost_ucilnic[ucilnica.pk, datum]:
if s != ignore and s.se_po_urah_prekriva(od, do):
konflikti.srecanja.append(s)
for r in self.rezerviranost_ucilnic[ucilnica.pk, datum]:
if r != ignore and r.se_po_urah_prekriva(od, do):
konflikti.rezervacije.append(r)
return konflikti
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ProsteUcilniceTermin(Termin):
HUE_PRAZEN = 120
HUE_POLN = 0
def __init__(self, dan, ura, ustrezne_ucilnice, zasedene_ucilnice,
rezervirane_ucilnice):
super().__init__(dan, ura)
zasedene_pks = {u.pk for u in zasedene_ucilnice}
rezervirane_pks = {u.pk for u in rezervirane_ucilnice}
self.proste = [u for u in ustrezne_ucilnice if u.pk not in
zasedene_pks and u.pk not in rezervirane_pks]
self.zasedene = [(u, r) for u, r in zasedene_ucilnice.items() if u.
pk not in rezervirane_pks]
self.rezervirane = list(rezervirane_ucilnice.items())
self.prikazane_ucilnice = []
def filtriraj_ucilnice(self, pokazi_zasedene):
vse = [('prosta', u, None) for u in self.proste]
if pokazi_zasedene:
vse.extend([('rezervirana', u, r) for u, r in self.rezervirane])
vse.extend([('zasedena', u, r) for u, r in self.zasedene])
self.prikazane_ucilnice = sorted(vse, key=lambda x: x[1])
def hue(self):
h = self.HUE_PRAZEN if self.proste else self.HUE_POLN
return '{:.0f}'.format(h)
class ProsteUcilnice(object):
"""Zgradi strukturo, ki omogoca hitro iskanje prekrivanj za dane ucilnice glede na uro in dan v tednu."""
def __init__(self, ucilnice):
self.ucilnice = set(ucilnice)
self.zasedenost_ucilnic = defaultdict(dict)
self.rezerviranost_ucilnic = defaultdict(dict)
def dodaj_srecanja_semestra(self, semester, teden=None):
for srecanje in semester.srecanja.select_related('ucilnica', 'predmet'
).prefetch_related('ucitelji').filter(ucilnica__in=[u.pk for u in
self.ucilnice]).exclude(ura__isnull=True):
if teden is None or semester.od <= teden + datetime.timedelta(days
=srecanje.dan - 1) <= semester.do:
for i in range(srecanje.trajanje):
self.zasedenost_ucilnic[srecanje.dan, srecanje.ura + i][
srecanje.ucilnica] = srecanje
def upostevaj_rezervacije_za_teden(self, teden):
self.upostevaj_rezervacije(Rezervacija.objects.v_tednu(teden))
def upostevaj_rezervacije(self, rezervacije):
for rezervacija in rezervacije.prefetch_related(Prefetch('ucilnice',
queryset=Ucilnica.objects.filter(pk__in=[u.pk for u in self.
ucilnice]), to_attr='ustrezne_ucilnice'), 'osebe'):
for ucilnica in rezervacija.ustrezne_ucilnice:
for dan in rezervacija.dnevi():
for ura in range(rezervacija.od, rezervacija.do):
self.rezerviranost_ucilnic[dan.isoweekday(), ura][
ucilnica] = rezervacija
def dobi_termine(self):
termini = [ProsteUcilniceTermin(d, u, self.ucilnice, self.
zasedenost_ucilnic[d, u], self.rezerviranost_ucilnic[d, u]) for
d in range(1, len(DNEVI) + 1) for u in range(MIN_URA, MAX_URA)]
return termini
class Konflikt(object):
def __init__(self):
self.srecanja = []
self.rezervacije = []
@property
def st_konfliktov(self):
return len(self.srecanja) + len(self.rezervacije)
def __bool__(self):
return self.st_konfliktov > 0
def __str__(self):
return 'Konflikti:\n rezervacije:\n{}\n predmeti:\n{}'.format(
'\n '.join(map(str, self.rezervacije)), '\n '.join(map(
str, self.srecanja)))
class IskalnikKonfliktov(object):
"""Zgradi strukturo, ki omogoca hitro iskanje prekrivanj glede na datum in učilnico."""
def __init__(self, ucilnice, min_datum, max_datum):
self.ucilnice = set(ucilnice)
self.min_datum = min_datum
self.max_datum = max_datum
self.zasedenost_ucilnic = defaultdict(list)
self.rezerviranost_ucilnic = defaultdict(list)
def dodaj_srecanja(self):
self.dodaj_srecanja_semestrov(Semester.objects.v_obdobju(self.
min_datum, self.max_datum))
def dodaj_srecanja_semestrov(self, semestri):
for s in Srecanje.objects.filter(semester__in=semestri,
ucilnica__in=self.ucilnice).exclude(ura__isnull=True
).select_related('semester', 'predmet', 'ucilnica'):
for d in s.dnevi_med(self.min_datum, self.max_datum):
self.zasedenost_ucilnic[s.ucilnica_id, d].append(s)
def dodaj_rezervacije(self, rezervacije):
"""Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic"""
for r in rezervacije:
for u in r.seznam_ucilnic:
for d in r.dnevi_med(self.min_datum, self.max_datum):
self.rezerviranost_ucilnic[u.pk, d].append(r)
@staticmethod
def za_rezervacije(rezervacije: RezervacijaQuerySet):
"""Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic"""
min_datum = datetime.date.max
max_datum = datetime.date.min
ucilnice = set()
for r in rezervacije:
if r.zacetek < min_datum:
min_datum = r.zacetek
if r.konec > max_datum:
max_datum = r.konec
ucilnice.update(r.seznam_ucilnic)
iskalnik = IskalnikKonfliktov(ucilnice, min_datum, max_datum)
iskalnik.dodaj_srecanja()
iskalnik.dodaj_rezervacije(rezervacije)
return iskalnik
def konflikti_z_rezervacijo(self, r: Rezervacija):
if not hasattr(r, 'seznam_ucilnic'):
r.seznam_ucilnic = r.ucilnice.all()
for u in r.seznam_ucilnic:
for d in r.dnevi():
k = self.konflikti(u, d, r.od, r.do, r)
if k:
yield u, d, k
def konflikti(self, ucilnica, datum, od, do, ignore=None):
"""Vrne konflikte z dejavnostjo, ki bi v ucilnici `ucilnica` potekala dne `datum` od ure `od` do `do`."""
konflikti = Konflikt()
if ucilnica not in self.ucilnice:
raise ValueError(
'Struktura iskanja ni bila pripravljena za iskanje konfliktov v učilnici {}'
.format(ucilnica))
if not self.min_datum <= datum <= self.max_datum:
raise ValueError(
'Struktura iskanja ni bila pripravljena za iskanje konfliktov dne {}'
.format(datum))
for s in self.zasedenost_ucilnic[ucilnica.pk, datum]:
if s != ignore and s.se_po_urah_prekriva(od, do):
konflikti.srecanja.append(s)
for r in self.rezerviranost_ucilnic[ucilnica.pk, datum]:
if r != ignore and r.se_po_urah_prekriva(od, do):
konflikti.rezervacije.append(r)
return konflikti
<|reserved_special_token_1|>
import datetime
from collections import defaultdict
from django.db.models import Prefetch
from urnik.models import Termin, Rezervacija, Ucilnica, DNEVI, MIN_URA, MAX_URA, Srecanje, Semester, RezervacijaQuerySet
class ProsteUcilniceTermin(Termin):
HUE_PRAZEN = 120
HUE_POLN = 0
def __init__(self, dan, ura, ustrezne_ucilnice, zasedene_ucilnice,
rezervirane_ucilnice):
super().__init__(dan, ura)
zasedene_pks = {u.pk for u in zasedene_ucilnice}
rezervirane_pks = {u.pk for u in rezervirane_ucilnice}
self.proste = [u for u in ustrezne_ucilnice if u.pk not in
zasedene_pks and u.pk not in rezervirane_pks]
self.zasedene = [(u, r) for u, r in zasedene_ucilnice.items() if u.
pk not in rezervirane_pks]
self.rezervirane = list(rezervirane_ucilnice.items())
self.prikazane_ucilnice = []
def filtriraj_ucilnice(self, pokazi_zasedene):
vse = [('prosta', u, None) for u in self.proste]
if pokazi_zasedene:
vse.extend([('rezervirana', u, r) for u, r in self.rezervirane])
vse.extend([('zasedena', u, r) for u, r in self.zasedene])
self.prikazane_ucilnice = sorted(vse, key=lambda x: x[1])
def hue(self):
h = self.HUE_PRAZEN if self.proste else self.HUE_POLN
return '{:.0f}'.format(h)
class ProsteUcilnice(object):
"""Zgradi strukturo, ki omogoca hitro iskanje prekrivanj za dane ucilnice glede na uro in dan v tednu."""
def __init__(self, ucilnice):
self.ucilnice = set(ucilnice)
self.zasedenost_ucilnic = defaultdict(dict)
self.rezerviranost_ucilnic = defaultdict(dict)
def dodaj_srecanja_semestra(self, semester, teden=None):
for srecanje in semester.srecanja.select_related('ucilnica', 'predmet'
).prefetch_related('ucitelji').filter(ucilnica__in=[u.pk for u in
self.ucilnice]).exclude(ura__isnull=True):
if teden is None or semester.od <= teden + datetime.timedelta(days
=srecanje.dan - 1) <= semester.do:
for i in range(srecanje.trajanje):
self.zasedenost_ucilnic[srecanje.dan, srecanje.ura + i][
srecanje.ucilnica] = srecanje
def upostevaj_rezervacije_za_teden(self, teden):
self.upostevaj_rezervacije(Rezervacija.objects.v_tednu(teden))
def upostevaj_rezervacije(self, rezervacije):
for rezervacija in rezervacije.prefetch_related(Prefetch('ucilnice',
queryset=Ucilnica.objects.filter(pk__in=[u.pk for u in self.
ucilnice]), to_attr='ustrezne_ucilnice'), 'osebe'):
for ucilnica in rezervacija.ustrezne_ucilnice:
for dan in rezervacija.dnevi():
for ura in range(rezervacija.od, rezervacija.do):
self.rezerviranost_ucilnic[dan.isoweekday(), ura][
ucilnica] = rezervacija
def dobi_termine(self):
termini = [ProsteUcilniceTermin(d, u, self.ucilnice, self.
zasedenost_ucilnic[d, u], self.rezerviranost_ucilnic[d, u]) for
d in range(1, len(DNEVI) + 1) for u in range(MIN_URA, MAX_URA)]
return termini
class Konflikt(object):
def __init__(self):
self.srecanja = []
self.rezervacije = []
@property
def st_konfliktov(self):
return len(self.srecanja) + len(self.rezervacije)
def __bool__(self):
return self.st_konfliktov > 0
def __str__(self):
return 'Konflikti:\n rezervacije:\n{}\n predmeti:\n{}'.format(
'\n '.join(map(str, self.rezervacije)), '\n '.join(map(
str, self.srecanja)))
class IskalnikKonfliktov(object):
"""Zgradi strukturo, ki omogoca hitro iskanje prekrivanj glede na datum in učilnico."""
def __init__(self, ucilnice, min_datum, max_datum):
self.ucilnice = set(ucilnice)
self.min_datum = min_datum
self.max_datum = max_datum
self.zasedenost_ucilnic = defaultdict(list)
self.rezerviranost_ucilnic = defaultdict(list)
def dodaj_srecanja(self):
self.dodaj_srecanja_semestrov(Semester.objects.v_obdobju(self.
min_datum, self.max_datum))
def dodaj_srecanja_semestrov(self, semestri):
for s in Srecanje.objects.filter(semester__in=semestri,
ucilnica__in=self.ucilnice).exclude(ura__isnull=True
).select_related('semester', 'predmet', 'ucilnica'):
for d in s.dnevi_med(self.min_datum, self.max_datum):
self.zasedenost_ucilnic[s.ucilnica_id, d].append(s)
def dodaj_rezervacije(self, rezervacije):
"""Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic"""
for r in rezervacije:
for u in r.seznam_ucilnic:
for d in r.dnevi_med(self.min_datum, self.max_datum):
self.rezerviranost_ucilnic[u.pk, d].append(r)
@staticmethod
def za_rezervacije(rezervacije: RezervacijaQuerySet):
"""Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic"""
min_datum = datetime.date.max
max_datum = datetime.date.min
ucilnice = set()
for r in rezervacije:
if r.zacetek < min_datum:
min_datum = r.zacetek
if r.konec > max_datum:
max_datum = r.konec
ucilnice.update(r.seznam_ucilnic)
iskalnik = IskalnikKonfliktov(ucilnice, min_datum, max_datum)
iskalnik.dodaj_srecanja()
iskalnik.dodaj_rezervacije(rezervacije)
return iskalnik
def konflikti_z_rezervacijo(self, r: Rezervacija):
if not hasattr(r, 'seznam_ucilnic'):
r.seznam_ucilnic = r.ucilnice.all()
for u in r.seznam_ucilnic:
for d in r.dnevi():
k = self.konflikti(u, d, r.od, r.do, r)
if k:
yield u, d, k
def konflikti(self, ucilnica, datum, od, do, ignore=None):
"""Vrne konflikte z dejavnostjo, ki bi v ucilnici `ucilnica` potekala dne `datum` od ure `od` do `do`."""
konflikti = Konflikt()
if ucilnica not in self.ucilnice:
raise ValueError(
'Struktura iskanja ni bila pripravljena za iskanje konfliktov v učilnici {}'
.format(ucilnica))
if not self.min_datum <= datum <= self.max_datum:
raise ValueError(
'Struktura iskanja ni bila pripravljena za iskanje konfliktov dne {}'
.format(datum))
for s in self.zasedenost_ucilnic[ucilnica.pk, datum]:
if s != ignore and s.se_po_urah_prekriva(od, do):
konflikti.srecanja.append(s)
for r in self.rezerviranost_ucilnic[ucilnica.pk, datum]:
if r != ignore and r.se_po_urah_prekriva(od, do):
konflikti.rezervacije.append(r)
return konflikti
<|reserved_special_token_1|>
import datetime
from collections import defaultdict
from django.db.models import Prefetch
from urnik.models import Termin, Rezervacija, Ucilnica, DNEVI, MIN_URA, MAX_URA, Srecanje, Semester, RezervacijaQuerySet
class ProsteUcilniceTermin(Termin):
HUE_PRAZEN = 120 # zelena
HUE_POLN = 0 # rdeca
def __init__(self, dan, ura, ustrezne_ucilnice, zasedene_ucilnice, rezervirane_ucilnice):
super().__init__(dan, ura)
zasedene_pks = {u.pk for u in zasedene_ucilnice}
rezervirane_pks = {u.pk for u in rezervirane_ucilnice}
# Vse ustrezne proste ucilnice.
self.proste = [u for u in ustrezne_ucilnice if u.pk not in zasedene_pks and u.pk not in rezervirane_pks]
# Vse ustrezne ucilnice, ki so pa zasedene, ker je tam stalno srečanje. Vrednosti so razlogi za zasedenost.
self.zasedene = [(u, r) for u, r in zasedene_ucilnice.items() if u.pk not in rezervirane_pks]
# Vse ustrezne ucilnice, ki so pa zasedene, ker so rezervirane. Vrednosti so razlogi za zasedenost.
self.rezervirane = list(rezervirane_ucilnice.items())
# ucilnice, ki bodo prikazane, skupaj s stanjem in razlogom
self.prikazane_ucilnice = []
def filtriraj_ucilnice(self, pokazi_zasedene):
vse = [('prosta', u, None) for u in self.proste]
if pokazi_zasedene:
vse.extend([('rezervirana', u, r) for u, r in self.rezervirane])
vse.extend([('zasedena', u, r) for u, r in self.zasedene])
self.prikazane_ucilnice = sorted(vse, key=lambda x: x[1])
def hue(self):
h = self.HUE_PRAZEN if self.proste else self.HUE_POLN
return "{:.0f}".format(h)
class ProsteUcilnice(object):
"""Zgradi strukturo, ki omogoca hitro iskanje prekrivanj za dane ucilnice glede na uro in dan v tednu."""
def __init__(self, ucilnice):
self.ucilnice = set(ucilnice)
self.zasedenost_ucilnic = defaultdict(dict)
self.rezerviranost_ucilnic = defaultdict(dict)
def dodaj_srecanja_semestra(self, semester, teden=None):
for srecanje in semester.srecanja.select_related('ucilnica', 'predmet').prefetch_related('ucitelji'
).filter(ucilnica__in=[u.pk for u in self.ucilnice]).exclude(ura__isnull=True):
if teden is None or semester.od <= teden + datetime.timedelta(days=srecanje.dan-1) <= semester.do:
for i in range(srecanje.trajanje):
self.zasedenost_ucilnic[srecanje.dan, srecanje.ura + i][srecanje.ucilnica] = srecanje
def upostevaj_rezervacije_za_teden(self, teden):
self.upostevaj_rezervacije(Rezervacija.objects.v_tednu(teden))
def upostevaj_rezervacije(self, rezervacije):
for rezervacija in rezervacije.prefetch_related(
Prefetch(
'ucilnice',
queryset=Ucilnica.objects.filter(pk__in=[u.pk for u in self.ucilnice]),
to_attr='ustrezne_ucilnice'),
'osebe'):
for ucilnica in rezervacija.ustrezne_ucilnice:
for dan in rezervacija.dnevi():
for ura in range(rezervacija.od, rezervacija.do):
self.rezerviranost_ucilnic[dan.isoweekday(), ura][ucilnica] = rezervacija
def dobi_termine(self):
termini = [ProsteUcilniceTermin(d, u, self.ucilnice, self.zasedenost_ucilnic[d, u],
self.rezerviranost_ucilnic[d, u])
for d in range(1, len(DNEVI) + 1) for u in range(MIN_URA, MAX_URA)]
return termini
class Konflikt(object):
def __init__(self):
self.srecanja = []
self.rezervacije = []
@property
def st_konfliktov(self):
return len(self.srecanja) + len(self.rezervacije)
def __bool__(self):
return self.st_konfliktov > 0
def __str__(self):
return "Konflikti:\n rezervacije:\n{}\n predmeti:\n{}".format("\n ".join(map(str, self.rezervacije)),
"\n ".join(map(str, self.srecanja)))
class IskalnikKonfliktov(object):
"""Zgradi strukturo, ki omogoca hitro iskanje prekrivanj glede na datum in učilnico."""
def __init__(self, ucilnice, min_datum, max_datum):
self.ucilnice = set(ucilnice)
self.min_datum = min_datum
self.max_datum = max_datum
self.zasedenost_ucilnic = defaultdict(list)
self.rezerviranost_ucilnic = defaultdict(list)
def dodaj_srecanja(self):
self.dodaj_srecanja_semestrov(Semester.objects.v_obdobju(self.min_datum, self.max_datum))
def dodaj_srecanja_semestrov(self, semestri):
for s in Srecanje.objects.filter(semester__in=semestri, ucilnica__in=self.ucilnice
).exclude(ura__isnull=True).select_related('semester', 'predmet', 'ucilnica'):
for d in s.dnevi_med(self.min_datum, self.max_datum):
self.zasedenost_ucilnic[s.ucilnica_id, d].append(s)
def dodaj_rezervacije(self, rezervacije):
"""Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic"""
for r in rezervacije:
for u in r.seznam_ucilnic:
for d in r.dnevi_med(self.min_datum, self.max_datum):
self.rezerviranost_ucilnic[u.pk, d].append(r)
@staticmethod
def za_rezervacije(rezervacije: RezervacijaQuerySet):
"""Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic"""
min_datum = datetime.date.max
max_datum = datetime.date.min
ucilnice = set()
for r in rezervacije:
if r.zacetek < min_datum:
min_datum = r.zacetek
if r.konec > max_datum:
max_datum = r.konec
ucilnice.update(r.seznam_ucilnic)
iskalnik = IskalnikKonfliktov(ucilnice, min_datum, max_datum)
iskalnik.dodaj_srecanja()
iskalnik.dodaj_rezervacije(rezervacije)
return iskalnik
def konflikti_z_rezervacijo(self, r: Rezervacija):
if not hasattr(r, 'seznam_ucilnic'):
r.seznam_ucilnic = r.ucilnice.all()
for u in r.seznam_ucilnic:
for d in r.dnevi():
k = self.konflikti(u, d, r.od, r.do, r)
if k:
yield u, d, k
def konflikti(self, ucilnica, datum, od, do, ignore=None):
"""Vrne konflikte z dejavnostjo, ki bi v ucilnici `ucilnica` potekala dne `datum` od ure `od` do `do`."""
konflikti = Konflikt()
if ucilnica not in self.ucilnice:
raise ValueError("Struktura iskanja ni bila pripravljena za iskanje konfliktov v učilnici {}".format(ucilnica))
if not (self.min_datum <= datum <= self.max_datum):
raise ValueError("Struktura iskanja ni bila pripravljena za iskanje konfliktov dne {}".format(datum))
for s in self.zasedenost_ucilnic[ucilnica.pk, datum]:
if s != ignore and s.se_po_urah_prekriva(od, do):
konflikti.srecanja.append(s)
for r in self.rezerviranost_ucilnic[ucilnica.pk, datum]:
if r != ignore and r.se_po_urah_prekriva(od, do):
konflikti.rezervacije.append(r)
return konflikti
|
flexible
|
{
"blob_id": "3ce9c0aeb6b4e575fbb3fced52a86a1dcec44706",
"index": 4713,
"step-1": "<mask token>\n\n\nclass ProsteUcilnice(object):\n <mask token>\n\n def __init__(self, ucilnice):\n self.ucilnice = set(ucilnice)\n self.zasedenost_ucilnic = defaultdict(dict)\n self.rezerviranost_ucilnic = defaultdict(dict)\n\n def dodaj_srecanja_semestra(self, semester, teden=None):\n for srecanje in semester.srecanja.select_related('ucilnica', 'predmet'\n ).prefetch_related('ucitelji').filter(ucilnica__in=[u.pk for u in\n self.ucilnice]).exclude(ura__isnull=True):\n if teden is None or semester.od <= teden + datetime.timedelta(days\n =srecanje.dan - 1) <= semester.do:\n for i in range(srecanje.trajanje):\n self.zasedenost_ucilnic[srecanje.dan, srecanje.ura + i][\n srecanje.ucilnica] = srecanje\n\n def upostevaj_rezervacije_za_teden(self, teden):\n self.upostevaj_rezervacije(Rezervacija.objects.v_tednu(teden))\n\n def upostevaj_rezervacije(self, rezervacije):\n for rezervacija in rezervacije.prefetch_related(Prefetch('ucilnice',\n queryset=Ucilnica.objects.filter(pk__in=[u.pk for u in self.\n ucilnice]), to_attr='ustrezne_ucilnice'), 'osebe'):\n for ucilnica in rezervacija.ustrezne_ucilnice:\n for dan in rezervacija.dnevi():\n for ura in range(rezervacija.od, rezervacija.do):\n self.rezerviranost_ucilnic[dan.isoweekday(), ura][\n ucilnica] = rezervacija\n\n def dobi_termine(self):\n termini = [ProsteUcilniceTermin(d, u, self.ucilnice, self.\n zasedenost_ucilnic[d, u], self.rezerviranost_ucilnic[d, u]) for\n d in range(1, len(DNEVI) + 1) for u in range(MIN_URA, MAX_URA)]\n return termini\n\n\nclass Konflikt(object):\n\n def __init__(self):\n self.srecanja = []\n self.rezervacije = []\n\n @property\n def st_konfliktov(self):\n return len(self.srecanja) + len(self.rezervacije)\n\n def __bool__(self):\n return self.st_konfliktov > 0\n\n def __str__(self):\n return 'Konflikti:\\n rezervacije:\\n{}\\n predmeti:\\n{}'.format(\n '\\n '.join(map(str, self.rezervacije)), '\\n '.join(map(\n str, self.srecanja)))\n\n\nclass IskalnikKonfliktov(object):\n \"\"\"Zgradi strukturo, ki omogoca hitro iskanje prekrivanj glede na datum in učilnico.\"\"\"\n\n def __init__(self, ucilnice, min_datum, max_datum):\n self.ucilnice = set(ucilnice)\n self.min_datum = min_datum\n self.max_datum = max_datum\n self.zasedenost_ucilnic = defaultdict(list)\n self.rezerviranost_ucilnic = defaultdict(list)\n\n def dodaj_srecanja(self):\n self.dodaj_srecanja_semestrov(Semester.objects.v_obdobju(self.\n min_datum, self.max_datum))\n\n def dodaj_srecanja_semestrov(self, semestri):\n for s in Srecanje.objects.filter(semester__in=semestri,\n ucilnica__in=self.ucilnice).exclude(ura__isnull=True\n ).select_related('semester', 'predmet', 'ucilnica'):\n for d in s.dnevi_med(self.min_datum, self.max_datum):\n self.zasedenost_ucilnic[s.ucilnica_id, d].append(s)\n\n def dodaj_rezervacije(self, rezervacije):\n \"\"\"Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic\"\"\"\n for r in rezervacije:\n for u in r.seznam_ucilnic:\n for d in r.dnevi_med(self.min_datum, self.max_datum):\n self.rezerviranost_ucilnic[u.pk, d].append(r)\n\n @staticmethod\n def za_rezervacije(rezervacije: RezervacijaQuerySet):\n \"\"\"Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic\"\"\"\n min_datum = datetime.date.max\n max_datum = datetime.date.min\n ucilnice = set()\n for r in rezervacije:\n if r.zacetek < min_datum:\n min_datum = r.zacetek\n if r.konec > max_datum:\n max_datum = r.konec\n ucilnice.update(r.seznam_ucilnic)\n iskalnik = IskalnikKonfliktov(ucilnice, min_datum, max_datum)\n iskalnik.dodaj_srecanja()\n iskalnik.dodaj_rezervacije(rezervacije)\n return iskalnik\n\n def konflikti_z_rezervacijo(self, r: Rezervacija):\n if not hasattr(r, 'seznam_ucilnic'):\n r.seznam_ucilnic = r.ucilnice.all()\n for u in r.seznam_ucilnic:\n for d in r.dnevi():\n k = self.konflikti(u, d, r.od, r.do, r)\n if k:\n yield u, d, k\n\n def konflikti(self, ucilnica, datum, od, do, ignore=None):\n \"\"\"Vrne konflikte z dejavnostjo, ki bi v ucilnici `ucilnica` potekala dne `datum` od ure `od` do `do`.\"\"\"\n konflikti = Konflikt()\n if ucilnica not in self.ucilnice:\n raise ValueError(\n 'Struktura iskanja ni bila pripravljena za iskanje konfliktov v učilnici {}'\n .format(ucilnica))\n if not self.min_datum <= datum <= self.max_datum:\n raise ValueError(\n 'Struktura iskanja ni bila pripravljena za iskanje konfliktov dne {}'\n .format(datum))\n for s in self.zasedenost_ucilnic[ucilnica.pk, datum]:\n if s != ignore and s.se_po_urah_prekriva(od, do):\n konflikti.srecanja.append(s)\n for r in self.rezerviranost_ucilnic[ucilnica.pk, datum]:\n if r != ignore and r.se_po_urah_prekriva(od, do):\n konflikti.rezervacije.append(r)\n return konflikti\n",
"step-2": "<mask token>\n\n\nclass ProsteUcilniceTermin(Termin):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass ProsteUcilnice(object):\n \"\"\"Zgradi strukturo, ki omogoca hitro iskanje prekrivanj za dane ucilnice glede na uro in dan v tednu.\"\"\"\n\n def __init__(self, ucilnice):\n self.ucilnice = set(ucilnice)\n self.zasedenost_ucilnic = defaultdict(dict)\n self.rezerviranost_ucilnic = defaultdict(dict)\n\n def dodaj_srecanja_semestra(self, semester, teden=None):\n for srecanje in semester.srecanja.select_related('ucilnica', 'predmet'\n ).prefetch_related('ucitelji').filter(ucilnica__in=[u.pk for u in\n self.ucilnice]).exclude(ura__isnull=True):\n if teden is None or semester.od <= teden + datetime.timedelta(days\n =srecanje.dan - 1) <= semester.do:\n for i in range(srecanje.trajanje):\n self.zasedenost_ucilnic[srecanje.dan, srecanje.ura + i][\n srecanje.ucilnica] = srecanje\n\n def upostevaj_rezervacije_za_teden(self, teden):\n self.upostevaj_rezervacije(Rezervacija.objects.v_tednu(teden))\n\n def upostevaj_rezervacije(self, rezervacije):\n for rezervacija in rezervacije.prefetch_related(Prefetch('ucilnice',\n queryset=Ucilnica.objects.filter(pk__in=[u.pk for u in self.\n ucilnice]), to_attr='ustrezne_ucilnice'), 'osebe'):\n for ucilnica in rezervacija.ustrezne_ucilnice:\n for dan in rezervacija.dnevi():\n for ura in range(rezervacija.od, rezervacija.do):\n self.rezerviranost_ucilnic[dan.isoweekday(), ura][\n ucilnica] = rezervacija\n\n def dobi_termine(self):\n termini = [ProsteUcilniceTermin(d, u, self.ucilnice, self.\n zasedenost_ucilnic[d, u], self.rezerviranost_ucilnic[d, u]) for\n d in range(1, len(DNEVI) + 1) for u in range(MIN_URA, MAX_URA)]\n return termini\n\n\nclass Konflikt(object):\n\n def __init__(self):\n self.srecanja = []\n self.rezervacije = []\n\n @property\n def st_konfliktov(self):\n return len(self.srecanja) + len(self.rezervacije)\n\n def __bool__(self):\n return self.st_konfliktov > 0\n\n def __str__(self):\n return 'Konflikti:\\n rezervacije:\\n{}\\n predmeti:\\n{}'.format(\n '\\n '.join(map(str, self.rezervacije)), '\\n '.join(map(\n str, self.srecanja)))\n\n\nclass IskalnikKonfliktov(object):\n \"\"\"Zgradi strukturo, ki omogoca hitro iskanje prekrivanj glede na datum in učilnico.\"\"\"\n\n def __init__(self, ucilnice, min_datum, max_datum):\n self.ucilnice = set(ucilnice)\n self.min_datum = min_datum\n self.max_datum = max_datum\n self.zasedenost_ucilnic = defaultdict(list)\n self.rezerviranost_ucilnic = defaultdict(list)\n\n def dodaj_srecanja(self):\n self.dodaj_srecanja_semestrov(Semester.objects.v_obdobju(self.\n min_datum, self.max_datum))\n\n def dodaj_srecanja_semestrov(self, semestri):\n for s in Srecanje.objects.filter(semester__in=semestri,\n ucilnica__in=self.ucilnice).exclude(ura__isnull=True\n ).select_related('semester', 'predmet', 'ucilnica'):\n for d in s.dnevi_med(self.min_datum, self.max_datum):\n self.zasedenost_ucilnic[s.ucilnica_id, d].append(s)\n\n def dodaj_rezervacije(self, rezervacije):\n \"\"\"Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic\"\"\"\n for r in rezervacije:\n for u in r.seznam_ucilnic:\n for d in r.dnevi_med(self.min_datum, self.max_datum):\n self.rezerviranost_ucilnic[u.pk, d].append(r)\n\n @staticmethod\n def za_rezervacije(rezervacije: RezervacijaQuerySet):\n \"\"\"Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic\"\"\"\n min_datum = datetime.date.max\n max_datum = datetime.date.min\n ucilnice = set()\n for r in rezervacije:\n if r.zacetek < min_datum:\n min_datum = r.zacetek\n if r.konec > max_datum:\n max_datum = r.konec\n ucilnice.update(r.seznam_ucilnic)\n iskalnik = IskalnikKonfliktov(ucilnice, min_datum, max_datum)\n iskalnik.dodaj_srecanja()\n iskalnik.dodaj_rezervacije(rezervacije)\n return iskalnik\n\n def konflikti_z_rezervacijo(self, r: Rezervacija):\n if not hasattr(r, 'seznam_ucilnic'):\n r.seznam_ucilnic = r.ucilnice.all()\n for u in r.seznam_ucilnic:\n for d in r.dnevi():\n k = self.konflikti(u, d, r.od, r.do, r)\n if k:\n yield u, d, k\n\n def konflikti(self, ucilnica, datum, od, do, ignore=None):\n \"\"\"Vrne konflikte z dejavnostjo, ki bi v ucilnici `ucilnica` potekala dne `datum` od ure `od` do `do`.\"\"\"\n konflikti = Konflikt()\n if ucilnica not in self.ucilnice:\n raise ValueError(\n 'Struktura iskanja ni bila pripravljena za iskanje konfliktov v učilnici {}'\n .format(ucilnica))\n if not self.min_datum <= datum <= self.max_datum:\n raise ValueError(\n 'Struktura iskanja ni bila pripravljena za iskanje konfliktov dne {}'\n .format(datum))\n for s in self.zasedenost_ucilnic[ucilnica.pk, datum]:\n if s != ignore and s.se_po_urah_prekriva(od, do):\n konflikti.srecanja.append(s)\n for r in self.rezerviranost_ucilnic[ucilnica.pk, datum]:\n if r != ignore and r.se_po_urah_prekriva(od, do):\n konflikti.rezervacije.append(r)\n return konflikti\n",
"step-3": "<mask token>\n\n\nclass ProsteUcilniceTermin(Termin):\n HUE_PRAZEN = 120\n HUE_POLN = 0\n\n def __init__(self, dan, ura, ustrezne_ucilnice, zasedene_ucilnice,\n rezervirane_ucilnice):\n super().__init__(dan, ura)\n zasedene_pks = {u.pk for u in zasedene_ucilnice}\n rezervirane_pks = {u.pk for u in rezervirane_ucilnice}\n self.proste = [u for u in ustrezne_ucilnice if u.pk not in\n zasedene_pks and u.pk not in rezervirane_pks]\n self.zasedene = [(u, r) for u, r in zasedene_ucilnice.items() if u.\n pk not in rezervirane_pks]\n self.rezervirane = list(rezervirane_ucilnice.items())\n self.prikazane_ucilnice = []\n\n def filtriraj_ucilnice(self, pokazi_zasedene):\n vse = [('prosta', u, None) for u in self.proste]\n if pokazi_zasedene:\n vse.extend([('rezervirana', u, r) for u, r in self.rezervirane])\n vse.extend([('zasedena', u, r) for u, r in self.zasedene])\n self.prikazane_ucilnice = sorted(vse, key=lambda x: x[1])\n\n def hue(self):\n h = self.HUE_PRAZEN if self.proste else self.HUE_POLN\n return '{:.0f}'.format(h)\n\n\nclass ProsteUcilnice(object):\n \"\"\"Zgradi strukturo, ki omogoca hitro iskanje prekrivanj za dane ucilnice glede na uro in dan v tednu.\"\"\"\n\n def __init__(self, ucilnice):\n self.ucilnice = set(ucilnice)\n self.zasedenost_ucilnic = defaultdict(dict)\n self.rezerviranost_ucilnic = defaultdict(dict)\n\n def dodaj_srecanja_semestra(self, semester, teden=None):\n for srecanje in semester.srecanja.select_related('ucilnica', 'predmet'\n ).prefetch_related('ucitelji').filter(ucilnica__in=[u.pk for u in\n self.ucilnice]).exclude(ura__isnull=True):\n if teden is None or semester.od <= teden + datetime.timedelta(days\n =srecanje.dan - 1) <= semester.do:\n for i in range(srecanje.trajanje):\n self.zasedenost_ucilnic[srecanje.dan, srecanje.ura + i][\n srecanje.ucilnica] = srecanje\n\n def upostevaj_rezervacije_za_teden(self, teden):\n self.upostevaj_rezervacije(Rezervacija.objects.v_tednu(teden))\n\n def upostevaj_rezervacije(self, rezervacije):\n for rezervacija in rezervacije.prefetch_related(Prefetch('ucilnice',\n queryset=Ucilnica.objects.filter(pk__in=[u.pk for u in self.\n ucilnice]), to_attr='ustrezne_ucilnice'), 'osebe'):\n for ucilnica in rezervacija.ustrezne_ucilnice:\n for dan in rezervacija.dnevi():\n for ura in range(rezervacija.od, rezervacija.do):\n self.rezerviranost_ucilnic[dan.isoweekday(), ura][\n ucilnica] = rezervacija\n\n def dobi_termine(self):\n termini = [ProsteUcilniceTermin(d, u, self.ucilnice, self.\n zasedenost_ucilnic[d, u], self.rezerviranost_ucilnic[d, u]) for\n d in range(1, len(DNEVI) + 1) for u in range(MIN_URA, MAX_URA)]\n return termini\n\n\nclass Konflikt(object):\n\n def __init__(self):\n self.srecanja = []\n self.rezervacije = []\n\n @property\n def st_konfliktov(self):\n return len(self.srecanja) + len(self.rezervacije)\n\n def __bool__(self):\n return self.st_konfliktov > 0\n\n def __str__(self):\n return 'Konflikti:\\n rezervacije:\\n{}\\n predmeti:\\n{}'.format(\n '\\n '.join(map(str, self.rezervacije)), '\\n '.join(map(\n str, self.srecanja)))\n\n\nclass IskalnikKonfliktov(object):\n \"\"\"Zgradi strukturo, ki omogoca hitro iskanje prekrivanj glede na datum in učilnico.\"\"\"\n\n def __init__(self, ucilnice, min_datum, max_datum):\n self.ucilnice = set(ucilnice)\n self.min_datum = min_datum\n self.max_datum = max_datum\n self.zasedenost_ucilnic = defaultdict(list)\n self.rezerviranost_ucilnic = defaultdict(list)\n\n def dodaj_srecanja(self):\n self.dodaj_srecanja_semestrov(Semester.objects.v_obdobju(self.\n min_datum, self.max_datum))\n\n def dodaj_srecanja_semestrov(self, semestri):\n for s in Srecanje.objects.filter(semester__in=semestri,\n ucilnica__in=self.ucilnice).exclude(ura__isnull=True\n ).select_related('semester', 'predmet', 'ucilnica'):\n for d in s.dnevi_med(self.min_datum, self.max_datum):\n self.zasedenost_ucilnic[s.ucilnica_id, d].append(s)\n\n def dodaj_rezervacije(self, rezervacije):\n \"\"\"Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic\"\"\"\n for r in rezervacije:\n for u in r.seznam_ucilnic:\n for d in r.dnevi_med(self.min_datum, self.max_datum):\n self.rezerviranost_ucilnic[u.pk, d].append(r)\n\n @staticmethod\n def za_rezervacije(rezervacije: RezervacijaQuerySet):\n \"\"\"Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic\"\"\"\n min_datum = datetime.date.max\n max_datum = datetime.date.min\n ucilnice = set()\n for r in rezervacije:\n if r.zacetek < min_datum:\n min_datum = r.zacetek\n if r.konec > max_datum:\n max_datum = r.konec\n ucilnice.update(r.seznam_ucilnic)\n iskalnik = IskalnikKonfliktov(ucilnice, min_datum, max_datum)\n iskalnik.dodaj_srecanja()\n iskalnik.dodaj_rezervacije(rezervacije)\n return iskalnik\n\n def konflikti_z_rezervacijo(self, r: Rezervacija):\n if not hasattr(r, 'seznam_ucilnic'):\n r.seznam_ucilnic = r.ucilnice.all()\n for u in r.seznam_ucilnic:\n for d in r.dnevi():\n k = self.konflikti(u, d, r.od, r.do, r)\n if k:\n yield u, d, k\n\n def konflikti(self, ucilnica, datum, od, do, ignore=None):\n \"\"\"Vrne konflikte z dejavnostjo, ki bi v ucilnici `ucilnica` potekala dne `datum` od ure `od` do `do`.\"\"\"\n konflikti = Konflikt()\n if ucilnica not in self.ucilnice:\n raise ValueError(\n 'Struktura iskanja ni bila pripravljena za iskanje konfliktov v učilnici {}'\n .format(ucilnica))\n if not self.min_datum <= datum <= self.max_datum:\n raise ValueError(\n 'Struktura iskanja ni bila pripravljena za iskanje konfliktov dne {}'\n .format(datum))\n for s in self.zasedenost_ucilnic[ucilnica.pk, datum]:\n if s != ignore and s.se_po_urah_prekriva(od, do):\n konflikti.srecanja.append(s)\n for r in self.rezerviranost_ucilnic[ucilnica.pk, datum]:\n if r != ignore and r.se_po_urah_prekriva(od, do):\n konflikti.rezervacije.append(r)\n return konflikti\n",
"step-4": "import datetime\nfrom collections import defaultdict\nfrom django.db.models import Prefetch\nfrom urnik.models import Termin, Rezervacija, Ucilnica, DNEVI, MIN_URA, MAX_URA, Srecanje, Semester, RezervacijaQuerySet\n\n\nclass ProsteUcilniceTermin(Termin):\n HUE_PRAZEN = 120\n HUE_POLN = 0\n\n def __init__(self, dan, ura, ustrezne_ucilnice, zasedene_ucilnice,\n rezervirane_ucilnice):\n super().__init__(dan, ura)\n zasedene_pks = {u.pk for u in zasedene_ucilnice}\n rezervirane_pks = {u.pk for u in rezervirane_ucilnice}\n self.proste = [u for u in ustrezne_ucilnice if u.pk not in\n zasedene_pks and u.pk not in rezervirane_pks]\n self.zasedene = [(u, r) for u, r in zasedene_ucilnice.items() if u.\n pk not in rezervirane_pks]\n self.rezervirane = list(rezervirane_ucilnice.items())\n self.prikazane_ucilnice = []\n\n def filtriraj_ucilnice(self, pokazi_zasedene):\n vse = [('prosta', u, None) for u in self.proste]\n if pokazi_zasedene:\n vse.extend([('rezervirana', u, r) for u, r in self.rezervirane])\n vse.extend([('zasedena', u, r) for u, r in self.zasedene])\n self.prikazane_ucilnice = sorted(vse, key=lambda x: x[1])\n\n def hue(self):\n h = self.HUE_PRAZEN if self.proste else self.HUE_POLN\n return '{:.0f}'.format(h)\n\n\nclass ProsteUcilnice(object):\n \"\"\"Zgradi strukturo, ki omogoca hitro iskanje prekrivanj za dane ucilnice glede na uro in dan v tednu.\"\"\"\n\n def __init__(self, ucilnice):\n self.ucilnice = set(ucilnice)\n self.zasedenost_ucilnic = defaultdict(dict)\n self.rezerviranost_ucilnic = defaultdict(dict)\n\n def dodaj_srecanja_semestra(self, semester, teden=None):\n for srecanje in semester.srecanja.select_related('ucilnica', 'predmet'\n ).prefetch_related('ucitelji').filter(ucilnica__in=[u.pk for u in\n self.ucilnice]).exclude(ura__isnull=True):\n if teden is None or semester.od <= teden + datetime.timedelta(days\n =srecanje.dan - 1) <= semester.do:\n for i in range(srecanje.trajanje):\n self.zasedenost_ucilnic[srecanje.dan, srecanje.ura + i][\n srecanje.ucilnica] = srecanje\n\n def upostevaj_rezervacije_za_teden(self, teden):\n self.upostevaj_rezervacije(Rezervacija.objects.v_tednu(teden))\n\n def upostevaj_rezervacije(self, rezervacije):\n for rezervacija in rezervacije.prefetch_related(Prefetch('ucilnice',\n queryset=Ucilnica.objects.filter(pk__in=[u.pk for u in self.\n ucilnice]), to_attr='ustrezne_ucilnice'), 'osebe'):\n for ucilnica in rezervacija.ustrezne_ucilnice:\n for dan in rezervacija.dnevi():\n for ura in range(rezervacija.od, rezervacija.do):\n self.rezerviranost_ucilnic[dan.isoweekday(), ura][\n ucilnica] = rezervacija\n\n def dobi_termine(self):\n termini = [ProsteUcilniceTermin(d, u, self.ucilnice, self.\n zasedenost_ucilnic[d, u], self.rezerviranost_ucilnic[d, u]) for\n d in range(1, len(DNEVI) + 1) for u in range(MIN_URA, MAX_URA)]\n return termini\n\n\nclass Konflikt(object):\n\n def __init__(self):\n self.srecanja = []\n self.rezervacije = []\n\n @property\n def st_konfliktov(self):\n return len(self.srecanja) + len(self.rezervacije)\n\n def __bool__(self):\n return self.st_konfliktov > 0\n\n def __str__(self):\n return 'Konflikti:\\n rezervacije:\\n{}\\n predmeti:\\n{}'.format(\n '\\n '.join(map(str, self.rezervacije)), '\\n '.join(map(\n str, self.srecanja)))\n\n\nclass IskalnikKonfliktov(object):\n \"\"\"Zgradi strukturo, ki omogoca hitro iskanje prekrivanj glede na datum in učilnico.\"\"\"\n\n def __init__(self, ucilnice, min_datum, max_datum):\n self.ucilnice = set(ucilnice)\n self.min_datum = min_datum\n self.max_datum = max_datum\n self.zasedenost_ucilnic = defaultdict(list)\n self.rezerviranost_ucilnic = defaultdict(list)\n\n def dodaj_srecanja(self):\n self.dodaj_srecanja_semestrov(Semester.objects.v_obdobju(self.\n min_datum, self.max_datum))\n\n def dodaj_srecanja_semestrov(self, semestri):\n for s in Srecanje.objects.filter(semester__in=semestri,\n ucilnica__in=self.ucilnice).exclude(ura__isnull=True\n ).select_related('semester', 'predmet', 'ucilnica'):\n for d in s.dnevi_med(self.min_datum, self.max_datum):\n self.zasedenost_ucilnic[s.ucilnica_id, d].append(s)\n\n def dodaj_rezervacije(self, rezervacije):\n \"\"\"Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic\"\"\"\n for r in rezervacije:\n for u in r.seznam_ucilnic:\n for d in r.dnevi_med(self.min_datum, self.max_datum):\n self.rezerviranost_ucilnic[u.pk, d].append(r)\n\n @staticmethod\n def za_rezervacije(rezervacije: RezervacijaQuerySet):\n \"\"\"Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic\"\"\"\n min_datum = datetime.date.max\n max_datum = datetime.date.min\n ucilnice = set()\n for r in rezervacije:\n if r.zacetek < min_datum:\n min_datum = r.zacetek\n if r.konec > max_datum:\n max_datum = r.konec\n ucilnice.update(r.seznam_ucilnic)\n iskalnik = IskalnikKonfliktov(ucilnice, min_datum, max_datum)\n iskalnik.dodaj_srecanja()\n iskalnik.dodaj_rezervacije(rezervacije)\n return iskalnik\n\n def konflikti_z_rezervacijo(self, r: Rezervacija):\n if not hasattr(r, 'seznam_ucilnic'):\n r.seznam_ucilnic = r.ucilnice.all()\n for u in r.seznam_ucilnic:\n for d in r.dnevi():\n k = self.konflikti(u, d, r.od, r.do, r)\n if k:\n yield u, d, k\n\n def konflikti(self, ucilnica, datum, od, do, ignore=None):\n \"\"\"Vrne konflikte z dejavnostjo, ki bi v ucilnici `ucilnica` potekala dne `datum` od ure `od` do `do`.\"\"\"\n konflikti = Konflikt()\n if ucilnica not in self.ucilnice:\n raise ValueError(\n 'Struktura iskanja ni bila pripravljena za iskanje konfliktov v učilnici {}'\n .format(ucilnica))\n if not self.min_datum <= datum <= self.max_datum:\n raise ValueError(\n 'Struktura iskanja ni bila pripravljena za iskanje konfliktov dne {}'\n .format(datum))\n for s in self.zasedenost_ucilnic[ucilnica.pk, datum]:\n if s != ignore and s.se_po_urah_prekriva(od, do):\n konflikti.srecanja.append(s)\n for r in self.rezerviranost_ucilnic[ucilnica.pk, datum]:\n if r != ignore and r.se_po_urah_prekriva(od, do):\n konflikti.rezervacije.append(r)\n return konflikti\n",
"step-5": "import datetime\nfrom collections import defaultdict\n\nfrom django.db.models import Prefetch\n\nfrom urnik.models import Termin, Rezervacija, Ucilnica, DNEVI, MIN_URA, MAX_URA, Srecanje, Semester, RezervacijaQuerySet\n\n\nclass ProsteUcilniceTermin(Termin):\n HUE_PRAZEN = 120 # zelena\n HUE_POLN = 0 # rdeca\n\n def __init__(self, dan, ura, ustrezne_ucilnice, zasedene_ucilnice, rezervirane_ucilnice):\n super().__init__(dan, ura)\n zasedene_pks = {u.pk for u in zasedene_ucilnice}\n rezervirane_pks = {u.pk for u in rezervirane_ucilnice}\n # Vse ustrezne proste ucilnice.\n self.proste = [u for u in ustrezne_ucilnice if u.pk not in zasedene_pks and u.pk not in rezervirane_pks]\n # Vse ustrezne ucilnice, ki so pa zasedene, ker je tam stalno srečanje. Vrednosti so razlogi za zasedenost.\n self.zasedene = [(u, r) for u, r in zasedene_ucilnice.items() if u.pk not in rezervirane_pks]\n # Vse ustrezne ucilnice, ki so pa zasedene, ker so rezervirane. Vrednosti so razlogi za zasedenost.\n self.rezervirane = list(rezervirane_ucilnice.items())\n # ucilnice, ki bodo prikazane, skupaj s stanjem in razlogom\n self.prikazane_ucilnice = []\n\n def filtriraj_ucilnice(self, pokazi_zasedene):\n vse = [('prosta', u, None) for u in self.proste]\n if pokazi_zasedene:\n vse.extend([('rezervirana', u, r) for u, r in self.rezervirane])\n vse.extend([('zasedena', u, r) for u, r in self.zasedene])\n self.prikazane_ucilnice = sorted(vse, key=lambda x: x[1])\n\n def hue(self):\n h = self.HUE_PRAZEN if self.proste else self.HUE_POLN\n return \"{:.0f}\".format(h)\n\n\nclass ProsteUcilnice(object):\n \"\"\"Zgradi strukturo, ki omogoca hitro iskanje prekrivanj za dane ucilnice glede na uro in dan v tednu.\"\"\"\n def __init__(self, ucilnice):\n self.ucilnice = set(ucilnice)\n self.zasedenost_ucilnic = defaultdict(dict)\n self.rezerviranost_ucilnic = defaultdict(dict)\n\n def dodaj_srecanja_semestra(self, semester, teden=None):\n for srecanje in semester.srecanja.select_related('ucilnica', 'predmet').prefetch_related('ucitelji'\n ).filter(ucilnica__in=[u.pk for u in self.ucilnice]).exclude(ura__isnull=True):\n if teden is None or semester.od <= teden + datetime.timedelta(days=srecanje.dan-1) <= semester.do:\n for i in range(srecanje.trajanje):\n self.zasedenost_ucilnic[srecanje.dan, srecanje.ura + i][srecanje.ucilnica] = srecanje\n\n def upostevaj_rezervacije_za_teden(self, teden):\n self.upostevaj_rezervacije(Rezervacija.objects.v_tednu(teden))\n\n def upostevaj_rezervacije(self, rezervacije):\n for rezervacija in rezervacije.prefetch_related(\n Prefetch(\n 'ucilnice',\n queryset=Ucilnica.objects.filter(pk__in=[u.pk for u in self.ucilnice]),\n to_attr='ustrezne_ucilnice'),\n 'osebe'):\n for ucilnica in rezervacija.ustrezne_ucilnice:\n for dan in rezervacija.dnevi():\n for ura in range(rezervacija.od, rezervacija.do):\n self.rezerviranost_ucilnic[dan.isoweekday(), ura][ucilnica] = rezervacija\n\n def dobi_termine(self):\n termini = [ProsteUcilniceTermin(d, u, self.ucilnice, self.zasedenost_ucilnic[d, u],\n self.rezerviranost_ucilnic[d, u])\n for d in range(1, len(DNEVI) + 1) for u in range(MIN_URA, MAX_URA)]\n return termini\n\n\nclass Konflikt(object):\n def __init__(self):\n self.srecanja = []\n self.rezervacije = []\n\n @property\n def st_konfliktov(self):\n return len(self.srecanja) + len(self.rezervacije)\n\n def __bool__(self):\n return self.st_konfliktov > 0\n\n def __str__(self):\n return \"Konflikti:\\n rezervacije:\\n{}\\n predmeti:\\n{}\".format(\"\\n \".join(map(str, self.rezervacije)),\n \"\\n \".join(map(str, self.srecanja)))\n\n\nclass IskalnikKonfliktov(object):\n \"\"\"Zgradi strukturo, ki omogoca hitro iskanje prekrivanj glede na datum in učilnico.\"\"\"\n\n def __init__(self, ucilnice, min_datum, max_datum):\n self.ucilnice = set(ucilnice)\n self.min_datum = min_datum\n self.max_datum = max_datum\n self.zasedenost_ucilnic = defaultdict(list)\n self.rezerviranost_ucilnic = defaultdict(list)\n\n def dodaj_srecanja(self):\n self.dodaj_srecanja_semestrov(Semester.objects.v_obdobju(self.min_datum, self.max_datum))\n\n def dodaj_srecanja_semestrov(self, semestri):\n for s in Srecanje.objects.filter(semester__in=semestri, ucilnica__in=self.ucilnice\n ).exclude(ura__isnull=True).select_related('semester', 'predmet', 'ucilnica'):\n for d in s.dnevi_med(self.min_datum, self.max_datum):\n self.zasedenost_ucilnic[s.ucilnica_id, d].append(s)\n\n def dodaj_rezervacije(self, rezervacije):\n \"\"\"Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic\"\"\"\n for r in rezervacije:\n for u in r.seznam_ucilnic:\n for d in r.dnevi_med(self.min_datum, self.max_datum):\n self.rezerviranost_ucilnic[u.pk, d].append(r)\n\n @staticmethod\n def za_rezervacije(rezervacije: RezervacijaQuerySet):\n \"\"\"Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic\"\"\"\n min_datum = datetime.date.max\n max_datum = datetime.date.min\n ucilnice = set()\n for r in rezervacije:\n if r.zacetek < min_datum:\n min_datum = r.zacetek\n if r.konec > max_datum:\n max_datum = r.konec\n ucilnice.update(r.seznam_ucilnic)\n\n iskalnik = IskalnikKonfliktov(ucilnice, min_datum, max_datum)\n iskalnik.dodaj_srecanja()\n iskalnik.dodaj_rezervacije(rezervacije)\n return iskalnik\n\n def konflikti_z_rezervacijo(self, r: Rezervacija):\n if not hasattr(r, 'seznam_ucilnic'):\n r.seznam_ucilnic = r.ucilnice.all()\n for u in r.seznam_ucilnic:\n for d in r.dnevi():\n k = self.konflikti(u, d, r.od, r.do, r)\n if k:\n yield u, d, k\n\n def konflikti(self, ucilnica, datum, od, do, ignore=None):\n \"\"\"Vrne konflikte z dejavnostjo, ki bi v ucilnici `ucilnica` potekala dne `datum` od ure `od` do `do`.\"\"\"\n konflikti = Konflikt()\n if ucilnica not in self.ucilnice:\n raise ValueError(\"Struktura iskanja ni bila pripravljena za iskanje konfliktov v učilnici {}\".format(ucilnica))\n if not (self.min_datum <= datum <= self.max_datum):\n raise ValueError(\"Struktura iskanja ni bila pripravljena za iskanje konfliktov dne {}\".format(datum))\n\n for s in self.zasedenost_ucilnic[ucilnica.pk, datum]:\n if s != ignore and s.se_po_urah_prekriva(od, do):\n konflikti.srecanja.append(s)\n\n for r in self.rezerviranost_ucilnic[ucilnica.pk, datum]:\n if r != ignore and r.se_po_urah_prekriva(od, do):\n konflikti.rezervacije.append(r)\n\n return konflikti\n",
"step-ids": [
20,
22,
26,
27,
28
]
}
|
[
20,
22,
26,
27,
28
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
admin.site.register(Evaluacion)
<|reserved_special_token_1|>
from django.contrib import admin
from Evaluacion.models import Evaluacion
admin.site.register(Evaluacion)
|
flexible
|
{
"blob_id": "4ef4e302304ccf2dc92cdebe134e104af47aae20",
"index": 3795,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nadmin.site.register(Evaluacion)\n",
"step-3": "from django.contrib import admin\nfrom Evaluacion.models import Evaluacion\nadmin.site.register(Evaluacion)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def multiply(num1, num2):
return num1 * num2
|
flexible
|
{
"blob_id": "e835e75f444e97ca948ce27504cc9149ea0092f6",
"index": 1946,
"step-1": "<mask token>\n",
"step-2": "def multiply(num1, num2):\n return num1 * num2\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
import argparse
p = argparse.ArgumentParser()
p.add_argument("--foo", action="store_true")
args = p.parse_args()
print(args.foo)
|
normal
|
{
"blob_id": "2cc9f8c476026311456857d3395a14a45e2f4b80",
"index": 1460,
"step-1": "<mask token>\n",
"step-2": "<mask token>\np.add_argument('--foo', action='store_true')\n<mask token>\nprint(args.foo)\n",
"step-3": "<mask token>\np = argparse.ArgumentParser()\np.add_argument('--foo', action='store_true')\nargs = p.parse_args()\nprint(args.foo)\n",
"step-4": "import argparse\np = argparse.ArgumentParser()\np.add_argument('--foo', action='store_true')\nargs = p.parse_args()\nprint(args.foo)\n",
"step-5": "import argparse\n\np = argparse.ArgumentParser()\np.add_argument(\"--foo\", action=\"store_true\")\nargs = p.parse_args()\nprint(args.foo)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def parse_lat(lat: int):
lat_str = 'N' if lat >= 0 else 'S'
if 10 > lat > -10:
lat_str += '0'
lat_str += str(abs(lat))
return lat_str
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def download(url: str, dest_folder: str):
if not os.path.exists(dest_folder):
os.makedirs(dest_folder)
filename = url.split('/')[-1].replace(' ', '_')
file_path = os.path.join(dest_folder, filename)
r = requests.get(url, stream=True)
if r.ok:
print('saving to', os.path.abspath(file_path))
with open(file_path, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024 * 8):
if chunk:
f.write(chunk)
f.flush()
os.fsync(f.fileno())
else:
print('Download failed: status code {}\n{}'.format(r.status_code, r
.text))
def parse_lat(lat: int):
lat_str = 'N' if lat >= 0 else 'S'
if 10 > lat > -10:
lat_str += '0'
lat_str += str(abs(lat))
return lat_str
def parse_long(long: int):
long_str = 'E' if long >= 0 else 'W'
if 100 > long > -100:
long_str += '0'
if 10 > long > -10:
long_str += '0'
long_str += str(abs(long))
return long_str
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def download(url: str, dest_folder: str):
if not os.path.exists(dest_folder):
os.makedirs(dest_folder)
filename = url.split('/')[-1].replace(' ', '_')
file_path = os.path.join(dest_folder, filename)
r = requests.get(url, stream=True)
if r.ok:
print('saving to', os.path.abspath(file_path))
with open(file_path, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024 * 8):
if chunk:
f.write(chunk)
f.flush()
os.fsync(f.fileno())
else:
print('Download failed: status code {}\n{}'.format(r.status_code, r
.text))
def parse_lat(lat: int):
lat_str = 'N' if lat >= 0 else 'S'
if 10 > lat > -10:
lat_str += '0'
lat_str += str(abs(lat))
return lat_str
def parse_long(long: int):
long_str = 'E' if long >= 0 else 'W'
if 100 > long > -100:
long_str += '0'
if 10 > long > -10:
long_str += '0'
long_str += str(abs(long))
return long_str
if __name__ == '__main__':
for lat in range(47, 21, -1):
for long in range(-14, 43, 1):
download(
f'https://gdemdl.aster.jspacesystems.or.jp/download/Download_{parse_lat(lat)}{parse_long(long)}.zip'
, dest_folder='/media/data-ext/aster-gdem')
<|reserved_special_token_1|>
import os
import requests
def download(url: str, dest_folder: str):
if not os.path.exists(dest_folder):
os.makedirs(dest_folder)
filename = url.split('/')[-1].replace(' ', '_')
file_path = os.path.join(dest_folder, filename)
r = requests.get(url, stream=True)
if r.ok:
print('saving to', os.path.abspath(file_path))
with open(file_path, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024 * 8):
if chunk:
f.write(chunk)
f.flush()
os.fsync(f.fileno())
else:
print('Download failed: status code {}\n{}'.format(r.status_code, r
.text))
def parse_lat(lat: int):
lat_str = 'N' if lat >= 0 else 'S'
if 10 > lat > -10:
lat_str += '0'
lat_str += str(abs(lat))
return lat_str
def parse_long(long: int):
long_str = 'E' if long >= 0 else 'W'
if 100 > long > -100:
long_str += '0'
if 10 > long > -10:
long_str += '0'
long_str += str(abs(long))
return long_str
if __name__ == '__main__':
for lat in range(47, 21, -1):
for long in range(-14, 43, 1):
download(
f'https://gdemdl.aster.jspacesystems.or.jp/download/Download_{parse_lat(lat)}{parse_long(long)}.zip'
, dest_folder='/media/data-ext/aster-gdem')
<|reserved_special_token_1|>
import os
import requests
def download(url: str, dest_folder: str):
#https://stackoverflow.com/a/56951135/8761164
if not os.path.exists(dest_folder):
os.makedirs(dest_folder) # create folder if it does not exist
filename = url.split('/')[-1].replace(" ", "_") # be careful with file names
file_path = os.path.join(dest_folder, filename)
r = requests.get(url, stream=True)
if r.ok:
print("saving to", os.path.abspath(file_path))
with open(file_path, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024 * 8):
if chunk:
f.write(chunk)
f.flush()
os.fsync(f.fileno())
else:
print("Download failed: status code {}\n{}".format(r.status_code, r.text))
def parse_lat(lat: int):
lat_str = 'N' if lat >= 0 else 'S'
if 10 > lat > -10:
lat_str += '0'
lat_str += str(abs(lat))
return lat_str
def parse_long(long: int):
long_str = 'E' if long >= 0 else 'W'
if 100 > long > -100:
long_str += '0'
if 10 > long > -10:
long_str += '0'
long_str += str(abs(long))
return long_str
if __name__=='__main__':
for lat in range(47, 21, -1):
for long in range(-14, 43, 1):
#print(parse_lat(lat), parse_long(long))
#print(f"https://gdemdl.aster.jspacesystems.or.jp/download/Download_{parse_lat(lat)}{parse_long(long)}.zip")
download(f"https://gdemdl.aster.jspacesystems.or.jp/download/Download_{parse_lat(lat)}{parse_long(long)}.zip", dest_folder="/media/data-ext/aster-gdem")
|
flexible
|
{
"blob_id": "0726a4fa3af196e2ba1592019f09afb0e7bb47d7",
"index": 9731,
"step-1": "<mask token>\n\n\ndef parse_lat(lat: int):\n lat_str = 'N' if lat >= 0 else 'S'\n if 10 > lat > -10:\n lat_str += '0'\n lat_str += str(abs(lat))\n return lat_str\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef download(url: str, dest_folder: str):\n if not os.path.exists(dest_folder):\n os.makedirs(dest_folder)\n filename = url.split('/')[-1].replace(' ', '_')\n file_path = os.path.join(dest_folder, filename)\n r = requests.get(url, stream=True)\n if r.ok:\n print('saving to', os.path.abspath(file_path))\n with open(file_path, 'wb') as f:\n for chunk in r.iter_content(chunk_size=1024 * 8):\n if chunk:\n f.write(chunk)\n f.flush()\n os.fsync(f.fileno())\n else:\n print('Download failed: status code {}\\n{}'.format(r.status_code, r\n .text))\n\n\ndef parse_lat(lat: int):\n lat_str = 'N' if lat >= 0 else 'S'\n if 10 > lat > -10:\n lat_str += '0'\n lat_str += str(abs(lat))\n return lat_str\n\n\ndef parse_long(long: int):\n long_str = 'E' if long >= 0 else 'W'\n if 100 > long > -100:\n long_str += '0'\n if 10 > long > -10:\n long_str += '0'\n long_str += str(abs(long))\n return long_str\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef download(url: str, dest_folder: str):\n if not os.path.exists(dest_folder):\n os.makedirs(dest_folder)\n filename = url.split('/')[-1].replace(' ', '_')\n file_path = os.path.join(dest_folder, filename)\n r = requests.get(url, stream=True)\n if r.ok:\n print('saving to', os.path.abspath(file_path))\n with open(file_path, 'wb') as f:\n for chunk in r.iter_content(chunk_size=1024 * 8):\n if chunk:\n f.write(chunk)\n f.flush()\n os.fsync(f.fileno())\n else:\n print('Download failed: status code {}\\n{}'.format(r.status_code, r\n .text))\n\n\ndef parse_lat(lat: int):\n lat_str = 'N' if lat >= 0 else 'S'\n if 10 > lat > -10:\n lat_str += '0'\n lat_str += str(abs(lat))\n return lat_str\n\n\ndef parse_long(long: int):\n long_str = 'E' if long >= 0 else 'W'\n if 100 > long > -100:\n long_str += '0'\n if 10 > long > -10:\n long_str += '0'\n long_str += str(abs(long))\n return long_str\n\n\nif __name__ == '__main__':\n for lat in range(47, 21, -1):\n for long in range(-14, 43, 1):\n download(\n f'https://gdemdl.aster.jspacesystems.or.jp/download/Download_{parse_lat(lat)}{parse_long(long)}.zip'\n , dest_folder='/media/data-ext/aster-gdem')\n",
"step-4": "import os\nimport requests\n\n\ndef download(url: str, dest_folder: str):\n if not os.path.exists(dest_folder):\n os.makedirs(dest_folder)\n filename = url.split('/')[-1].replace(' ', '_')\n file_path = os.path.join(dest_folder, filename)\n r = requests.get(url, stream=True)\n if r.ok:\n print('saving to', os.path.abspath(file_path))\n with open(file_path, 'wb') as f:\n for chunk in r.iter_content(chunk_size=1024 * 8):\n if chunk:\n f.write(chunk)\n f.flush()\n os.fsync(f.fileno())\n else:\n print('Download failed: status code {}\\n{}'.format(r.status_code, r\n .text))\n\n\ndef parse_lat(lat: int):\n lat_str = 'N' if lat >= 0 else 'S'\n if 10 > lat > -10:\n lat_str += '0'\n lat_str += str(abs(lat))\n return lat_str\n\n\ndef parse_long(long: int):\n long_str = 'E' if long >= 0 else 'W'\n if 100 > long > -100:\n long_str += '0'\n if 10 > long > -10:\n long_str += '0'\n long_str += str(abs(long))\n return long_str\n\n\nif __name__ == '__main__':\n for lat in range(47, 21, -1):\n for long in range(-14, 43, 1):\n download(\n f'https://gdemdl.aster.jspacesystems.or.jp/download/Download_{parse_lat(lat)}{parse_long(long)}.zip'\n , dest_folder='/media/data-ext/aster-gdem')\n",
"step-5": "import os\nimport requests\n\ndef download(url: str, dest_folder: str):\n #https://stackoverflow.com/a/56951135/8761164\n if not os.path.exists(dest_folder):\n os.makedirs(dest_folder) # create folder if it does not exist\n\n filename = url.split('/')[-1].replace(\" \", \"_\") # be careful with file names\n file_path = os.path.join(dest_folder, filename)\n\n r = requests.get(url, stream=True)\n\n if r.ok:\n print(\"saving to\", os.path.abspath(file_path))\n with open(file_path, 'wb') as f:\n for chunk in r.iter_content(chunk_size=1024 * 8):\n if chunk:\n f.write(chunk)\n f.flush()\n os.fsync(f.fileno())\n else:\n print(\"Download failed: status code {}\\n{}\".format(r.status_code, r.text))\n\n\ndef parse_lat(lat: int):\n lat_str = 'N' if lat >= 0 else 'S'\n if 10 > lat > -10:\n lat_str += '0'\n lat_str += str(abs(lat))\n return lat_str\n\ndef parse_long(long: int):\n long_str = 'E' if long >= 0 else 'W'\n if 100 > long > -100:\n long_str += '0'\n if 10 > long > -10:\n long_str += '0'\n long_str += str(abs(long))\n return long_str\n\n\nif __name__=='__main__':\n\n for lat in range(47, 21, -1):\n for long in range(-14, 43, 1):\n #print(parse_lat(lat), parse_long(long))\n #print(f\"https://gdemdl.aster.jspacesystems.or.jp/download/Download_{parse_lat(lat)}{parse_long(long)}.zip\")\n download(f\"https://gdemdl.aster.jspacesystems.or.jp/download/Download_{parse_lat(lat)}{parse_long(long)}.zip\", dest_folder=\"/media/data-ext/aster-gdem\")",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
# coding: utf-8
"""
Picarto.TV API Documentation
The Picarto.TV API documentation Note, for fixed access tokens, the header that needs to be sent is of the format: `Authorization: Bearer yourTokenHere` This can be generated at https://oauth.picarto.tv/ For chat API, see https://docs.picarto.tv/chat/chat.proto - contact via the email below for implementation details
OpenAPI spec version: 1.2.5
Contact: api@picarto.tv
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
# import models into sdk package
from .models.basic_channel_info import BasicChannelInfo
from .models.basic_follower_info import BasicFollowerInfo
from .models.basic_following_info import BasicFollowingInfo
from .models.categories import Categories
from .models.category import Category
from .models.channel_details import ChannelDetails
from .models.channel_search_results import ChannelSearchResults
from .models.channel_video import ChannelVideo
from .models.channel_videos import ChannelVideos
from .models.description_panel import DescriptionPanel
from .models.event import Event
from .models.events import Events
from .models.language import Language
from .models.languages import Languages
from .models.mobile_notify_settings import MobileNotifySettings
from .models.multi_participant import MultiParticipant
from .models.notification import Notification
from .models.notification_1 import Notification1
from .models.notifications import Notifications
from .models.online_channels import OnlineChannels
from .models.online_details import OnlineDetails
from .models.online_notify_settings import OnlineNotifySettings
from .models.thumbnail import Thumbnail
from .models.user_data import UserData
from .models.user_email_settings import UserEmailSettings
from .models.video_search_result import VideoSearchResult
from .models.video_search_results import VideoSearchResults
from .models.webhook import Webhook
# import apis into sdk package
from .apis.bot_api import BotApi
from .apis.channel_api import ChannelApi
from .apis.multistream_api import MultistreamApi
from .apis.public_api import PublicApi
from .apis.sensitive_api import SensitiveApi
from .apis.user_api import UserApi
from .apis.webhook_api import WebhookApi
# import ApiClient
from .api_client import ApiClient
from .configuration import Configuration
configuration = Configuration()
|
normal
|
{
"blob_id": "939011fca968d5f9250beb29a0bb700200e637df",
"index": 6274,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nconfiguration = Configuration()\n",
"step-3": "<mask token>\nfrom __future__ import absolute_import\nfrom .models.basic_channel_info import BasicChannelInfo\nfrom .models.basic_follower_info import BasicFollowerInfo\nfrom .models.basic_following_info import BasicFollowingInfo\nfrom .models.categories import Categories\nfrom .models.category import Category\nfrom .models.channel_details import ChannelDetails\nfrom .models.channel_search_results import ChannelSearchResults\nfrom .models.channel_video import ChannelVideo\nfrom .models.channel_videos import ChannelVideos\nfrom .models.description_panel import DescriptionPanel\nfrom .models.event import Event\nfrom .models.events import Events\nfrom .models.language import Language\nfrom .models.languages import Languages\nfrom .models.mobile_notify_settings import MobileNotifySettings\nfrom .models.multi_participant import MultiParticipant\nfrom .models.notification import Notification\nfrom .models.notification_1 import Notification1\nfrom .models.notifications import Notifications\nfrom .models.online_channels import OnlineChannels\nfrom .models.online_details import OnlineDetails\nfrom .models.online_notify_settings import OnlineNotifySettings\nfrom .models.thumbnail import Thumbnail\nfrom .models.user_data import UserData\nfrom .models.user_email_settings import UserEmailSettings\nfrom .models.video_search_result import VideoSearchResult\nfrom .models.video_search_results import VideoSearchResults\nfrom .models.webhook import Webhook\nfrom .apis.bot_api import BotApi\nfrom .apis.channel_api import ChannelApi\nfrom .apis.multistream_api import MultistreamApi\nfrom .apis.public_api import PublicApi\nfrom .apis.sensitive_api import SensitiveApi\nfrom .apis.user_api import UserApi\nfrom .apis.webhook_api import WebhookApi\nfrom .api_client import ApiClient\nfrom .configuration import Configuration\nconfiguration = Configuration()\n",
"step-4": "# coding: utf-8\n\n\"\"\"\n Picarto.TV API Documentation\n\n The Picarto.TV API documentation Note, for fixed access tokens, the header that needs to be sent is of the format: `Authorization: Bearer yourTokenHere` This can be generated at https://oauth.picarto.tv/ For chat API, see https://docs.picarto.tv/chat/chat.proto - contact via the email below for implementation details \n\n OpenAPI spec version: 1.2.5\n Contact: api@picarto.tv\n Generated by: https://github.com/swagger-api/swagger-codegen.git\n\"\"\"\n\n\nfrom __future__ import absolute_import\n\n# import models into sdk package\nfrom .models.basic_channel_info import BasicChannelInfo\nfrom .models.basic_follower_info import BasicFollowerInfo\nfrom .models.basic_following_info import BasicFollowingInfo\nfrom .models.categories import Categories\nfrom .models.category import Category\nfrom .models.channel_details import ChannelDetails\nfrom .models.channel_search_results import ChannelSearchResults\nfrom .models.channel_video import ChannelVideo\nfrom .models.channel_videos import ChannelVideos\nfrom .models.description_panel import DescriptionPanel\nfrom .models.event import Event\nfrom .models.events import Events\nfrom .models.language import Language\nfrom .models.languages import Languages\nfrom .models.mobile_notify_settings import MobileNotifySettings\nfrom .models.multi_participant import MultiParticipant\nfrom .models.notification import Notification\nfrom .models.notification_1 import Notification1\nfrom .models.notifications import Notifications\nfrom .models.online_channels import OnlineChannels\nfrom .models.online_details import OnlineDetails\nfrom .models.online_notify_settings import OnlineNotifySettings\nfrom .models.thumbnail import Thumbnail\nfrom .models.user_data import UserData\nfrom .models.user_email_settings import UserEmailSettings\nfrom .models.video_search_result import VideoSearchResult\nfrom .models.video_search_results import VideoSearchResults\nfrom .models.webhook import Webhook\n\n# import apis into sdk package\nfrom .apis.bot_api import BotApi\nfrom .apis.channel_api import ChannelApi\nfrom .apis.multistream_api import MultistreamApi\nfrom .apis.public_api import PublicApi\nfrom .apis.sensitive_api import SensitiveApi\nfrom .apis.user_api import UserApi\nfrom .apis.webhook_api import WebhookApi\n\n# import ApiClient\nfrom .api_client import ApiClient\n\nfrom .configuration import Configuration\n\nconfiguration = Configuration()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#Write your function here
def over_nine_thousand(lst):
sum = 0
for number in lst:
sum += number
if (sum > 9000):
break
return sum
#Uncomment the line below when your function is done
print(over_nine_thousand([8000, 900, 120, 5000]))
#9020
|
normal
|
{
"blob_id": "c2f39e33030cbe7c5d4827b47fb28d7604bdbc6d",
"index": 8135,
"step-1": "<mask token>\n",
"step-2": "def over_nine_thousand(lst):\n sum = 0\n for number in lst:\n sum += number\n if sum > 9000:\n break\n return sum\n\n\n<mask token>\n",
"step-3": "def over_nine_thousand(lst):\n sum = 0\n for number in lst:\n sum += number\n if sum > 9000:\n break\n return sum\n\n\nprint(over_nine_thousand([8000, 900, 120, 5000]))\n",
"step-4": "#Write your function here\ndef over_nine_thousand(lst):\n sum = 0\n for number in lst:\n sum += number\n if (sum > 9000):\n break\n return sum\n\n#Uncomment the line below when your function is done\nprint(over_nine_thousand([8000, 900, 120, 5000]))\n\n#9020",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# -*- coding: utf-8 -*-
# Author : Seungyeon Jo
# e-mail : syjo@seculayer.co.kr
# Powered by Seculayer © 2018 AI-Core Team
from mlps.core.data.cnvrtr.ConvertAbstract import ConvertAbstract
class Substr(ConvertAbstract):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def apply(self, data):
result = ''
# check blank
if self._isBlank(data) :
return [result]
s_idx = 0
e_idx = 0
if len(self.arg_list) >= 2 :
s_idx = int(self.arg_list[0])
e_idx = int(self.arg_list[1])
else:
return [result]
if s_idx > len(data):
s_idx = 0
if e_idx > len(data):
e_idx = len(data)
if e_idx == 0:
result = data[s_idx:]
else:
result = data[s_idx:e_idx]
return [result]
if __name__ == "__main__":
_str = "Korea"
print(Substr(arg_list=[0, 1]).apply(_str))
|
normal
|
{
"blob_id": "f704742b9e023a1c3386fed293032fd8196b875e",
"index": 7344,
"step-1": "<mask token>\n\n\nclass Substr(ConvertAbstract):\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Substr(ConvertAbstract):\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n def apply(self, data):\n result = ''\n if self._isBlank(data):\n return [result]\n s_idx = 0\n e_idx = 0\n if len(self.arg_list) >= 2:\n s_idx = int(self.arg_list[0])\n e_idx = int(self.arg_list[1])\n else:\n return [result]\n if s_idx > len(data):\n s_idx = 0\n if e_idx > len(data):\n e_idx = len(data)\n if e_idx == 0:\n result = data[s_idx:]\n else:\n result = data[s_idx:e_idx]\n return [result]\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Substr(ConvertAbstract):\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n def apply(self, data):\n result = ''\n if self._isBlank(data):\n return [result]\n s_idx = 0\n e_idx = 0\n if len(self.arg_list) >= 2:\n s_idx = int(self.arg_list[0])\n e_idx = int(self.arg_list[1])\n else:\n return [result]\n if s_idx > len(data):\n s_idx = 0\n if e_idx > len(data):\n e_idx = len(data)\n if e_idx == 0:\n result = data[s_idx:]\n else:\n result = data[s_idx:e_idx]\n return [result]\n\n\nif __name__ == '__main__':\n _str = 'Korea'\n print(Substr(arg_list=[0, 1]).apply(_str))\n",
"step-4": "from mlps.core.data.cnvrtr.ConvertAbstract import ConvertAbstract\n\n\nclass Substr(ConvertAbstract):\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n def apply(self, data):\n result = ''\n if self._isBlank(data):\n return [result]\n s_idx = 0\n e_idx = 0\n if len(self.arg_list) >= 2:\n s_idx = int(self.arg_list[0])\n e_idx = int(self.arg_list[1])\n else:\n return [result]\n if s_idx > len(data):\n s_idx = 0\n if e_idx > len(data):\n e_idx = len(data)\n if e_idx == 0:\n result = data[s_idx:]\n else:\n result = data[s_idx:e_idx]\n return [result]\n\n\nif __name__ == '__main__':\n _str = 'Korea'\n print(Substr(arg_list=[0, 1]).apply(_str))\n",
"step-5": "# -*- coding: utf-8 -*-\n# Author : Seungyeon Jo\n# e-mail : syjo@seculayer.co.kr\n# Powered by Seculayer © 2018 AI-Core Team\n\nfrom mlps.core.data.cnvrtr.ConvertAbstract import ConvertAbstract\n\n\nclass Substr(ConvertAbstract):\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n \n def apply(self, data):\n result = ''\n \n # check blank\n if self._isBlank(data) :\n return [result]\n \n s_idx = 0\n e_idx = 0\n if len(self.arg_list) >= 2 :\n s_idx = int(self.arg_list[0])\n e_idx = int(self.arg_list[1])\n else:\n return [result]\n \n if s_idx > len(data):\n s_idx = 0\n if e_idx > len(data):\n e_idx = len(data)\n \n if e_idx == 0:\n result = data[s_idx:]\n else:\n result = data[s_idx:e_idx]\n \n return [result]\n\n\nif __name__ == \"__main__\":\n _str = \"Korea\"\n print(Substr(arg_list=[0, 1]).apply(_str))\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
import sys
INF = sys.maxsize
def bellman_ford(graph,start):
distance = {}
predecessor = {}
# 거리 값, 이전 정점 초기화
for node in graph:
distance[node] = INF
predecessor[node] = None
distance[start] = 0
# V-1개마큼 반복
for _ in range(len(graph)-1):
for node in graph:
for neigbor in graph[node]:
if distance[neigbor] > distance[node] + graph[node][neigbor]:
distance[neigbor] = distance[node] + graph[node][neigbor]
predecessor[neigbor] = node
# 음수 사이클이 존재하는지 (1번더 반복후 V-1번 반복했을때랑 같으면 음수사이클X 다르면 음수사이클 존재)
for node in graph:
for neigbor in graph[node]:
if distance[neigbor] > distance[node] + graph[node][neigbor]:
return -1, "그래프에 음수 사이클이 존재합니다."
return distance,graph
# 음수 사이클이 존재하지 않는 그래프
graph = {
'A': {'B': -1, 'C': 4},
'B': {'C': 3, 'D': 2, 'E': 2},
'C': {},
'D': {'B': 1, 'C': 5},
'E': {'D': -3}
}
# 그래프 정보와 시작 정점을 넘김
distance, predecessor = bellman_ford(graph, start='A')
print(distance)
print(predecessor)
# 음수 사이클이 존재하는 그래프
graph = {
'A': {'B': -1, 'C': 4},
'B': {'C': 3, 'D': 2, 'E': 2},
'C': {'A': -5},
'D': {'B': 1, 'C': 5},
'E': {'D': -3}
}
distance, predecessor = bellman_ford(graph, start='A')
print(distance)
print(predecessor)
|
normal
|
{
"blob_id": "8ebf031cb294c69bf744d543b18783d6ac5ef257",
"index": 1910,
"step-1": "<mask token>\n\n\ndef bellman_ford(graph, start):\n distance = {}\n predecessor = {}\n for node in graph:\n distance[node] = INF\n predecessor[node] = None\n distance[start] = 0\n for _ in range(len(graph) - 1):\n for node in graph:\n for neigbor in graph[node]:\n if distance[neigbor] > distance[node] + graph[node][neigbor]:\n distance[neigbor] = distance[node] + graph[node][neigbor]\n predecessor[neigbor] = node\n for node in graph:\n for neigbor in graph[node]:\n if distance[neigbor] > distance[node] + graph[node][neigbor]:\n return -1, '그래프에 음수 사이클이 존재합니다.'\n return distance, graph\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef bellman_ford(graph, start):\n distance = {}\n predecessor = {}\n for node in graph:\n distance[node] = INF\n predecessor[node] = None\n distance[start] = 0\n for _ in range(len(graph) - 1):\n for node in graph:\n for neigbor in graph[node]:\n if distance[neigbor] > distance[node] + graph[node][neigbor]:\n distance[neigbor] = distance[node] + graph[node][neigbor]\n predecessor[neigbor] = node\n for node in graph:\n for neigbor in graph[node]:\n if distance[neigbor] > distance[node] + graph[node][neigbor]:\n return -1, '그래프에 음수 사이클이 존재합니다.'\n return distance, graph\n\n\n<mask token>\nprint(distance)\nprint(predecessor)\n<mask token>\nprint(distance)\nprint(predecessor)\n",
"step-3": "<mask token>\nINF = sys.maxsize\n\n\ndef bellman_ford(graph, start):\n distance = {}\n predecessor = {}\n for node in graph:\n distance[node] = INF\n predecessor[node] = None\n distance[start] = 0\n for _ in range(len(graph) - 1):\n for node in graph:\n for neigbor in graph[node]:\n if distance[neigbor] > distance[node] + graph[node][neigbor]:\n distance[neigbor] = distance[node] + graph[node][neigbor]\n predecessor[neigbor] = node\n for node in graph:\n for neigbor in graph[node]:\n if distance[neigbor] > distance[node] + graph[node][neigbor]:\n return -1, '그래프에 음수 사이클이 존재합니다.'\n return distance, graph\n\n\ngraph = {'A': {'B': -1, 'C': 4}, 'B': {'C': 3, 'D': 2, 'E': 2}, 'C': {},\n 'D': {'B': 1, 'C': 5}, 'E': {'D': -3}}\ndistance, predecessor = bellman_ford(graph, start='A')\nprint(distance)\nprint(predecessor)\ngraph = {'A': {'B': -1, 'C': 4}, 'B': {'C': 3, 'D': 2, 'E': 2}, 'C': {'A': \n -5}, 'D': {'B': 1, 'C': 5}, 'E': {'D': -3}}\ndistance, predecessor = bellman_ford(graph, start='A')\nprint(distance)\nprint(predecessor)\n",
"step-4": "import sys\nINF = sys.maxsize\n\n\ndef bellman_ford(graph, start):\n distance = {}\n predecessor = {}\n for node in graph:\n distance[node] = INF\n predecessor[node] = None\n distance[start] = 0\n for _ in range(len(graph) - 1):\n for node in graph:\n for neigbor in graph[node]:\n if distance[neigbor] > distance[node] + graph[node][neigbor]:\n distance[neigbor] = distance[node] + graph[node][neigbor]\n predecessor[neigbor] = node\n for node in graph:\n for neigbor in graph[node]:\n if distance[neigbor] > distance[node] + graph[node][neigbor]:\n return -1, '그래프에 음수 사이클이 존재합니다.'\n return distance, graph\n\n\ngraph = {'A': {'B': -1, 'C': 4}, 'B': {'C': 3, 'D': 2, 'E': 2}, 'C': {},\n 'D': {'B': 1, 'C': 5}, 'E': {'D': -3}}\ndistance, predecessor = bellman_ford(graph, start='A')\nprint(distance)\nprint(predecessor)\ngraph = {'A': {'B': -1, 'C': 4}, 'B': {'C': 3, 'D': 2, 'E': 2}, 'C': {'A': \n -5}, 'D': {'B': 1, 'C': 5}, 'E': {'D': -3}}\ndistance, predecessor = bellman_ford(graph, start='A')\nprint(distance)\nprint(predecessor)\n",
"step-5": "import sys\nINF = sys.maxsize\ndef bellman_ford(graph,start):\n distance = {}\n predecessor = {}\n \n # 거리 값, 이전 정점 초기화\n for node in graph:\n distance[node] = INF\n predecessor[node] = None\n distance[start] = 0\n\n # V-1개마큼 반복\n for _ in range(len(graph)-1):\n for node in graph:\n for neigbor in graph[node]:\n if distance[neigbor] > distance[node] + graph[node][neigbor]:\n distance[neigbor] = distance[node] + graph[node][neigbor]\n predecessor[neigbor] = node\n \n # 음수 사이클이 존재하는지 (1번더 반복후 V-1번 반복했을때랑 같으면 음수사이클X 다르면 음수사이클 존재)\n for node in graph:\n for neigbor in graph[node]:\n if distance[neigbor] > distance[node] + graph[node][neigbor]:\n return -1, \"그래프에 음수 사이클이 존재합니다.\"\n \n return distance,graph\n\n# 음수 사이클이 존재하지 않는 그래프\ngraph = {\n 'A': {'B': -1, 'C': 4},\n 'B': {'C': 3, 'D': 2, 'E': 2},\n 'C': {},\n 'D': {'B': 1, 'C': 5},\n 'E': {'D': -3}\n}\n\n# 그래프 정보와 시작 정점을 넘김\ndistance, predecessor = bellman_ford(graph, start='A')\n\nprint(distance)\nprint(predecessor)\n\n\n# 음수 사이클이 존재하는 그래프\ngraph = {\n 'A': {'B': -1, 'C': 4},\n 'B': {'C': 3, 'D': 2, 'E': 2},\n 'C': {'A': -5},\n 'D': {'B': 1, 'C': 5},\n 'E': {'D': -3}\n}\n\n\ndistance, predecessor = bellman_ford(graph, start='A')\n\nprint(distance)\nprint(predecessor)",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class Category(models.Model):
"""Категории"""
name = models.CharField('Категория', max_length=150)
url = models.SlugField(max_length=160, unique=True)
def __str__(self):
return self.name
class Meta:
verbose_name = 'Категория'
verbose_name_plural = 'Категории'
class Service(models.Model):
"""Услуга"""
PERIOD = (0, ''), (1, '6'), (2, '12'), (3, '24')
title = models.CharField('Название', max_length=100)
description = models.TextField('Описание')
image = models.ImageField('Фото', upload_to='services/', null=True,
blank=True)
employee = models.ManyToManyField(Employee, verbose_name='Cотрудник',
related_name='service_employee')
category = models.ForeignKey(Category, verbose_name='Категория',
on_delete=models.SET_NULL, null=True)
warranty = models.PositiveSmallIntegerField('Гарантийный срок', choices
=PERIOD, help_text='Указать в месяцах')
price = models.DecimalField('Стоимость услуги', max_digits=9,
decimal_places=2, default=0, help_text='Указывать сумму в рублях',
validators=[validators.MinValueValidator(0)])
url = models.SlugField(max_length=130, unique=True)
def __str__(self):
return self.title
class Meta:
verbose_name = 'Услуга'
verbose_name_plural = 'Услуги'
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Employee(models.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Meta:
verbose_name = 'Сотрудник'
verbose_name_plural = 'Сотрудники'
class Category(models.Model):
"""Категории"""
name = models.CharField('Категория', max_length=150)
url = models.SlugField(max_length=160, unique=True)
def __str__(self):
return self.name
class Meta:
verbose_name = 'Категория'
verbose_name_plural = 'Категории'
class Service(models.Model):
"""Услуга"""
PERIOD = (0, ''), (1, '6'), (2, '12'), (3, '24')
title = models.CharField('Название', max_length=100)
description = models.TextField('Описание')
image = models.ImageField('Фото', upload_to='services/', null=True,
blank=True)
employee = models.ManyToManyField(Employee, verbose_name='Cотрудник',
related_name='service_employee')
category = models.ForeignKey(Category, verbose_name='Категория',
on_delete=models.SET_NULL, null=True)
warranty = models.PositiveSmallIntegerField('Гарантийный срок', choices
=PERIOD, help_text='Указать в месяцах')
price = models.DecimalField('Стоимость услуги', max_digits=9,
decimal_places=2, default=0, help_text='Указывать сумму в рублях',
validators=[validators.MinValueValidator(0)])
url = models.SlugField(max_length=130, unique=True)
def __str__(self):
return self.title
class Meta:
verbose_name = 'Услуга'
verbose_name_plural = 'Услуги'
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Employee(models.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __str__(self):
return self.name
class Meta:
verbose_name = 'Сотрудник'
verbose_name_plural = 'Сотрудники'
class Category(models.Model):
"""Категории"""
name = models.CharField('Категория', max_length=150)
url = models.SlugField(max_length=160, unique=True)
def __str__(self):
return self.name
class Meta:
verbose_name = 'Категория'
verbose_name_plural = 'Категории'
class Service(models.Model):
"""Услуга"""
PERIOD = (0, ''), (1, '6'), (2, '12'), (3, '24')
title = models.CharField('Название', max_length=100)
description = models.TextField('Описание')
image = models.ImageField('Фото', upload_to='services/', null=True,
blank=True)
employee = models.ManyToManyField(Employee, verbose_name='Cотрудник',
related_name='service_employee')
category = models.ForeignKey(Category, verbose_name='Категория',
on_delete=models.SET_NULL, null=True)
warranty = models.PositiveSmallIntegerField('Гарантийный срок', choices
=PERIOD, help_text='Указать в месяцах')
price = models.DecimalField('Стоимость услуги', max_digits=9,
decimal_places=2, default=0, help_text='Указывать сумму в рублях',
validators=[validators.MinValueValidator(0)])
url = models.SlugField(max_length=130, unique=True)
def __str__(self):
return self.title
class Meta:
verbose_name = 'Услуга'
verbose_name_plural = 'Услуги'
<|reserved_special_token_1|>
from django.contrib.auth.models import User
from django.core import validators
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.contrib.auth.models import Group
from django.conf import settings
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def assign_group(sender, instance, created, **kwargs):
"""Сигнал, добавляющий созданного пользователя в группу editors"""
if created:
editors_group = Group.objects.get(name='editors')
instance.groups.add(editors_group)
class Employee(models.Model):
"""Сотрудники"""
name = models.CharField('Имя', max_length=100)
age = models.PositiveSmallIntegerField('Возраст', validators=[
validators.MaxValueValidator(120), validators.MinValueValidator(18)])
position = models.CharField('Должность', max_length=60)
photo = models.ImageField('Фото', upload_to='employees/')
achievements = models.TextField('Достижения', max_length=2000,
help_text=
'Информация об образовании, опыте, квалификации и профессиональных достижениях'
)
def __str__(self):
return self.name
class Meta:
verbose_name = 'Сотрудник'
verbose_name_plural = 'Сотрудники'
class Category(models.Model):
"""Категории"""
name = models.CharField('Категория', max_length=150)
url = models.SlugField(max_length=160, unique=True)
def __str__(self):
return self.name
class Meta:
verbose_name = 'Категория'
verbose_name_plural = 'Категории'
class Service(models.Model):
"""Услуга"""
PERIOD = (0, ''), (1, '6'), (2, '12'), (3, '24')
title = models.CharField('Название', max_length=100)
description = models.TextField('Описание')
image = models.ImageField('Фото', upload_to='services/', null=True,
blank=True)
employee = models.ManyToManyField(Employee, verbose_name='Cотрудник',
related_name='service_employee')
category = models.ForeignKey(Category, verbose_name='Категория',
on_delete=models.SET_NULL, null=True)
warranty = models.PositiveSmallIntegerField('Гарантийный срок', choices
=PERIOD, help_text='Указать в месяцах')
price = models.DecimalField('Стоимость услуги', max_digits=9,
decimal_places=2, default=0, help_text='Указывать сумму в рублях',
validators=[validators.MinValueValidator(0)])
url = models.SlugField(max_length=130, unique=True)
def __str__(self):
return self.title
class Meta:
verbose_name = 'Услуга'
verbose_name_plural = 'Услуги'
<|reserved_special_token_1|>
from django.contrib.auth.models import User
from django.core import validators
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.contrib.auth.models import Group
from django.conf import settings
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def assign_group(sender, instance, created, **kwargs):
"""Сигнал, добавляющий созданного пользователя в группу editors"""
if created:
editors_group = Group.objects.get(name='editors')
instance.groups.add(editors_group)
class Employee(models.Model):
"""Сотрудники"""
name = models.CharField("Имя", max_length=100)
age = models.PositiveSmallIntegerField("Возраст", validators=[validators.MaxValueValidator(120),
validators.MinValueValidator(18)])
position = models.CharField("Должность", max_length=60)
photo = models.ImageField("Фото", upload_to="employees/")
achievements = models.TextField("Достижения", max_length=2000,
help_text="Информация об образовании, опыте, квалификации и профессиональных достижениях")
def __str__(self):
return self.name
class Meta:
verbose_name = "Сотрудник"
verbose_name_plural = "Сотрудники"
class Category(models.Model):
"""Категории"""
name = models.CharField("Категория", max_length=150)
url = models.SlugField(max_length=160, unique=True)
def __str__(self):
return self.name
class Meta:
verbose_name = "Категория"
verbose_name_plural = "Категории"
class Service(models.Model):
"""Услуга"""
PERIOD = (
(0, ''),
(1, '6'),
(2, '12'),
(3, '24'),
)
title = models.CharField("Название", max_length=100)
description = models.TextField("Описание")
image = models.ImageField("Фото", upload_to="services/", null=True, blank=True)
employee = models.ManyToManyField(Employee, verbose_name="Cотрудник", related_name="service_employee")
category = models.ForeignKey(Category, verbose_name="Категория", on_delete=models.SET_NULL, null=True)
warranty = models.PositiveSmallIntegerField("Гарантийный срок", choices=PERIOD, help_text="Указать в месяцах")
price = models.DecimalField("Стоимость услуги", max_digits=9, decimal_places=2, default=0,
help_text="Указывать сумму в рублях", validators=[validators.MinValueValidator(0)])
url = models.SlugField(max_length=130, unique=True)
def __str__(self):
return self.title
class Meta:
verbose_name = "Услуга"
verbose_name_plural = "Услуги"
|
flexible
|
{
"blob_id": "a139042d0c6fa4941b7149a33b0a48018e9f511b",
"index": 9003,
"step-1": "<mask token>\n\n\nclass Category(models.Model):\n \"\"\"Категории\"\"\"\n name = models.CharField('Категория', max_length=150)\n url = models.SlugField(max_length=160, unique=True)\n\n def __str__(self):\n return self.name\n\n\n class Meta:\n verbose_name = 'Категория'\n verbose_name_plural = 'Категории'\n\n\nclass Service(models.Model):\n \"\"\"Услуга\"\"\"\n PERIOD = (0, ''), (1, '6'), (2, '12'), (3, '24')\n title = models.CharField('Название', max_length=100)\n description = models.TextField('Описание')\n image = models.ImageField('Фото', upload_to='services/', null=True,\n blank=True)\n employee = models.ManyToManyField(Employee, verbose_name='Cотрудник',\n related_name='service_employee')\n category = models.ForeignKey(Category, verbose_name='Категория',\n on_delete=models.SET_NULL, null=True)\n warranty = models.PositiveSmallIntegerField('Гарантийный срок', choices\n =PERIOD, help_text='Указать в месяцах')\n price = models.DecimalField('Стоимость услуги', max_digits=9,\n decimal_places=2, default=0, help_text='Указывать сумму в рублях',\n validators=[validators.MinValueValidator(0)])\n url = models.SlugField(max_length=130, unique=True)\n\n def __str__(self):\n return self.title\n\n\n class Meta:\n verbose_name = 'Услуга'\n verbose_name_plural = 'Услуги'\n",
"step-2": "<mask token>\n\n\nclass Employee(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n verbose_name = 'Сотрудник'\n verbose_name_plural = 'Сотрудники'\n\n\nclass Category(models.Model):\n \"\"\"Категории\"\"\"\n name = models.CharField('Категория', max_length=150)\n url = models.SlugField(max_length=160, unique=True)\n\n def __str__(self):\n return self.name\n\n\n class Meta:\n verbose_name = 'Категория'\n verbose_name_plural = 'Категории'\n\n\nclass Service(models.Model):\n \"\"\"Услуга\"\"\"\n PERIOD = (0, ''), (1, '6'), (2, '12'), (3, '24')\n title = models.CharField('Название', max_length=100)\n description = models.TextField('Описание')\n image = models.ImageField('Фото', upload_to='services/', null=True,\n blank=True)\n employee = models.ManyToManyField(Employee, verbose_name='Cотрудник',\n related_name='service_employee')\n category = models.ForeignKey(Category, verbose_name='Категория',\n on_delete=models.SET_NULL, null=True)\n warranty = models.PositiveSmallIntegerField('Гарантийный срок', choices\n =PERIOD, help_text='Указать в месяцах')\n price = models.DecimalField('Стоимость услуги', max_digits=9,\n decimal_places=2, default=0, help_text='Указывать сумму в рублях',\n validators=[validators.MinValueValidator(0)])\n url = models.SlugField(max_length=130, unique=True)\n\n def __str__(self):\n return self.title\n\n\n class Meta:\n verbose_name = 'Услуга'\n verbose_name_plural = 'Услуги'\n",
"step-3": "<mask token>\n\n\nclass Employee(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.name\n\n\n class Meta:\n verbose_name = 'Сотрудник'\n verbose_name_plural = 'Сотрудники'\n\n\nclass Category(models.Model):\n \"\"\"Категории\"\"\"\n name = models.CharField('Категория', max_length=150)\n url = models.SlugField(max_length=160, unique=True)\n\n def __str__(self):\n return self.name\n\n\n class Meta:\n verbose_name = 'Категория'\n verbose_name_plural = 'Категории'\n\n\nclass Service(models.Model):\n \"\"\"Услуга\"\"\"\n PERIOD = (0, ''), (1, '6'), (2, '12'), (3, '24')\n title = models.CharField('Название', max_length=100)\n description = models.TextField('Описание')\n image = models.ImageField('Фото', upload_to='services/', null=True,\n blank=True)\n employee = models.ManyToManyField(Employee, verbose_name='Cотрудник',\n related_name='service_employee')\n category = models.ForeignKey(Category, verbose_name='Категория',\n on_delete=models.SET_NULL, null=True)\n warranty = models.PositiveSmallIntegerField('Гарантийный срок', choices\n =PERIOD, help_text='Указать в месяцах')\n price = models.DecimalField('Стоимость услуги', max_digits=9,\n decimal_places=2, default=0, help_text='Указывать сумму в рублях',\n validators=[validators.MinValueValidator(0)])\n url = models.SlugField(max_length=130, unique=True)\n\n def __str__(self):\n return self.title\n\n\n class Meta:\n verbose_name = 'Услуга'\n verbose_name_plural = 'Услуги'\n",
"step-4": "from django.contrib.auth.models import User\nfrom django.core import validators\nfrom django.db import models\nfrom django.db.models.signals import post_save\nfrom django.dispatch import receiver\nfrom django.contrib.auth.models import Group\nfrom django.conf import settings\n\n\n@receiver(post_save, sender=settings.AUTH_USER_MODEL)\ndef assign_group(sender, instance, created, **kwargs):\n \"\"\"Сигнал, добавляющий созданного пользователя в группу editors\"\"\"\n if created:\n editors_group = Group.objects.get(name='editors')\n instance.groups.add(editors_group)\n\n\nclass Employee(models.Model):\n \"\"\"Сотрудники\"\"\"\n name = models.CharField('Имя', max_length=100)\n age = models.PositiveSmallIntegerField('Возраст', validators=[\n validators.MaxValueValidator(120), validators.MinValueValidator(18)])\n position = models.CharField('Должность', max_length=60)\n photo = models.ImageField('Фото', upload_to='employees/')\n achievements = models.TextField('Достижения', max_length=2000,\n help_text=\n 'Информация об образовании, опыте, квалификации и профессиональных достижениях'\n )\n\n def __str__(self):\n return self.name\n\n\n class Meta:\n verbose_name = 'Сотрудник'\n verbose_name_plural = 'Сотрудники'\n\n\nclass Category(models.Model):\n \"\"\"Категории\"\"\"\n name = models.CharField('Категория', max_length=150)\n url = models.SlugField(max_length=160, unique=True)\n\n def __str__(self):\n return self.name\n\n\n class Meta:\n verbose_name = 'Категория'\n verbose_name_plural = 'Категории'\n\n\nclass Service(models.Model):\n \"\"\"Услуга\"\"\"\n PERIOD = (0, ''), (1, '6'), (2, '12'), (3, '24')\n title = models.CharField('Название', max_length=100)\n description = models.TextField('Описание')\n image = models.ImageField('Фото', upload_to='services/', null=True,\n blank=True)\n employee = models.ManyToManyField(Employee, verbose_name='Cотрудник',\n related_name='service_employee')\n category = models.ForeignKey(Category, verbose_name='Категория',\n on_delete=models.SET_NULL, null=True)\n warranty = models.PositiveSmallIntegerField('Гарантийный срок', choices\n =PERIOD, help_text='Указать в месяцах')\n price = models.DecimalField('Стоимость услуги', max_digits=9,\n decimal_places=2, default=0, help_text='Указывать сумму в рублях',\n validators=[validators.MinValueValidator(0)])\n url = models.SlugField(max_length=130, unique=True)\n\n def __str__(self):\n return self.title\n\n\n class Meta:\n verbose_name = 'Услуга'\n verbose_name_plural = 'Услуги'\n",
"step-5": "from django.contrib.auth.models import User\nfrom django.core import validators\nfrom django.db import models\nfrom django.db.models.signals import post_save\nfrom django.dispatch import receiver\nfrom django.contrib.auth.models import Group\n\nfrom django.conf import settings\n\n\n@receiver(post_save, sender=settings.AUTH_USER_MODEL)\ndef assign_group(sender, instance, created, **kwargs):\n \"\"\"Сигнал, добавляющий созданного пользователя в группу editors\"\"\"\n\n if created:\n editors_group = Group.objects.get(name='editors')\n instance.groups.add(editors_group)\n\n\nclass Employee(models.Model):\n \"\"\"Сотрудники\"\"\"\n\n name = models.CharField(\"Имя\", max_length=100)\n age = models.PositiveSmallIntegerField(\"Возраст\", validators=[validators.MaxValueValidator(120),\n validators.MinValueValidator(18)])\n position = models.CharField(\"Должность\", max_length=60)\n photo = models.ImageField(\"Фото\", upload_to=\"employees/\")\n achievements = models.TextField(\"Достижения\", max_length=2000,\n help_text=\"Информация об образовании, опыте, квалификации и профессиональных достижениях\")\n\n def __str__(self):\n return self.name\n\n class Meta:\n verbose_name = \"Сотрудник\"\n verbose_name_plural = \"Сотрудники\"\n\n\nclass Category(models.Model):\n \"\"\"Категории\"\"\"\n\n name = models.CharField(\"Категория\", max_length=150)\n url = models.SlugField(max_length=160, unique=True)\n\n def __str__(self):\n return self.name\n\n class Meta:\n verbose_name = \"Категория\"\n verbose_name_plural = \"Категории\"\n\n\nclass Service(models.Model):\n \"\"\"Услуга\"\"\"\n\n PERIOD = (\n (0, ''),\n (1, '6'),\n (2, '12'),\n (3, '24'),\n )\n\n title = models.CharField(\"Название\", max_length=100)\n description = models.TextField(\"Описание\")\n image = models.ImageField(\"Фото\", upload_to=\"services/\", null=True, blank=True)\n employee = models.ManyToManyField(Employee, verbose_name=\"Cотрудник\", related_name=\"service_employee\")\n category = models.ForeignKey(Category, verbose_name=\"Категория\", on_delete=models.SET_NULL, null=True)\n warranty = models.PositiveSmallIntegerField(\"Гарантийный срок\", choices=PERIOD, help_text=\"Указать в месяцах\")\n price = models.DecimalField(\"Стоимость услуги\", max_digits=9, decimal_places=2, default=0,\n help_text=\"Указывать сумму в рублях\", validators=[validators.MinValueValidator(0)])\n url = models.SlugField(max_length=130, unique=True)\n\n def __str__(self):\n return self.title\n\n class Meta:\n verbose_name = \"Услуга\"\n verbose_name_plural = \"Услуги\"\n",
"step-ids": [
8,
9,
10,
14,
15
]
}
|
[
8,
9,
10,
14,
15
] |
<|reserved_special_token_0|>
@torch.jit.script
def swish_jit_fwd(x):
return x * torch.sigmoid(x) * 1.6768
<|reserved_special_token_0|>
class SwishJitAutoFn(torch.autograd.Function):
@staticmethod
def forward(ctx, x):
ctx.save_for_backward(x)
return swish_jit_fwd(x)
@staticmethod
def backward(ctx, grad_output):
x = ctx.saved_tensors[0]
return swish_jit_bwd(x, grad_output)
class Swish(nn.Module):
def forward(self, x):
return SwishJitAutoFn.apply(x)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@torch.jit.script
def swish_jit_fwd(x):
return x * torch.sigmoid(x) * 1.6768
<|reserved_special_token_0|>
class SwishJitAutoFn(torch.autograd.Function):
@staticmethod
def forward(ctx, x):
ctx.save_for_backward(x)
return swish_jit_fwd(x)
@staticmethod
def backward(ctx, grad_output):
x = ctx.saved_tensors[0]
return swish_jit_bwd(x, grad_output)
class Swish(nn.Module):
def forward(self, x):
return SwishJitAutoFn.apply(x)
<|reserved_special_token_0|>
def play_and_train(args, policy, optim):
total_loss = 0
turns = 0
scores = []
while turns < args.bs:
log_probs = []
rewards = []
game = Game(4)
t = time_logging.start()
while True:
x = game.encode()
t = time_logging.end('encode', t)
x = torch.tensor(x, device=args.device, dtype=torch.float32)
x = args.beta * policy(x)
t = time_logging.end('policy', t)
loss = [0]
def sample(x, w=1):
if torch.rand(()) < args.randmove:
m = torch.distributions.Categorical(logits=torch.
zeros_like(x))
else:
m = torch.distributions.Categorical(logits=x)
i = m.sample().item()
loss[0] += x.log_softmax(0)[i].mul(w)
return i
action = sample(x[:3])
score = game.score
if action == 0:
position = sample(x[3:3 + 5])
out = game.play(position)
if action == 1:
position = sample(x[3:3 + 5])
out = game.discard(position)
if action == 2:
target = sample(x[3 + 5:3 + 5 + 5], 0.5)
info = sample(x[3 + 5 + 5:3 + 5 + 5 + 10], 0.5)
if info < 5:
out = game.clue(target, info)
else:
out = game.clue(target, 'rgbyp'[info - 5])
t = time_logging.end('decode', t)
log_probs.append(loss[0])
if out is not None:
rewards.append(-1)
break
if game.gameover:
if game.score == 25:
rewards.append(game.score - score)
else:
rewards.append(-1)
break
rewards.append(game.score - score)
if len(log_probs) >= 3:
turns += len(log_probs)
R = 0
returns = []
for r in rewards[::-1]:
R = r + args.gamma * R
returns.insert(0, R)
returns = torch.tensor(returns, device=args.device, dtype=torch
.float32)
returns = (returns - returns.mean()) / (returns.std() + 1e-05)
for log_prob, R in zip(log_probs, returns):
total_loss += -(log_prob * R)
scores.append(game.score)
total_loss /= turns
optim.zero_grad()
total_loss.backward()
optim.step()
t = time_logging.end('backward & optim', t)
return scores
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@torch.jit.script
def swish_jit_fwd(x):
return x * torch.sigmoid(x) * 1.6768
<|reserved_special_token_0|>
class SwishJitAutoFn(torch.autograd.Function):
@staticmethod
def forward(ctx, x):
ctx.save_for_backward(x)
return swish_jit_fwd(x)
@staticmethod
def backward(ctx, grad_output):
x = ctx.saved_tensors[0]
return swish_jit_bwd(x, grad_output)
class Swish(nn.Module):
def forward(self, x):
return SwishJitAutoFn.apply(x)
def orthogonal_(tensor, gain=1):
"""
Orthogonal initialization (modified version from PyTorch)
"""
if tensor.ndimension() < 2:
raise ValueError('Only tensors with 2 or more dimensions are supported'
)
rows = tensor.size(0)
cols = tensor[0].numel()
flattened = tensor.new_empty(rows, cols).normal_(0, 1)
for i in range(0, rows, cols):
q, r = torch.qr(flattened[i:i + cols].t())
q *= torch.diag(r, 0).sign()
q.t_()
with torch.no_grad():
tensor[i:i + cols].view_as(q).copy_(q)
with torch.no_grad():
tensor.mul_(gain)
return tensor
<|reserved_special_token_0|>
def play_and_train(args, policy, optim):
total_loss = 0
turns = 0
scores = []
while turns < args.bs:
log_probs = []
rewards = []
game = Game(4)
t = time_logging.start()
while True:
x = game.encode()
t = time_logging.end('encode', t)
x = torch.tensor(x, device=args.device, dtype=torch.float32)
x = args.beta * policy(x)
t = time_logging.end('policy', t)
loss = [0]
def sample(x, w=1):
if torch.rand(()) < args.randmove:
m = torch.distributions.Categorical(logits=torch.
zeros_like(x))
else:
m = torch.distributions.Categorical(logits=x)
i = m.sample().item()
loss[0] += x.log_softmax(0)[i].mul(w)
return i
action = sample(x[:3])
score = game.score
if action == 0:
position = sample(x[3:3 + 5])
out = game.play(position)
if action == 1:
position = sample(x[3:3 + 5])
out = game.discard(position)
if action == 2:
target = sample(x[3 + 5:3 + 5 + 5], 0.5)
info = sample(x[3 + 5 + 5:3 + 5 + 5 + 10], 0.5)
if info < 5:
out = game.clue(target, info)
else:
out = game.clue(target, 'rgbyp'[info - 5])
t = time_logging.end('decode', t)
log_probs.append(loss[0])
if out is not None:
rewards.append(-1)
break
if game.gameover:
if game.score == 25:
rewards.append(game.score - score)
else:
rewards.append(-1)
break
rewards.append(game.score - score)
if len(log_probs) >= 3:
turns += len(log_probs)
R = 0
returns = []
for r in rewards[::-1]:
R = r + args.gamma * R
returns.insert(0, R)
returns = torch.tensor(returns, device=args.device, dtype=torch
.float32)
returns = (returns - returns.mean()) / (returns.std() + 1e-05)
for log_prob, R in zip(log_probs, returns):
total_loss += -(log_prob * R)
scores.append(game.score)
total_loss /= turns
optim.zero_grad()
total_loss.backward()
optim.step()
t = time_logging.end('backward & optim', t)
return scores
<|reserved_special_token_0|>
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--lr', type=float, default=1e-05)
parser.add_argument('--bs', type=int, default=10)
parser.add_argument('--n', type=int, default=500)
parser.add_argument('--n_avg', type=int, default=1000)
parser.add_argument('--beta', type=float, default=0.01)
parser.add_argument('--gamma', type=float, default=0.99)
parser.add_argument('--randmove', type=float, default=0.4)
parser.add_argument('--restore', type=str)
parser.add_argument('--device', type=str, required=True)
parser.add_argument('--pickle', type=str, required=True)
args = parser.parse_args()
new = True
torch.save(args, args.pickle)
try:
for res in execute(args):
with open(args.pickle, 'wb') as f:
torch.save(args, f)
torch.save(res, f)
new = False
except:
if new:
os.remove(args.pickle)
raise
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def mean(xs):
xs = list(xs)
return sum(xs) / len(xs)
@torch.jit.script
def swish_jit_fwd(x):
return x * torch.sigmoid(x) * 1.6768
@torch.jit.script
def swish_jit_bwd(x, grad_output):
x_sigmoid = torch.sigmoid(x)
return grad_output * (x_sigmoid * (1 + x * (1 - x_sigmoid))) * 1.6768
class SwishJitAutoFn(torch.autograd.Function):
@staticmethod
def forward(ctx, x):
ctx.save_for_backward(x)
return swish_jit_fwd(x)
@staticmethod
def backward(ctx, grad_output):
x = ctx.saved_tensors[0]
return swish_jit_bwd(x, grad_output)
class Swish(nn.Module):
def forward(self, x):
return SwishJitAutoFn.apply(x)
def orthogonal_(tensor, gain=1):
"""
Orthogonal initialization (modified version from PyTorch)
"""
if tensor.ndimension() < 2:
raise ValueError('Only tensors with 2 or more dimensions are supported'
)
rows = tensor.size(0)
cols = tensor[0].numel()
flattened = tensor.new_empty(rows, cols).normal_(0, 1)
for i in range(0, rows, cols):
q, r = torch.qr(flattened[i:i + cols].t())
q *= torch.diag(r, 0).sign()
q.t_()
with torch.no_grad():
tensor[i:i + cols].view_as(q).copy_(q)
with torch.no_grad():
tensor.mul_(gain)
return tensor
def linear(in_features, out_features, bias=True):
"""
Linear Module initialized properly
"""
m = nn.Linear(in_features, out_features, bias=bias)
orthogonal_(m.weight)
nn.init.zeros_(m.bias)
return m
def play_and_train(args, policy, optim):
total_loss = 0
turns = 0
scores = []
while turns < args.bs:
log_probs = []
rewards = []
game = Game(4)
t = time_logging.start()
while True:
x = game.encode()
t = time_logging.end('encode', t)
x = torch.tensor(x, device=args.device, dtype=torch.float32)
x = args.beta * policy(x)
t = time_logging.end('policy', t)
loss = [0]
def sample(x, w=1):
if torch.rand(()) < args.randmove:
m = torch.distributions.Categorical(logits=torch.
zeros_like(x))
else:
m = torch.distributions.Categorical(logits=x)
i = m.sample().item()
loss[0] += x.log_softmax(0)[i].mul(w)
return i
action = sample(x[:3])
score = game.score
if action == 0:
position = sample(x[3:3 + 5])
out = game.play(position)
if action == 1:
position = sample(x[3:3 + 5])
out = game.discard(position)
if action == 2:
target = sample(x[3 + 5:3 + 5 + 5], 0.5)
info = sample(x[3 + 5 + 5:3 + 5 + 5 + 10], 0.5)
if info < 5:
out = game.clue(target, info)
else:
out = game.clue(target, 'rgbyp'[info - 5])
t = time_logging.end('decode', t)
log_probs.append(loss[0])
if out is not None:
rewards.append(-1)
break
if game.gameover:
if game.score == 25:
rewards.append(game.score - score)
else:
rewards.append(-1)
break
rewards.append(game.score - score)
if len(log_probs) >= 3:
turns += len(log_probs)
R = 0
returns = []
for r in rewards[::-1]:
R = r + args.gamma * R
returns.insert(0, R)
returns = torch.tensor(returns, device=args.device, dtype=torch
.float32)
returns = (returns - returns.mean()) / (returns.std() + 1e-05)
for log_prob, R in zip(log_probs, returns):
total_loss += -(log_prob * R)
scores.append(game.score)
total_loss /= turns
optim.zero_grad()
total_loss.backward()
optim.step()
t = time_logging.end('backward & optim', t)
return scores
def execute(args):
torch.backends.cudnn.benchmark = True
policy = nn.Sequential(linear(2270, args.n), Swish(), linear(args.n,
args.n), Swish(), linear(args.n, args.n), Swish(), linear(args.n,
args.n), Swish(), linear(args.n, 23)).to(args.device)
scores = [0]
optim = torch.optim.Adam(policy.parameters(), lr=args.lr)
if args.restore:
with open(args.restore, 'rb') as f:
torch.load(f)
x = torch.load(f, map_location=args.device)
scores = x['scores']
policy.load_state_dict(x['state'])
t = tqdm.tqdm()
for i in itertools.count(1):
new_scores = play_and_train(args, policy, optim)
scores.extend(new_scores)
if i % 1000 == 0:
print()
print(time_logging.text_statistics())
yield {'args': args, 'state': policy.state_dict(), 'scores': scores
}
avg_score = mean(scores[-args.n_avg:])
t.update(len(new_scores))
t.set_postfix_str('scores={} avg_score={:.2f}'.format(scores[-5:],
avg_score))
t.close()
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--lr', type=float, default=1e-05)
parser.add_argument('--bs', type=int, default=10)
parser.add_argument('--n', type=int, default=500)
parser.add_argument('--n_avg', type=int, default=1000)
parser.add_argument('--beta', type=float, default=0.01)
parser.add_argument('--gamma', type=float, default=0.99)
parser.add_argument('--randmove', type=float, default=0.4)
parser.add_argument('--restore', type=str)
parser.add_argument('--device', type=str, required=True)
parser.add_argument('--pickle', type=str, required=True)
args = parser.parse_args()
new = True
torch.save(args, args.pickle)
try:
for res in execute(args):
with open(args.pickle, 'wb') as f:
torch.save(args, f)
torch.save(res, f)
new = False
except:
if new:
os.remove(args.pickle)
raise
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
# pylint: disable=not-callable, no-member, invalid-name, missing-docstring, arguments-differ
import argparse
import itertools
import os
import torch
import torch.nn as nn
import tqdm
import time_logging
from hanabi import Game
def mean(xs):
xs = list(xs)
return sum(xs) / len(xs)
@torch.jit.script
def swish_jit_fwd(x):
return x * torch.sigmoid(x) * 1.6768
@torch.jit.script
def swish_jit_bwd(x, grad_output):
x_sigmoid = torch.sigmoid(x)
return grad_output * (x_sigmoid * (1 + x * (1 - x_sigmoid))) * 1.6768
class SwishJitAutoFn(torch.autograd.Function):
@staticmethod
def forward(ctx, x):
ctx.save_for_backward(x)
return swish_jit_fwd(x)
@staticmethod
def backward(ctx, grad_output):
x = ctx.saved_tensors[0]
return swish_jit_bwd(x, grad_output)
class Swish(nn.Module):
def forward(self, x):
return SwishJitAutoFn.apply(x)
def orthogonal_(tensor, gain=1):
'''
Orthogonal initialization (modified version from PyTorch)
'''
if tensor.ndimension() < 2:
raise ValueError("Only tensors with 2 or more dimensions are supported")
rows = tensor.size(0)
cols = tensor[0].numel()
flattened = tensor.new_empty(rows, cols).normal_(0, 1)
for i in range(0, rows, cols):
# Compute the qr factorization
q, r = torch.qr(flattened[i:i + cols].t())
# Make Q uniform according to https://arxiv.org/pdf/math-ph/0609050.pdf
q *= torch.diag(r, 0).sign()
q.t_()
with torch.no_grad():
tensor[i:i + cols].view_as(q).copy_(q)
with torch.no_grad():
tensor.mul_(gain)
return tensor
def linear(in_features, out_features, bias=True):
'''
Linear Module initialized properly
'''
m = nn.Linear(in_features, out_features, bias=bias)
orthogonal_(m.weight)
nn.init.zeros_(m.bias)
return m
def play_and_train(args, policy, optim):
total_loss = 0
turns = 0
scores = []
while turns < args.bs:
log_probs = []
rewards = []
game = Game(4)
t = time_logging.start()
while True:
x = game.encode()
t = time_logging.end("encode", t)
x = torch.tensor(x, device=args.device, dtype=torch.float32)
x = args.beta * policy(x)
t = time_logging.end("policy", t)
loss = [0]
def sample(x, w=1):
if torch.rand(()) < args.randmove:
m = torch.distributions.Categorical(logits=torch.zeros_like(x))
else:
m = torch.distributions.Categorical(logits=x)
i = m.sample().item()
loss[0] += x.log_softmax(0)[i].mul(w)
return i
action = sample(x[:3])
score = game.score
if action == 0:
position = sample(x[3:3+5])
out = game.play(position)
if action == 1:
position = sample(x[3:3+5])
out = game.discard(position)
if action == 2:
target = sample(x[3+5:3+5+5], 0.5)
info = sample(x[3+5+5:3+5+5+10], 0.5)
if info < 5:
out = game.clue(target, info)
else:
out = game.clue(target, "rgbyp"[info-5])
t = time_logging.end("decode", t)
log_probs.append(loss[0])
if out is not None:
rewards.append(-1)
break
if game.gameover:
if game.score == 25:
rewards.append(game.score - score)
else:
rewards.append(-1)
break
rewards.append(game.score - score)
if len(log_probs) >= 3:
turns += len(log_probs)
R = 0
returns = []
for r in rewards[::-1]:
R = r + args.gamma * R
returns.insert(0, R)
returns = torch.tensor(returns, device=args.device, dtype=torch.float32)
returns = (returns - returns.mean()) / (returns.std() + 1e-5)
for log_prob, R in zip(log_probs, returns):
total_loss += -(log_prob * R)
scores.append(game.score)
total_loss /= turns
optim.zero_grad()
total_loss.backward()
optim.step()
t = time_logging.end("backward & optim", t)
return scores
def execute(args):
torch.backends.cudnn.benchmark = True
policy = nn.Sequential(
linear(2270, args.n), Swish(),
linear(args.n, args.n), Swish(),
linear(args.n, args.n), Swish(),
linear(args.n, args.n), Swish(),
linear(args.n, 23)
).to(args.device)
scores = [0]
optim = torch.optim.Adam(policy.parameters(), lr=args.lr)
if args.restore:
with open(args.restore, 'rb') as f:
torch.load(f)
x = torch.load(f, map_location=args.device)
scores = x['scores']
policy.load_state_dict(x['state'])
t = tqdm.tqdm()
for i in itertools.count(1):
new_scores = play_and_train(args, policy, optim)
scores.extend(new_scores)
if i % 1000 == 0:
print()
print(time_logging.text_statistics())
yield {
'args': args,
'state': policy.state_dict(),
'scores': scores,
}
avg_score = mean(scores[-args.n_avg:])
t.update(len(new_scores))
t.set_postfix_str("scores={} avg_score={:.2f}".format(scores[-5:], avg_score))
t.close()
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--lr", type=float, default=1e-5)
parser.add_argument("--bs", type=int, default=10)
parser.add_argument("--n", type=int, default=500)
parser.add_argument("--n_avg", type=int, default=1000)
parser.add_argument("--beta", type=float, default=0.01)
parser.add_argument("--gamma", type=float, default=0.99)
parser.add_argument("--randmove", type=float, default=0.4)
parser.add_argument("--restore", type=str)
parser.add_argument("--device", type=str, required=True)
parser.add_argument("--pickle", type=str, required=True)
args = parser.parse_args()
new = True
torch.save(args, args.pickle)
try:
for res in execute(args):
with open(args.pickle, 'wb') as f:
torch.save(args, f)
torch.save(res, f)
new = False
except:
if new:
os.remove(args.pickle)
raise
if __name__ == "__main__":
main()
|
flexible
|
{
"blob_id": "070330f8d343ff65852c5fbb9a3e96fe1bfc55b5",
"index": 8816,
"step-1": "<mask token>\n\n\n@torch.jit.script\ndef swish_jit_fwd(x):\n return x * torch.sigmoid(x) * 1.6768\n\n\n<mask token>\n\n\nclass SwishJitAutoFn(torch.autograd.Function):\n\n @staticmethod\n def forward(ctx, x):\n ctx.save_for_backward(x)\n return swish_jit_fwd(x)\n\n @staticmethod\n def backward(ctx, grad_output):\n x = ctx.saved_tensors[0]\n return swish_jit_bwd(x, grad_output)\n\n\nclass Swish(nn.Module):\n\n def forward(self, x):\n return SwishJitAutoFn.apply(x)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@torch.jit.script\ndef swish_jit_fwd(x):\n return x * torch.sigmoid(x) * 1.6768\n\n\n<mask token>\n\n\nclass SwishJitAutoFn(torch.autograd.Function):\n\n @staticmethod\n def forward(ctx, x):\n ctx.save_for_backward(x)\n return swish_jit_fwd(x)\n\n @staticmethod\n def backward(ctx, grad_output):\n x = ctx.saved_tensors[0]\n return swish_jit_bwd(x, grad_output)\n\n\nclass Swish(nn.Module):\n\n def forward(self, x):\n return SwishJitAutoFn.apply(x)\n\n\n<mask token>\n\n\ndef play_and_train(args, policy, optim):\n total_loss = 0\n turns = 0\n scores = []\n while turns < args.bs:\n log_probs = []\n rewards = []\n game = Game(4)\n t = time_logging.start()\n while True:\n x = game.encode()\n t = time_logging.end('encode', t)\n x = torch.tensor(x, device=args.device, dtype=torch.float32)\n x = args.beta * policy(x)\n t = time_logging.end('policy', t)\n loss = [0]\n\n def sample(x, w=1):\n if torch.rand(()) < args.randmove:\n m = torch.distributions.Categorical(logits=torch.\n zeros_like(x))\n else:\n m = torch.distributions.Categorical(logits=x)\n i = m.sample().item()\n loss[0] += x.log_softmax(0)[i].mul(w)\n return i\n action = sample(x[:3])\n score = game.score\n if action == 0:\n position = sample(x[3:3 + 5])\n out = game.play(position)\n if action == 1:\n position = sample(x[3:3 + 5])\n out = game.discard(position)\n if action == 2:\n target = sample(x[3 + 5:3 + 5 + 5], 0.5)\n info = sample(x[3 + 5 + 5:3 + 5 + 5 + 10], 0.5)\n if info < 5:\n out = game.clue(target, info)\n else:\n out = game.clue(target, 'rgbyp'[info - 5])\n t = time_logging.end('decode', t)\n log_probs.append(loss[0])\n if out is not None:\n rewards.append(-1)\n break\n if game.gameover:\n if game.score == 25:\n rewards.append(game.score - score)\n else:\n rewards.append(-1)\n break\n rewards.append(game.score - score)\n if len(log_probs) >= 3:\n turns += len(log_probs)\n R = 0\n returns = []\n for r in rewards[::-1]:\n R = r + args.gamma * R\n returns.insert(0, R)\n returns = torch.tensor(returns, device=args.device, dtype=torch\n .float32)\n returns = (returns - returns.mean()) / (returns.std() + 1e-05)\n for log_prob, R in zip(log_probs, returns):\n total_loss += -(log_prob * R)\n scores.append(game.score)\n total_loss /= turns\n optim.zero_grad()\n total_loss.backward()\n optim.step()\n t = time_logging.end('backward & optim', t)\n return scores\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\n@torch.jit.script\ndef swish_jit_fwd(x):\n return x * torch.sigmoid(x) * 1.6768\n\n\n<mask token>\n\n\nclass SwishJitAutoFn(torch.autograd.Function):\n\n @staticmethod\n def forward(ctx, x):\n ctx.save_for_backward(x)\n return swish_jit_fwd(x)\n\n @staticmethod\n def backward(ctx, grad_output):\n x = ctx.saved_tensors[0]\n return swish_jit_bwd(x, grad_output)\n\n\nclass Swish(nn.Module):\n\n def forward(self, x):\n return SwishJitAutoFn.apply(x)\n\n\ndef orthogonal_(tensor, gain=1):\n \"\"\"\n Orthogonal initialization (modified version from PyTorch)\n \"\"\"\n if tensor.ndimension() < 2:\n raise ValueError('Only tensors with 2 or more dimensions are supported'\n )\n rows = tensor.size(0)\n cols = tensor[0].numel()\n flattened = tensor.new_empty(rows, cols).normal_(0, 1)\n for i in range(0, rows, cols):\n q, r = torch.qr(flattened[i:i + cols].t())\n q *= torch.diag(r, 0).sign()\n q.t_()\n with torch.no_grad():\n tensor[i:i + cols].view_as(q).copy_(q)\n with torch.no_grad():\n tensor.mul_(gain)\n return tensor\n\n\n<mask token>\n\n\ndef play_and_train(args, policy, optim):\n total_loss = 0\n turns = 0\n scores = []\n while turns < args.bs:\n log_probs = []\n rewards = []\n game = Game(4)\n t = time_logging.start()\n while True:\n x = game.encode()\n t = time_logging.end('encode', t)\n x = torch.tensor(x, device=args.device, dtype=torch.float32)\n x = args.beta * policy(x)\n t = time_logging.end('policy', t)\n loss = [0]\n\n def sample(x, w=1):\n if torch.rand(()) < args.randmove:\n m = torch.distributions.Categorical(logits=torch.\n zeros_like(x))\n else:\n m = torch.distributions.Categorical(logits=x)\n i = m.sample().item()\n loss[0] += x.log_softmax(0)[i].mul(w)\n return i\n action = sample(x[:3])\n score = game.score\n if action == 0:\n position = sample(x[3:3 + 5])\n out = game.play(position)\n if action == 1:\n position = sample(x[3:3 + 5])\n out = game.discard(position)\n if action == 2:\n target = sample(x[3 + 5:3 + 5 + 5], 0.5)\n info = sample(x[3 + 5 + 5:3 + 5 + 5 + 10], 0.5)\n if info < 5:\n out = game.clue(target, info)\n else:\n out = game.clue(target, 'rgbyp'[info - 5])\n t = time_logging.end('decode', t)\n log_probs.append(loss[0])\n if out is not None:\n rewards.append(-1)\n break\n if game.gameover:\n if game.score == 25:\n rewards.append(game.score - score)\n else:\n rewards.append(-1)\n break\n rewards.append(game.score - score)\n if len(log_probs) >= 3:\n turns += len(log_probs)\n R = 0\n returns = []\n for r in rewards[::-1]:\n R = r + args.gamma * R\n returns.insert(0, R)\n returns = torch.tensor(returns, device=args.device, dtype=torch\n .float32)\n returns = (returns - returns.mean()) / (returns.std() + 1e-05)\n for log_prob, R in zip(log_probs, returns):\n total_loss += -(log_prob * R)\n scores.append(game.score)\n total_loss /= turns\n optim.zero_grad()\n total_loss.backward()\n optim.step()\n t = time_logging.end('backward & optim', t)\n return scores\n\n\n<mask token>\n\n\ndef main():\n parser = argparse.ArgumentParser()\n parser.add_argument('--lr', type=float, default=1e-05)\n parser.add_argument('--bs', type=int, default=10)\n parser.add_argument('--n', type=int, default=500)\n parser.add_argument('--n_avg', type=int, default=1000)\n parser.add_argument('--beta', type=float, default=0.01)\n parser.add_argument('--gamma', type=float, default=0.99)\n parser.add_argument('--randmove', type=float, default=0.4)\n parser.add_argument('--restore', type=str)\n parser.add_argument('--device', type=str, required=True)\n parser.add_argument('--pickle', type=str, required=True)\n args = parser.parse_args()\n new = True\n torch.save(args, args.pickle)\n try:\n for res in execute(args):\n with open(args.pickle, 'wb') as f:\n torch.save(args, f)\n torch.save(res, f)\n new = False\n except:\n if new:\n os.remove(args.pickle)\n raise\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef mean(xs):\n xs = list(xs)\n return sum(xs) / len(xs)\n\n\n@torch.jit.script\ndef swish_jit_fwd(x):\n return x * torch.sigmoid(x) * 1.6768\n\n\n@torch.jit.script\ndef swish_jit_bwd(x, grad_output):\n x_sigmoid = torch.sigmoid(x)\n return grad_output * (x_sigmoid * (1 + x * (1 - x_sigmoid))) * 1.6768\n\n\nclass SwishJitAutoFn(torch.autograd.Function):\n\n @staticmethod\n def forward(ctx, x):\n ctx.save_for_backward(x)\n return swish_jit_fwd(x)\n\n @staticmethod\n def backward(ctx, grad_output):\n x = ctx.saved_tensors[0]\n return swish_jit_bwd(x, grad_output)\n\n\nclass Swish(nn.Module):\n\n def forward(self, x):\n return SwishJitAutoFn.apply(x)\n\n\ndef orthogonal_(tensor, gain=1):\n \"\"\"\n Orthogonal initialization (modified version from PyTorch)\n \"\"\"\n if tensor.ndimension() < 2:\n raise ValueError('Only tensors with 2 or more dimensions are supported'\n )\n rows = tensor.size(0)\n cols = tensor[0].numel()\n flattened = tensor.new_empty(rows, cols).normal_(0, 1)\n for i in range(0, rows, cols):\n q, r = torch.qr(flattened[i:i + cols].t())\n q *= torch.diag(r, 0).sign()\n q.t_()\n with torch.no_grad():\n tensor[i:i + cols].view_as(q).copy_(q)\n with torch.no_grad():\n tensor.mul_(gain)\n return tensor\n\n\ndef linear(in_features, out_features, bias=True):\n \"\"\"\n Linear Module initialized properly\n \"\"\"\n m = nn.Linear(in_features, out_features, bias=bias)\n orthogonal_(m.weight)\n nn.init.zeros_(m.bias)\n return m\n\n\ndef play_and_train(args, policy, optim):\n total_loss = 0\n turns = 0\n scores = []\n while turns < args.bs:\n log_probs = []\n rewards = []\n game = Game(4)\n t = time_logging.start()\n while True:\n x = game.encode()\n t = time_logging.end('encode', t)\n x = torch.tensor(x, device=args.device, dtype=torch.float32)\n x = args.beta * policy(x)\n t = time_logging.end('policy', t)\n loss = [0]\n\n def sample(x, w=1):\n if torch.rand(()) < args.randmove:\n m = torch.distributions.Categorical(logits=torch.\n zeros_like(x))\n else:\n m = torch.distributions.Categorical(logits=x)\n i = m.sample().item()\n loss[0] += x.log_softmax(0)[i].mul(w)\n return i\n action = sample(x[:3])\n score = game.score\n if action == 0:\n position = sample(x[3:3 + 5])\n out = game.play(position)\n if action == 1:\n position = sample(x[3:3 + 5])\n out = game.discard(position)\n if action == 2:\n target = sample(x[3 + 5:3 + 5 + 5], 0.5)\n info = sample(x[3 + 5 + 5:3 + 5 + 5 + 10], 0.5)\n if info < 5:\n out = game.clue(target, info)\n else:\n out = game.clue(target, 'rgbyp'[info - 5])\n t = time_logging.end('decode', t)\n log_probs.append(loss[0])\n if out is not None:\n rewards.append(-1)\n break\n if game.gameover:\n if game.score == 25:\n rewards.append(game.score - score)\n else:\n rewards.append(-1)\n break\n rewards.append(game.score - score)\n if len(log_probs) >= 3:\n turns += len(log_probs)\n R = 0\n returns = []\n for r in rewards[::-1]:\n R = r + args.gamma * R\n returns.insert(0, R)\n returns = torch.tensor(returns, device=args.device, dtype=torch\n .float32)\n returns = (returns - returns.mean()) / (returns.std() + 1e-05)\n for log_prob, R in zip(log_probs, returns):\n total_loss += -(log_prob * R)\n scores.append(game.score)\n total_loss /= turns\n optim.zero_grad()\n total_loss.backward()\n optim.step()\n t = time_logging.end('backward & optim', t)\n return scores\n\n\ndef execute(args):\n torch.backends.cudnn.benchmark = True\n policy = nn.Sequential(linear(2270, args.n), Swish(), linear(args.n,\n args.n), Swish(), linear(args.n, args.n), Swish(), linear(args.n,\n args.n), Swish(), linear(args.n, 23)).to(args.device)\n scores = [0]\n optim = torch.optim.Adam(policy.parameters(), lr=args.lr)\n if args.restore:\n with open(args.restore, 'rb') as f:\n torch.load(f)\n x = torch.load(f, map_location=args.device)\n scores = x['scores']\n policy.load_state_dict(x['state'])\n t = tqdm.tqdm()\n for i in itertools.count(1):\n new_scores = play_and_train(args, policy, optim)\n scores.extend(new_scores)\n if i % 1000 == 0:\n print()\n print(time_logging.text_statistics())\n yield {'args': args, 'state': policy.state_dict(), 'scores': scores\n }\n avg_score = mean(scores[-args.n_avg:])\n t.update(len(new_scores))\n t.set_postfix_str('scores={} avg_score={:.2f}'.format(scores[-5:],\n avg_score))\n t.close()\n\n\ndef main():\n parser = argparse.ArgumentParser()\n parser.add_argument('--lr', type=float, default=1e-05)\n parser.add_argument('--bs', type=int, default=10)\n parser.add_argument('--n', type=int, default=500)\n parser.add_argument('--n_avg', type=int, default=1000)\n parser.add_argument('--beta', type=float, default=0.01)\n parser.add_argument('--gamma', type=float, default=0.99)\n parser.add_argument('--randmove', type=float, default=0.4)\n parser.add_argument('--restore', type=str)\n parser.add_argument('--device', type=str, required=True)\n parser.add_argument('--pickle', type=str, required=True)\n args = parser.parse_args()\n new = True\n torch.save(args, args.pickle)\n try:\n for res in execute(args):\n with open(args.pickle, 'wb') as f:\n torch.save(args, f)\n torch.save(res, f)\n new = False\n except:\n if new:\n os.remove(args.pickle)\n raise\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "# pylint: disable=not-callable, no-member, invalid-name, missing-docstring, arguments-differ\nimport argparse\nimport itertools\nimport os\n\nimport torch\nimport torch.nn as nn\nimport tqdm\n\nimport time_logging\nfrom hanabi import Game\n\n\ndef mean(xs):\n xs = list(xs)\n return sum(xs) / len(xs)\n\n\n@torch.jit.script\ndef swish_jit_fwd(x):\n return x * torch.sigmoid(x) * 1.6768\n\n\n@torch.jit.script\ndef swish_jit_bwd(x, grad_output):\n x_sigmoid = torch.sigmoid(x)\n return grad_output * (x_sigmoid * (1 + x * (1 - x_sigmoid))) * 1.6768\n\n\nclass SwishJitAutoFn(torch.autograd.Function):\n @staticmethod\n def forward(ctx, x):\n ctx.save_for_backward(x)\n return swish_jit_fwd(x)\n\n @staticmethod\n def backward(ctx, grad_output):\n x = ctx.saved_tensors[0]\n return swish_jit_bwd(x, grad_output)\n\n\nclass Swish(nn.Module):\n def forward(self, x):\n return SwishJitAutoFn.apply(x)\n\n\ndef orthogonal_(tensor, gain=1):\n '''\n Orthogonal initialization (modified version from PyTorch)\n '''\n if tensor.ndimension() < 2:\n raise ValueError(\"Only tensors with 2 or more dimensions are supported\")\n\n rows = tensor.size(0)\n cols = tensor[0].numel()\n flattened = tensor.new_empty(rows, cols).normal_(0, 1)\n\n for i in range(0, rows, cols):\n # Compute the qr factorization\n q, r = torch.qr(flattened[i:i + cols].t())\n # Make Q uniform according to https://arxiv.org/pdf/math-ph/0609050.pdf\n q *= torch.diag(r, 0).sign()\n q.t_()\n\n with torch.no_grad():\n tensor[i:i + cols].view_as(q).copy_(q)\n\n with torch.no_grad():\n tensor.mul_(gain)\n return tensor\n\n\ndef linear(in_features, out_features, bias=True):\n '''\n Linear Module initialized properly\n '''\n m = nn.Linear(in_features, out_features, bias=bias)\n orthogonal_(m.weight)\n nn.init.zeros_(m.bias)\n return m\n\n\ndef play_and_train(args, policy, optim):\n total_loss = 0\n turns = 0\n scores = []\n\n while turns < args.bs:\n log_probs = []\n rewards = []\n\n game = Game(4)\n t = time_logging.start()\n while True:\n x = game.encode()\n t = time_logging.end(\"encode\", t)\n x = torch.tensor(x, device=args.device, dtype=torch.float32)\n x = args.beta * policy(x)\n t = time_logging.end(\"policy\", t)\n\n loss = [0]\n def sample(x, w=1):\n if torch.rand(()) < args.randmove:\n m = torch.distributions.Categorical(logits=torch.zeros_like(x))\n else:\n m = torch.distributions.Categorical(logits=x)\n i = m.sample().item()\n loss[0] += x.log_softmax(0)[i].mul(w)\n return i\n\n action = sample(x[:3])\n score = game.score\n\n if action == 0:\n position = sample(x[3:3+5])\n out = game.play(position)\n\n if action == 1:\n position = sample(x[3:3+5])\n out = game.discard(position)\n\n if action == 2:\n target = sample(x[3+5:3+5+5], 0.5)\n info = sample(x[3+5+5:3+5+5+10], 0.5)\n if info < 5:\n out = game.clue(target, info)\n else:\n out = game.clue(target, \"rgbyp\"[info-5])\n\n t = time_logging.end(\"decode\", t)\n\n log_probs.append(loss[0])\n if out is not None:\n rewards.append(-1)\n break\n\n if game.gameover:\n if game.score == 25:\n rewards.append(game.score - score)\n else:\n rewards.append(-1)\n break\n\n rewards.append(game.score - score)\n\n if len(log_probs) >= 3:\n turns += len(log_probs)\n R = 0\n returns = []\n for r in rewards[::-1]:\n R = r + args.gamma * R\n returns.insert(0, R)\n returns = torch.tensor(returns, device=args.device, dtype=torch.float32)\n returns = (returns - returns.mean()) / (returns.std() + 1e-5)\n for log_prob, R in zip(log_probs, returns):\n total_loss += -(log_prob * R)\n\n scores.append(game.score)\n\n total_loss /= turns\n\n optim.zero_grad()\n total_loss.backward()\n optim.step()\n t = time_logging.end(\"backward & optim\", t)\n\n return scores\n\n\ndef execute(args):\n torch.backends.cudnn.benchmark = True\n\n policy = nn.Sequential(\n linear(2270, args.n), Swish(),\n linear(args.n, args.n), Swish(),\n linear(args.n, args.n), Swish(),\n linear(args.n, args.n), Swish(),\n linear(args.n, 23)\n ).to(args.device)\n\n scores = [0]\n\n optim = torch.optim.Adam(policy.parameters(), lr=args.lr)\n\n if args.restore:\n with open(args.restore, 'rb') as f:\n torch.load(f)\n x = torch.load(f, map_location=args.device)\n scores = x['scores']\n policy.load_state_dict(x['state'])\n\n t = tqdm.tqdm()\n for i in itertools.count(1):\n new_scores = play_and_train(args, policy, optim)\n scores.extend(new_scores)\n\n if i % 1000 == 0:\n print()\n print(time_logging.text_statistics())\n yield {\n 'args': args,\n 'state': policy.state_dict(),\n 'scores': scores,\n }\n\n avg_score = mean(scores[-args.n_avg:])\n t.update(len(new_scores))\n t.set_postfix_str(\"scores={} avg_score={:.2f}\".format(scores[-5:], avg_score))\n\n t.close()\n\n\ndef main():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"--lr\", type=float, default=1e-5)\n parser.add_argument(\"--bs\", type=int, default=10)\n parser.add_argument(\"--n\", type=int, default=500)\n parser.add_argument(\"--n_avg\", type=int, default=1000)\n parser.add_argument(\"--beta\", type=float, default=0.01)\n parser.add_argument(\"--gamma\", type=float, default=0.99)\n parser.add_argument(\"--randmove\", type=float, default=0.4)\n parser.add_argument(\"--restore\", type=str)\n\n parser.add_argument(\"--device\", type=str, required=True)\n\n parser.add_argument(\"--pickle\", type=str, required=True)\n args = parser.parse_args()\n\n new = True\n torch.save(args, args.pickle)\n try:\n for res in execute(args):\n with open(args.pickle, 'wb') as f:\n torch.save(args, f)\n torch.save(res, f)\n new = False\n except:\n if new:\n os.remove(args.pickle)\n raise\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
6,
7,
9,
14,
16
]
}
|
[
6,
7,
9,
14,
16
] |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Base PXE Interface Methods
"""
from ironic_lib import metrics_utils
from oslo_log import log as logging
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import pxe_utils as pxe_utils
from ironic.drivers.modules import deploy_utils
LOG = logging.getLogger(__name__)
METRICS = metrics_utils.get_metrics_logger(__name__)
REQUIRED_PROPERTIES = {
'deploy_kernel': _("UUID (from Glance) of the deployment kernel. "
"Required."),
'deploy_ramdisk': _("UUID (from Glance) of the ramdisk that is "
"mounted at boot time. Required."),
}
OPTIONAL_PROPERTIES = {
'force_persistent_boot_device': _("True to enable persistent behavior "
"when the boot device is set during "
"deploy and cleaning operations. "
"Defaults to False. Optional."),
}
RESCUE_PROPERTIES = {
'rescue_kernel': _('UUID (from Glance) of the rescue kernel. This value '
'is required for rescue mode.'),
'rescue_ramdisk': _('UUID (from Glance) of the rescue ramdisk with agent '
'that is used at node rescue time. This value is '
'required for rescue mode.'),
}
COMMON_PROPERTIES = REQUIRED_PROPERTIES.copy()
COMMON_PROPERTIES.update(OPTIONAL_PROPERTIES)
COMMON_PROPERTIES.update(RESCUE_PROPERTIES)
class PXEBaseMixin(object):
def get_properties(self):
"""Return the properties of the interface.
:returns: dictionary of <property name>:<property description> entries.
"""
return COMMON_PROPERTIES
@METRICS.timer('PXEBaseMixin.clean_up_ramdisk')
def clean_up_ramdisk(self, task):
"""Cleans up the boot of ironic ramdisk.
This method cleans up the PXE environment that was setup for booting
the deploy or rescue ramdisk. It unlinks the deploy/rescue
kernel/ramdisk in the node's directory in tftproot and removes it's PXE
config.
:param task: a task from TaskManager.
:param mode: Label indicating a deploy or rescue operation
was carried out on the node. Supported values are 'deploy' and
'rescue'. Defaults to 'deploy', indicating deploy operation was
carried out.
:returns: None
"""
node = task.node
mode = deploy_utils.rescue_or_deploy_mode(node)
try:
images_info = pxe_utils.get_image_info(node, mode=mode)
except exception.MissingParameterValue as e:
LOG.warning('Could not get %(mode)s image info '
'to clean up images for node %(node)s: %(err)s',
{'mode': mode, 'node': node.uuid, 'err': e})
else:
pxe_utils.clean_up_pxe_env(task, images_info)
@METRICS.timer('PXEBaseMixin.validate_rescue')
def validate_rescue(self, task):
"""Validate that the node has required properties for rescue.
:param task: a TaskManager instance with the node being checked
:raises: MissingParameterValue if node is missing one or more required
parameters
"""
pxe_utils.parse_driver_info(task.node, mode='rescue')
|
normal
|
{
"blob_id": "d56fa4ea999d8af887e5f68296bfb20ad535e6ad",
"index": 6748,
"step-1": "<mask token>\n\n\nclass PXEBaseMixin(object):\n\n def get_properties(self):\n \"\"\"Return the properties of the interface.\n\n :returns: dictionary of <property name>:<property description> entries.\n \"\"\"\n return COMMON_PROPERTIES\n\n @METRICS.timer('PXEBaseMixin.clean_up_ramdisk')\n def clean_up_ramdisk(self, task):\n \"\"\"Cleans up the boot of ironic ramdisk.\n\n This method cleans up the PXE environment that was setup for booting\n the deploy or rescue ramdisk. It unlinks the deploy/rescue\n kernel/ramdisk in the node's directory in tftproot and removes it's PXE\n config.\n\n :param task: a task from TaskManager.\n :param mode: Label indicating a deploy or rescue operation\n was carried out on the node. Supported values are 'deploy' and\n 'rescue'. Defaults to 'deploy', indicating deploy operation was\n carried out.\n :returns: None\n \"\"\"\n node = task.node\n mode = deploy_utils.rescue_or_deploy_mode(node)\n try:\n images_info = pxe_utils.get_image_info(node, mode=mode)\n except exception.MissingParameterValue as e:\n LOG.warning(\n 'Could not get %(mode)s image info to clean up images for node %(node)s: %(err)s'\n , {'mode': mode, 'node': node.uuid, 'err': e})\n else:\n pxe_utils.clean_up_pxe_env(task, images_info)\n\n @METRICS.timer('PXEBaseMixin.validate_rescue')\n def validate_rescue(self, task):\n \"\"\"Validate that the node has required properties for rescue.\n\n :param task: a TaskManager instance with the node being checked\n :raises: MissingParameterValue if node is missing one or more required\n parameters\n \"\"\"\n pxe_utils.parse_driver_info(task.node, mode='rescue')\n",
"step-2": "<mask token>\nCOMMON_PROPERTIES.update(OPTIONAL_PROPERTIES)\nCOMMON_PROPERTIES.update(RESCUE_PROPERTIES)\n\n\nclass PXEBaseMixin(object):\n\n def get_properties(self):\n \"\"\"Return the properties of the interface.\n\n :returns: dictionary of <property name>:<property description> entries.\n \"\"\"\n return COMMON_PROPERTIES\n\n @METRICS.timer('PXEBaseMixin.clean_up_ramdisk')\n def clean_up_ramdisk(self, task):\n \"\"\"Cleans up the boot of ironic ramdisk.\n\n This method cleans up the PXE environment that was setup for booting\n the deploy or rescue ramdisk. It unlinks the deploy/rescue\n kernel/ramdisk in the node's directory in tftproot and removes it's PXE\n config.\n\n :param task: a task from TaskManager.\n :param mode: Label indicating a deploy or rescue operation\n was carried out on the node. Supported values are 'deploy' and\n 'rescue'. Defaults to 'deploy', indicating deploy operation was\n carried out.\n :returns: None\n \"\"\"\n node = task.node\n mode = deploy_utils.rescue_or_deploy_mode(node)\n try:\n images_info = pxe_utils.get_image_info(node, mode=mode)\n except exception.MissingParameterValue as e:\n LOG.warning(\n 'Could not get %(mode)s image info to clean up images for node %(node)s: %(err)s'\n , {'mode': mode, 'node': node.uuid, 'err': e})\n else:\n pxe_utils.clean_up_pxe_env(task, images_info)\n\n @METRICS.timer('PXEBaseMixin.validate_rescue')\n def validate_rescue(self, task):\n \"\"\"Validate that the node has required properties for rescue.\n\n :param task: a TaskManager instance with the node being checked\n :raises: MissingParameterValue if node is missing one or more required\n parameters\n \"\"\"\n pxe_utils.parse_driver_info(task.node, mode='rescue')\n",
"step-3": "<mask token>\nLOG = logging.getLogger(__name__)\nMETRICS = metrics_utils.get_metrics_logger(__name__)\nREQUIRED_PROPERTIES = {'deploy_kernel': _(\n 'UUID (from Glance) of the deployment kernel. Required.'),\n 'deploy_ramdisk': _(\n 'UUID (from Glance) of the ramdisk that is mounted at boot time. Required.'\n )}\nOPTIONAL_PROPERTIES = {'force_persistent_boot_device': _(\n 'True to enable persistent behavior when the boot device is set during deploy and cleaning operations. Defaults to False. Optional.'\n )}\nRESCUE_PROPERTIES = {'rescue_kernel': _(\n 'UUID (from Glance) of the rescue kernel. This value is required for rescue mode.'\n ), 'rescue_ramdisk': _(\n 'UUID (from Glance) of the rescue ramdisk with agent that is used at node rescue time. This value is required for rescue mode.'\n )}\nCOMMON_PROPERTIES = REQUIRED_PROPERTIES.copy()\nCOMMON_PROPERTIES.update(OPTIONAL_PROPERTIES)\nCOMMON_PROPERTIES.update(RESCUE_PROPERTIES)\n\n\nclass PXEBaseMixin(object):\n\n def get_properties(self):\n \"\"\"Return the properties of the interface.\n\n :returns: dictionary of <property name>:<property description> entries.\n \"\"\"\n return COMMON_PROPERTIES\n\n @METRICS.timer('PXEBaseMixin.clean_up_ramdisk')\n def clean_up_ramdisk(self, task):\n \"\"\"Cleans up the boot of ironic ramdisk.\n\n This method cleans up the PXE environment that was setup for booting\n the deploy or rescue ramdisk. It unlinks the deploy/rescue\n kernel/ramdisk in the node's directory in tftproot and removes it's PXE\n config.\n\n :param task: a task from TaskManager.\n :param mode: Label indicating a deploy or rescue operation\n was carried out on the node. Supported values are 'deploy' and\n 'rescue'. Defaults to 'deploy', indicating deploy operation was\n carried out.\n :returns: None\n \"\"\"\n node = task.node\n mode = deploy_utils.rescue_or_deploy_mode(node)\n try:\n images_info = pxe_utils.get_image_info(node, mode=mode)\n except exception.MissingParameterValue as e:\n LOG.warning(\n 'Could not get %(mode)s image info to clean up images for node %(node)s: %(err)s'\n , {'mode': mode, 'node': node.uuid, 'err': e})\n else:\n pxe_utils.clean_up_pxe_env(task, images_info)\n\n @METRICS.timer('PXEBaseMixin.validate_rescue')\n def validate_rescue(self, task):\n \"\"\"Validate that the node has required properties for rescue.\n\n :param task: a TaskManager instance with the node being checked\n :raises: MissingParameterValue if node is missing one or more required\n parameters\n \"\"\"\n pxe_utils.parse_driver_info(task.node, mode='rescue')\n",
"step-4": "<mask token>\nfrom ironic_lib import metrics_utils\nfrom oslo_log import log as logging\nfrom ironic.common import exception\nfrom ironic.common.i18n import _\nfrom ironic.common import pxe_utils as pxe_utils\nfrom ironic.drivers.modules import deploy_utils\nLOG = logging.getLogger(__name__)\nMETRICS = metrics_utils.get_metrics_logger(__name__)\nREQUIRED_PROPERTIES = {'deploy_kernel': _(\n 'UUID (from Glance) of the deployment kernel. Required.'),\n 'deploy_ramdisk': _(\n 'UUID (from Glance) of the ramdisk that is mounted at boot time. Required.'\n )}\nOPTIONAL_PROPERTIES = {'force_persistent_boot_device': _(\n 'True to enable persistent behavior when the boot device is set during deploy and cleaning operations. Defaults to False. Optional.'\n )}\nRESCUE_PROPERTIES = {'rescue_kernel': _(\n 'UUID (from Glance) of the rescue kernel. This value is required for rescue mode.'\n ), 'rescue_ramdisk': _(\n 'UUID (from Glance) of the rescue ramdisk with agent that is used at node rescue time. This value is required for rescue mode.'\n )}\nCOMMON_PROPERTIES = REQUIRED_PROPERTIES.copy()\nCOMMON_PROPERTIES.update(OPTIONAL_PROPERTIES)\nCOMMON_PROPERTIES.update(RESCUE_PROPERTIES)\n\n\nclass PXEBaseMixin(object):\n\n def get_properties(self):\n \"\"\"Return the properties of the interface.\n\n :returns: dictionary of <property name>:<property description> entries.\n \"\"\"\n return COMMON_PROPERTIES\n\n @METRICS.timer('PXEBaseMixin.clean_up_ramdisk')\n def clean_up_ramdisk(self, task):\n \"\"\"Cleans up the boot of ironic ramdisk.\n\n This method cleans up the PXE environment that was setup for booting\n the deploy or rescue ramdisk. It unlinks the deploy/rescue\n kernel/ramdisk in the node's directory in tftproot and removes it's PXE\n config.\n\n :param task: a task from TaskManager.\n :param mode: Label indicating a deploy or rescue operation\n was carried out on the node. Supported values are 'deploy' and\n 'rescue'. Defaults to 'deploy', indicating deploy operation was\n carried out.\n :returns: None\n \"\"\"\n node = task.node\n mode = deploy_utils.rescue_or_deploy_mode(node)\n try:\n images_info = pxe_utils.get_image_info(node, mode=mode)\n except exception.MissingParameterValue as e:\n LOG.warning(\n 'Could not get %(mode)s image info to clean up images for node %(node)s: %(err)s'\n , {'mode': mode, 'node': node.uuid, 'err': e})\n else:\n pxe_utils.clean_up_pxe_env(task, images_info)\n\n @METRICS.timer('PXEBaseMixin.validate_rescue')\n def validate_rescue(self, task):\n \"\"\"Validate that the node has required properties for rescue.\n\n :param task: a TaskManager instance with the node being checked\n :raises: MissingParameterValue if node is missing one or more required\n parameters\n \"\"\"\n pxe_utils.parse_driver_info(task.node, mode='rescue')\n",
"step-5": "# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\"\"\"\nBase PXE Interface Methods\n\"\"\"\n\nfrom ironic_lib import metrics_utils\nfrom oslo_log import log as logging\n\nfrom ironic.common import exception\nfrom ironic.common.i18n import _\nfrom ironic.common import pxe_utils as pxe_utils\nfrom ironic.drivers.modules import deploy_utils\nLOG = logging.getLogger(__name__)\n\nMETRICS = metrics_utils.get_metrics_logger(__name__)\n\nREQUIRED_PROPERTIES = {\n 'deploy_kernel': _(\"UUID (from Glance) of the deployment kernel. \"\n \"Required.\"),\n 'deploy_ramdisk': _(\"UUID (from Glance) of the ramdisk that is \"\n \"mounted at boot time. Required.\"),\n}\nOPTIONAL_PROPERTIES = {\n 'force_persistent_boot_device': _(\"True to enable persistent behavior \"\n \"when the boot device is set during \"\n \"deploy and cleaning operations. \"\n \"Defaults to False. Optional.\"),\n}\nRESCUE_PROPERTIES = {\n 'rescue_kernel': _('UUID (from Glance) of the rescue kernel. This value '\n 'is required for rescue mode.'),\n 'rescue_ramdisk': _('UUID (from Glance) of the rescue ramdisk with agent '\n 'that is used at node rescue time. This value is '\n 'required for rescue mode.'),\n}\nCOMMON_PROPERTIES = REQUIRED_PROPERTIES.copy()\nCOMMON_PROPERTIES.update(OPTIONAL_PROPERTIES)\nCOMMON_PROPERTIES.update(RESCUE_PROPERTIES)\n\n\nclass PXEBaseMixin(object):\n\n def get_properties(self):\n \"\"\"Return the properties of the interface.\n\n :returns: dictionary of <property name>:<property description> entries.\n \"\"\"\n return COMMON_PROPERTIES\n\n @METRICS.timer('PXEBaseMixin.clean_up_ramdisk')\n def clean_up_ramdisk(self, task):\n \"\"\"Cleans up the boot of ironic ramdisk.\n\n This method cleans up the PXE environment that was setup for booting\n the deploy or rescue ramdisk. It unlinks the deploy/rescue\n kernel/ramdisk in the node's directory in tftproot and removes it's PXE\n config.\n\n :param task: a task from TaskManager.\n :param mode: Label indicating a deploy or rescue operation\n was carried out on the node. Supported values are 'deploy' and\n 'rescue'. Defaults to 'deploy', indicating deploy operation was\n carried out.\n :returns: None\n \"\"\"\n node = task.node\n mode = deploy_utils.rescue_or_deploy_mode(node)\n try:\n images_info = pxe_utils.get_image_info(node, mode=mode)\n except exception.MissingParameterValue as e:\n LOG.warning('Could not get %(mode)s image info '\n 'to clean up images for node %(node)s: %(err)s',\n {'mode': mode, 'node': node.uuid, 'err': e})\n else:\n pxe_utils.clean_up_pxe_env(task, images_info)\n\n @METRICS.timer('PXEBaseMixin.validate_rescue')\n def validate_rescue(self, task):\n \"\"\"Validate that the node has required properties for rescue.\n\n :param task: a TaskManager instance with the node being checked\n :raises: MissingParameterValue if node is missing one or more required\n parameters\n \"\"\"\n pxe_utils.parse_driver_info(task.node, mode='rescue')\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Copyright © 2020- Spyder Project Contributors
#
# Released under the terms of the MIT License
# ----------------------------------------------------------------------------
"""Tests for the execution of pylint."""
# Standard library imports
from io import open
import os.path as osp
from unittest.mock import Mock, MagicMock
# Third party imports
import pytest
from qtpy.QtCore import Signal, QObject
# Local imports
from spyder.plugins.pylint.plugin import Pylint
from spyder.plugins.pylint.widgets.pylintgui import PylintWidget
from spyder.plugins.pylint.utils import get_pylintrc_path
# pylint: disable=redefined-outer-name
PYLINTRC_FILENAME = ".pylintrc"
# Constants for dir name keys
# In Python 3 and Spyder 5, replace with enum
NO_DIR = "e"
SCRIPT_DIR = "SCRIPT_DIR"
WORKING_DIR = "WORKING_DIR"
PROJECT_DIR = "PROJECT_DIR"
HOME_DIR = "HOME_DIR"
ALL_DIR = "ALL_DIR"
DIR_LIST = [SCRIPT_DIR, WORKING_DIR, PROJECT_DIR, HOME_DIR]
DIR_LIST_ALL = [NO_DIR] + DIR_LIST + [ALL_DIR]
PYLINT_TEST_SCRIPT = "import math\nimport os\nimport sys\n" + "\n".join(
[dir_name + " = " + str(idx) for idx, dir_name in enumerate(DIR_LIST_ALL)])
PYLINT_TEST_SCRIPT = "\"\"\"Docstring.\"\"\"\n" + PYLINT_TEST_SCRIPT + "\n"
PYLINTRC_TEST_CONTENTS = """
[MESSAGES CONTROL]
enable=blacklisted-name
[BASIC]
bad-names={bad_names}
good-names=e
"""
class MainWindowMock(QObject):
sig_editor_focus_changed = Signal(str)
def __init__(self):
super(MainWindowMock, self).__init__(None)
self.editor = Mock()
self.editor.sig_editor_focus_changed = self.sig_editor_focus_changed
self.projects = MagicMock()
@pytest.fixture
def pylintrc_search_paths(tmp_path_factory):
"""Construct temporary .pylintrc search paths."""
search_paths = {dir_name: str(tmp_path_factory.mktemp(dir_name))
for dir_name in DIR_LIST}
return search_paths
@pytest.fixture
def pylint_test_script(pylintrc_search_paths):
"""Write a script for testing Pylint to a temporary directory."""
script_path = osp.join(
pylintrc_search_paths[SCRIPT_DIR], "test_script.py")
with open(script_path, mode="w",
encoding="utf-8", newline="\n") as script_file:
script_file.write(PYLINT_TEST_SCRIPT)
return script_path
@pytest.fixture
def pylint_test_scripts(pylintrc_search_paths):
def _pylint_test_scripts(filenames):
"""Write scripts for testing Pylint to a temporary directory."""
script_paths = []
for filename in filenames:
script_path = osp.join(
pylintrc_search_paths[SCRIPT_DIR], filename)
with open(script_path, mode="w",
encoding="utf-8", newline="\n") as script_file:
script_file.write(PYLINT_TEST_SCRIPT)
script_paths.append(script_path)
return script_paths
return _pylint_test_scripts
@pytest.fixture(
params=[
[], [SCRIPT_DIR], [WORKING_DIR], [PROJECT_DIR], [HOME_DIR],
[SCRIPT_DIR, HOME_DIR], [WORKING_DIR, PROJECT_DIR],
[SCRIPT_DIR, PROJECT_DIR], [PROJECT_DIR, HOME_DIR],
[SCRIPT_DIR, WORKING_DIR, PROJECT_DIR, HOME_DIR]],
ids=["None", "Script", "Working", "Project", "Home", "Script & Home",
"Working & Project", "Script & Working", "Project & Home", "All"])
def pylintrc_files(pylintrc_search_paths, request):
"""Store test .pylintrc files at the paths and determine the result."""
search_paths = pylintrc_search_paths
# Determine the bad names that should be reported
pylintrc_locations = request.param
bad_names = [ALL_DIR]
for search_path_name, search_path in search_paths.items():
if search_path_name in pylintrc_locations:
expected_path = osp.join(search_path, PYLINTRC_FILENAME)
bad_names += [search_path_name]
break
else:
expected_path = None
bad_names = [NO_DIR]
# Store the selected pylintrc files at the designated paths
for location in pylintrc_locations:
pylintrc_test_contents = PYLINTRC_TEST_CONTENTS.format(
bad_names=", ".join([location, ALL_DIR]))
pylintrc_path = osp.join(search_paths[location], PYLINTRC_FILENAME)
with open(pylintrc_path, mode="w",
encoding="utf-8", newline="\n") as rc_file:
rc_file.write(pylintrc_test_contents)
return search_paths, expected_path, bad_names
def test_get_pylintrc_path(pylintrc_files, mocker):
"""Test that get_pylintrc_path finds the expected one in the hiearchy."""
search_paths, expected_path, __ = pylintrc_files
mocker.patch("pylint.config.os.path.expanduser",
return_value=search_paths[HOME_DIR])
actual_path = get_pylintrc_path(
search_paths=list(search_paths.values()),
home_path=search_paths[HOME_DIR],
)
assert actual_path == expected_path
def test_pylint_widget_noproject(pylint_test_script, mocker, qtbot):
"""Test that pylint works without errors with no project open."""
main_window = MainWindowMock()
main_window.projects.get_active_project_path = mocker.MagicMock(
return_value=None)
pylint_sw = Pylint(parent=main_window)
pylint_widget = PylintWidget(parent=pylint_sw)
pylint_widget.analyze(filename=pylint_test_script)
qtbot.waitUntil(
lambda: pylint_widget.get_data(pylint_test_script)[1] is not None,
timeout=5000)
pylint_data = pylint_widget.get_data(filename=pylint_test_script)
print(pylint_data)
assert pylint_data
assert pylint_data[0] is not None
assert pylint_data[1] is not None
def test_pylint_widget_pylintrc(
pylint_test_script, pylintrc_files, mocker, qtbot):
"""Test that entire pylint widget gets results depending on pylintrc."""
search_paths, __, bad_names = pylintrc_files
mocker.patch("pylint.config.os.path.expanduser",
return_value=search_paths[HOME_DIR])
mocker.patch("spyder.plugins.pylint.widgets.pylintgui.getcwd_or_home",
return_value=search_paths[WORKING_DIR])
mocker.patch("spyder.plugins.pylint.widgets.pylintgui.osp.expanduser",
return_value=search_paths[HOME_DIR])
main_window = MainWindowMock()
main_window.projects.get_active_project_path = mocker.MagicMock(
return_value=search_paths[PROJECT_DIR])
pylint_sw = Pylint(parent=main_window)
pylint_widget = PylintWidget(parent=pylint_sw)
pylint_widget.analyze(filename=pylint_test_script)
qtbot.waitUntil(
lambda: pylint_widget.get_data(pylint_test_script)[1] is not None,
timeout=5000)
pylint_data = pylint_widget.get_data(filename=pylint_test_script)
print(pylint_data)
assert pylint_data
conventions = pylint_data[1][3]["C:"]
assert conventions
assert len(conventions) == len(bad_names)
assert all([sum([bad_name in message[2] for message in conventions]) == 1
for bad_name in bad_names])
def test_pylint_max_history_conf(pylint_test_scripts, mocker):
"""Regression test for checking max_entries configuration.
For further information see spyder-ide/spyder#12884
"""
# Create the pylint widget for code analysis
main_window = MainWindowMock()
main_window.projects.get_active_project_path = mocker.MagicMock(
return_value=None)
pylint_sw = Pylint(parent=main_window)
pylint_widget = PylintWidget(parent=pylint_sw)
pylint_widget.filecombo.clear()
script_0, script_1, script_2 = pylint_test_scripts(
["test_script_{}.py".format(n) for n in range(3)])
# Change the max_entry to 2
pylint_widget.parent.set_option('max_entries', 2)
pylint_widget.change_history_limit(2)
assert pylint_widget.parent.get_option('max_entries') == 2
# Call to set_filename
pylint_widget.set_filename(filename=script_0)
assert pylint_widget.filecombo.count() == 1
# Add to more filenames
pylint_widget.set_filename(filename=script_1)
pylint_widget.set_filename(filename=script_2)
assert pylint_widget.filecombo.count() == 2
assert 'test_script_2.py' in pylint_widget.curr_filenames[0]
assert 'test_script_1.py' in pylint_widget.curr_filenames[1]
# Change the max entry to 1
pylint_widget.parent.set_option('max_entries', 1)
pylint_widget.change_history_limit(1)
assert pylint_widget.filecombo.count() == 1
assert 'test_script_2.py' in pylint_widget.curr_filenames[0]
if __name__ == "__main__":
pytest.main([osp.basename(__file__), '-vv', '-rw'])
|
normal
|
{
"blob_id": "22792937415a8ee4cecff2a9683c435abe54bdab",
"index": 5516,
"step-1": "<mask token>\n\n\nclass MainWindowMock(QObject):\n sig_editor_focus_changed = Signal(str)\n\n def __init__(self):\n super(MainWindowMock, self).__init__(None)\n self.editor = Mock()\n self.editor.sig_editor_focus_changed = self.sig_editor_focus_changed\n self.projects = MagicMock()\n\n\n<mask token>\n\n\n@pytest.fixture\ndef pylint_test_script(pylintrc_search_paths):\n \"\"\"Write a script for testing Pylint to a temporary directory.\"\"\"\n script_path = osp.join(pylintrc_search_paths[SCRIPT_DIR], 'test_script.py')\n with open(script_path, mode='w', encoding='utf-8', newline='\\n'\n ) as script_file:\n script_file.write(PYLINT_TEST_SCRIPT)\n return script_path\n\n\n@pytest.fixture\ndef pylint_test_scripts(pylintrc_search_paths):\n\n def _pylint_test_scripts(filenames):\n \"\"\"Write scripts for testing Pylint to a temporary directory.\"\"\"\n script_paths = []\n for filename in filenames:\n script_path = osp.join(pylintrc_search_paths[SCRIPT_DIR], filename)\n with open(script_path, mode='w', encoding='utf-8', newline='\\n'\n ) as script_file:\n script_file.write(PYLINT_TEST_SCRIPT)\n script_paths.append(script_path)\n return script_paths\n return _pylint_test_scripts\n\n\n@pytest.fixture(params=[[], [SCRIPT_DIR], [WORKING_DIR], [PROJECT_DIR], [\n HOME_DIR], [SCRIPT_DIR, HOME_DIR], [WORKING_DIR, PROJECT_DIR], [\n SCRIPT_DIR, PROJECT_DIR], [PROJECT_DIR, HOME_DIR], [SCRIPT_DIR,\n WORKING_DIR, PROJECT_DIR, HOME_DIR]], ids=['None', 'Script', 'Working',\n 'Project', 'Home', 'Script & Home', 'Working & Project',\n 'Script & Working', 'Project & Home', 'All'])\ndef pylintrc_files(pylintrc_search_paths, request):\n \"\"\"Store test .pylintrc files at the paths and determine the result.\"\"\"\n search_paths = pylintrc_search_paths\n pylintrc_locations = request.param\n bad_names = [ALL_DIR]\n for search_path_name, search_path in search_paths.items():\n if search_path_name in pylintrc_locations:\n expected_path = osp.join(search_path, PYLINTRC_FILENAME)\n bad_names += [search_path_name]\n break\n else:\n expected_path = None\n bad_names = [NO_DIR]\n for location in pylintrc_locations:\n pylintrc_test_contents = PYLINTRC_TEST_CONTENTS.format(bad_names=\n ', '.join([location, ALL_DIR]))\n pylintrc_path = osp.join(search_paths[location], PYLINTRC_FILENAME)\n with open(pylintrc_path, mode='w', encoding='utf-8', newline='\\n'\n ) as rc_file:\n rc_file.write(pylintrc_test_contents)\n return search_paths, expected_path, bad_names\n\n\ndef test_get_pylintrc_path(pylintrc_files, mocker):\n \"\"\"Test that get_pylintrc_path finds the expected one in the hiearchy.\"\"\"\n search_paths, expected_path, __ = pylintrc_files\n mocker.patch('pylint.config.os.path.expanduser', return_value=\n search_paths[HOME_DIR])\n actual_path = get_pylintrc_path(search_paths=list(search_paths.values()\n ), home_path=search_paths[HOME_DIR])\n assert actual_path == expected_path\n\n\ndef test_pylint_widget_noproject(pylint_test_script, mocker, qtbot):\n \"\"\"Test that pylint works without errors with no project open.\"\"\"\n main_window = MainWindowMock()\n main_window.projects.get_active_project_path = mocker.MagicMock(\n return_value=None)\n pylint_sw = Pylint(parent=main_window)\n pylint_widget = PylintWidget(parent=pylint_sw)\n pylint_widget.analyze(filename=pylint_test_script)\n qtbot.waitUntil(lambda : pylint_widget.get_data(pylint_test_script)[1]\n is not None, timeout=5000)\n pylint_data = pylint_widget.get_data(filename=pylint_test_script)\n print(pylint_data)\n assert pylint_data\n assert pylint_data[0] is not None\n assert pylint_data[1] is not None\n\n\ndef test_pylint_widget_pylintrc(pylint_test_script, pylintrc_files, mocker,\n qtbot):\n \"\"\"Test that entire pylint widget gets results depending on pylintrc.\"\"\"\n search_paths, __, bad_names = pylintrc_files\n mocker.patch('pylint.config.os.path.expanduser', return_value=\n search_paths[HOME_DIR])\n mocker.patch('spyder.plugins.pylint.widgets.pylintgui.getcwd_or_home',\n return_value=search_paths[WORKING_DIR])\n mocker.patch('spyder.plugins.pylint.widgets.pylintgui.osp.expanduser',\n return_value=search_paths[HOME_DIR])\n main_window = MainWindowMock()\n main_window.projects.get_active_project_path = mocker.MagicMock(\n return_value=search_paths[PROJECT_DIR])\n pylint_sw = Pylint(parent=main_window)\n pylint_widget = PylintWidget(parent=pylint_sw)\n pylint_widget.analyze(filename=pylint_test_script)\n qtbot.waitUntil(lambda : pylint_widget.get_data(pylint_test_script)[1]\n is not None, timeout=5000)\n pylint_data = pylint_widget.get_data(filename=pylint_test_script)\n print(pylint_data)\n assert pylint_data\n conventions = pylint_data[1][3]['C:']\n assert conventions\n assert len(conventions) == len(bad_names)\n assert all([(sum([(bad_name in message[2]) for message in conventions]) ==\n 1) for bad_name in bad_names])\n\n\ndef test_pylint_max_history_conf(pylint_test_scripts, mocker):\n \"\"\"Regression test for checking max_entries configuration.\n\n For further information see spyder-ide/spyder#12884\n \"\"\"\n main_window = MainWindowMock()\n main_window.projects.get_active_project_path = mocker.MagicMock(\n return_value=None)\n pylint_sw = Pylint(parent=main_window)\n pylint_widget = PylintWidget(parent=pylint_sw)\n pylint_widget.filecombo.clear()\n script_0, script_1, script_2 = pylint_test_scripts(['test_script_{}.py'\n .format(n) for n in range(3)])\n pylint_widget.parent.set_option('max_entries', 2)\n pylint_widget.change_history_limit(2)\n assert pylint_widget.parent.get_option('max_entries') == 2\n pylint_widget.set_filename(filename=script_0)\n assert pylint_widget.filecombo.count() == 1\n pylint_widget.set_filename(filename=script_1)\n pylint_widget.set_filename(filename=script_2)\n assert pylint_widget.filecombo.count() == 2\n assert 'test_script_2.py' in pylint_widget.curr_filenames[0]\n assert 'test_script_1.py' in pylint_widget.curr_filenames[1]\n pylint_widget.parent.set_option('max_entries', 1)\n pylint_widget.change_history_limit(1)\n assert pylint_widget.filecombo.count() == 1\n assert 'test_script_2.py' in pylint_widget.curr_filenames[0]\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass MainWindowMock(QObject):\n sig_editor_focus_changed = Signal(str)\n\n def __init__(self):\n super(MainWindowMock, self).__init__(None)\n self.editor = Mock()\n self.editor.sig_editor_focus_changed = self.sig_editor_focus_changed\n self.projects = MagicMock()\n\n\n@pytest.fixture\ndef pylintrc_search_paths(tmp_path_factory):\n \"\"\"Construct temporary .pylintrc search paths.\"\"\"\n search_paths = {dir_name: str(tmp_path_factory.mktemp(dir_name)) for\n dir_name in DIR_LIST}\n return search_paths\n\n\n@pytest.fixture\ndef pylint_test_script(pylintrc_search_paths):\n \"\"\"Write a script for testing Pylint to a temporary directory.\"\"\"\n script_path = osp.join(pylintrc_search_paths[SCRIPT_DIR], 'test_script.py')\n with open(script_path, mode='w', encoding='utf-8', newline='\\n'\n ) as script_file:\n script_file.write(PYLINT_TEST_SCRIPT)\n return script_path\n\n\n@pytest.fixture\ndef pylint_test_scripts(pylintrc_search_paths):\n\n def _pylint_test_scripts(filenames):\n \"\"\"Write scripts for testing Pylint to a temporary directory.\"\"\"\n script_paths = []\n for filename in filenames:\n script_path = osp.join(pylintrc_search_paths[SCRIPT_DIR], filename)\n with open(script_path, mode='w', encoding='utf-8', newline='\\n'\n ) as script_file:\n script_file.write(PYLINT_TEST_SCRIPT)\n script_paths.append(script_path)\n return script_paths\n return _pylint_test_scripts\n\n\n@pytest.fixture(params=[[], [SCRIPT_DIR], [WORKING_DIR], [PROJECT_DIR], [\n HOME_DIR], [SCRIPT_DIR, HOME_DIR], [WORKING_DIR, PROJECT_DIR], [\n SCRIPT_DIR, PROJECT_DIR], [PROJECT_DIR, HOME_DIR], [SCRIPT_DIR,\n WORKING_DIR, PROJECT_DIR, HOME_DIR]], ids=['None', 'Script', 'Working',\n 'Project', 'Home', 'Script & Home', 'Working & Project',\n 'Script & Working', 'Project & Home', 'All'])\ndef pylintrc_files(pylintrc_search_paths, request):\n \"\"\"Store test .pylintrc files at the paths and determine the result.\"\"\"\n search_paths = pylintrc_search_paths\n pylintrc_locations = request.param\n bad_names = [ALL_DIR]\n for search_path_name, search_path in search_paths.items():\n if search_path_name in pylintrc_locations:\n expected_path = osp.join(search_path, PYLINTRC_FILENAME)\n bad_names += [search_path_name]\n break\n else:\n expected_path = None\n bad_names = [NO_DIR]\n for location in pylintrc_locations:\n pylintrc_test_contents = PYLINTRC_TEST_CONTENTS.format(bad_names=\n ', '.join([location, ALL_DIR]))\n pylintrc_path = osp.join(search_paths[location], PYLINTRC_FILENAME)\n with open(pylintrc_path, mode='w', encoding='utf-8', newline='\\n'\n ) as rc_file:\n rc_file.write(pylintrc_test_contents)\n return search_paths, expected_path, bad_names\n\n\ndef test_get_pylintrc_path(pylintrc_files, mocker):\n \"\"\"Test that get_pylintrc_path finds the expected one in the hiearchy.\"\"\"\n search_paths, expected_path, __ = pylintrc_files\n mocker.patch('pylint.config.os.path.expanduser', return_value=\n search_paths[HOME_DIR])\n actual_path = get_pylintrc_path(search_paths=list(search_paths.values()\n ), home_path=search_paths[HOME_DIR])\n assert actual_path == expected_path\n\n\ndef test_pylint_widget_noproject(pylint_test_script, mocker, qtbot):\n \"\"\"Test that pylint works without errors with no project open.\"\"\"\n main_window = MainWindowMock()\n main_window.projects.get_active_project_path = mocker.MagicMock(\n return_value=None)\n pylint_sw = Pylint(parent=main_window)\n pylint_widget = PylintWidget(parent=pylint_sw)\n pylint_widget.analyze(filename=pylint_test_script)\n qtbot.waitUntil(lambda : pylint_widget.get_data(pylint_test_script)[1]\n is not None, timeout=5000)\n pylint_data = pylint_widget.get_data(filename=pylint_test_script)\n print(pylint_data)\n assert pylint_data\n assert pylint_data[0] is not None\n assert pylint_data[1] is not None\n\n\ndef test_pylint_widget_pylintrc(pylint_test_script, pylintrc_files, mocker,\n qtbot):\n \"\"\"Test that entire pylint widget gets results depending on pylintrc.\"\"\"\n search_paths, __, bad_names = pylintrc_files\n mocker.patch('pylint.config.os.path.expanduser', return_value=\n search_paths[HOME_DIR])\n mocker.patch('spyder.plugins.pylint.widgets.pylintgui.getcwd_or_home',\n return_value=search_paths[WORKING_DIR])\n mocker.patch('spyder.plugins.pylint.widgets.pylintgui.osp.expanduser',\n return_value=search_paths[HOME_DIR])\n main_window = MainWindowMock()\n main_window.projects.get_active_project_path = mocker.MagicMock(\n return_value=search_paths[PROJECT_DIR])\n pylint_sw = Pylint(parent=main_window)\n pylint_widget = PylintWidget(parent=pylint_sw)\n pylint_widget.analyze(filename=pylint_test_script)\n qtbot.waitUntil(lambda : pylint_widget.get_data(pylint_test_script)[1]\n is not None, timeout=5000)\n pylint_data = pylint_widget.get_data(filename=pylint_test_script)\n print(pylint_data)\n assert pylint_data\n conventions = pylint_data[1][3]['C:']\n assert conventions\n assert len(conventions) == len(bad_names)\n assert all([(sum([(bad_name in message[2]) for message in conventions]) ==\n 1) for bad_name in bad_names])\n\n\ndef test_pylint_max_history_conf(pylint_test_scripts, mocker):\n \"\"\"Regression test for checking max_entries configuration.\n\n For further information see spyder-ide/spyder#12884\n \"\"\"\n main_window = MainWindowMock()\n main_window.projects.get_active_project_path = mocker.MagicMock(\n return_value=None)\n pylint_sw = Pylint(parent=main_window)\n pylint_widget = PylintWidget(parent=pylint_sw)\n pylint_widget.filecombo.clear()\n script_0, script_1, script_2 = pylint_test_scripts(['test_script_{}.py'\n .format(n) for n in range(3)])\n pylint_widget.parent.set_option('max_entries', 2)\n pylint_widget.change_history_limit(2)\n assert pylint_widget.parent.get_option('max_entries') == 2\n pylint_widget.set_filename(filename=script_0)\n assert pylint_widget.filecombo.count() == 1\n pylint_widget.set_filename(filename=script_1)\n pylint_widget.set_filename(filename=script_2)\n assert pylint_widget.filecombo.count() == 2\n assert 'test_script_2.py' in pylint_widget.curr_filenames[0]\n assert 'test_script_1.py' in pylint_widget.curr_filenames[1]\n pylint_widget.parent.set_option('max_entries', 1)\n pylint_widget.change_history_limit(1)\n assert pylint_widget.filecombo.count() == 1\n assert 'test_script_2.py' in pylint_widget.curr_filenames[0]\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass MainWindowMock(QObject):\n sig_editor_focus_changed = Signal(str)\n\n def __init__(self):\n super(MainWindowMock, self).__init__(None)\n self.editor = Mock()\n self.editor.sig_editor_focus_changed = self.sig_editor_focus_changed\n self.projects = MagicMock()\n\n\n@pytest.fixture\ndef pylintrc_search_paths(tmp_path_factory):\n \"\"\"Construct temporary .pylintrc search paths.\"\"\"\n search_paths = {dir_name: str(tmp_path_factory.mktemp(dir_name)) for\n dir_name in DIR_LIST}\n return search_paths\n\n\n@pytest.fixture\ndef pylint_test_script(pylintrc_search_paths):\n \"\"\"Write a script for testing Pylint to a temporary directory.\"\"\"\n script_path = osp.join(pylintrc_search_paths[SCRIPT_DIR], 'test_script.py')\n with open(script_path, mode='w', encoding='utf-8', newline='\\n'\n ) as script_file:\n script_file.write(PYLINT_TEST_SCRIPT)\n return script_path\n\n\n@pytest.fixture\ndef pylint_test_scripts(pylintrc_search_paths):\n\n def _pylint_test_scripts(filenames):\n \"\"\"Write scripts for testing Pylint to a temporary directory.\"\"\"\n script_paths = []\n for filename in filenames:\n script_path = osp.join(pylintrc_search_paths[SCRIPT_DIR], filename)\n with open(script_path, mode='w', encoding='utf-8', newline='\\n'\n ) as script_file:\n script_file.write(PYLINT_TEST_SCRIPT)\n script_paths.append(script_path)\n return script_paths\n return _pylint_test_scripts\n\n\n@pytest.fixture(params=[[], [SCRIPT_DIR], [WORKING_DIR], [PROJECT_DIR], [\n HOME_DIR], [SCRIPT_DIR, HOME_DIR], [WORKING_DIR, PROJECT_DIR], [\n SCRIPT_DIR, PROJECT_DIR], [PROJECT_DIR, HOME_DIR], [SCRIPT_DIR,\n WORKING_DIR, PROJECT_DIR, HOME_DIR]], ids=['None', 'Script', 'Working',\n 'Project', 'Home', 'Script & Home', 'Working & Project',\n 'Script & Working', 'Project & Home', 'All'])\ndef pylintrc_files(pylintrc_search_paths, request):\n \"\"\"Store test .pylintrc files at the paths and determine the result.\"\"\"\n search_paths = pylintrc_search_paths\n pylintrc_locations = request.param\n bad_names = [ALL_DIR]\n for search_path_name, search_path in search_paths.items():\n if search_path_name in pylintrc_locations:\n expected_path = osp.join(search_path, PYLINTRC_FILENAME)\n bad_names += [search_path_name]\n break\n else:\n expected_path = None\n bad_names = [NO_DIR]\n for location in pylintrc_locations:\n pylintrc_test_contents = PYLINTRC_TEST_CONTENTS.format(bad_names=\n ', '.join([location, ALL_DIR]))\n pylintrc_path = osp.join(search_paths[location], PYLINTRC_FILENAME)\n with open(pylintrc_path, mode='w', encoding='utf-8', newline='\\n'\n ) as rc_file:\n rc_file.write(pylintrc_test_contents)\n return search_paths, expected_path, bad_names\n\n\ndef test_get_pylintrc_path(pylintrc_files, mocker):\n \"\"\"Test that get_pylintrc_path finds the expected one in the hiearchy.\"\"\"\n search_paths, expected_path, __ = pylintrc_files\n mocker.patch('pylint.config.os.path.expanduser', return_value=\n search_paths[HOME_DIR])\n actual_path = get_pylintrc_path(search_paths=list(search_paths.values()\n ), home_path=search_paths[HOME_DIR])\n assert actual_path == expected_path\n\n\ndef test_pylint_widget_noproject(pylint_test_script, mocker, qtbot):\n \"\"\"Test that pylint works without errors with no project open.\"\"\"\n main_window = MainWindowMock()\n main_window.projects.get_active_project_path = mocker.MagicMock(\n return_value=None)\n pylint_sw = Pylint(parent=main_window)\n pylint_widget = PylintWidget(parent=pylint_sw)\n pylint_widget.analyze(filename=pylint_test_script)\n qtbot.waitUntil(lambda : pylint_widget.get_data(pylint_test_script)[1]\n is not None, timeout=5000)\n pylint_data = pylint_widget.get_data(filename=pylint_test_script)\n print(pylint_data)\n assert pylint_data\n assert pylint_data[0] is not None\n assert pylint_data[1] is not None\n\n\ndef test_pylint_widget_pylintrc(pylint_test_script, pylintrc_files, mocker,\n qtbot):\n \"\"\"Test that entire pylint widget gets results depending on pylintrc.\"\"\"\n search_paths, __, bad_names = pylintrc_files\n mocker.patch('pylint.config.os.path.expanduser', return_value=\n search_paths[HOME_DIR])\n mocker.patch('spyder.plugins.pylint.widgets.pylintgui.getcwd_or_home',\n return_value=search_paths[WORKING_DIR])\n mocker.patch('spyder.plugins.pylint.widgets.pylintgui.osp.expanduser',\n return_value=search_paths[HOME_DIR])\n main_window = MainWindowMock()\n main_window.projects.get_active_project_path = mocker.MagicMock(\n return_value=search_paths[PROJECT_DIR])\n pylint_sw = Pylint(parent=main_window)\n pylint_widget = PylintWidget(parent=pylint_sw)\n pylint_widget.analyze(filename=pylint_test_script)\n qtbot.waitUntil(lambda : pylint_widget.get_data(pylint_test_script)[1]\n is not None, timeout=5000)\n pylint_data = pylint_widget.get_data(filename=pylint_test_script)\n print(pylint_data)\n assert pylint_data\n conventions = pylint_data[1][3]['C:']\n assert conventions\n assert len(conventions) == len(bad_names)\n assert all([(sum([(bad_name in message[2]) for message in conventions]) ==\n 1) for bad_name in bad_names])\n\n\ndef test_pylint_max_history_conf(pylint_test_scripts, mocker):\n \"\"\"Regression test for checking max_entries configuration.\n\n For further information see spyder-ide/spyder#12884\n \"\"\"\n main_window = MainWindowMock()\n main_window.projects.get_active_project_path = mocker.MagicMock(\n return_value=None)\n pylint_sw = Pylint(parent=main_window)\n pylint_widget = PylintWidget(parent=pylint_sw)\n pylint_widget.filecombo.clear()\n script_0, script_1, script_2 = pylint_test_scripts(['test_script_{}.py'\n .format(n) for n in range(3)])\n pylint_widget.parent.set_option('max_entries', 2)\n pylint_widget.change_history_limit(2)\n assert pylint_widget.parent.get_option('max_entries') == 2\n pylint_widget.set_filename(filename=script_0)\n assert pylint_widget.filecombo.count() == 1\n pylint_widget.set_filename(filename=script_1)\n pylint_widget.set_filename(filename=script_2)\n assert pylint_widget.filecombo.count() == 2\n assert 'test_script_2.py' in pylint_widget.curr_filenames[0]\n assert 'test_script_1.py' in pylint_widget.curr_filenames[1]\n pylint_widget.parent.set_option('max_entries', 1)\n pylint_widget.change_history_limit(1)\n assert pylint_widget.filecombo.count() == 1\n assert 'test_script_2.py' in pylint_widget.curr_filenames[0]\n\n\nif __name__ == '__main__':\n pytest.main([osp.basename(__file__), '-vv', '-rw'])\n",
"step-4": "<mask token>\nfrom io import open\nimport os.path as osp\nfrom unittest.mock import Mock, MagicMock\nimport pytest\nfrom qtpy.QtCore import Signal, QObject\nfrom spyder.plugins.pylint.plugin import Pylint\nfrom spyder.plugins.pylint.widgets.pylintgui import PylintWidget\nfrom spyder.plugins.pylint.utils import get_pylintrc_path\nPYLINTRC_FILENAME = '.pylintrc'\nNO_DIR = 'e'\nSCRIPT_DIR = 'SCRIPT_DIR'\nWORKING_DIR = 'WORKING_DIR'\nPROJECT_DIR = 'PROJECT_DIR'\nHOME_DIR = 'HOME_DIR'\nALL_DIR = 'ALL_DIR'\nDIR_LIST = [SCRIPT_DIR, WORKING_DIR, PROJECT_DIR, HOME_DIR]\nDIR_LIST_ALL = [NO_DIR] + DIR_LIST + [ALL_DIR]\nPYLINT_TEST_SCRIPT = \"\"\"import math\nimport os\nimport sys\n\"\"\" + '\\n'.join([(\n dir_name + ' = ' + str(idx)) for idx, dir_name in enumerate(DIR_LIST_ALL)])\nPYLINT_TEST_SCRIPT = '\"\"\"Docstring.\"\"\"\\n' + PYLINT_TEST_SCRIPT + '\\n'\nPYLINTRC_TEST_CONTENTS = \"\"\"\n[MESSAGES CONTROL]\nenable=blacklisted-name\n\n[BASIC]\nbad-names={bad_names}\ngood-names=e\n\"\"\"\n\n\nclass MainWindowMock(QObject):\n sig_editor_focus_changed = Signal(str)\n\n def __init__(self):\n super(MainWindowMock, self).__init__(None)\n self.editor = Mock()\n self.editor.sig_editor_focus_changed = self.sig_editor_focus_changed\n self.projects = MagicMock()\n\n\n@pytest.fixture\ndef pylintrc_search_paths(tmp_path_factory):\n \"\"\"Construct temporary .pylintrc search paths.\"\"\"\n search_paths = {dir_name: str(tmp_path_factory.mktemp(dir_name)) for\n dir_name in DIR_LIST}\n return search_paths\n\n\n@pytest.fixture\ndef pylint_test_script(pylintrc_search_paths):\n \"\"\"Write a script for testing Pylint to a temporary directory.\"\"\"\n script_path = osp.join(pylintrc_search_paths[SCRIPT_DIR], 'test_script.py')\n with open(script_path, mode='w', encoding='utf-8', newline='\\n'\n ) as script_file:\n script_file.write(PYLINT_TEST_SCRIPT)\n return script_path\n\n\n@pytest.fixture\ndef pylint_test_scripts(pylintrc_search_paths):\n\n def _pylint_test_scripts(filenames):\n \"\"\"Write scripts for testing Pylint to a temporary directory.\"\"\"\n script_paths = []\n for filename in filenames:\n script_path = osp.join(pylintrc_search_paths[SCRIPT_DIR], filename)\n with open(script_path, mode='w', encoding='utf-8', newline='\\n'\n ) as script_file:\n script_file.write(PYLINT_TEST_SCRIPT)\n script_paths.append(script_path)\n return script_paths\n return _pylint_test_scripts\n\n\n@pytest.fixture(params=[[], [SCRIPT_DIR], [WORKING_DIR], [PROJECT_DIR], [\n HOME_DIR], [SCRIPT_DIR, HOME_DIR], [WORKING_DIR, PROJECT_DIR], [\n SCRIPT_DIR, PROJECT_DIR], [PROJECT_DIR, HOME_DIR], [SCRIPT_DIR,\n WORKING_DIR, PROJECT_DIR, HOME_DIR]], ids=['None', 'Script', 'Working',\n 'Project', 'Home', 'Script & Home', 'Working & Project',\n 'Script & Working', 'Project & Home', 'All'])\ndef pylintrc_files(pylintrc_search_paths, request):\n \"\"\"Store test .pylintrc files at the paths and determine the result.\"\"\"\n search_paths = pylintrc_search_paths\n pylintrc_locations = request.param\n bad_names = [ALL_DIR]\n for search_path_name, search_path in search_paths.items():\n if search_path_name in pylintrc_locations:\n expected_path = osp.join(search_path, PYLINTRC_FILENAME)\n bad_names += [search_path_name]\n break\n else:\n expected_path = None\n bad_names = [NO_DIR]\n for location in pylintrc_locations:\n pylintrc_test_contents = PYLINTRC_TEST_CONTENTS.format(bad_names=\n ', '.join([location, ALL_DIR]))\n pylintrc_path = osp.join(search_paths[location], PYLINTRC_FILENAME)\n with open(pylintrc_path, mode='w', encoding='utf-8', newline='\\n'\n ) as rc_file:\n rc_file.write(pylintrc_test_contents)\n return search_paths, expected_path, bad_names\n\n\ndef test_get_pylintrc_path(pylintrc_files, mocker):\n \"\"\"Test that get_pylintrc_path finds the expected one in the hiearchy.\"\"\"\n search_paths, expected_path, __ = pylintrc_files\n mocker.patch('pylint.config.os.path.expanduser', return_value=\n search_paths[HOME_DIR])\n actual_path = get_pylintrc_path(search_paths=list(search_paths.values()\n ), home_path=search_paths[HOME_DIR])\n assert actual_path == expected_path\n\n\ndef test_pylint_widget_noproject(pylint_test_script, mocker, qtbot):\n \"\"\"Test that pylint works without errors with no project open.\"\"\"\n main_window = MainWindowMock()\n main_window.projects.get_active_project_path = mocker.MagicMock(\n return_value=None)\n pylint_sw = Pylint(parent=main_window)\n pylint_widget = PylintWidget(parent=pylint_sw)\n pylint_widget.analyze(filename=pylint_test_script)\n qtbot.waitUntil(lambda : pylint_widget.get_data(pylint_test_script)[1]\n is not None, timeout=5000)\n pylint_data = pylint_widget.get_data(filename=pylint_test_script)\n print(pylint_data)\n assert pylint_data\n assert pylint_data[0] is not None\n assert pylint_data[1] is not None\n\n\ndef test_pylint_widget_pylintrc(pylint_test_script, pylintrc_files, mocker,\n qtbot):\n \"\"\"Test that entire pylint widget gets results depending on pylintrc.\"\"\"\n search_paths, __, bad_names = pylintrc_files\n mocker.patch('pylint.config.os.path.expanduser', return_value=\n search_paths[HOME_DIR])\n mocker.patch('spyder.plugins.pylint.widgets.pylintgui.getcwd_or_home',\n return_value=search_paths[WORKING_DIR])\n mocker.patch('spyder.plugins.pylint.widgets.pylintgui.osp.expanduser',\n return_value=search_paths[HOME_DIR])\n main_window = MainWindowMock()\n main_window.projects.get_active_project_path = mocker.MagicMock(\n return_value=search_paths[PROJECT_DIR])\n pylint_sw = Pylint(parent=main_window)\n pylint_widget = PylintWidget(parent=pylint_sw)\n pylint_widget.analyze(filename=pylint_test_script)\n qtbot.waitUntil(lambda : pylint_widget.get_data(pylint_test_script)[1]\n is not None, timeout=5000)\n pylint_data = pylint_widget.get_data(filename=pylint_test_script)\n print(pylint_data)\n assert pylint_data\n conventions = pylint_data[1][3]['C:']\n assert conventions\n assert len(conventions) == len(bad_names)\n assert all([(sum([(bad_name in message[2]) for message in conventions]) ==\n 1) for bad_name in bad_names])\n\n\ndef test_pylint_max_history_conf(pylint_test_scripts, mocker):\n \"\"\"Regression test for checking max_entries configuration.\n\n For further information see spyder-ide/spyder#12884\n \"\"\"\n main_window = MainWindowMock()\n main_window.projects.get_active_project_path = mocker.MagicMock(\n return_value=None)\n pylint_sw = Pylint(parent=main_window)\n pylint_widget = PylintWidget(parent=pylint_sw)\n pylint_widget.filecombo.clear()\n script_0, script_1, script_2 = pylint_test_scripts(['test_script_{}.py'\n .format(n) for n in range(3)])\n pylint_widget.parent.set_option('max_entries', 2)\n pylint_widget.change_history_limit(2)\n assert pylint_widget.parent.get_option('max_entries') == 2\n pylint_widget.set_filename(filename=script_0)\n assert pylint_widget.filecombo.count() == 1\n pylint_widget.set_filename(filename=script_1)\n pylint_widget.set_filename(filename=script_2)\n assert pylint_widget.filecombo.count() == 2\n assert 'test_script_2.py' in pylint_widget.curr_filenames[0]\n assert 'test_script_1.py' in pylint_widget.curr_filenames[1]\n pylint_widget.parent.set_option('max_entries', 1)\n pylint_widget.change_history_limit(1)\n assert pylint_widget.filecombo.count() == 1\n assert 'test_script_2.py' in pylint_widget.curr_filenames[0]\n\n\nif __name__ == '__main__':\n pytest.main([osp.basename(__file__), '-vv', '-rw'])\n",
"step-5": "# -*- coding: utf-8 -*-\n# ----------------------------------------------------------------------------\n# Copyright © 2020- Spyder Project Contributors\n#\n# Released under the terms of the MIT License\n# ----------------------------------------------------------------------------\n\n\"\"\"Tests for the execution of pylint.\"\"\"\n\n# Standard library imports\nfrom io import open\nimport os.path as osp\nfrom unittest.mock import Mock, MagicMock\n\n# Third party imports\nimport pytest\nfrom qtpy.QtCore import Signal, QObject\n\n# Local imports\nfrom spyder.plugins.pylint.plugin import Pylint\nfrom spyder.plugins.pylint.widgets.pylintgui import PylintWidget\nfrom spyder.plugins.pylint.utils import get_pylintrc_path\n\n# pylint: disable=redefined-outer-name\n\nPYLINTRC_FILENAME = \".pylintrc\"\n\n# Constants for dir name keys\n# In Python 3 and Spyder 5, replace with enum\nNO_DIR = \"e\"\nSCRIPT_DIR = \"SCRIPT_DIR\"\nWORKING_DIR = \"WORKING_DIR\"\nPROJECT_DIR = \"PROJECT_DIR\"\nHOME_DIR = \"HOME_DIR\"\nALL_DIR = \"ALL_DIR\"\n\nDIR_LIST = [SCRIPT_DIR, WORKING_DIR, PROJECT_DIR, HOME_DIR]\nDIR_LIST_ALL = [NO_DIR] + DIR_LIST + [ALL_DIR]\n\nPYLINT_TEST_SCRIPT = \"import math\\nimport os\\nimport sys\\n\" + \"\\n\".join(\n [dir_name + \" = \" + str(idx) for idx, dir_name in enumerate(DIR_LIST_ALL)])\nPYLINT_TEST_SCRIPT = \"\\\"\\\"\\\"Docstring.\\\"\\\"\\\"\\n\" + PYLINT_TEST_SCRIPT + \"\\n\"\n\nPYLINTRC_TEST_CONTENTS = \"\"\"\n[MESSAGES CONTROL]\nenable=blacklisted-name\n\n[BASIC]\nbad-names={bad_names}\ngood-names=e\n\"\"\"\n\n\nclass MainWindowMock(QObject):\n sig_editor_focus_changed = Signal(str)\n\n def __init__(self):\n super(MainWindowMock, self).__init__(None)\n self.editor = Mock()\n self.editor.sig_editor_focus_changed = self.sig_editor_focus_changed\n self.projects = MagicMock()\n\n\n@pytest.fixture\ndef pylintrc_search_paths(tmp_path_factory):\n \"\"\"Construct temporary .pylintrc search paths.\"\"\"\n search_paths = {dir_name: str(tmp_path_factory.mktemp(dir_name))\n for dir_name in DIR_LIST}\n return search_paths\n\n\n@pytest.fixture\ndef pylint_test_script(pylintrc_search_paths):\n \"\"\"Write a script for testing Pylint to a temporary directory.\"\"\"\n script_path = osp.join(\n pylintrc_search_paths[SCRIPT_DIR], \"test_script.py\")\n with open(script_path, mode=\"w\",\n encoding=\"utf-8\", newline=\"\\n\") as script_file:\n script_file.write(PYLINT_TEST_SCRIPT)\n\n return script_path\n\n\n@pytest.fixture\ndef pylint_test_scripts(pylintrc_search_paths):\n def _pylint_test_scripts(filenames):\n \"\"\"Write scripts for testing Pylint to a temporary directory.\"\"\"\n script_paths = []\n for filename in filenames:\n script_path = osp.join(\n pylintrc_search_paths[SCRIPT_DIR], filename)\n with open(script_path, mode=\"w\",\n encoding=\"utf-8\", newline=\"\\n\") as script_file:\n script_file.write(PYLINT_TEST_SCRIPT)\n script_paths.append(script_path)\n return script_paths\n return _pylint_test_scripts\n\n\n@pytest.fixture(\n params=[\n [], [SCRIPT_DIR], [WORKING_DIR], [PROJECT_DIR], [HOME_DIR],\n [SCRIPT_DIR, HOME_DIR], [WORKING_DIR, PROJECT_DIR],\n [SCRIPT_DIR, PROJECT_DIR], [PROJECT_DIR, HOME_DIR],\n [SCRIPT_DIR, WORKING_DIR, PROJECT_DIR, HOME_DIR]],\n ids=[\"None\", \"Script\", \"Working\", \"Project\", \"Home\", \"Script & Home\",\n \"Working & Project\", \"Script & Working\", \"Project & Home\", \"All\"])\ndef pylintrc_files(pylintrc_search_paths, request):\n \"\"\"Store test .pylintrc files at the paths and determine the result.\"\"\"\n search_paths = pylintrc_search_paths\n\n # Determine the bad names that should be reported\n pylintrc_locations = request.param\n bad_names = [ALL_DIR]\n for search_path_name, search_path in search_paths.items():\n if search_path_name in pylintrc_locations:\n expected_path = osp.join(search_path, PYLINTRC_FILENAME)\n bad_names += [search_path_name]\n break\n else:\n expected_path = None\n bad_names = [NO_DIR]\n\n # Store the selected pylintrc files at the designated paths\n for location in pylintrc_locations:\n pylintrc_test_contents = PYLINTRC_TEST_CONTENTS.format(\n bad_names=\", \".join([location, ALL_DIR]))\n pylintrc_path = osp.join(search_paths[location], PYLINTRC_FILENAME)\n with open(pylintrc_path, mode=\"w\",\n encoding=\"utf-8\", newline=\"\\n\") as rc_file:\n rc_file.write(pylintrc_test_contents)\n return search_paths, expected_path, bad_names\n\n\ndef test_get_pylintrc_path(pylintrc_files, mocker):\n \"\"\"Test that get_pylintrc_path finds the expected one in the hiearchy.\"\"\"\n search_paths, expected_path, __ = pylintrc_files\n mocker.patch(\"pylint.config.os.path.expanduser\",\n return_value=search_paths[HOME_DIR])\n actual_path = get_pylintrc_path(\n search_paths=list(search_paths.values()),\n home_path=search_paths[HOME_DIR],\n )\n assert actual_path == expected_path\n\n\ndef test_pylint_widget_noproject(pylint_test_script, mocker, qtbot):\n \"\"\"Test that pylint works without errors with no project open.\"\"\"\n main_window = MainWindowMock()\n main_window.projects.get_active_project_path = mocker.MagicMock(\n return_value=None)\n pylint_sw = Pylint(parent=main_window)\n pylint_widget = PylintWidget(parent=pylint_sw)\n pylint_widget.analyze(filename=pylint_test_script)\n qtbot.waitUntil(\n lambda: pylint_widget.get_data(pylint_test_script)[1] is not None,\n timeout=5000)\n pylint_data = pylint_widget.get_data(filename=pylint_test_script)\n print(pylint_data)\n assert pylint_data\n assert pylint_data[0] is not None\n assert pylint_data[1] is not None\n\n\ndef test_pylint_widget_pylintrc(\n pylint_test_script, pylintrc_files, mocker, qtbot):\n \"\"\"Test that entire pylint widget gets results depending on pylintrc.\"\"\"\n search_paths, __, bad_names = pylintrc_files\n mocker.patch(\"pylint.config.os.path.expanduser\",\n return_value=search_paths[HOME_DIR])\n mocker.patch(\"spyder.plugins.pylint.widgets.pylintgui.getcwd_or_home\",\n return_value=search_paths[WORKING_DIR])\n mocker.patch(\"spyder.plugins.pylint.widgets.pylintgui.osp.expanduser\",\n return_value=search_paths[HOME_DIR])\n main_window = MainWindowMock()\n main_window.projects.get_active_project_path = mocker.MagicMock(\n return_value=search_paths[PROJECT_DIR])\n pylint_sw = Pylint(parent=main_window)\n\n pylint_widget = PylintWidget(parent=pylint_sw)\n pylint_widget.analyze(filename=pylint_test_script)\n qtbot.waitUntil(\n lambda: pylint_widget.get_data(pylint_test_script)[1] is not None,\n timeout=5000)\n pylint_data = pylint_widget.get_data(filename=pylint_test_script)\n print(pylint_data)\n assert pylint_data\n conventions = pylint_data[1][3][\"C:\"]\n assert conventions\n assert len(conventions) == len(bad_names)\n assert all([sum([bad_name in message[2] for message in conventions]) == 1\n for bad_name in bad_names])\n\n\ndef test_pylint_max_history_conf(pylint_test_scripts, mocker):\n \"\"\"Regression test for checking max_entries configuration.\n\n For further information see spyder-ide/spyder#12884\n \"\"\"\n # Create the pylint widget for code analysis\n main_window = MainWindowMock()\n main_window.projects.get_active_project_path = mocker.MagicMock(\n return_value=None)\n pylint_sw = Pylint(parent=main_window)\n pylint_widget = PylintWidget(parent=pylint_sw)\n pylint_widget.filecombo.clear()\n\n script_0, script_1, script_2 = pylint_test_scripts(\n [\"test_script_{}.py\".format(n) for n in range(3)])\n\n # Change the max_entry to 2\n pylint_widget.parent.set_option('max_entries', 2)\n pylint_widget.change_history_limit(2)\n assert pylint_widget.parent.get_option('max_entries') == 2\n\n # Call to set_filename\n pylint_widget.set_filename(filename=script_0)\n assert pylint_widget.filecombo.count() == 1\n\n # Add to more filenames\n pylint_widget.set_filename(filename=script_1)\n pylint_widget.set_filename(filename=script_2)\n\n assert pylint_widget.filecombo.count() == 2\n\n assert 'test_script_2.py' in pylint_widget.curr_filenames[0]\n assert 'test_script_1.py' in pylint_widget.curr_filenames[1]\n\n # Change the max entry to 1\n pylint_widget.parent.set_option('max_entries', 1)\n pylint_widget.change_history_limit(1)\n\n assert pylint_widget.filecombo.count() == 1\n\n assert 'test_script_2.py' in pylint_widget.curr_filenames[0]\n\n\nif __name__ == \"__main__\":\n pytest.main([osp.basename(__file__), '-vv', '-rw'])\n",
"step-ids": [
10,
11,
12,
14,
15
]
}
|
[
10,
11,
12,
14,
15
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
try:
conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd
='root', db='test')
cur = conn.cursor()
cur.execute('SELECT user_id, user_name FROM cap_user')
row_count = cur.rowcount
for r in cur.fetchall():
print('userId is %s, userName is %s' % r)
except Exception as e:
print(e)
finally:
if cur is not None:
cur.close()
if conn is not None:
conn.close()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
conn = None
cur = None
try:
conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd
='root', db='test')
cur = conn.cursor()
cur.execute('SELECT user_id, user_name FROM cap_user')
row_count = cur.rowcount
for r in cur.fetchall():
print('userId is %s, userName is %s' % r)
except Exception as e:
print(e)
finally:
if cur is not None:
cur.close()
if conn is not None:
conn.close()
<|reserved_special_token_1|>
import pymysql
conn = None
cur = None
try:
conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd
='root', db='test')
cur = conn.cursor()
cur.execute('SELECT user_id, user_name FROM cap_user')
row_count = cur.rowcount
for r in cur.fetchall():
print('userId is %s, userName is %s' % r)
except Exception as e:
print(e)
finally:
if cur is not None:
cur.close()
if conn is not None:
conn.close()
<|reserved_special_token_1|>
import pymysql
conn = None
cur = None
try:
conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd='root', db='test')
cur = conn.cursor()
cur.execute("SELECT user_id, user_name FROM cap_user")
row_count = cur.rowcount
# row_number = cur.rownumber
for r in cur.fetchall():
print("userId is %s, userName is %s" % r)
except Exception as e:
print(e)
finally:
if cur is not None:
cur.close()
if conn is not None:
conn.close()
|
flexible
|
{
"blob_id": "e5b5874f060bdf93ac4fadaf556aa4182619d077",
"index": 2033,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ntry:\n conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd\n ='root', db='test')\n cur = conn.cursor()\n cur.execute('SELECT user_id, user_name FROM cap_user')\n row_count = cur.rowcount\n for r in cur.fetchall():\n print('userId is %s, userName is %s' % r)\nexcept Exception as e:\n print(e)\nfinally:\n if cur is not None:\n cur.close()\n if conn is not None:\n conn.close()\n",
"step-3": "<mask token>\nconn = None\ncur = None\ntry:\n conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd\n ='root', db='test')\n cur = conn.cursor()\n cur.execute('SELECT user_id, user_name FROM cap_user')\n row_count = cur.rowcount\n for r in cur.fetchall():\n print('userId is %s, userName is %s' % r)\nexcept Exception as e:\n print(e)\nfinally:\n if cur is not None:\n cur.close()\n if conn is not None:\n conn.close()\n",
"step-4": "import pymysql\nconn = None\ncur = None\ntry:\n conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd\n ='root', db='test')\n cur = conn.cursor()\n cur.execute('SELECT user_id, user_name FROM cap_user')\n row_count = cur.rowcount\n for r in cur.fetchall():\n print('userId is %s, userName is %s' % r)\nexcept Exception as e:\n print(e)\nfinally:\n if cur is not None:\n cur.close()\n if conn is not None:\n conn.close()\n",
"step-5": "import pymysql\n\n\nconn = None\ncur = None\ntry:\n conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd='root', db='test')\n cur = conn.cursor()\n cur.execute(\"SELECT user_id, user_name FROM cap_user\")\n row_count = cur.rowcount\n # row_number = cur.rownumber\n for r in cur.fetchall():\n print(\"userId is %s, userName is %s\" % r)\nexcept Exception as e:\n print(e)\nfinally:\n if cur is not None:\n cur.close()\n if conn is not None:\n conn.close()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
def postorder(self, node: 'TreeNode', p: 'TreeNode', q: 'TreeNode'
) ->'TreeNode':
"""
@return: p, q, their lca, or None
Improvement: record how many nodes are found to do early return
"""
if not node:
return None
if node == p or node == q:
return node
left = self.postorder(node.left, p, q)
right = self.postorder(node.right, p, q)
if left:
if right:
return node
else:
return left
elif right:
return right
else:
return None
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
def postorder(self, node: 'TreeNode', p: 'TreeNode', q: 'TreeNode'
) ->'TreeNode':
"""
@return: p, q, their lca, or None
Improvement: record how many nodes are found to do early return
"""
if not node:
return None
if node == p or node == q:
return node
left = self.postorder(node.left, p, q)
right = self.postorder(node.right, p, q)
if left:
if right:
return node
else:
return left
elif right:
return right
else:
return None
def lowestCommonAncestor(self, root: 'TreeNode', p: 'TreeNode', q:
'TreeNode') ->'TreeNode':
return self.postorder(root, p, q)
<|reserved_special_token_1|>
'''
236. Lowest Common Ancestor of a Binary Tree
https://leetcode.com/problems/lowest-common-ancestor-of-a-binary-tree/
Given a binary tree, find the lowest common ancestor (LCA) of two given nodes in the tree.
According to the definition of LCA on Wikipedia:
“The lowest common ancestor is defined between two nodes p and q as the lowest node in T that
has both p and q as descendants (where we allow a node to be a descendant of itself).”
Given the following binary tree: root = [3,5,1,6,2,0,8,null,null,7,4]
Example 1:
Input: root = [3,5,1,6,2,0,8,null,null,7,4], p = 5, q = 1
Output: 3
Explanation: The LCA of nodes 5 and 1 is 3.
Example 2:
Input: root = [3,5,1,6,2,0,8,null,null,7,4], p = 5, q = 4
Output: 5
Explanation: The LCA of nodes 5 and 4 is 5, since a node can be a descendant of
itself according to the LCA definition.
Note:
All of the nodes' values will be unique.
p and q are different and both values will exist in the binary tree.
'''
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def postorder(self, node: 'TreeNode', p: 'TreeNode', q: 'TreeNode') -> 'TreeNode':
'''
@return: p, q, their lca, or None
Improvement: record how many nodes are found to do early return
'''
if not node:
return None
if node == p or node == q:
# node is p, q or their lca
return node
left = self.postorder(node.left, p, q)
right = self.postorder(node.right, p, q)
if left:
if right:
return node # p,q is in left and right, node is lca
else:
return left # left is p or q
else:
if right:
return right # right is p or q
else:
return None # p or q not in node or its children
def lowestCommonAncestor(self, root: 'TreeNode', p: 'TreeNode', q: 'TreeNode') -> 'TreeNode':
return self.postorder(root, p , q)
|
flexible
|
{
"blob_id": "ec9184fa3562ef6015801edf316faa0097d1eb57",
"index": 4821,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution:\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Solution:\n\n def postorder(self, node: 'TreeNode', p: 'TreeNode', q: 'TreeNode'\n ) ->'TreeNode':\n \"\"\"\n @return: p, q, their lca, or None\n Improvement: record how many nodes are found to do early return\n \"\"\"\n if not node:\n return None\n if node == p or node == q:\n return node\n left = self.postorder(node.left, p, q)\n right = self.postorder(node.right, p, q)\n if left:\n if right:\n return node\n else:\n return left\n elif right:\n return right\n else:\n return None\n <mask token>\n",
"step-4": "<mask token>\n\n\nclass Solution:\n\n def postorder(self, node: 'TreeNode', p: 'TreeNode', q: 'TreeNode'\n ) ->'TreeNode':\n \"\"\"\n @return: p, q, their lca, or None\n Improvement: record how many nodes are found to do early return\n \"\"\"\n if not node:\n return None\n if node == p or node == q:\n return node\n left = self.postorder(node.left, p, q)\n right = self.postorder(node.right, p, q)\n if left:\n if right:\n return node\n else:\n return left\n elif right:\n return right\n else:\n return None\n\n def lowestCommonAncestor(self, root: 'TreeNode', p: 'TreeNode', q:\n 'TreeNode') ->'TreeNode':\n return self.postorder(root, p, q)\n",
"step-5": "'''\n236. Lowest Common Ancestor of a Binary Tree\nhttps://leetcode.com/problems/lowest-common-ancestor-of-a-binary-tree/\n\nGiven a binary tree, find the lowest common ancestor (LCA) of two given nodes in the tree.\n\nAccording to the definition of LCA on Wikipedia:\n“The lowest common ancestor is defined between two nodes p and q as the lowest node in T that\nhas both p and q as descendants (where we allow a node to be a descendant of itself).”\n\nGiven the following binary tree: root = [3,5,1,6,2,0,8,null,null,7,4]\n\nExample 1:\n\nInput: root = [3,5,1,6,2,0,8,null,null,7,4], p = 5, q = 1\nOutput: 3\nExplanation: The LCA of nodes 5 and 1 is 3.\n \nExample 2:\n\nInput: root = [3,5,1,6,2,0,8,null,null,7,4], p = 5, q = 4\nOutput: 5\nExplanation: The LCA of nodes 5 and 4 is 5, since a node can be a descendant of\nitself according to the LCA definition.\n \nNote:\n\nAll of the nodes' values will be unique.\np and q are different and both values will exist in the binary tree.\n'''\n# Definition for a binary tree node.\n# class TreeNode:\n# def __init__(self, x):\n# self.val = x\n# self.left = None\n# self.right = None\n\nclass Solution:\n def postorder(self, node: 'TreeNode', p: 'TreeNode', q: 'TreeNode') -> 'TreeNode':\n '''\n @return: p, q, their lca, or None\n Improvement: record how many nodes are found to do early return\n '''\n if not node:\n return None\n \n if node == p or node == q:\n # node is p, q or their lca\n return node\n \n left = self.postorder(node.left, p, q)\n right = self.postorder(node.right, p, q)\n \n if left:\n if right:\n return node # p,q is in left and right, node is lca\n else:\n return left # left is p or q\n else:\n if right:\n return right # right is p or q\n else:\n return None # p or q not in node or its children\n \n def lowestCommonAncestor(self, root: 'TreeNode', p: 'TreeNode', q: 'TreeNode') -> 'TreeNode':\n return self.postorder(root, p , q)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class TestBromineObjective(ForceBalanceTestCase, ObjectiveTests):
def setUp(self):
self.options = forcebalance.parser.gen_opts_defaults.copy()
self.options.update({'root': os.getcwd() + '/test/files',
'penalty_additive': 0.01, 'jobtype': 'NEWTON', 'forcefield': [
'bro.itp']})
os.chdir(self.options['root'])
self.logger.debug('\nUsing the following options:\n%s\n' % str(self
.options))
self.tgt_opts = [forcebalance.parser.tgt_opts_defaults.copy()]
self.tgt_opts[0].update({'type': 'LIQUID_GMX', 'name': 'LiquidBromine'}
)
self.ff = forcebalance.forcefield.FF(self.options)
self.objective = forcebalance.objective.Objective(self.options,
self.tgt_opts, self.ff)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ObjectiveTests(object):
def test_target_zero_order_terms(self):
"""Check zero order target terms"""
obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),
Order=0)
self.assertEqual(type(obj), dict)
self.assertTrue('X' in obj)
self.assertNotEqual(int(obj['X']), 0)
self.assertTrue('G' in obj)
self.assertFalse(obj['G'].any())
self.assertTrue('H' in obj)
self.assertEqual(obj['H'], numpy.diag([1] * self.ff.np))
def test_target_first_order_terms(self):
"""Check first order target terms"""
obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),
Order=1)
self.assertEqual(type(obj), dict)
self.assertTrue('X' in obj)
self.assertTrue('G' in obj)
self.assertTrue('H' in obj)
def test_target_second_order_terms(self):
"""Check second order target terms"""
obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),
Order=2)
self.assertEqual(type(obj), dict)
self.assertTrue('X' in obj)
self.assertTrue('G' in obj)
self.assertTrue('H' in obj)
def test_indicate(self):
"""Check objective.indicate() runs without errors"""
self.objective.Indicate()
class TestWaterObjective(ForceBalanceTestCase, ObjectiveTests):
def setUp(self):
self.options = forcebalance.parser.gen_opts_defaults.copy()
self.options.update({'root': os.getcwd() + '/test/files',
'penalty_additive': 0.01, 'jobtype': 'NEWTON', 'forcefield': [
'water.itp']})
os.chdir(self.options['root'])
self.logger.debug('\nUsing the following options:\n%s\n' % str(self
.options))
self.tgt_opts = [forcebalance.parser.tgt_opts_defaults.copy()]
self.tgt_opts[0].update({'type': 'ABINITIO_GMX', 'name': 'cluster-06'})
self.ff = forcebalance.forcefield.FF(self.options)
self.objective = forcebalance.objective.Objective(self.options,
self.tgt_opts, self.ff)
def shortDescription(self):
return super(TestWaterObjective, self).shortDescription(
) + ' (AbInitio_GMX target)'
class TestBromineObjective(ForceBalanceTestCase, ObjectiveTests):
def setUp(self):
self.options = forcebalance.parser.gen_opts_defaults.copy()
self.options.update({'root': os.getcwd() + '/test/files',
'penalty_additive': 0.01, 'jobtype': 'NEWTON', 'forcefield': [
'bro.itp']})
os.chdir(self.options['root'])
self.logger.debug('\nUsing the following options:\n%s\n' % str(self
.options))
self.tgt_opts = [forcebalance.parser.tgt_opts_defaults.copy()]
self.tgt_opts[0].update({'type': 'LIQUID_GMX', 'name': 'LiquidBromine'}
)
self.ff = forcebalance.forcefield.FF(self.options)
self.objective = forcebalance.objective.Objective(self.options,
self.tgt_opts, self.ff)
def shortDescription(self):
return super(TestBromineObjective, self).shortDescription(
) + ' (Liquid_GMX target)'
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestPenalty(ForceBalanceTestCase):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class ObjectiveTests(object):
def test_target_zero_order_terms(self):
"""Check zero order target terms"""
obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),
Order=0)
self.assertEqual(type(obj), dict)
self.assertTrue('X' in obj)
self.assertNotEqual(int(obj['X']), 0)
self.assertTrue('G' in obj)
self.assertFalse(obj['G'].any())
self.assertTrue('H' in obj)
self.assertEqual(obj['H'], numpy.diag([1] * self.ff.np))
def test_target_first_order_terms(self):
"""Check first order target terms"""
obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),
Order=1)
self.assertEqual(type(obj), dict)
self.assertTrue('X' in obj)
self.assertTrue('G' in obj)
self.assertTrue('H' in obj)
def test_target_second_order_terms(self):
"""Check second order target terms"""
obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),
Order=2)
self.assertEqual(type(obj), dict)
self.assertTrue('X' in obj)
self.assertTrue('G' in obj)
self.assertTrue('H' in obj)
def test_indicate(self):
"""Check objective.indicate() runs without errors"""
self.objective.Indicate()
class TestWaterObjective(ForceBalanceTestCase, ObjectiveTests):
def setUp(self):
self.options = forcebalance.parser.gen_opts_defaults.copy()
self.options.update({'root': os.getcwd() + '/test/files',
'penalty_additive': 0.01, 'jobtype': 'NEWTON', 'forcefield': [
'water.itp']})
os.chdir(self.options['root'])
self.logger.debug('\nUsing the following options:\n%s\n' % str(self
.options))
self.tgt_opts = [forcebalance.parser.tgt_opts_defaults.copy()]
self.tgt_opts[0].update({'type': 'ABINITIO_GMX', 'name': 'cluster-06'})
self.ff = forcebalance.forcefield.FF(self.options)
self.objective = forcebalance.objective.Objective(self.options,
self.tgt_opts, self.ff)
def shortDescription(self):
return super(TestWaterObjective, self).shortDescription(
) + ' (AbInitio_GMX target)'
class TestBromineObjective(ForceBalanceTestCase, ObjectiveTests):
def setUp(self):
self.options = forcebalance.parser.gen_opts_defaults.copy()
self.options.update({'root': os.getcwd() + '/test/files',
'penalty_additive': 0.01, 'jobtype': 'NEWTON', 'forcefield': [
'bro.itp']})
os.chdir(self.options['root'])
self.logger.debug('\nUsing the following options:\n%s\n' % str(self
.options))
self.tgt_opts = [forcebalance.parser.tgt_opts_defaults.copy()]
self.tgt_opts[0].update({'type': 'LIQUID_GMX', 'name': 'LiquidBromine'}
)
self.ff = forcebalance.forcefield.FF(self.options)
self.objective = forcebalance.objective.Objective(self.options,
self.tgt_opts, self.ff)
def shortDescription(self):
return super(TestBromineObjective, self).shortDescription(
) + ' (Liquid_GMX target)'
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestImplemented(ForceBalanceTestCase):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class TestPenalty(ForceBalanceTestCase):
def setUp(self):
self.options = forcebalance.parser.gen_opts_defaults.copy()
self.options.update({'root': os.getcwd() + '/test/files',
'penalty_additive': 0.01, 'jobtype': 'NEWTON', 'forcefield': [
'cc-pvdz-overlap-original.gbs']})
os.chdir(self.options['root'])
self.ff = forcebalance.forcefield.FF(self.options)
self.np = self.ff.np
self.penalties = []
for ptype in forcebalance.objective.Penalty.Pen_Names.keys():
penalty = forcebalance.objective.Penalty(ptype, self.ff, self.
options['penalty_additive'], self.options[
'penalty_multiplicative'], self.options[
'penalty_hyperbolic_b'], self.options['penalty_alpha'])
self.penalties.append(penalty)
def test_penalty_compute(self):
"""Check penalty computation functions"""
objective = {'G': numpy.zeros(9), 'H': numpy.diag((1,) * 9), 'X': 1}
for penalty in self.penalties:
result = penalty.compute([1] * self.np, objective)
self.assertEqual(tuple, type(result))
class ObjectiveTests(object):
def test_target_zero_order_terms(self):
"""Check zero order target terms"""
obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),
Order=0)
self.assertEqual(type(obj), dict)
self.assertTrue('X' in obj)
self.assertNotEqual(int(obj['X']), 0)
self.assertTrue('G' in obj)
self.assertFalse(obj['G'].any())
self.assertTrue('H' in obj)
self.assertEqual(obj['H'], numpy.diag([1] * self.ff.np))
def test_target_first_order_terms(self):
"""Check first order target terms"""
obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),
Order=1)
self.assertEqual(type(obj), dict)
self.assertTrue('X' in obj)
self.assertTrue('G' in obj)
self.assertTrue('H' in obj)
def test_target_second_order_terms(self):
"""Check second order target terms"""
obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),
Order=2)
self.assertEqual(type(obj), dict)
self.assertTrue('X' in obj)
self.assertTrue('G' in obj)
self.assertTrue('H' in obj)
def test_indicate(self):
"""Check objective.indicate() runs without errors"""
self.objective.Indicate()
class TestWaterObjective(ForceBalanceTestCase, ObjectiveTests):
def setUp(self):
self.options = forcebalance.parser.gen_opts_defaults.copy()
self.options.update({'root': os.getcwd() + '/test/files',
'penalty_additive': 0.01, 'jobtype': 'NEWTON', 'forcefield': [
'water.itp']})
os.chdir(self.options['root'])
self.logger.debug('\nUsing the following options:\n%s\n' % str(self
.options))
self.tgt_opts = [forcebalance.parser.tgt_opts_defaults.copy()]
self.tgt_opts[0].update({'type': 'ABINITIO_GMX', 'name': 'cluster-06'})
self.ff = forcebalance.forcefield.FF(self.options)
self.objective = forcebalance.objective.Objective(self.options,
self.tgt_opts, self.ff)
def shortDescription(self):
return super(TestWaterObjective, self).shortDescription(
) + ' (AbInitio_GMX target)'
class TestBromineObjective(ForceBalanceTestCase, ObjectiveTests):
def setUp(self):
self.options = forcebalance.parser.gen_opts_defaults.copy()
self.options.update({'root': os.getcwd() + '/test/files',
'penalty_additive': 0.01, 'jobtype': 'NEWTON', 'forcefield': [
'bro.itp']})
os.chdir(self.options['root'])
self.logger.debug('\nUsing the following options:\n%s\n' % str(self
.options))
self.tgt_opts = [forcebalance.parser.tgt_opts_defaults.copy()]
self.tgt_opts[0].update({'type': 'LIQUID_GMX', 'name': 'LiquidBromine'}
)
self.ff = forcebalance.forcefield.FF(self.options)
self.objective = forcebalance.objective.Objective(self.options,
self.tgt_opts, self.ff)
def shortDescription(self):
return super(TestBromineObjective, self).shortDescription(
) + ' (Liquid_GMX target)'
<|reserved_special_token_0|>
<|reserved_special_token_1|>
from __future__ import absolute_import
from builtins import str
from builtins import object
import unittest
import sys, os, re
import forcebalance
import abc
import numpy
from __init__ import ForceBalanceTestCase
class TestImplemented(ForceBalanceTestCase):
def test_implemented_targets_derived_from_target(self):
"""Check classes listed in Implemented_Targets are derived from Target"""
for key in forcebalance.objective.Implemented_Targets.keys():
self.logger.debug("Assert %s is subclass of target\n" % str(forcebalance.objective.Implemented_Targets[key]))
self.assertTrue(issubclass(forcebalance.objective.Implemented_Targets[key],forcebalance.target.Target))
def test_no_unlisted_classes_derived_from_Target(self):
"""Check for unknown omissions from Implemented_Targets
Check to make sure any classes derived from Target are either
listed in Implemented_Targets or in the exclusion list in this
test case
"""
self.skipTest("Not sure if test is working properly.")
forcebalance_modules=[module[:-3] for module in os.listdir(forcebalance.__path__[0])
if re.compile(".*\.py$").match(module)
and module not in ["__init__.py"]]
for module in forcebalance_modules:
# LPW: I don't think dcdlib should be imported this way.
print(module)
if module == "_dcdlib": continue
m = __import__('forcebalance.' + module)
objs = dir(eval('m.' + module))
print(objs)
for obj in objs:
obj = eval('m.'+module+'.'+obj)
if type(obj) == abc.ABCMeta:
implemented = [i for i in forcebalance.objective.Implemented_Targets.values()]
# list of documented exceptions
# Basically, platform-independent targets are excluded.
exclude = ['Target',
'AbInitio',
'Interaction',
'Interaction_GMX',
'Liquid',
'Lipid',
'BindingEnergy',
'LeastSquares',
'Vibration',
'Thermo',
'Hydration',
'Moments']
print(obj)
if obj not in implemented and obj.__name__ not in exclude:
self.fail("Unknown class '%s' not listed in Implemented_Targets" % obj.__name__)
class TestPenalty(ForceBalanceTestCase):
def setUp(self):
self.options=forcebalance.parser.gen_opts_defaults.copy()
self.options.update({
'root': os.getcwd() + '/test/files',
'penalty_additive': 0.01,
'jobtype': 'NEWTON',
'forcefield': ['cc-pvdz-overlap-original.gbs']})
os.chdir(self.options['root'])
self.ff = forcebalance.forcefield.FF(self.options)
self.np=self.ff.np
self.penalties = []
for ptype in forcebalance.objective.Penalty.Pen_Names.keys():
penalty = forcebalance.objective.Penalty(ptype,
self.ff,
self.options['penalty_additive'],
self.options['penalty_multiplicative'],
self.options['penalty_hyperbolic_b'],
self.options['penalty_alpha'])
self.penalties.append(penalty)
def test_penalty_compute(self):
"""Check penalty computation functions"""
objective = {'G': numpy.zeros((9)),
'H': numpy.diag((1,)*9),
'X': 1}
for penalty in self.penalties:
result=penalty.compute([1]*self.np, objective)
self.assertEqual(tuple, type(result))
# more tests go here
class ObjectiveTests(object):
def test_target_zero_order_terms(self):
"""Check zero order target terms"""
obj = self.objective.Target_Terms(numpy.array([.5]*self.ff.np), Order=0)
self.assertEqual(type(obj),dict)
self.assertTrue("X" in obj)
self.assertNotEqual(int(obj["X"]), 0)
self.assertTrue("G" in obj)
self.assertFalse(obj["G"].any())
self.assertTrue("H" in obj)
self.assertEqual(obj["H"], numpy.diag([1]*self.ff.np))
def test_target_first_order_terms(self):
"""Check first order target terms"""
obj = self.objective.Target_Terms(numpy.array([.5]*self.ff.np), Order=1)
self.assertEqual(type(obj),dict)
self.assertTrue("X" in obj)
self.assertTrue("G" in obj)
self.assertTrue("H" in obj)
def test_target_second_order_terms(self):
"""Check second order target terms"""
obj = self.objective.Target_Terms(numpy.array([.5]*self.ff.np), Order=2)
self.assertEqual(type(obj),dict)
self.assertTrue("X" in obj)
self.assertTrue("G" in obj)
self.assertTrue("H" in obj)
def test_indicate(self):
"""Check objective.indicate() runs without errors"""
self.objective.Indicate()
class TestWaterObjective(ForceBalanceTestCase, ObjectiveTests):
def setUp(self):
self.options=forcebalance.parser.gen_opts_defaults.copy()
self.options.update({
'root': os.getcwd() + '/test/files',
'penalty_additive': 0.01,
'jobtype': 'NEWTON',
'forcefield': ['water.itp']})
os.chdir(self.options['root'])
self.logger.debug("\nUsing the following options:\n%s\n" % str(self.options))
self.tgt_opts = [ forcebalance.parser.tgt_opts_defaults.copy() ]
self.tgt_opts[0].update({"type" : "ABINITIO_GMX", "name" : "cluster-06"})
self.ff = forcebalance.forcefield.FF(self.options)
self.objective = forcebalance.objective.Objective(self.options, self.tgt_opts,self.ff)
def shortDescription(self):
return super(TestWaterObjective, self).shortDescription() + " (AbInitio_GMX target)"
class TestBromineObjective(ForceBalanceTestCase, ObjectiveTests):
def setUp(self):
self.options=forcebalance.parser.gen_opts_defaults.copy()
self.options.update({
'root': os.getcwd() + '/test/files',
'penalty_additive': 0.01,
'jobtype': 'NEWTON',
'forcefield': ['bro.itp']})
os.chdir(self.options['root'])
self.logger.debug("\nUsing the following options:\n%s\n" % str(self.options))
self.tgt_opts = [ forcebalance.parser.tgt_opts_defaults.copy() ]
self.tgt_opts[0].update({"type" : "LIQUID_GMX", "name" : "LiquidBromine"})
self.ff = forcebalance.forcefield.FF(self.options)
self.objective = forcebalance.objective.Objective(self.options, self.tgt_opts,self.ff)
def shortDescription(self):
return super(TestBromineObjective, self).shortDescription() + " (Liquid_GMX target)"
if __name__ == '__main__':
unittest.main()
|
flexible
|
{
"blob_id": "f91e1fdc31b2fe1aef15757576d847c617a86201",
"index": 1121,
"step-1": "<mask token>\n\n\nclass TestBromineObjective(ForceBalanceTestCase, ObjectiveTests):\n\n def setUp(self):\n self.options = forcebalance.parser.gen_opts_defaults.copy()\n self.options.update({'root': os.getcwd() + '/test/files',\n 'penalty_additive': 0.01, 'jobtype': 'NEWTON', 'forcefield': [\n 'bro.itp']})\n os.chdir(self.options['root'])\n self.logger.debug('\\nUsing the following options:\\n%s\\n' % str(self\n .options))\n self.tgt_opts = [forcebalance.parser.tgt_opts_defaults.copy()]\n self.tgt_opts[0].update({'type': 'LIQUID_GMX', 'name': 'LiquidBromine'}\n )\n self.ff = forcebalance.forcefield.FF(self.options)\n self.objective = forcebalance.objective.Objective(self.options,\n self.tgt_opts, self.ff)\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ObjectiveTests(object):\n\n def test_target_zero_order_terms(self):\n \"\"\"Check zero order target terms\"\"\"\n obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),\n Order=0)\n self.assertEqual(type(obj), dict)\n self.assertTrue('X' in obj)\n self.assertNotEqual(int(obj['X']), 0)\n self.assertTrue('G' in obj)\n self.assertFalse(obj['G'].any())\n self.assertTrue('H' in obj)\n self.assertEqual(obj['H'], numpy.diag([1] * self.ff.np))\n\n def test_target_first_order_terms(self):\n \"\"\"Check first order target terms\"\"\"\n obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),\n Order=1)\n self.assertEqual(type(obj), dict)\n self.assertTrue('X' in obj)\n self.assertTrue('G' in obj)\n self.assertTrue('H' in obj)\n\n def test_target_second_order_terms(self):\n \"\"\"Check second order target terms\"\"\"\n obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),\n Order=2)\n self.assertEqual(type(obj), dict)\n self.assertTrue('X' in obj)\n self.assertTrue('G' in obj)\n self.assertTrue('H' in obj)\n\n def test_indicate(self):\n \"\"\"Check objective.indicate() runs without errors\"\"\"\n self.objective.Indicate()\n\n\nclass TestWaterObjective(ForceBalanceTestCase, ObjectiveTests):\n\n def setUp(self):\n self.options = forcebalance.parser.gen_opts_defaults.copy()\n self.options.update({'root': os.getcwd() + '/test/files',\n 'penalty_additive': 0.01, 'jobtype': 'NEWTON', 'forcefield': [\n 'water.itp']})\n os.chdir(self.options['root'])\n self.logger.debug('\\nUsing the following options:\\n%s\\n' % str(self\n .options))\n self.tgt_opts = [forcebalance.parser.tgt_opts_defaults.copy()]\n self.tgt_opts[0].update({'type': 'ABINITIO_GMX', 'name': 'cluster-06'})\n self.ff = forcebalance.forcefield.FF(self.options)\n self.objective = forcebalance.objective.Objective(self.options,\n self.tgt_opts, self.ff)\n\n def shortDescription(self):\n return super(TestWaterObjective, self).shortDescription(\n ) + ' (AbInitio_GMX target)'\n\n\nclass TestBromineObjective(ForceBalanceTestCase, ObjectiveTests):\n\n def setUp(self):\n self.options = forcebalance.parser.gen_opts_defaults.copy()\n self.options.update({'root': os.getcwd() + '/test/files',\n 'penalty_additive': 0.01, 'jobtype': 'NEWTON', 'forcefield': [\n 'bro.itp']})\n os.chdir(self.options['root'])\n self.logger.debug('\\nUsing the following options:\\n%s\\n' % str(self\n .options))\n self.tgt_opts = [forcebalance.parser.tgt_opts_defaults.copy()]\n self.tgt_opts[0].update({'type': 'LIQUID_GMX', 'name': 'LiquidBromine'}\n )\n self.ff = forcebalance.forcefield.FF(self.options)\n self.objective = forcebalance.objective.Objective(self.options,\n self.tgt_opts, self.ff)\n\n def shortDescription(self):\n return super(TestBromineObjective, self).shortDescription(\n ) + ' (Liquid_GMX target)'\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass TestPenalty(ForceBalanceTestCase):\n <mask token>\n <mask token>\n\n\nclass ObjectiveTests(object):\n\n def test_target_zero_order_terms(self):\n \"\"\"Check zero order target terms\"\"\"\n obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),\n Order=0)\n self.assertEqual(type(obj), dict)\n self.assertTrue('X' in obj)\n self.assertNotEqual(int(obj['X']), 0)\n self.assertTrue('G' in obj)\n self.assertFalse(obj['G'].any())\n self.assertTrue('H' in obj)\n self.assertEqual(obj['H'], numpy.diag([1] * self.ff.np))\n\n def test_target_first_order_terms(self):\n \"\"\"Check first order target terms\"\"\"\n obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),\n Order=1)\n self.assertEqual(type(obj), dict)\n self.assertTrue('X' in obj)\n self.assertTrue('G' in obj)\n self.assertTrue('H' in obj)\n\n def test_target_second_order_terms(self):\n \"\"\"Check second order target terms\"\"\"\n obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),\n Order=2)\n self.assertEqual(type(obj), dict)\n self.assertTrue('X' in obj)\n self.assertTrue('G' in obj)\n self.assertTrue('H' in obj)\n\n def test_indicate(self):\n \"\"\"Check objective.indicate() runs without errors\"\"\"\n self.objective.Indicate()\n\n\nclass TestWaterObjective(ForceBalanceTestCase, ObjectiveTests):\n\n def setUp(self):\n self.options = forcebalance.parser.gen_opts_defaults.copy()\n self.options.update({'root': os.getcwd() + '/test/files',\n 'penalty_additive': 0.01, 'jobtype': 'NEWTON', 'forcefield': [\n 'water.itp']})\n os.chdir(self.options['root'])\n self.logger.debug('\\nUsing the following options:\\n%s\\n' % str(self\n .options))\n self.tgt_opts = [forcebalance.parser.tgt_opts_defaults.copy()]\n self.tgt_opts[0].update({'type': 'ABINITIO_GMX', 'name': 'cluster-06'})\n self.ff = forcebalance.forcefield.FF(self.options)\n self.objective = forcebalance.objective.Objective(self.options,\n self.tgt_opts, self.ff)\n\n def shortDescription(self):\n return super(TestWaterObjective, self).shortDescription(\n ) + ' (AbInitio_GMX target)'\n\n\nclass TestBromineObjective(ForceBalanceTestCase, ObjectiveTests):\n\n def setUp(self):\n self.options = forcebalance.parser.gen_opts_defaults.copy()\n self.options.update({'root': os.getcwd() + '/test/files',\n 'penalty_additive': 0.01, 'jobtype': 'NEWTON', 'forcefield': [\n 'bro.itp']})\n os.chdir(self.options['root'])\n self.logger.debug('\\nUsing the following options:\\n%s\\n' % str(self\n .options))\n self.tgt_opts = [forcebalance.parser.tgt_opts_defaults.copy()]\n self.tgt_opts[0].update({'type': 'LIQUID_GMX', 'name': 'LiquidBromine'}\n )\n self.ff = forcebalance.forcefield.FF(self.options)\n self.objective = forcebalance.objective.Objective(self.options,\n self.tgt_opts, self.ff)\n\n def shortDescription(self):\n return super(TestBromineObjective, self).shortDescription(\n ) + ' (Liquid_GMX target)'\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass TestImplemented(ForceBalanceTestCase):\n <mask token>\n <mask token>\n\n\nclass TestPenalty(ForceBalanceTestCase):\n\n def setUp(self):\n self.options = forcebalance.parser.gen_opts_defaults.copy()\n self.options.update({'root': os.getcwd() + '/test/files',\n 'penalty_additive': 0.01, 'jobtype': 'NEWTON', 'forcefield': [\n 'cc-pvdz-overlap-original.gbs']})\n os.chdir(self.options['root'])\n self.ff = forcebalance.forcefield.FF(self.options)\n self.np = self.ff.np\n self.penalties = []\n for ptype in forcebalance.objective.Penalty.Pen_Names.keys():\n penalty = forcebalance.objective.Penalty(ptype, self.ff, self.\n options['penalty_additive'], self.options[\n 'penalty_multiplicative'], self.options[\n 'penalty_hyperbolic_b'], self.options['penalty_alpha'])\n self.penalties.append(penalty)\n\n def test_penalty_compute(self):\n \"\"\"Check penalty computation functions\"\"\"\n objective = {'G': numpy.zeros(9), 'H': numpy.diag((1,) * 9), 'X': 1}\n for penalty in self.penalties:\n result = penalty.compute([1] * self.np, objective)\n self.assertEqual(tuple, type(result))\n\n\nclass ObjectiveTests(object):\n\n def test_target_zero_order_terms(self):\n \"\"\"Check zero order target terms\"\"\"\n obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),\n Order=0)\n self.assertEqual(type(obj), dict)\n self.assertTrue('X' in obj)\n self.assertNotEqual(int(obj['X']), 0)\n self.assertTrue('G' in obj)\n self.assertFalse(obj['G'].any())\n self.assertTrue('H' in obj)\n self.assertEqual(obj['H'], numpy.diag([1] * self.ff.np))\n\n def test_target_first_order_terms(self):\n \"\"\"Check first order target terms\"\"\"\n obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),\n Order=1)\n self.assertEqual(type(obj), dict)\n self.assertTrue('X' in obj)\n self.assertTrue('G' in obj)\n self.assertTrue('H' in obj)\n\n def test_target_second_order_terms(self):\n \"\"\"Check second order target terms\"\"\"\n obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),\n Order=2)\n self.assertEqual(type(obj), dict)\n self.assertTrue('X' in obj)\n self.assertTrue('G' in obj)\n self.assertTrue('H' in obj)\n\n def test_indicate(self):\n \"\"\"Check objective.indicate() runs without errors\"\"\"\n self.objective.Indicate()\n\n\nclass TestWaterObjective(ForceBalanceTestCase, ObjectiveTests):\n\n def setUp(self):\n self.options = forcebalance.parser.gen_opts_defaults.copy()\n self.options.update({'root': os.getcwd() + '/test/files',\n 'penalty_additive': 0.01, 'jobtype': 'NEWTON', 'forcefield': [\n 'water.itp']})\n os.chdir(self.options['root'])\n self.logger.debug('\\nUsing the following options:\\n%s\\n' % str(self\n .options))\n self.tgt_opts = [forcebalance.parser.tgt_opts_defaults.copy()]\n self.tgt_opts[0].update({'type': 'ABINITIO_GMX', 'name': 'cluster-06'})\n self.ff = forcebalance.forcefield.FF(self.options)\n self.objective = forcebalance.objective.Objective(self.options,\n self.tgt_opts, self.ff)\n\n def shortDescription(self):\n return super(TestWaterObjective, self).shortDescription(\n ) + ' (AbInitio_GMX target)'\n\n\nclass TestBromineObjective(ForceBalanceTestCase, ObjectiveTests):\n\n def setUp(self):\n self.options = forcebalance.parser.gen_opts_defaults.copy()\n self.options.update({'root': os.getcwd() + '/test/files',\n 'penalty_additive': 0.01, 'jobtype': 'NEWTON', 'forcefield': [\n 'bro.itp']})\n os.chdir(self.options['root'])\n self.logger.debug('\\nUsing the following options:\\n%s\\n' % str(self\n .options))\n self.tgt_opts = [forcebalance.parser.tgt_opts_defaults.copy()]\n self.tgt_opts[0].update({'type': 'LIQUID_GMX', 'name': 'LiquidBromine'}\n )\n self.ff = forcebalance.forcefield.FF(self.options)\n self.objective = forcebalance.objective.Objective(self.options,\n self.tgt_opts, self.ff)\n\n def shortDescription(self):\n return super(TestBromineObjective, self).shortDescription(\n ) + ' (Liquid_GMX target)'\n\n\n<mask token>\n",
"step-5": "from __future__ import absolute_import\nfrom builtins import str\nfrom builtins import object\nimport unittest\nimport sys, os, re\nimport forcebalance\nimport abc\nimport numpy\nfrom __init__ import ForceBalanceTestCase\n\nclass TestImplemented(ForceBalanceTestCase):\n def test_implemented_targets_derived_from_target(self):\n \"\"\"Check classes listed in Implemented_Targets are derived from Target\"\"\"\n for key in forcebalance.objective.Implemented_Targets.keys():\n self.logger.debug(\"Assert %s is subclass of target\\n\" % str(forcebalance.objective.Implemented_Targets[key]))\n self.assertTrue(issubclass(forcebalance.objective.Implemented_Targets[key],forcebalance.target.Target))\n \n def test_no_unlisted_classes_derived_from_Target(self):\n \"\"\"Check for unknown omissions from Implemented_Targets\n \n Check to make sure any classes derived from Target are either\n listed in Implemented_Targets or in the exclusion list in this\n test case\n \"\"\"\n self.skipTest(\"Not sure if test is working properly.\")\n forcebalance_modules=[module[:-3] for module in os.listdir(forcebalance.__path__[0])\n if re.compile(\".*\\.py$\").match(module)\n and module not in [\"__init__.py\"]]\n for module in forcebalance_modules:\n # LPW: I don't think dcdlib should be imported this way.\n print(module)\n if module == \"_dcdlib\": continue\n m = __import__('forcebalance.' + module)\n objs = dir(eval('m.' + module))\n print(objs)\n for obj in objs:\n obj = eval('m.'+module+'.'+obj)\n if type(obj) == abc.ABCMeta:\n implemented = [i for i in forcebalance.objective.Implemented_Targets.values()]\n # list of documented exceptions\n # Basically, platform-independent targets are excluded.\n exclude = ['Target',\n 'AbInitio',\n 'Interaction',\n 'Interaction_GMX',\n 'Liquid',\n 'Lipid',\n 'BindingEnergy',\n 'LeastSquares',\n 'Vibration',\n 'Thermo',\n 'Hydration',\n 'Moments']\n print(obj)\n if obj not in implemented and obj.__name__ not in exclude:\n self.fail(\"Unknown class '%s' not listed in Implemented_Targets\" % obj.__name__)\n\nclass TestPenalty(ForceBalanceTestCase):\n def setUp(self):\n self.options=forcebalance.parser.gen_opts_defaults.copy()\n self.options.update({\n 'root': os.getcwd() + '/test/files',\n 'penalty_additive': 0.01,\n 'jobtype': 'NEWTON',\n 'forcefield': ['cc-pvdz-overlap-original.gbs']})\n os.chdir(self.options['root'])\n\n self.ff = forcebalance.forcefield.FF(self.options)\n self.np=self.ff.np\n\n self.penalties = []\n for ptype in forcebalance.objective.Penalty.Pen_Names.keys():\n penalty = forcebalance.objective.Penalty(ptype,\n self.ff,\n self.options['penalty_additive'],\n self.options['penalty_multiplicative'],\n self.options['penalty_hyperbolic_b'],\n self.options['penalty_alpha'])\n self.penalties.append(penalty)\n\n def test_penalty_compute(self):\n \"\"\"Check penalty computation functions\"\"\"\n objective = {'G': numpy.zeros((9)),\n 'H': numpy.diag((1,)*9),\n 'X': 1}\n for penalty in self.penalties:\n result=penalty.compute([1]*self.np, objective)\n self.assertEqual(tuple, type(result))\n # more tests go here\n \nclass ObjectiveTests(object): \n def test_target_zero_order_terms(self):\n \"\"\"Check zero order target terms\"\"\"\n obj = self.objective.Target_Terms(numpy.array([.5]*self.ff.np), Order=0)\n self.assertEqual(type(obj),dict)\n self.assertTrue(\"X\" in obj)\n self.assertNotEqual(int(obj[\"X\"]), 0)\n \n self.assertTrue(\"G\" in obj)\n self.assertFalse(obj[\"G\"].any())\n \n self.assertTrue(\"H\" in obj)\n self.assertEqual(obj[\"H\"], numpy.diag([1]*self.ff.np))\n \n def test_target_first_order_terms(self):\n \"\"\"Check first order target terms\"\"\"\n obj = self.objective.Target_Terms(numpy.array([.5]*self.ff.np), Order=1)\n self.assertEqual(type(obj),dict)\n self.assertTrue(\"X\" in obj)\n self.assertTrue(\"G\" in obj)\n self.assertTrue(\"H\" in obj)\n \n def test_target_second_order_terms(self):\n \"\"\"Check second order target terms\"\"\"\n obj = self.objective.Target_Terms(numpy.array([.5]*self.ff.np), Order=2)\n self.assertEqual(type(obj),dict)\n self.assertTrue(\"X\" in obj)\n self.assertTrue(\"G\" in obj)\n self.assertTrue(\"H\" in obj)\n \n def test_indicate(self):\n \"\"\"Check objective.indicate() runs without errors\"\"\"\n self.objective.Indicate()\n\nclass TestWaterObjective(ForceBalanceTestCase, ObjectiveTests):\n def setUp(self):\n self.options=forcebalance.parser.gen_opts_defaults.copy()\n self.options.update({\n 'root': os.getcwd() + '/test/files',\n 'penalty_additive': 0.01,\n 'jobtype': 'NEWTON',\n 'forcefield': ['water.itp']})\n os.chdir(self.options['root'])\n \n self.logger.debug(\"\\nUsing the following options:\\n%s\\n\" % str(self.options))\n\n self.tgt_opts = [ forcebalance.parser.tgt_opts_defaults.copy() ]\n self.tgt_opts[0].update({\"type\" : \"ABINITIO_GMX\", \"name\" : \"cluster-06\"})\n self.ff = forcebalance.forcefield.FF(self.options)\n \n self.objective = forcebalance.objective.Objective(self.options, self.tgt_opts,self.ff)\n \n def shortDescription(self):\n return super(TestWaterObjective, self).shortDescription() + \" (AbInitio_GMX target)\"\n \nclass TestBromineObjective(ForceBalanceTestCase, ObjectiveTests):\n def setUp(self):\n self.options=forcebalance.parser.gen_opts_defaults.copy()\n self.options.update({\n 'root': os.getcwd() + '/test/files',\n 'penalty_additive': 0.01,\n 'jobtype': 'NEWTON',\n 'forcefield': ['bro.itp']})\n os.chdir(self.options['root'])\n \n self.logger.debug(\"\\nUsing the following options:\\n%s\\n\" % str(self.options))\n\n self.tgt_opts = [ forcebalance.parser.tgt_opts_defaults.copy() ]\n self.tgt_opts[0].update({\"type\" : \"LIQUID_GMX\", \"name\" : \"LiquidBromine\"})\n self.ff = forcebalance.forcefield.FF(self.options)\n \n self.objective = forcebalance.objective.Objective(self.options, self.tgt_opts,self.ff)\n \n def shortDescription(self):\n return super(TestBromineObjective, self).shortDescription() + \" (Liquid_GMX target)\"\n\nif __name__ == '__main__': \n unittest.main()\n",
"step-ids": [
2,
11,
12,
15,
20
]
}
|
[
2,
11,
12,
15,
20
] |
<|reserved_special_token_0|>
class BalancedForestTest(unittest.TestCase):
def test1(self):
expected = 10
c = [1, 1, 1, 18, 10, 11, 5, 6]
edges = [[1, 2], [1, 4], [2, 3], [1, 8], [8, 7], [7, 6], [5, 7]]
self.assertEqual(balancedForest(c, edges), expected)
<|reserved_special_token_0|>
def test3(self):
expected = 19
c = [15, 12, 8, 14, 13]
edges = [[4, 5], [1, 2], [1, 3], [1, 4]]
self.assertEqual(balancedForest(c, edges), expected)
<|reserved_special_token_0|>
def test5(self):
expected = -1
c = [1, 3, 5]
edges = [[1, 3], [1, 2]]
self.assertEqual(balancedForest(c, edges), expected)
def test6(self):
expected = -1
c = [7, 7, 4, 1, 1, 1]
edges = [(1, 2), (3, 1), (2, 4), (2, 5), (2, 6)]
self.assertEqual(balancedForest(c, edges), expected)
def test7(self):
expected = 0
c = [1, 3, 4, 4]
edges = [(1, 2), (1, 3), (1, 4)]
self.assertEqual(balancedForest(c, edges), expected)
def test8(self):
expected = 297
c = [100, 99, 98, 100, 99, 98]
edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]
self.assertEqual(balancedForest(c, edges), expected)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Node(object):
<|reserved_special_token_0|>
def __init__(self, value, indentifier):
super(Node, self).__init__()
self.value = value
self.identifier = indentifier
self.next = None
class Graph(object):
"""docstring for Graph"""
def __init__(self, values, edges):
super(Graph, self).__init__()
self.node_values = values
self.vertices = len(values)
self.edges = edges
self.graph = [None] * self.vertices
self.grand_sum = sum(self.node_values)
def build_adjacency_list(self):
for edge in self.edges:
fro = edge[0] - 1
to = edge[1] - 1
node = Node(self.node_values[to], to)
node.next = self.graph[fro]
self.graph[fro] = node
node = Node(self.node_values[fro], fro)
node.next = self.graph[to]
self.graph[to] = node
def print_graph(self):
for i in range(self.vertices):
node = self.graph[i]
print('Vertex:', i)
while node != None:
print(node.value, node.identifier)
node = node.next
print('<<' * 20)
def get_tree_nodes(self, start_node, nodes, edge, total):
if start_node == None:
return nodes
while start_node != None:
if start_node.identifier == edge[0
] or start_node.identifier == edge[2
] or start_node.identifier in nodes:
print('skipping ', start_node.identifier)
else:
print('adding ', start_node.identifier)
nodes.append(start_node.identifier)
total[0] += start_node.value
next_n = self.graph[start_node.identifier]
self.get_tree_nodes(next_n, nodes, edge, total)
start_node = start_node.next
return nodes
def split_and_compute_tree_sum(self, t1_nodes=[], t2_nodes=[], edge=[],
ton=False):
t1_total = 0
t2_total = 0
total = [0]
start_node = self.graph[edge[1]]
if start_node.next != None:
t2_nodes = self.get_tree_nodes(start_node, t2_nodes, edge, total)
if len(t2_nodes) == 0 and edge[1] != edge[2]:
t2_nodes.append(edge[1])
total[0] += self.node_values[edge[1]]
t2_total = total[0]
if not ton and t2_total < self.grand_sum / 2:
for i in range(self.vertices):
if i not in t2_nodes:
t1_nodes.append(i)
t1_total = self.grand_sum - t2_total
print('t2_nodes', t2_nodes)
print('t2_total', t2_total)
return t1_total, t2_total
def check(self, tree1_total, tree2_total, tree3_total):
print('###' * 10)
print('FINAL tree1_total: ', tree1_total)
print('FINAL tree2_total: ', tree2_total)
print('FINAL tree3_total: ', tree3_total)
print('###' * 10)
if (tree1_total == tree2_total or tree1_total == tree3_total or
tree2_total == tree3_total):
mx = max(tree1_total, tree2_total, tree3_total)
if [tree1_total, tree2_total, tree3_total].count(mx) >= 2:
ret = mx - min(tree1_total, tree2_total, tree3_total)
return ret, True
return -1, False
def split_tree_into_two(self):
ret = -1
found = False
global skipped
for entry in range(self.vertices):
tree1_nodes = []
tree2_nodes = []
tree3_nodes = []
temp_nodes = []
n = self.graph[entry]
while n != None:
edge = [entry, n.identifier, -1]
if n.identifier <= entry:
n = n.next
skipped += 1
continue
print('##MAIN##. SPLIT POINT EDGE: ', edge)
tree1_nodes = []
tree2_nodes = []
tree1_total, tree2_total = self.split_and_compute_tree_sum(
tree1_nodes, tree2_nodes, edge)
print('ORIGINALS: ', tree1_total, tree2_total)
if min(tree1_total, tree2_total) < self.grand_sum / 3 or max(
tree1_total, tree2_total) > 2 * self.grand_sum / 3:
n = n.next
continue
if tree1_total > tree2_total:
ret, found = self.find_third_tree(tree1_total,
tree2_total, tree1_nodes, 1, edge[1])
elif tree2_total > tree1_total:
ret, found = self.find_third_tree(tree1_total,
tree2_total, tree2_nodes, 2, edge[0])
elif tree1_total == tree2_total:
ret = tree1_total
found = True
else:
found = True
if found:
break
n = n.next
if found:
break
return ret
def find_third_tree(self, tree1_total, tree2_total, nodes, t=1, m=0):
ret, found = -1, False
global skipped
consumed = []
for i in range(len(nodes)):
skip_n = nodes[i]
consumed.append(skip_n)
n = self.graph[skip_n]
while n != None:
if n.identifier in consumed:
n = n.next
skipped += 1
continue
edge = [skip_n, n.identifier, m]
print('2. SPLIT POINT EDGE: ', edge)
print('tree1_total', tree1_total)
tree3_nodes = []
temp_nodes = []
_, tree3_total = self.split_and_compute_tree_sum(temp_nodes,
tree3_nodes, edge, True)
if t == 1:
ret, found = self.check(tree1_total - tree3_total,
tree2_total, tree3_total)
elif t == 2:
ret, found = self.check(tree1_total, tree2_total -
tree3_total, tree3_total)
if found:
break
n = n.next
if found:
break
return ret, found
<|reserved_special_token_0|>
class BalancedForestTest(unittest.TestCase):
def test1(self):
expected = 10
c = [1, 1, 1, 18, 10, 11, 5, 6]
edges = [[1, 2], [1, 4], [2, 3], [1, 8], [8, 7], [7, 6], [5, 7]]
self.assertEqual(balancedForest(c, edges), expected)
def test2(self):
expected = 13
c = [12, 7, 11, 17, 20, 10]
edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]
self.assertEqual(balancedForest(c, edges), expected)
def test3(self):
expected = 19
c = [15, 12, 8, 14, 13]
edges = [[4, 5], [1, 2], [1, 3], [1, 4]]
self.assertEqual(balancedForest(c, edges), expected)
def test4(self):
expected = 2
c = [1, 2, 2, 1, 1]
edges = [[1, 2], [1, 3], [3, 5], [1, 4]]
self.assertEqual(balancedForest(c, edges), expected)
def test5(self):
expected = -1
c = [1, 3, 5]
edges = [[1, 3], [1, 2]]
self.assertEqual(balancedForest(c, edges), expected)
def test6(self):
expected = -1
c = [7, 7, 4, 1, 1, 1]
edges = [(1, 2), (3, 1), (2, 4), (2, 5), (2, 6)]
self.assertEqual(balancedForest(c, edges), expected)
def test7(self):
expected = 0
c = [1, 3, 4, 4]
edges = [(1, 2), (1, 3), (1, 4)]
self.assertEqual(balancedForest(c, edges), expected)
def test8(self):
expected = 297
c = [100, 99, 98, 100, 99, 98]
edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]
self.assertEqual(balancedForest(c, edges), expected)
def test9(self):
expected = 4
c = [12, 10, 8, 12, 14, 12]
edges = [[1, 2], [1, 3], [1, 4], [2, 5], [4, 6]]
self.assertEqual(balancedForest(c, edges), expected)
print('SKIPPED', skipped)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Node(object):
"""docstring for Node"""
def __init__(self, value, indentifier):
super(Node, self).__init__()
self.value = value
self.identifier = indentifier
self.next = None
class Graph(object):
"""docstring for Graph"""
def __init__(self, values, edges):
super(Graph, self).__init__()
self.node_values = values
self.vertices = len(values)
self.edges = edges
self.graph = [None] * self.vertices
self.grand_sum = sum(self.node_values)
def build_adjacency_list(self):
for edge in self.edges:
fro = edge[0] - 1
to = edge[1] - 1
node = Node(self.node_values[to], to)
node.next = self.graph[fro]
self.graph[fro] = node
node = Node(self.node_values[fro], fro)
node.next = self.graph[to]
self.graph[to] = node
def print_graph(self):
for i in range(self.vertices):
node = self.graph[i]
print('Vertex:', i)
while node != None:
print(node.value, node.identifier)
node = node.next
print('<<' * 20)
def get_tree_nodes(self, start_node, nodes, edge, total):
if start_node == None:
return nodes
while start_node != None:
if start_node.identifier == edge[0
] or start_node.identifier == edge[2
] or start_node.identifier in nodes:
print('skipping ', start_node.identifier)
else:
print('adding ', start_node.identifier)
nodes.append(start_node.identifier)
total[0] += start_node.value
next_n = self.graph[start_node.identifier]
self.get_tree_nodes(next_n, nodes, edge, total)
start_node = start_node.next
return nodes
def split_and_compute_tree_sum(self, t1_nodes=[], t2_nodes=[], edge=[],
ton=False):
t1_total = 0
t2_total = 0
total = [0]
start_node = self.graph[edge[1]]
if start_node.next != None:
t2_nodes = self.get_tree_nodes(start_node, t2_nodes, edge, total)
if len(t2_nodes) == 0 and edge[1] != edge[2]:
t2_nodes.append(edge[1])
total[0] += self.node_values[edge[1]]
t2_total = total[0]
if not ton and t2_total < self.grand_sum / 2:
for i in range(self.vertices):
if i not in t2_nodes:
t1_nodes.append(i)
t1_total = self.grand_sum - t2_total
print('t2_nodes', t2_nodes)
print('t2_total', t2_total)
return t1_total, t2_total
def check(self, tree1_total, tree2_total, tree3_total):
print('###' * 10)
print('FINAL tree1_total: ', tree1_total)
print('FINAL tree2_total: ', tree2_total)
print('FINAL tree3_total: ', tree3_total)
print('###' * 10)
if (tree1_total == tree2_total or tree1_total == tree3_total or
tree2_total == tree3_total):
mx = max(tree1_total, tree2_total, tree3_total)
if [tree1_total, tree2_total, tree3_total].count(mx) >= 2:
ret = mx - min(tree1_total, tree2_total, tree3_total)
return ret, True
return -1, False
def split_tree_into_two(self):
ret = -1
found = False
global skipped
for entry in range(self.vertices):
tree1_nodes = []
tree2_nodes = []
tree3_nodes = []
temp_nodes = []
n = self.graph[entry]
while n != None:
edge = [entry, n.identifier, -1]
if n.identifier <= entry:
n = n.next
skipped += 1
continue
print('##MAIN##. SPLIT POINT EDGE: ', edge)
tree1_nodes = []
tree2_nodes = []
tree1_total, tree2_total = self.split_and_compute_tree_sum(
tree1_nodes, tree2_nodes, edge)
print('ORIGINALS: ', tree1_total, tree2_total)
if min(tree1_total, tree2_total) < self.grand_sum / 3 or max(
tree1_total, tree2_total) > 2 * self.grand_sum / 3:
n = n.next
continue
if tree1_total > tree2_total:
ret, found = self.find_third_tree(tree1_total,
tree2_total, tree1_nodes, 1, edge[1])
elif tree2_total > tree1_total:
ret, found = self.find_third_tree(tree1_total,
tree2_total, tree2_nodes, 2, edge[0])
elif tree1_total == tree2_total:
ret = tree1_total
found = True
else:
found = True
if found:
break
n = n.next
if found:
break
return ret
def find_third_tree(self, tree1_total, tree2_total, nodes, t=1, m=0):
ret, found = -1, False
global skipped
consumed = []
for i in range(len(nodes)):
skip_n = nodes[i]
consumed.append(skip_n)
n = self.graph[skip_n]
while n != None:
if n.identifier in consumed:
n = n.next
skipped += 1
continue
edge = [skip_n, n.identifier, m]
print('2. SPLIT POINT EDGE: ', edge)
print('tree1_total', tree1_total)
tree3_nodes = []
temp_nodes = []
_, tree3_total = self.split_and_compute_tree_sum(temp_nodes,
tree3_nodes, edge, True)
if t == 1:
ret, found = self.check(tree1_total - tree3_total,
tree2_total, tree3_total)
elif t == 2:
ret, found = self.check(tree1_total, tree2_total -
tree3_total, tree3_total)
if found:
break
n = n.next
if found:
break
return ret, found
def balancedForest(values, edges):
mygraph = Graph(values, edges)
mygraph.build_adjacency_list()
mygraph.print_graph()
return mygraph.split_tree_into_two()
<|reserved_special_token_0|>
class BalancedForestTest(unittest.TestCase):
def test1(self):
expected = 10
c = [1, 1, 1, 18, 10, 11, 5, 6]
edges = [[1, 2], [1, 4], [2, 3], [1, 8], [8, 7], [7, 6], [5, 7]]
self.assertEqual(balancedForest(c, edges), expected)
def test2(self):
expected = 13
c = [12, 7, 11, 17, 20, 10]
edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]
self.assertEqual(balancedForest(c, edges), expected)
def test3(self):
expected = 19
c = [15, 12, 8, 14, 13]
edges = [[4, 5], [1, 2], [1, 3], [1, 4]]
self.assertEqual(balancedForest(c, edges), expected)
def test4(self):
expected = 2
c = [1, 2, 2, 1, 1]
edges = [[1, 2], [1, 3], [3, 5], [1, 4]]
self.assertEqual(balancedForest(c, edges), expected)
def test5(self):
expected = -1
c = [1, 3, 5]
edges = [[1, 3], [1, 2]]
self.assertEqual(balancedForest(c, edges), expected)
def test6(self):
expected = -1
c = [7, 7, 4, 1, 1, 1]
edges = [(1, 2), (3, 1), (2, 4), (2, 5), (2, 6)]
self.assertEqual(balancedForest(c, edges), expected)
def test7(self):
expected = 0
c = [1, 3, 4, 4]
edges = [(1, 2), (1, 3), (1, 4)]
self.assertEqual(balancedForest(c, edges), expected)
def test8(self):
expected = 297
c = [100, 99, 98, 100, 99, 98]
edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]
self.assertEqual(balancedForest(c, edges), expected)
def test9(self):
expected = 4
c = [12, 10, 8, 12, 14, 12]
edges = [[1, 2], [1, 3], [1, 4], [2, 5], [4, 6]]
self.assertEqual(balancedForest(c, edges), expected)
print('SKIPPED', skipped)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Node(object):
"""docstring for Node"""
def __init__(self, value, indentifier):
super(Node, self).__init__()
self.value = value
self.identifier = indentifier
self.next = None
class Graph(object):
"""docstring for Graph"""
def __init__(self, values, edges):
super(Graph, self).__init__()
self.node_values = values
self.vertices = len(values)
self.edges = edges
self.graph = [None] * self.vertices
self.grand_sum = sum(self.node_values)
def build_adjacency_list(self):
for edge in self.edges:
fro = edge[0] - 1
to = edge[1] - 1
node = Node(self.node_values[to], to)
node.next = self.graph[fro]
self.graph[fro] = node
node = Node(self.node_values[fro], fro)
node.next = self.graph[to]
self.graph[to] = node
def print_graph(self):
for i in range(self.vertices):
node = self.graph[i]
print('Vertex:', i)
while node != None:
print(node.value, node.identifier)
node = node.next
print('<<' * 20)
def get_tree_nodes(self, start_node, nodes, edge, total):
if start_node == None:
return nodes
while start_node != None:
if start_node.identifier == edge[0
] or start_node.identifier == edge[2
] or start_node.identifier in nodes:
print('skipping ', start_node.identifier)
else:
print('adding ', start_node.identifier)
nodes.append(start_node.identifier)
total[0] += start_node.value
next_n = self.graph[start_node.identifier]
self.get_tree_nodes(next_n, nodes, edge, total)
start_node = start_node.next
return nodes
def split_and_compute_tree_sum(self, t1_nodes=[], t2_nodes=[], edge=[],
ton=False):
t1_total = 0
t2_total = 0
total = [0]
start_node = self.graph[edge[1]]
if start_node.next != None:
t2_nodes = self.get_tree_nodes(start_node, t2_nodes, edge, total)
if len(t2_nodes) == 0 and edge[1] != edge[2]:
t2_nodes.append(edge[1])
total[0] += self.node_values[edge[1]]
t2_total = total[0]
if not ton and t2_total < self.grand_sum / 2:
for i in range(self.vertices):
if i not in t2_nodes:
t1_nodes.append(i)
t1_total = self.grand_sum - t2_total
print('t2_nodes', t2_nodes)
print('t2_total', t2_total)
return t1_total, t2_total
def check(self, tree1_total, tree2_total, tree3_total):
print('###' * 10)
print('FINAL tree1_total: ', tree1_total)
print('FINAL tree2_total: ', tree2_total)
print('FINAL tree3_total: ', tree3_total)
print('###' * 10)
if (tree1_total == tree2_total or tree1_total == tree3_total or
tree2_total == tree3_total):
mx = max(tree1_total, tree2_total, tree3_total)
if [tree1_total, tree2_total, tree3_total].count(mx) >= 2:
ret = mx - min(tree1_total, tree2_total, tree3_total)
return ret, True
return -1, False
def split_tree_into_two(self):
ret = -1
found = False
global skipped
for entry in range(self.vertices):
tree1_nodes = []
tree2_nodes = []
tree3_nodes = []
temp_nodes = []
n = self.graph[entry]
while n != None:
edge = [entry, n.identifier, -1]
if n.identifier <= entry:
n = n.next
skipped += 1
continue
print('##MAIN##. SPLIT POINT EDGE: ', edge)
tree1_nodes = []
tree2_nodes = []
tree1_total, tree2_total = self.split_and_compute_tree_sum(
tree1_nodes, tree2_nodes, edge)
print('ORIGINALS: ', tree1_total, tree2_total)
if min(tree1_total, tree2_total) < self.grand_sum / 3 or max(
tree1_total, tree2_total) > 2 * self.grand_sum / 3:
n = n.next
continue
if tree1_total > tree2_total:
ret, found = self.find_third_tree(tree1_total,
tree2_total, tree1_nodes, 1, edge[1])
elif tree2_total > tree1_total:
ret, found = self.find_third_tree(tree1_total,
tree2_total, tree2_nodes, 2, edge[0])
elif tree1_total == tree2_total:
ret = tree1_total
found = True
else:
found = True
if found:
break
n = n.next
if found:
break
return ret
def find_third_tree(self, tree1_total, tree2_total, nodes, t=1, m=0):
ret, found = -1, False
global skipped
consumed = []
for i in range(len(nodes)):
skip_n = nodes[i]
consumed.append(skip_n)
n = self.graph[skip_n]
while n != None:
if n.identifier in consumed:
n = n.next
skipped += 1
continue
edge = [skip_n, n.identifier, m]
print('2. SPLIT POINT EDGE: ', edge)
print('tree1_total', tree1_total)
tree3_nodes = []
temp_nodes = []
_, tree3_total = self.split_and_compute_tree_sum(temp_nodes,
tree3_nodes, edge, True)
if t == 1:
ret, found = self.check(tree1_total - tree3_total,
tree2_total, tree3_total)
elif t == 2:
ret, found = self.check(tree1_total, tree2_total -
tree3_total, tree3_total)
if found:
break
n = n.next
if found:
break
return ret, found
def balancedForest(values, edges):
mygraph = Graph(values, edges)
mygraph.build_adjacency_list()
mygraph.print_graph()
return mygraph.split_tree_into_two()
<|reserved_special_token_0|>
class BalancedForestTest(unittest.TestCase):
def test1(self):
expected = 10
c = [1, 1, 1, 18, 10, 11, 5, 6]
edges = [[1, 2], [1, 4], [2, 3], [1, 8], [8, 7], [7, 6], [5, 7]]
self.assertEqual(balancedForest(c, edges), expected)
def test2(self):
expected = 13
c = [12, 7, 11, 17, 20, 10]
edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]
self.assertEqual(balancedForest(c, edges), expected)
def test3(self):
expected = 19
c = [15, 12, 8, 14, 13]
edges = [[4, 5], [1, 2], [1, 3], [1, 4]]
self.assertEqual(balancedForest(c, edges), expected)
def test4(self):
expected = 2
c = [1, 2, 2, 1, 1]
edges = [[1, 2], [1, 3], [3, 5], [1, 4]]
self.assertEqual(balancedForest(c, edges), expected)
def test5(self):
expected = -1
c = [1, 3, 5]
edges = [[1, 3], [1, 2]]
self.assertEqual(balancedForest(c, edges), expected)
def test6(self):
expected = -1
c = [7, 7, 4, 1, 1, 1]
edges = [(1, 2), (3, 1), (2, 4), (2, 5), (2, 6)]
self.assertEqual(balancedForest(c, edges), expected)
def test7(self):
expected = 0
c = [1, 3, 4, 4]
edges = [(1, 2), (1, 3), (1, 4)]
self.assertEqual(balancedForest(c, edges), expected)
def test8(self):
expected = 297
c = [100, 99, 98, 100, 99, 98]
edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]
self.assertEqual(balancedForest(c, edges), expected)
def test9(self):
expected = 4
c = [12, 10, 8, 12, 14, 12]
edges = [[1, 2], [1, 3], [1, 4], [2, 5], [4, 6]]
self.assertEqual(balancedForest(c, edges), expected)
print('SKIPPED', skipped)
if __name__ == '__main__':
unittest.main()
<|reserved_special_token_1|>
skipped = 0
class Node(object):
"""docstring for Node"""
def __init__(self, value, indentifier):
super(Node, self).__init__()
self.value = value
self.identifier = indentifier
self.next = None
class Graph(object):
"""docstring for Graph"""
def __init__(self, values, edges):
super(Graph, self).__init__()
self.node_values = values
self.vertices = len(values)
self.edges = edges
self.graph = [None] * self.vertices
# self.edges.sort()
self.grand_sum = sum(self.node_values)
def build_adjacency_list(self):
for edge in self.edges:
fro = edge[0] - 1
to = edge[1]- 1
# Adding the node to the source node
node = Node(self.node_values[to], to)
node.next = self.graph[fro]
self.graph[fro] = node
# Adding the source node to the destination as
# it is the undirected graph
node = Node(self.node_values[fro], fro)
node.next = self.graph[to]
self.graph[to] = node
def print_graph(self):
for i in range(self.vertices):
node = self.graph[i]
print("Vertex:", i)
while(node!=None):
print(node.value, node.identifier)
node = node.next
print("<<"*20)
def get_tree_nodes(self, start_node, nodes, edge, total):
if(start_node==None):
return nodes
while(start_node!=None):
if(start_node.identifier==edge[0] or start_node.identifier==edge[2] or (start_node.identifier in nodes)):
print("skipping ", start_node.identifier)
else:
print("adding ", start_node.identifier)
nodes.append(start_node.identifier)
total[0] += start_node.value
next_n = self.graph[start_node.identifier]
self.get_tree_nodes(next_n, nodes, edge, total)
start_node = start_node.next
return nodes
def split_and_compute_tree_sum(self, t1_nodes = [], t2_nodes = [], edge=[], ton = False):
t1_total = 0
t2_total = 0
total = [0]
start_node = self.graph[edge[1]]
if(start_node.next != None):
t2_nodes = self.get_tree_nodes(start_node, t2_nodes, edge, total)
if(len(t2_nodes)==0 and edge[1]!=edge[2]):
t2_nodes.append(edge[1])
total[0] += self.node_values[edge[1]]
t2_total = total[0]
if(not ton and t2_total < self.grand_sum/2):
for i in range(self.vertices):
if(i not in t2_nodes):
t1_nodes.append(i)
t1_total = self.grand_sum - t2_total
print("t2_nodes", t2_nodes)
print("t2_total", t2_total)
return t1_total, t2_total
def check(self, tree1_total, tree2_total, tree3_total):
print("###"*10)
print("FINAL tree1_total: ", tree1_total)
print("FINAL tree2_total: ", tree2_total)
print("FINAL tree3_total: ", tree3_total)
print("###"*10)
if (tree1_total == tree2_total) or (tree1_total == tree3_total) or (tree2_total == tree3_total):
mx = max(tree1_total, tree2_total, tree3_total)
if([tree1_total, tree2_total, tree3_total].count(mx) >= 2):
ret = mx - min(tree1_total, tree2_total, tree3_total)
return ret, True
return -1, False
def split_tree_into_two(self):
ret = -1
found = False
global skipped
for entry in range(self.vertices):
tree1_nodes = []
tree2_nodes = []
tree3_nodes = []
temp_nodes = []
n = self.graph[entry]
while(n!=None):
edge = [entry, n.identifier, -1]
if(n.identifier <= entry):
n = n.next
skipped += 1
continue
print("##MAIN##. SPLIT POINT EDGE: ", edge)
tree1_nodes = []
tree2_nodes = []
tree1_total, tree2_total = self.split_and_compute_tree_sum(tree1_nodes, tree2_nodes, edge)
print("ORIGINALS: ", tree1_total, tree2_total)
if(min(tree1_total, tree2_total) < self.grand_sum/3 or (max(tree1_total, tree2_total) > (2*self.grand_sum)/3)):
n = n.next
continue
if(tree1_total > tree2_total):
ret, found = self.find_third_tree(tree1_total, tree2_total,tree1_nodes, 1, edge[1])
elif(tree2_total > tree1_total):
ret, found = self.find_third_tree(tree1_total, tree2_total,tree2_nodes, 2, edge[0])
elif (tree1_total == tree2_total):
ret = tree1_total
found = True
else:
found = True
if(found):
break
n = n.next
if(found):
break
return ret
def find_third_tree(self, tree1_total, tree2_total, nodes, t = 1, m=0):
ret , found = -1, False
global skipped
consumed = []
for i in range(len(nodes)):
skip_n = nodes[i]
consumed.append(skip_n)
n = self.graph[skip_n]
while(n!=None):
if(n.identifier in consumed):
n = n.next
skipped += 1
continue
edge = [skip_n, n.identifier, m]
print("2. SPLIT POINT EDGE: ", edge)
print("tree1_total",tree1_total)
tree3_nodes = []
temp_nodes = []
_,tree3_total = self.split_and_compute_tree_sum(temp_nodes, tree3_nodes, edge, True)
if(t==1):
ret , found = self.check(tree1_total - tree3_total, tree2_total, tree3_total)
elif(t==2):
ret , found = self.check(tree1_total, tree2_total - tree3_total, tree3_total)
if(found):
break
n = n.next
if(found):
break
return ret, found
def balancedForest(values, edges):
mygraph = Graph(values, edges)
mygraph.build_adjacency_list()
mygraph.print_graph()
return mygraph.split_tree_into_two()
import unittest
class BalancedForestTest(unittest.TestCase):
def test1(self):
expected = 10
c = [1, 1, 1, 18, 10, 11, 5, 6]
edges = [[1, 2], [1, 4], [2, 3], [1, 8], [8, 7], [7, 6], [5, 7]]
self.assertEqual(balancedForest(c, edges), expected)
def test2(self):
expected = 13
c = [12, 7, 11, 17, 20, 10]
edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]
self.assertEqual(balancedForest(c, edges), expected)
def test3(self):
expected = 19
c = [15, 12, 8, 14, 13]
edges = [[4,5],[1,2],[1,3],[1,4]]
self.assertEqual(balancedForest(c, edges), expected)
def test4(self):
expected = 2
c = [1,2,2,1,1]
edges = [[1,2],[1,3],[3,5],[1,4]]
self.assertEqual(balancedForest(c, edges), expected)
def test5(self):
expected = -1
c = [1,3,5]
edges = [[1,3],[1,2]]
self.assertEqual(balancedForest(c, edges), expected)
def test6(self):
expected = -1
c = [7, 7, 4, 1, 1, 1]
edges = [(1, 2), (3, 1), (2, 4), (2, 5), (2, 6)]
self.assertEqual(balancedForest(c, edges), expected)
def test7(self):
expected = 0
c = [1, 3, 4, 4]
edges = [(1, 2), (1, 3), (1, 4)]
self.assertEqual(balancedForest(c, edges), expected)
def test8(self):
expected = 297
c = [100, 99, 98, 100, 99, 98]
edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]
self.assertEqual(balancedForest(c, edges), expected)
def test9(self):
expected = 4
c = [12, 10, 8, 12, 14, 12]
edges = [[1, 2], [1, 3], [1, 4], [2, 5], [4, 6]]
self.assertEqual(balancedForest(c, edges), expected)
print("SKIPPED", skipped)
if __name__ == '__main__':
unittest.main()
|
flexible
|
{
"blob_id": "e361215c44305f1ecc1cbe9e19345ee08bdd30f5",
"index": 2393,
"step-1": "<mask token>\n\n\nclass BalancedForestTest(unittest.TestCase):\n\n def test1(self):\n expected = 10\n c = [1, 1, 1, 18, 10, 11, 5, 6]\n edges = [[1, 2], [1, 4], [2, 3], [1, 8], [8, 7], [7, 6], [5, 7]]\n self.assertEqual(balancedForest(c, edges), expected)\n <mask token>\n\n def test3(self):\n expected = 19\n c = [15, 12, 8, 14, 13]\n edges = [[4, 5], [1, 2], [1, 3], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n <mask token>\n\n def test5(self):\n expected = -1\n c = [1, 3, 5]\n edges = [[1, 3], [1, 2]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test6(self):\n expected = -1\n c = [7, 7, 4, 1, 1, 1]\n edges = [(1, 2), (3, 1), (2, 4), (2, 5), (2, 6)]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test7(self):\n expected = 0\n c = [1, 3, 4, 4]\n edges = [(1, 2), (1, 3), (1, 4)]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test8(self):\n expected = 297\n c = [100, 99, 98, 100, 99, 98]\n edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Node(object):\n <mask token>\n\n def __init__(self, value, indentifier):\n super(Node, self).__init__()\n self.value = value\n self.identifier = indentifier\n self.next = None\n\n\nclass Graph(object):\n \"\"\"docstring for Graph\"\"\"\n\n def __init__(self, values, edges):\n super(Graph, self).__init__()\n self.node_values = values\n self.vertices = len(values)\n self.edges = edges\n self.graph = [None] * self.vertices\n self.grand_sum = sum(self.node_values)\n\n def build_adjacency_list(self):\n for edge in self.edges:\n fro = edge[0] - 1\n to = edge[1] - 1\n node = Node(self.node_values[to], to)\n node.next = self.graph[fro]\n self.graph[fro] = node\n node = Node(self.node_values[fro], fro)\n node.next = self.graph[to]\n self.graph[to] = node\n\n def print_graph(self):\n for i in range(self.vertices):\n node = self.graph[i]\n print('Vertex:', i)\n while node != None:\n print(node.value, node.identifier)\n node = node.next\n print('<<' * 20)\n\n def get_tree_nodes(self, start_node, nodes, edge, total):\n if start_node == None:\n return nodes\n while start_node != None:\n if start_node.identifier == edge[0\n ] or start_node.identifier == edge[2\n ] or start_node.identifier in nodes:\n print('skipping ', start_node.identifier)\n else:\n print('adding ', start_node.identifier)\n nodes.append(start_node.identifier)\n total[0] += start_node.value\n next_n = self.graph[start_node.identifier]\n self.get_tree_nodes(next_n, nodes, edge, total)\n start_node = start_node.next\n return nodes\n\n def split_and_compute_tree_sum(self, t1_nodes=[], t2_nodes=[], edge=[],\n ton=False):\n t1_total = 0\n t2_total = 0\n total = [0]\n start_node = self.graph[edge[1]]\n if start_node.next != None:\n t2_nodes = self.get_tree_nodes(start_node, t2_nodes, edge, total)\n if len(t2_nodes) == 0 and edge[1] != edge[2]:\n t2_nodes.append(edge[1])\n total[0] += self.node_values[edge[1]]\n t2_total = total[0]\n if not ton and t2_total < self.grand_sum / 2:\n for i in range(self.vertices):\n if i not in t2_nodes:\n t1_nodes.append(i)\n t1_total = self.grand_sum - t2_total\n print('t2_nodes', t2_nodes)\n print('t2_total', t2_total)\n return t1_total, t2_total\n\n def check(self, tree1_total, tree2_total, tree3_total):\n print('###' * 10)\n print('FINAL tree1_total: ', tree1_total)\n print('FINAL tree2_total: ', tree2_total)\n print('FINAL tree3_total: ', tree3_total)\n print('###' * 10)\n if (tree1_total == tree2_total or tree1_total == tree3_total or \n tree2_total == tree3_total):\n mx = max(tree1_total, tree2_total, tree3_total)\n if [tree1_total, tree2_total, tree3_total].count(mx) >= 2:\n ret = mx - min(tree1_total, tree2_total, tree3_total)\n return ret, True\n return -1, False\n\n def split_tree_into_two(self):\n ret = -1\n found = False\n global skipped\n for entry in range(self.vertices):\n tree1_nodes = []\n tree2_nodes = []\n tree3_nodes = []\n temp_nodes = []\n n = self.graph[entry]\n while n != None:\n edge = [entry, n.identifier, -1]\n if n.identifier <= entry:\n n = n.next\n skipped += 1\n continue\n print('##MAIN##. SPLIT POINT EDGE: ', edge)\n tree1_nodes = []\n tree2_nodes = []\n tree1_total, tree2_total = self.split_and_compute_tree_sum(\n tree1_nodes, tree2_nodes, edge)\n print('ORIGINALS: ', tree1_total, tree2_total)\n if min(tree1_total, tree2_total) < self.grand_sum / 3 or max(\n tree1_total, tree2_total) > 2 * self.grand_sum / 3:\n n = n.next\n continue\n if tree1_total > tree2_total:\n ret, found = self.find_third_tree(tree1_total,\n tree2_total, tree1_nodes, 1, edge[1])\n elif tree2_total > tree1_total:\n ret, found = self.find_third_tree(tree1_total,\n tree2_total, tree2_nodes, 2, edge[0])\n elif tree1_total == tree2_total:\n ret = tree1_total\n found = True\n else:\n found = True\n if found:\n break\n n = n.next\n if found:\n break\n return ret\n\n def find_third_tree(self, tree1_total, tree2_total, nodes, t=1, m=0):\n ret, found = -1, False\n global skipped\n consumed = []\n for i in range(len(nodes)):\n skip_n = nodes[i]\n consumed.append(skip_n)\n n = self.graph[skip_n]\n while n != None:\n if n.identifier in consumed:\n n = n.next\n skipped += 1\n continue\n edge = [skip_n, n.identifier, m]\n print('2. SPLIT POINT EDGE: ', edge)\n print('tree1_total', tree1_total)\n tree3_nodes = []\n temp_nodes = []\n _, tree3_total = self.split_and_compute_tree_sum(temp_nodes,\n tree3_nodes, edge, True)\n if t == 1:\n ret, found = self.check(tree1_total - tree3_total,\n tree2_total, tree3_total)\n elif t == 2:\n ret, found = self.check(tree1_total, tree2_total -\n tree3_total, tree3_total)\n if found:\n break\n n = n.next\n if found:\n break\n return ret, found\n\n\n<mask token>\n\n\nclass BalancedForestTest(unittest.TestCase):\n\n def test1(self):\n expected = 10\n c = [1, 1, 1, 18, 10, 11, 5, 6]\n edges = [[1, 2], [1, 4], [2, 3], [1, 8], [8, 7], [7, 6], [5, 7]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test2(self):\n expected = 13\n c = [12, 7, 11, 17, 20, 10]\n edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test3(self):\n expected = 19\n c = [15, 12, 8, 14, 13]\n edges = [[4, 5], [1, 2], [1, 3], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test4(self):\n expected = 2\n c = [1, 2, 2, 1, 1]\n edges = [[1, 2], [1, 3], [3, 5], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test5(self):\n expected = -1\n c = [1, 3, 5]\n edges = [[1, 3], [1, 2]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test6(self):\n expected = -1\n c = [7, 7, 4, 1, 1, 1]\n edges = [(1, 2), (3, 1), (2, 4), (2, 5), (2, 6)]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test7(self):\n expected = 0\n c = [1, 3, 4, 4]\n edges = [(1, 2), (1, 3), (1, 4)]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test8(self):\n expected = 297\n c = [100, 99, 98, 100, 99, 98]\n edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test9(self):\n expected = 4\n c = [12, 10, 8, 12, 14, 12]\n edges = [[1, 2], [1, 3], [1, 4], [2, 5], [4, 6]]\n self.assertEqual(balancedForest(c, edges), expected)\n print('SKIPPED', skipped)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Node(object):\n \"\"\"docstring for Node\"\"\"\n\n def __init__(self, value, indentifier):\n super(Node, self).__init__()\n self.value = value\n self.identifier = indentifier\n self.next = None\n\n\nclass Graph(object):\n \"\"\"docstring for Graph\"\"\"\n\n def __init__(self, values, edges):\n super(Graph, self).__init__()\n self.node_values = values\n self.vertices = len(values)\n self.edges = edges\n self.graph = [None] * self.vertices\n self.grand_sum = sum(self.node_values)\n\n def build_adjacency_list(self):\n for edge in self.edges:\n fro = edge[0] - 1\n to = edge[1] - 1\n node = Node(self.node_values[to], to)\n node.next = self.graph[fro]\n self.graph[fro] = node\n node = Node(self.node_values[fro], fro)\n node.next = self.graph[to]\n self.graph[to] = node\n\n def print_graph(self):\n for i in range(self.vertices):\n node = self.graph[i]\n print('Vertex:', i)\n while node != None:\n print(node.value, node.identifier)\n node = node.next\n print('<<' * 20)\n\n def get_tree_nodes(self, start_node, nodes, edge, total):\n if start_node == None:\n return nodes\n while start_node != None:\n if start_node.identifier == edge[0\n ] or start_node.identifier == edge[2\n ] or start_node.identifier in nodes:\n print('skipping ', start_node.identifier)\n else:\n print('adding ', start_node.identifier)\n nodes.append(start_node.identifier)\n total[0] += start_node.value\n next_n = self.graph[start_node.identifier]\n self.get_tree_nodes(next_n, nodes, edge, total)\n start_node = start_node.next\n return nodes\n\n def split_and_compute_tree_sum(self, t1_nodes=[], t2_nodes=[], edge=[],\n ton=False):\n t1_total = 0\n t2_total = 0\n total = [0]\n start_node = self.graph[edge[1]]\n if start_node.next != None:\n t2_nodes = self.get_tree_nodes(start_node, t2_nodes, edge, total)\n if len(t2_nodes) == 0 and edge[1] != edge[2]:\n t2_nodes.append(edge[1])\n total[0] += self.node_values[edge[1]]\n t2_total = total[0]\n if not ton and t2_total < self.grand_sum / 2:\n for i in range(self.vertices):\n if i not in t2_nodes:\n t1_nodes.append(i)\n t1_total = self.grand_sum - t2_total\n print('t2_nodes', t2_nodes)\n print('t2_total', t2_total)\n return t1_total, t2_total\n\n def check(self, tree1_total, tree2_total, tree3_total):\n print('###' * 10)\n print('FINAL tree1_total: ', tree1_total)\n print('FINAL tree2_total: ', tree2_total)\n print('FINAL tree3_total: ', tree3_total)\n print('###' * 10)\n if (tree1_total == tree2_total or tree1_total == tree3_total or \n tree2_total == tree3_total):\n mx = max(tree1_total, tree2_total, tree3_total)\n if [tree1_total, tree2_total, tree3_total].count(mx) >= 2:\n ret = mx - min(tree1_total, tree2_total, tree3_total)\n return ret, True\n return -1, False\n\n def split_tree_into_two(self):\n ret = -1\n found = False\n global skipped\n for entry in range(self.vertices):\n tree1_nodes = []\n tree2_nodes = []\n tree3_nodes = []\n temp_nodes = []\n n = self.graph[entry]\n while n != None:\n edge = [entry, n.identifier, -1]\n if n.identifier <= entry:\n n = n.next\n skipped += 1\n continue\n print('##MAIN##. SPLIT POINT EDGE: ', edge)\n tree1_nodes = []\n tree2_nodes = []\n tree1_total, tree2_total = self.split_and_compute_tree_sum(\n tree1_nodes, tree2_nodes, edge)\n print('ORIGINALS: ', tree1_total, tree2_total)\n if min(tree1_total, tree2_total) < self.grand_sum / 3 or max(\n tree1_total, tree2_total) > 2 * self.grand_sum / 3:\n n = n.next\n continue\n if tree1_total > tree2_total:\n ret, found = self.find_third_tree(tree1_total,\n tree2_total, tree1_nodes, 1, edge[1])\n elif tree2_total > tree1_total:\n ret, found = self.find_third_tree(tree1_total,\n tree2_total, tree2_nodes, 2, edge[0])\n elif tree1_total == tree2_total:\n ret = tree1_total\n found = True\n else:\n found = True\n if found:\n break\n n = n.next\n if found:\n break\n return ret\n\n def find_third_tree(self, tree1_total, tree2_total, nodes, t=1, m=0):\n ret, found = -1, False\n global skipped\n consumed = []\n for i in range(len(nodes)):\n skip_n = nodes[i]\n consumed.append(skip_n)\n n = self.graph[skip_n]\n while n != None:\n if n.identifier in consumed:\n n = n.next\n skipped += 1\n continue\n edge = [skip_n, n.identifier, m]\n print('2. SPLIT POINT EDGE: ', edge)\n print('tree1_total', tree1_total)\n tree3_nodes = []\n temp_nodes = []\n _, tree3_total = self.split_and_compute_tree_sum(temp_nodes,\n tree3_nodes, edge, True)\n if t == 1:\n ret, found = self.check(tree1_total - tree3_total,\n tree2_total, tree3_total)\n elif t == 2:\n ret, found = self.check(tree1_total, tree2_total -\n tree3_total, tree3_total)\n if found:\n break\n n = n.next\n if found:\n break\n return ret, found\n\n\ndef balancedForest(values, edges):\n mygraph = Graph(values, edges)\n mygraph.build_adjacency_list()\n mygraph.print_graph()\n return mygraph.split_tree_into_two()\n\n\n<mask token>\n\n\nclass BalancedForestTest(unittest.TestCase):\n\n def test1(self):\n expected = 10\n c = [1, 1, 1, 18, 10, 11, 5, 6]\n edges = [[1, 2], [1, 4], [2, 3], [1, 8], [8, 7], [7, 6], [5, 7]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test2(self):\n expected = 13\n c = [12, 7, 11, 17, 20, 10]\n edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test3(self):\n expected = 19\n c = [15, 12, 8, 14, 13]\n edges = [[4, 5], [1, 2], [1, 3], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test4(self):\n expected = 2\n c = [1, 2, 2, 1, 1]\n edges = [[1, 2], [1, 3], [3, 5], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test5(self):\n expected = -1\n c = [1, 3, 5]\n edges = [[1, 3], [1, 2]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test6(self):\n expected = -1\n c = [7, 7, 4, 1, 1, 1]\n edges = [(1, 2), (3, 1), (2, 4), (2, 5), (2, 6)]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test7(self):\n expected = 0\n c = [1, 3, 4, 4]\n edges = [(1, 2), (1, 3), (1, 4)]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test8(self):\n expected = 297\n c = [100, 99, 98, 100, 99, 98]\n edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test9(self):\n expected = 4\n c = [12, 10, 8, 12, 14, 12]\n edges = [[1, 2], [1, 3], [1, 4], [2, 5], [4, 6]]\n self.assertEqual(balancedForest(c, edges), expected)\n print('SKIPPED', skipped)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Node(object):\n \"\"\"docstring for Node\"\"\"\n\n def __init__(self, value, indentifier):\n super(Node, self).__init__()\n self.value = value\n self.identifier = indentifier\n self.next = None\n\n\nclass Graph(object):\n \"\"\"docstring for Graph\"\"\"\n\n def __init__(self, values, edges):\n super(Graph, self).__init__()\n self.node_values = values\n self.vertices = len(values)\n self.edges = edges\n self.graph = [None] * self.vertices\n self.grand_sum = sum(self.node_values)\n\n def build_adjacency_list(self):\n for edge in self.edges:\n fro = edge[0] - 1\n to = edge[1] - 1\n node = Node(self.node_values[to], to)\n node.next = self.graph[fro]\n self.graph[fro] = node\n node = Node(self.node_values[fro], fro)\n node.next = self.graph[to]\n self.graph[to] = node\n\n def print_graph(self):\n for i in range(self.vertices):\n node = self.graph[i]\n print('Vertex:', i)\n while node != None:\n print(node.value, node.identifier)\n node = node.next\n print('<<' * 20)\n\n def get_tree_nodes(self, start_node, nodes, edge, total):\n if start_node == None:\n return nodes\n while start_node != None:\n if start_node.identifier == edge[0\n ] or start_node.identifier == edge[2\n ] or start_node.identifier in nodes:\n print('skipping ', start_node.identifier)\n else:\n print('adding ', start_node.identifier)\n nodes.append(start_node.identifier)\n total[0] += start_node.value\n next_n = self.graph[start_node.identifier]\n self.get_tree_nodes(next_n, nodes, edge, total)\n start_node = start_node.next\n return nodes\n\n def split_and_compute_tree_sum(self, t1_nodes=[], t2_nodes=[], edge=[],\n ton=False):\n t1_total = 0\n t2_total = 0\n total = [0]\n start_node = self.graph[edge[1]]\n if start_node.next != None:\n t2_nodes = self.get_tree_nodes(start_node, t2_nodes, edge, total)\n if len(t2_nodes) == 0 and edge[1] != edge[2]:\n t2_nodes.append(edge[1])\n total[0] += self.node_values[edge[1]]\n t2_total = total[0]\n if not ton and t2_total < self.grand_sum / 2:\n for i in range(self.vertices):\n if i not in t2_nodes:\n t1_nodes.append(i)\n t1_total = self.grand_sum - t2_total\n print('t2_nodes', t2_nodes)\n print('t2_total', t2_total)\n return t1_total, t2_total\n\n def check(self, tree1_total, tree2_total, tree3_total):\n print('###' * 10)\n print('FINAL tree1_total: ', tree1_total)\n print('FINAL tree2_total: ', tree2_total)\n print('FINAL tree3_total: ', tree3_total)\n print('###' * 10)\n if (tree1_total == tree2_total or tree1_total == tree3_total or \n tree2_total == tree3_total):\n mx = max(tree1_total, tree2_total, tree3_total)\n if [tree1_total, tree2_total, tree3_total].count(mx) >= 2:\n ret = mx - min(tree1_total, tree2_total, tree3_total)\n return ret, True\n return -1, False\n\n def split_tree_into_two(self):\n ret = -1\n found = False\n global skipped\n for entry in range(self.vertices):\n tree1_nodes = []\n tree2_nodes = []\n tree3_nodes = []\n temp_nodes = []\n n = self.graph[entry]\n while n != None:\n edge = [entry, n.identifier, -1]\n if n.identifier <= entry:\n n = n.next\n skipped += 1\n continue\n print('##MAIN##. SPLIT POINT EDGE: ', edge)\n tree1_nodes = []\n tree2_nodes = []\n tree1_total, tree2_total = self.split_and_compute_tree_sum(\n tree1_nodes, tree2_nodes, edge)\n print('ORIGINALS: ', tree1_total, tree2_total)\n if min(tree1_total, tree2_total) < self.grand_sum / 3 or max(\n tree1_total, tree2_total) > 2 * self.grand_sum / 3:\n n = n.next\n continue\n if tree1_total > tree2_total:\n ret, found = self.find_third_tree(tree1_total,\n tree2_total, tree1_nodes, 1, edge[1])\n elif tree2_total > tree1_total:\n ret, found = self.find_third_tree(tree1_total,\n tree2_total, tree2_nodes, 2, edge[0])\n elif tree1_total == tree2_total:\n ret = tree1_total\n found = True\n else:\n found = True\n if found:\n break\n n = n.next\n if found:\n break\n return ret\n\n def find_third_tree(self, tree1_total, tree2_total, nodes, t=1, m=0):\n ret, found = -1, False\n global skipped\n consumed = []\n for i in range(len(nodes)):\n skip_n = nodes[i]\n consumed.append(skip_n)\n n = self.graph[skip_n]\n while n != None:\n if n.identifier in consumed:\n n = n.next\n skipped += 1\n continue\n edge = [skip_n, n.identifier, m]\n print('2. SPLIT POINT EDGE: ', edge)\n print('tree1_total', tree1_total)\n tree3_nodes = []\n temp_nodes = []\n _, tree3_total = self.split_and_compute_tree_sum(temp_nodes,\n tree3_nodes, edge, True)\n if t == 1:\n ret, found = self.check(tree1_total - tree3_total,\n tree2_total, tree3_total)\n elif t == 2:\n ret, found = self.check(tree1_total, tree2_total -\n tree3_total, tree3_total)\n if found:\n break\n n = n.next\n if found:\n break\n return ret, found\n\n\ndef balancedForest(values, edges):\n mygraph = Graph(values, edges)\n mygraph.build_adjacency_list()\n mygraph.print_graph()\n return mygraph.split_tree_into_two()\n\n\n<mask token>\n\n\nclass BalancedForestTest(unittest.TestCase):\n\n def test1(self):\n expected = 10\n c = [1, 1, 1, 18, 10, 11, 5, 6]\n edges = [[1, 2], [1, 4], [2, 3], [1, 8], [8, 7], [7, 6], [5, 7]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test2(self):\n expected = 13\n c = [12, 7, 11, 17, 20, 10]\n edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test3(self):\n expected = 19\n c = [15, 12, 8, 14, 13]\n edges = [[4, 5], [1, 2], [1, 3], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test4(self):\n expected = 2\n c = [1, 2, 2, 1, 1]\n edges = [[1, 2], [1, 3], [3, 5], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test5(self):\n expected = -1\n c = [1, 3, 5]\n edges = [[1, 3], [1, 2]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test6(self):\n expected = -1\n c = [7, 7, 4, 1, 1, 1]\n edges = [(1, 2), (3, 1), (2, 4), (2, 5), (2, 6)]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test7(self):\n expected = 0\n c = [1, 3, 4, 4]\n edges = [(1, 2), (1, 3), (1, 4)]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test8(self):\n expected = 297\n c = [100, 99, 98, 100, 99, 98]\n edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test9(self):\n expected = 4\n c = [12, 10, 8, 12, 14, 12]\n edges = [[1, 2], [1, 3], [1, 4], [2, 5], [4, 6]]\n self.assertEqual(balancedForest(c, edges), expected)\n print('SKIPPED', skipped)\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "skipped = 0\n\nclass Node(object):\n \"\"\"docstring for Node\"\"\"\n def __init__(self, value, indentifier):\n super(Node, self).__init__()\n self.value = value\n self.identifier = indentifier\n self.next = None\n\n\nclass Graph(object):\n \"\"\"docstring for Graph\"\"\"\n def __init__(self, values, edges):\n super(Graph, self).__init__()\n self.node_values = values\n self.vertices = len(values)\n self.edges = edges\n self.graph = [None] * self.vertices\n # self.edges.sort()\n self.grand_sum = sum(self.node_values)\n\n def build_adjacency_list(self):\n for edge in self.edges:\n fro = edge[0] - 1\n to = edge[1]- 1\n\n # Adding the node to the source node\n node = Node(self.node_values[to], to)\n node.next = self.graph[fro]\n self.graph[fro] = node\n\n # Adding the source node to the destination as \n # it is the undirected graph \n node = Node(self.node_values[fro], fro)\n node.next = self.graph[to]\n self.graph[to] = node\n\n \n def print_graph(self):\n for i in range(self.vertices):\n node = self.graph[i]\n print(\"Vertex:\", i)\n while(node!=None):\n print(node.value, node.identifier)\n node = node.next\n print(\"<<\"*20)\n\n def get_tree_nodes(self, start_node, nodes, edge, total):\n\n if(start_node==None):\n return nodes\n\n while(start_node!=None):\n if(start_node.identifier==edge[0] or start_node.identifier==edge[2] or (start_node.identifier in nodes)):\n print(\"skipping \", start_node.identifier)\n else:\n print(\"adding \", start_node.identifier)\n nodes.append(start_node.identifier)\n total[0] += start_node.value\n next_n = self.graph[start_node.identifier]\n self.get_tree_nodes(next_n, nodes, edge, total)\n start_node = start_node.next\n return nodes\n\n\n def split_and_compute_tree_sum(self, t1_nodes = [], t2_nodes = [], edge=[], ton = False):\n t1_total = 0\n t2_total = 0\n total = [0]\n \n start_node = self.graph[edge[1]]\n if(start_node.next != None):\n t2_nodes = self.get_tree_nodes(start_node, t2_nodes, edge, total)\n\n if(len(t2_nodes)==0 and edge[1]!=edge[2]):\n t2_nodes.append(edge[1])\n total[0] += self.node_values[edge[1]]\n\n t2_total = total[0]\n if(not ton and t2_total < self.grand_sum/2):\n for i in range(self.vertices):\n if(i not in t2_nodes):\n t1_nodes.append(i)\n\n t1_total = self.grand_sum - t2_total\n\n print(\"t2_nodes\", t2_nodes)\n print(\"t2_total\", t2_total)\n\n return t1_total, t2_total\n\n\n def check(self, tree1_total, tree2_total, tree3_total):\n print(\"###\"*10)\n print(\"FINAL tree1_total: \", tree1_total)\n print(\"FINAL tree2_total: \", tree2_total)\n print(\"FINAL tree3_total: \", tree3_total)\n print(\"###\"*10)\n\n if (tree1_total == tree2_total) or (tree1_total == tree3_total) or (tree2_total == tree3_total):\n mx = max(tree1_total, tree2_total, tree3_total)\n if([tree1_total, tree2_total, tree3_total].count(mx) >= 2):\n ret = mx - min(tree1_total, tree2_total, tree3_total)\n return ret, True\n return -1, False\n\n def split_tree_into_two(self):\n ret = -1\n found = False\n global skipped\n\n for entry in range(self.vertices):\n tree1_nodes = []\n tree2_nodes = []\n tree3_nodes = []\n temp_nodes = []\n\n n = self.graph[entry]\n while(n!=None):\n edge = [entry, n.identifier, -1]\n if(n.identifier <= entry):\n n = n.next\n skipped += 1\n continue\n print(\"##MAIN##. SPLIT POINT EDGE: \", edge)\n tree1_nodes = []\n tree2_nodes = []\n tree1_total, tree2_total = self.split_and_compute_tree_sum(tree1_nodes, tree2_nodes, edge)\n print(\"ORIGINALS: \", tree1_total, tree2_total)\n if(min(tree1_total, tree2_total) < self.grand_sum/3 or (max(tree1_total, tree2_total) > (2*self.grand_sum)/3)):\n n = n.next\n continue\n\n if(tree1_total > tree2_total):\n ret, found = self.find_third_tree(tree1_total, tree2_total,tree1_nodes, 1, edge[1])\n elif(tree2_total > tree1_total):\n ret, found = self.find_third_tree(tree1_total, tree2_total,tree2_nodes, 2, edge[0])\n elif (tree1_total == tree2_total):\n ret = tree1_total\n found = True\n else:\n found = True\n if(found):\n break\n n = n.next\n if(found):\n break\n return ret\n\n\n def find_third_tree(self, tree1_total, tree2_total, nodes, t = 1, m=0):\n\n ret , found = -1, False\n global skipped\n consumed = []\n\n for i in range(len(nodes)):\n skip_n = nodes[i]\n consumed.append(skip_n)\n n = self.graph[skip_n]\n while(n!=None):\n if(n.identifier in consumed):\n n = n.next\n skipped += 1\n continue\n edge = [skip_n, n.identifier, m]\n print(\"2. SPLIT POINT EDGE: \", edge)\n print(\"tree1_total\",tree1_total)\n tree3_nodes = []\n temp_nodes = []\n _,tree3_total = self.split_and_compute_tree_sum(temp_nodes, tree3_nodes, edge, True)\n if(t==1):\n ret , found = self.check(tree1_total - tree3_total, tree2_total, tree3_total)\n elif(t==2):\n ret , found = self.check(tree1_total, tree2_total - tree3_total, tree3_total)\n if(found):\n break\n n = n.next\n if(found):\n break\n\n return ret, found\n\n\ndef balancedForest(values, edges):\n mygraph = Graph(values, edges)\n mygraph.build_adjacency_list()\n mygraph.print_graph()\n return mygraph.split_tree_into_two()\n\nimport unittest\n\nclass BalancedForestTest(unittest.TestCase):\n def test1(self):\n expected = 10\n c = [1, 1, 1, 18, 10, 11, 5, 6]\n edges = [[1, 2], [1, 4], [2, 3], [1, 8], [8, 7], [7, 6], [5, 7]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test2(self):\n expected = 13\n c = [12, 7, 11, 17, 20, 10]\n edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]\n\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test3(self):\n expected = 19\n c = [15, 12, 8, 14, 13]\n edges = [[4,5],[1,2],[1,3],[1,4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test4(self):\n expected = 2\n c = [1,2,2,1,1]\n edges = [[1,2],[1,3],[3,5],[1,4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test5(self):\n expected = -1\n c = [1,3,5]\n edges = [[1,3],[1,2]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test6(self):\n expected = -1\n c = [7, 7, 4, 1, 1, 1]\n edges = [(1, 2), (3, 1), (2, 4), (2, 5), (2, 6)]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test7(self):\n expected = 0\n c = [1, 3, 4, 4]\n edges = [(1, 2), (1, 3), (1, 4)]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test8(self):\n expected = 297\n c = [100, 99, 98, 100, 99, 98]\n edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test9(self):\n expected = 4\n c = [12, 10, 8, 12, 14, 12]\n edges = [[1, 2], [1, 3], [1, 4], [2, 5], [4, 6]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n print(\"SKIPPED\", skipped)\n\n\nif __name__ == '__main__':\n unittest.main()",
"step-ids": [
7,
22,
24,
25,
28
]
}
|
[
7,
22,
24,
25,
28
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
G = 1000000000
M = 1000000
K = 1000
|
flexible
|
{
"blob_id": "f765f54a89a98a5f61c70a37379860f170444c0a",
"index": 4069,
"step-1": "<mask token>\n",
"step-2": "G = 1000000000\nM = 1000000\nK = 1000\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
mycursor.execute('select * from question')
<|reserved_special_token_0|>
for user in users:
print(user)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
mydb = mysql.connector.connect(user='seyed', password='Sm13481353', host=
'localhost', database='telegram_bot', auth_plugin='mysql_native_password')
mycursor = mydb.cursor()
query = 'insert into question(update_id,chat_id) values (40,20)'
mycursor.execute('select * from question')
users = mycursor.fetchall()
for user in users:
print(user)
<|reserved_special_token_1|>
import mysql.connector
mydb = mysql.connector.connect(user='seyed', password='Sm13481353', host=
'localhost', database='telegram_bot', auth_plugin='mysql_native_password')
mycursor = mydb.cursor()
query = 'insert into question(update_id,chat_id) values (40,20)'
mycursor.execute('select * from question')
users = mycursor.fetchall()
for user in users:
print(user)
<|reserved_special_token_1|>
import mysql.connector
# config = {
# "user":"root",
# "password":"Sm13481353",
# "host":"3"
# }
mydb = mysql.connector.connect(
user="seyed",
password="Sm13481353",
host="localhost",
database="telegram_bot",
auth_plugin="mysql_native_password"
)
mycursor = mydb.cursor()
query = "insert into question(update_id,chat_id) values (40,20)"
# mycursor.execute(query)
# mydb.commit()
mycursor.execute("select * from question")
users = mycursor.fetchall()
for user in users:
print(user)
|
flexible
|
{
"blob_id": "a29a904290cb733ac7b526a75e0c218b952e2266",
"index": 4630,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nmycursor.execute('select * from question')\n<mask token>\nfor user in users:\n print(user)\n",
"step-3": "<mask token>\nmydb = mysql.connector.connect(user='seyed', password='Sm13481353', host=\n 'localhost', database='telegram_bot', auth_plugin='mysql_native_password')\nmycursor = mydb.cursor()\nquery = 'insert into question(update_id,chat_id) values (40,20)'\nmycursor.execute('select * from question')\nusers = mycursor.fetchall()\nfor user in users:\n print(user)\n",
"step-4": "import mysql.connector\nmydb = mysql.connector.connect(user='seyed', password='Sm13481353', host=\n 'localhost', database='telegram_bot', auth_plugin='mysql_native_password')\nmycursor = mydb.cursor()\nquery = 'insert into question(update_id,chat_id) values (40,20)'\nmycursor.execute('select * from question')\nusers = mycursor.fetchall()\nfor user in users:\n print(user)\n",
"step-5": "import mysql.connector\n# config = {\n# \"user\":\"root\",\n# \"password\":\"Sm13481353\",\n# \"host\":\"3\"\n# }\nmydb = mysql.connector.connect(\n user=\"seyed\",\n password=\"Sm13481353\",\n host=\"localhost\",\n database=\"telegram_bot\",\n auth_plugin=\"mysql_native_password\"\n )\nmycursor = mydb.cursor()\nquery = \"insert into question(update_id,chat_id) values (40,20)\"\n# mycursor.execute(query)\n# mydb.commit()\nmycursor.execute(\"select * from question\")\nusers = mycursor.fetchall()\nfor user in users:\n print(user)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
'''
Created on Nov 20, 2012
@author: shriram
'''
import xml.etree.ElementTree as ET
from xml.sax.saxutils import escape
'''
Annotating only Sparse and Non Sparse Lines
'''
class Trainer:
def html_escape(self,text):
html_escape_table = {
'"': """,
"'": "'"
}
return escape(text, html_escape_table)
def train(self, preprocessedxml, xmlname):
f = open('../TrainingData/htmls/train'+xmlname+'.html','w')
f.write('<html><body><form action="http://localhost/cgi-bin/TableProcessor.py" method="post">')
f.write('<input type="hidden" name="xmlname" value="'+xmlname +'"/>')
i = 0
pageno = 0
colno = 0
for page in preprocessedxml:
f.write('<div class="page"><input type="hidden" name="pagebegin'+str(pageno)+'" value="'+str(colno)+'"/>')
for col in page:
f.write('<div class="col"><input type="hidden" name="colbegin'+str(colno)+'" value="'+str(i)+'"/>')
for tup in col:
f.write('<div><select id="docparams" name="docparams'+ str(i) +'">')
f.write('<option value="sparse">Sparse</option>')
f.write('<option value="nonsparse" selected="selected">Not Sparse</option>')
f.write("</select><input type='hidden' name='texttag"+str(i)+"' value='"+ self.html_escape(ET.tostring(tup[1],'utf-8',"xml")) + "'/>"+ ET.tostring(tup[1]) +"</div>")
i += 1
f.write('<input type="hidden" name="colend'+str(colno)+'" value="'+str(i)+'"/><div>')
colno += 1
f.write('<input type="hidden" name="pageend'+str(pageno)+'" value="'+str(colno)+'"/> <div>')
pageno += 1
f.write('<input type="submit" value="Done!"/></form></body></html>')
f.close()
def readAnnotatedXml(self,xmlname):
f = open(xmlname)
preprocessedxml = list()
col = list()
for line in f:
if(line == "=============================== PAGE ===================================\n"):
pagelist = list()
preprocessedxml.append(pagelist)
elif(line == "=============================== COL ===================================\n"):
col = list()
pagelist.append(col)
else:
tup0 = line[:line.find(" ")]
tup1 = line[line.find(" ")+1:]
col.append([tup0,ET.fromstring(tup1)])
return preprocessedxml
def readAnnotatedxmlforTableDecomposition(self, xmlname):
f = open(xmlname)
table = list()
for line in f:
if(line.strip() == ''):
continue
tup0 = line[:line.find("\t")]
tup1 = line[line.find("\t")+1:]
table.append([tup0,ET.fromstring(tup1)])
return table
|
normal
|
{
"blob_id": "22e24e8dd49367ae57d1980c4addf48d65c5e897",
"index": 7851,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Trainer:\n\n def html_escape(self, text):\n html_escape_table = {'\"': '"', \"'\": '''}\n return escape(text, html_escape_table)\n\n def train(self, preprocessedxml, xmlname):\n f = open('../TrainingData/htmls/train' + xmlname + '.html', 'w')\n f.write(\n '<html><body><form action=\"http://localhost/cgi-bin/TableProcessor.py\" method=\"post\">'\n )\n f.write('<input type=\"hidden\" name=\"xmlname\" value=\"' + xmlname + '\"/>'\n )\n i = 0\n pageno = 0\n colno = 0\n for page in preprocessedxml:\n f.write(\n '<div class=\"page\"><input type=\"hidden\" name=\"pagebegin' +\n str(pageno) + '\" value=\"' + str(colno) + '\"/>')\n for col in page:\n f.write(\n '<div class=\"col\"><input type=\"hidden\" name=\"colbegin' +\n str(colno) + '\" value=\"' + str(i) + '\"/>')\n for tup in col:\n f.write('<div><select id=\"docparams\" name=\"docparams' +\n str(i) + '\">')\n f.write('<option value=\"sparse\">Sparse</option>')\n f.write(\n '<option value=\"nonsparse\" selected=\"selected\">Not Sparse</option>'\n )\n f.write(\"</select><input type='hidden' name='texttag\" +\n str(i) + \"' value='\" + self.html_escape(ET.tostring\n (tup[1], 'utf-8', 'xml')) + \"'/>\" + ET.tostring(tup\n [1]) + '</div>')\n i += 1\n f.write('<input type=\"hidden\" name=\"colend' + str(colno) +\n '\" value=\"' + str(i) + '\"/><div>')\n colno += 1\n f.write('<input type=\"hidden\" name=\"pageend' + str(pageno) +\n '\" value=\"' + str(colno) + '\"/> <div>')\n pageno += 1\n f.write('<input type=\"submit\" value=\"Done!\"/></form></body></html>')\n f.close()\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Trainer:\n\n def html_escape(self, text):\n html_escape_table = {'\"': '"', \"'\": '''}\n return escape(text, html_escape_table)\n\n def train(self, preprocessedxml, xmlname):\n f = open('../TrainingData/htmls/train' + xmlname + '.html', 'w')\n f.write(\n '<html><body><form action=\"http://localhost/cgi-bin/TableProcessor.py\" method=\"post\">'\n )\n f.write('<input type=\"hidden\" name=\"xmlname\" value=\"' + xmlname + '\"/>'\n )\n i = 0\n pageno = 0\n colno = 0\n for page in preprocessedxml:\n f.write(\n '<div class=\"page\"><input type=\"hidden\" name=\"pagebegin' +\n str(pageno) + '\" value=\"' + str(colno) + '\"/>')\n for col in page:\n f.write(\n '<div class=\"col\"><input type=\"hidden\" name=\"colbegin' +\n str(colno) + '\" value=\"' + str(i) + '\"/>')\n for tup in col:\n f.write('<div><select id=\"docparams\" name=\"docparams' +\n str(i) + '\">')\n f.write('<option value=\"sparse\">Sparse</option>')\n f.write(\n '<option value=\"nonsparse\" selected=\"selected\">Not Sparse</option>'\n )\n f.write(\"</select><input type='hidden' name='texttag\" +\n str(i) + \"' value='\" + self.html_escape(ET.tostring\n (tup[1], 'utf-8', 'xml')) + \"'/>\" + ET.tostring(tup\n [1]) + '</div>')\n i += 1\n f.write('<input type=\"hidden\" name=\"colend' + str(colno) +\n '\" value=\"' + str(i) + '\"/><div>')\n colno += 1\n f.write('<input type=\"hidden\" name=\"pageend' + str(pageno) +\n '\" value=\"' + str(colno) + '\"/> <div>')\n pageno += 1\n f.write('<input type=\"submit\" value=\"Done!\"/></form></body></html>')\n f.close()\n <mask token>\n\n def readAnnotatedxmlforTableDecomposition(self, xmlname):\n f = open(xmlname)\n table = list()\n for line in f:\n if line.strip() == '':\n continue\n tup0 = line[:line.find('\\t')]\n tup1 = line[line.find('\\t') + 1:]\n table.append([tup0, ET.fromstring(tup1)])\n return table\n",
"step-4": "<mask token>\nimport xml.etree.ElementTree as ET\nfrom xml.sax.saxutils import escape\n<mask token>\n\n\nclass Trainer:\n\n def html_escape(self, text):\n html_escape_table = {'\"': '"', \"'\": '''}\n return escape(text, html_escape_table)\n\n def train(self, preprocessedxml, xmlname):\n f = open('../TrainingData/htmls/train' + xmlname + '.html', 'w')\n f.write(\n '<html><body><form action=\"http://localhost/cgi-bin/TableProcessor.py\" method=\"post\">'\n )\n f.write('<input type=\"hidden\" name=\"xmlname\" value=\"' + xmlname + '\"/>'\n )\n i = 0\n pageno = 0\n colno = 0\n for page in preprocessedxml:\n f.write(\n '<div class=\"page\"><input type=\"hidden\" name=\"pagebegin' +\n str(pageno) + '\" value=\"' + str(colno) + '\"/>')\n for col in page:\n f.write(\n '<div class=\"col\"><input type=\"hidden\" name=\"colbegin' +\n str(colno) + '\" value=\"' + str(i) + '\"/>')\n for tup in col:\n f.write('<div><select id=\"docparams\" name=\"docparams' +\n str(i) + '\">')\n f.write('<option value=\"sparse\">Sparse</option>')\n f.write(\n '<option value=\"nonsparse\" selected=\"selected\">Not Sparse</option>'\n )\n f.write(\"</select><input type='hidden' name='texttag\" +\n str(i) + \"' value='\" + self.html_escape(ET.tostring\n (tup[1], 'utf-8', 'xml')) + \"'/>\" + ET.tostring(tup\n [1]) + '</div>')\n i += 1\n f.write('<input type=\"hidden\" name=\"colend' + str(colno) +\n '\" value=\"' + str(i) + '\"/><div>')\n colno += 1\n f.write('<input type=\"hidden\" name=\"pageend' + str(pageno) +\n '\" value=\"' + str(colno) + '\"/> <div>')\n pageno += 1\n f.write('<input type=\"submit\" value=\"Done!\"/></form></body></html>')\n f.close()\n\n def readAnnotatedXml(self, xmlname):\n f = open(xmlname)\n preprocessedxml = list()\n col = list()\n for line in f:\n if (line ==\n \"\"\"=============================== PAGE ===================================\n\"\"\"\n ):\n pagelist = list()\n preprocessedxml.append(pagelist)\n elif line == \"\"\"=============================== COL ===================================\n\"\"\":\n col = list()\n pagelist.append(col)\n else:\n tup0 = line[:line.find(' ')]\n tup1 = line[line.find(' ') + 1:]\n col.append([tup0, ET.fromstring(tup1)])\n return preprocessedxml\n\n def readAnnotatedxmlforTableDecomposition(self, xmlname):\n f = open(xmlname)\n table = list()\n for line in f:\n if line.strip() == '':\n continue\n tup0 = line[:line.find('\\t')]\n tup1 = line[line.find('\\t') + 1:]\n table.append([tup0, ET.fromstring(tup1)])\n return table\n",
"step-5": "'''\nCreated on Nov 20, 2012\n\n@author: shriram\n'''\nimport xml.etree.ElementTree as ET\nfrom xml.sax.saxutils import escape\n\n'''\n Annotating only Sparse and Non Sparse Lines\n'''\nclass Trainer:\n def html_escape(self,text):\n html_escape_table = {\n '\"': \""\",\n \"'\": \"'\"\n }\n return escape(text, html_escape_table)\n \n def train(self, preprocessedxml, xmlname):\n f = open('../TrainingData/htmls/train'+xmlname+'.html','w')\n f.write('<html><body><form action=\"http://localhost/cgi-bin/TableProcessor.py\" method=\"post\">')\n f.write('<input type=\"hidden\" name=\"xmlname\" value=\"'+xmlname +'\"/>')\n i = 0\n pageno = 0\n colno = 0\n for page in preprocessedxml:\n f.write('<div class=\"page\"><input type=\"hidden\" name=\"pagebegin'+str(pageno)+'\" value=\"'+str(colno)+'\"/>')\n for col in page:\n f.write('<div class=\"col\"><input type=\"hidden\" name=\"colbegin'+str(colno)+'\" value=\"'+str(i)+'\"/>')\n for tup in col:\n f.write('<div><select id=\"docparams\" name=\"docparams'+ str(i) +'\">')\n f.write('<option value=\"sparse\">Sparse</option>')\n f.write('<option value=\"nonsparse\" selected=\"selected\">Not Sparse</option>')\n f.write(\"</select><input type='hidden' name='texttag\"+str(i)+\"' value='\"+ self.html_escape(ET.tostring(tup[1],'utf-8',\"xml\")) + \"'/>\"+ ET.tostring(tup[1]) +\"</div>\")\n i += 1\n f.write('<input type=\"hidden\" name=\"colend'+str(colno)+'\" value=\"'+str(i)+'\"/><div>')\n colno += 1\n f.write('<input type=\"hidden\" name=\"pageend'+str(pageno)+'\" value=\"'+str(colno)+'\"/> <div>')\n pageno += 1\n f.write('<input type=\"submit\" value=\"Done!\"/></form></body></html>')\n f.close()\n \n def readAnnotatedXml(self,xmlname):\n f = open(xmlname)\n preprocessedxml = list()\n col = list()\n for line in f:\n if(line == \"=============================== PAGE ===================================\\n\"):\n pagelist = list()\n preprocessedxml.append(pagelist)\n elif(line == \"=============================== COL ===================================\\n\"):\n col = list()\n pagelist.append(col)\n else:\n tup0 = line[:line.find(\" \")]\n tup1 = line[line.find(\" \")+1:]\n col.append([tup0,ET.fromstring(tup1)])\n \n return preprocessedxml\n \n def readAnnotatedxmlforTableDecomposition(self, xmlname):\n f = open(xmlname)\n table = list()\n for line in f:\n if(line.strip() == ''):\n continue\n tup0 = line[:line.find(\"\\t\")]\n tup1 = line[line.find(\"\\t\")+1:]\n table.append([tup0,ET.fromstring(tup1)])\n return table\n \n \n \n \n \n ",
"step-ids": [
0,
3,
4,
6,
7
]
}
|
[
0,
3,
4,
6,
7
] |
class Solution:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution:
def searchInsert(self, nums, target: int):
n = len(nums)
left = 0
right = n - 1
while left <= right:
mid = (left + right) // 2
if nums[mid] == target:
return mid
if nums[mid] < target:
left = mid + 1
else:
right = mid - 1
return left
<|reserved_special_token_0|>
def searchInsert02(self, nums, target: int):
size = len(nums)
if size == 0:
return 0
if nums[-1] < target:
return size
left = 0
right = size - 1
while left < right:
mid = left + (right - left) // 2
if nums[mid] < target:
left = mid + 1
else:
right = mid
return left
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution:
def searchInsert(self, nums, target: int):
n = len(nums)
left = 0
right = n - 1
while left <= right:
mid = (left + right) // 2
if nums[mid] == target:
return mid
if nums[mid] < target:
left = mid + 1
else:
right = mid - 1
return left
def searchInsert01(self, nums, target: int):
size = len(nums)
if size == 0:
return 0
left = 0
right = size
while left < right:
mid = left + (right - left) // 2
if nums[mid] < target:
left = mid + 1
else:
right = mid
return left
def searchInsert02(self, nums, target: int):
size = len(nums)
if size == 0:
return 0
if nums[-1] < target:
return size
left = 0
right = size - 1
while left < right:
mid = left + (right - left) // 2
if nums[mid] < target:
left = mid + 1
else:
right = mid
return left
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution:
def searchInsert(self, nums, target: int):
n = len(nums)
left = 0
right = n - 1
while left <= right:
mid = (left + right) // 2
if nums[mid] == target:
return mid
if nums[mid] < target:
left = mid + 1
else:
right = mid - 1
return left
def searchInsert01(self, nums, target: int):
size = len(nums)
if size == 0:
return 0
left = 0
right = size
while left < right:
mid = left + (right - left) // 2
if nums[mid] < target:
left = mid + 1
else:
right = mid
return left
def searchInsert02(self, nums, target: int):
size = len(nums)
if size == 0:
return 0
if nums[-1] < target:
return size
left = 0
right = size - 1
while left < right:
mid = left + (right - left) // 2
if nums[mid] < target:
left = mid + 1
else:
right = mid
return left
if __name__ == '__main__':
nums = [1, 3, 5, 6]
target = 7
s = Solution()
print(s.searchInsert01(nums, target))
<|reserved_special_token_1|>
# coding:utf-8
class Solution:
def searchInsert(self, nums, target: int):
n = len(nums)
left = 0
right = n - 1
# 返回大于等于target的第一个索引则用left,否则用right
while left <= right:
mid = (left + right) // 2
if nums[mid] == target:
return mid
if nums[mid] < target:
left = mid + 1
else:
right = mid - 1
return left
# 如果写两个分支
def searchInsert01(self, nums, target: int):
size = len(nums)
if size == 0:
return 0
left = 0
right = size
while left < right:
mid = left + (right - left) // 2
# 此处中位数小于目标值则排除掉,否则得包含中位数
if nums[mid] < target:
left = mid + 1
else: # >=
right = mid
return left
# 如果写两个分支
# 范围为[0,size-1]
def searchInsert02(self, nums, target: int):
size = len(nums)
if size == 0:
return 0
if nums[-1] < target:
return size
left = 0
right = size - 1
while left < right:
mid = left + (right - left) // 2
if nums[mid] < target:
left = mid + 1
else: # >=
right = mid
return left
if __name__ == '__main__':
nums = [1, 3, 5, 6]
target = 7
s = Solution()
# print(s.searchInsert(nums, target))
print(s.searchInsert01(nums, target))
|
flexible
|
{
"blob_id": "9ec1cca08fac2fd976c1f596f7d340befc4eb339",
"index": 2020,
"step-1": "class Solution:\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "class Solution:\n\n def searchInsert(self, nums, target: int):\n n = len(nums)\n left = 0\n right = n - 1\n while left <= right:\n mid = (left + right) // 2\n if nums[mid] == target:\n return mid\n if nums[mid] < target:\n left = mid + 1\n else:\n right = mid - 1\n return left\n <mask token>\n\n def searchInsert02(self, nums, target: int):\n size = len(nums)\n if size == 0:\n return 0\n if nums[-1] < target:\n return size\n left = 0\n right = size - 1\n while left < right:\n mid = left + (right - left) // 2\n if nums[mid] < target:\n left = mid + 1\n else:\n right = mid\n return left\n\n\n<mask token>\n",
"step-3": "class Solution:\n\n def searchInsert(self, nums, target: int):\n n = len(nums)\n left = 0\n right = n - 1\n while left <= right:\n mid = (left + right) // 2\n if nums[mid] == target:\n return mid\n if nums[mid] < target:\n left = mid + 1\n else:\n right = mid - 1\n return left\n\n def searchInsert01(self, nums, target: int):\n size = len(nums)\n if size == 0:\n return 0\n left = 0\n right = size\n while left < right:\n mid = left + (right - left) // 2\n if nums[mid] < target:\n left = mid + 1\n else:\n right = mid\n return left\n\n def searchInsert02(self, nums, target: int):\n size = len(nums)\n if size == 0:\n return 0\n if nums[-1] < target:\n return size\n left = 0\n right = size - 1\n while left < right:\n mid = left + (right - left) // 2\n if nums[mid] < target:\n left = mid + 1\n else:\n right = mid\n return left\n\n\n<mask token>\n",
"step-4": "class Solution:\n\n def searchInsert(self, nums, target: int):\n n = len(nums)\n left = 0\n right = n - 1\n while left <= right:\n mid = (left + right) // 2\n if nums[mid] == target:\n return mid\n if nums[mid] < target:\n left = mid + 1\n else:\n right = mid - 1\n return left\n\n def searchInsert01(self, nums, target: int):\n size = len(nums)\n if size == 0:\n return 0\n left = 0\n right = size\n while left < right:\n mid = left + (right - left) // 2\n if nums[mid] < target:\n left = mid + 1\n else:\n right = mid\n return left\n\n def searchInsert02(self, nums, target: int):\n size = len(nums)\n if size == 0:\n return 0\n if nums[-1] < target:\n return size\n left = 0\n right = size - 1\n while left < right:\n mid = left + (right - left) // 2\n if nums[mid] < target:\n left = mid + 1\n else:\n right = mid\n return left\n\n\nif __name__ == '__main__':\n nums = [1, 3, 5, 6]\n target = 7\n s = Solution()\n print(s.searchInsert01(nums, target))\n",
"step-5": "# coding:utf-8\nclass Solution:\n def searchInsert(self, nums, target: int):\n n = len(nums)\n left = 0\n right = n - 1\n # 返回大于等于target的第一个索引则用left,否则用right\n while left <= right:\n mid = (left + right) // 2\n if nums[mid] == target:\n return mid\n if nums[mid] < target:\n left = mid + 1\n else:\n right = mid - 1\n return left\n\n # 如果写两个分支\n def searchInsert01(self, nums, target: int):\n size = len(nums)\n if size == 0:\n return 0\n left = 0\n right = size\n while left < right:\n mid = left + (right - left) // 2\n # 此处中位数小于目标值则排除掉,否则得包含中位数\n if nums[mid] < target:\n left = mid + 1\n else: # >=\n right = mid\n return left\n\n # 如果写两个分支\n # 范围为[0,size-1]\n def searchInsert02(self, nums, target: int):\n size = len(nums)\n if size == 0:\n return 0\n if nums[-1] < target:\n return size\n left = 0\n right = size - 1\n while left < right:\n mid = left + (right - left) // 2\n if nums[mid] < target:\n left = mid + 1\n else: # >=\n right = mid\n return left\n\n\nif __name__ == '__main__':\n nums = [1, 3, 5, 6]\n target = 7\n s = Solution()\n # print(s.searchInsert(nums, target))\n print(s.searchInsert01(nums, target))\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
#!/usr/bin/python
#coding:utf-8
import glob, os
#from collections import OrderedDict
aa = os.popen("grep -E 'register|cp' all.log |grep -v 'bohan' | awk '{ print $6 }' > /opt/csvt01/logs/tmp.txt").read().strip()
#os.system("grep -E 'register|cp' all.log |grep -v 'bohan' | awk '{ print $6 }' > /opt/csvt01/logs/tmp.txt")
#bb = aa.split('-')[1]
res = []
fileName = file('/opt/csvt01/logs/tmp.txt')
while True:
line = fileName.readline()
if len(line) ==0:break
a = line.split('-')[1]
res.append(a)
fileName.close()
#print res
a = {}
for i in res:
if res.count(i)>1:
a[i] = res.count(i)
#print (a)
def fun(s):
d = sorted(s.iteritems(),key=lambda t:t[1],reverse=True)
return d
d = fun(a)
for i in d:
print i[0]
|
normal
|
{
"blob_id": "fc8f3be408f4d21de2ae18776cd60177c82bea77",
"index": 2068,
"step-1": "#!/usr/bin/python \n#coding:utf-8 \nimport glob, os\n#from collections import OrderedDict\naa = os.popen(\"grep -E 'register|cp' all.log |grep -v 'bohan' | awk '{ print $6 }' > /opt/csvt01/logs/tmp.txt\").read().strip()\n#os.system(\"grep -E 'register|cp' all.log |grep -v 'bohan' | awk '{ print $6 }' > /opt/csvt01/logs/tmp.txt\")\n#bb = aa.split('-')[1]\n\nres = []\n\n\nfileName = file('/opt/csvt01/logs/tmp.txt')\n\nwhile True:\n line = fileName.readline()\n if len(line) ==0:break\n a = line.split('-')[1]\n res.append(a)\n \nfileName.close()\n#print res\n\na = {}\nfor i in res:\n if res.count(i)>1:\n a[i] = res.count(i)\n#print (a)\n\n\n\ndef fun(s):\n d = sorted(s.iteritems(),key=lambda t:t[1],reverse=True)\n return d\n \nd = fun(a)\n\nfor i in d:\n print i[0]\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import pymel.core as pm
from alShaders import *
class AEalLayerColorTemplate(alShadersTemplate):
controls = {}
params = {}
def setup(self):
self.params.clear()
self.params["layer1"] = Param("layer1", "Layer 1", "The background layer (will be blended over black if its alpha is not 1.", "rgb", presets=None)
self.params["layer1a"] = Param("layer1a", "Layer 1 Alpha", "The alpha of the background layer", "float", presets=None)
self.params["layer1blend"] = Param("layer1blend", "Mode", "Blend mode for the background layer.", "enum", presets=None)
self.params["layer2"] = Param("layer2", "Layer 2", "The color plugged in here will be blended over the layers below according to its alpha and blend mode.", "rgb", presets=None)
self.params["layer2a"] = Param("layer2a", "Layer 2 Alpha", "The alpha used to blend this layer over the layers below.", "float", presets=None)
self.params["layer2blend"] = Param("layer2blend", "Mode", "The blend mode used to blend this layer over the layers below.", "enum", presets=None)
self.params["layer3"] = Param("layer3", "Layer 3", "The color plugged in here will be blended over the layers below according to its alpha and blend mode.", "rgb", presets=None)
self.params["layer3a"] = Param("layer3a", "Layer 3 Alpha", "The alpha used to blend this layer over the layers below.", "float", presets=None)
self.params["layer3blend"] = Param("layer3blend", "Mode", "The blend mode used to blend this layer over the layers below.", "enum", presets=None)
self.params["layer4"] = Param("layer4", "Layer 4", "The color plugged in here will be blended over the layers below according to its alpha and blend mode.", "rgb", presets=None)
self.params["layer4a"] = Param("layer4a", "Layer 4 Alpha", "The alpha used to blend this layer over the layers below.", "float", presets=None)
self.params["layer4blend"] = Param("layer4blend", "Mode", "The blend mode used to blend this layer over the layers below.", "enum", presets=None)
self.params["layer5"] = Param("layer5", "Layer 5", "The color plugged in here will be blended over the layers below according to its alpha and blend mode.", "rgb", presets=None)
self.params["layer5a"] = Param("layer5a", "Layer 5 Alpha", "The alpha used to blend this layer over the layers below.", "float", presets=None)
self.params["layer5blend"] = Param("layer5blend", "Mode", "The blend mode used to blend this layer over the layers below.", "enum", presets=None)
self.params["layer6"] = Param("layer6", "Layer 6", "The color plugged in here will be blended over the layers below according to its alpha and blend mode.", "rgb", presets=None)
self.params["layer6a"] = Param("layer6a", "Layer 6 Alpha", "The alpha used to blend this layer over the layers below.", "float", presets=None)
self.params["layer6blend"] = Param("layer6blend", "Mode", "The blend mode used to blend this layer over the layers below.", "enum", presets=None)
self.params["layer7"] = Param("layer7", "Layer 7", "The color plugged in here will be blended over the layers below according to its alpha and blend mode.", "rgb", presets=None)
self.params["layer7a"] = Param("layer7a", "Layer 7 Alpha", "The alpha used to blend this layer over the layers below.", "float", presets=None)
self.params["layer7blend"] = Param("layer7blend", "Mode", "The blend mode used to blend this layer over the layers below.", "enum", presets=None)
self.params["layer8"] = Param("layer8", "Layer 8", "The color plugged in here will be blended over the layers below according to its alpha and blend mode.", "rgb", presets=None)
self.params["layer8a"] = Param("layer8a", "Layer 8 Alpha", "The alpha used to blend this layer over the layers below.", "float", presets=None)
self.params["layer8blend"] = Param("layer8blend", "Mode", "The blend mode used to blend this layer over the layers below.", "enum", presets=None)
self.addSwatch()
self.beginScrollLayout()
self.addCustomRgb("layer1")
self.addCustomFlt("layer1a")
self.addControl("layer1blend", label="Mode", annotation="Blend mode for the background layer.")
self.addCustomRgb("layer2")
self.addCustomFlt("layer2a")
self.addControl("layer2blend", label="Mode", annotation="The blend mode used to blend this layer over the layers below.")
self.addCustomRgb("layer3")
self.addCustomFlt("layer3a")
self.addControl("layer3blend", label="Mode", annotation="The blend mode used to blend this layer over the layers below.")
self.addCustomRgb("layer4")
self.addCustomFlt("layer4a")
self.addControl("layer4blend", label="Mode", annotation="The blend mode used to blend this layer over the layers below.")
self.addCustomRgb("layer5")
self.addCustomFlt("layer5a")
self.addControl("layer5blend", label="Mode", annotation="The blend mode used to blend this layer over the layers below.")
self.addCustomRgb("layer6")
self.addCustomFlt("layer6a")
self.addControl("layer6blend", label="Mode", annotation="The blend mode used to blend this layer over the layers below.")
self.addCustomRgb("layer7")
self.addCustomFlt("layer7a")
self.addControl("layer7blend", label="Mode", annotation="The blend mode used to blend this layer over the layers below.")
self.addCustomRgb("layer8")
self.addCustomFlt("layer8a")
self.addControl("layer8blend", label="Mode", annotation="The blend mode used to blend this layer over the layers below.")
pm.mel.AEdependNodeTemplate(self.nodeName)
self.addExtraControls()
self.endScrollLayout()
|
normal
|
{
"blob_id": "c847e7abe36b62c4518bb535789064e22b5f1db7",
"index": 5750,
"step-1": "<mask token>\n\n\nclass AEalLayerColorTemplate(alShadersTemplate):\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass AEalLayerColorTemplate(alShadersTemplate):\n <mask token>\n <mask token>\n\n def setup(self):\n self.params.clear()\n self.params['layer1'] = Param('layer1', 'Layer 1',\n 'The background layer (will be blended over black if its alpha is not 1.'\n , 'rgb', presets=None)\n self.params['layer1a'] = Param('layer1a', 'Layer 1 Alpha',\n 'The alpha of the background layer', 'float', presets=None)\n self.params['layer1blend'] = Param('layer1blend', 'Mode',\n 'Blend mode for the background layer.', 'enum', presets=None)\n self.params['layer2'] = Param('layer2', 'Layer 2',\n 'The color plugged in here will be blended over the layers below according to its alpha and blend mode.'\n , 'rgb', presets=None)\n self.params['layer2a'] = Param('layer2a', 'Layer 2 Alpha',\n 'The alpha used to blend this layer over the layers below.',\n 'float', presets=None)\n self.params['layer2blend'] = Param('layer2blend', 'Mode',\n 'The blend mode used to blend this layer over the layers below.',\n 'enum', presets=None)\n self.params['layer3'] = Param('layer3', 'Layer 3',\n 'The color plugged in here will be blended over the layers below according to its alpha and blend mode.'\n , 'rgb', presets=None)\n self.params['layer3a'] = Param('layer3a', 'Layer 3 Alpha',\n 'The alpha used to blend this layer over the layers below.',\n 'float', presets=None)\n self.params['layer3blend'] = Param('layer3blend', 'Mode',\n 'The blend mode used to blend this layer over the layers below.',\n 'enum', presets=None)\n self.params['layer4'] = Param('layer4', 'Layer 4',\n 'The color plugged in here will be blended over the layers below according to its alpha and blend mode.'\n , 'rgb', presets=None)\n self.params['layer4a'] = Param('layer4a', 'Layer 4 Alpha',\n 'The alpha used to blend this layer over the layers below.',\n 'float', presets=None)\n self.params['layer4blend'] = Param('layer4blend', 'Mode',\n 'The blend mode used to blend this layer over the layers below.',\n 'enum', presets=None)\n self.params['layer5'] = Param('layer5', 'Layer 5',\n 'The color plugged in here will be blended over the layers below according to its alpha and blend mode.'\n , 'rgb', presets=None)\n self.params['layer5a'] = Param('layer5a', 'Layer 5 Alpha',\n 'The alpha used to blend this layer over the layers below.',\n 'float', presets=None)\n self.params['layer5blend'] = Param('layer5blend', 'Mode',\n 'The blend mode used to blend this layer over the layers below.',\n 'enum', presets=None)\n self.params['layer6'] = Param('layer6', 'Layer 6',\n 'The color plugged in here will be blended over the layers below according to its alpha and blend mode.'\n , 'rgb', presets=None)\n self.params['layer6a'] = Param('layer6a', 'Layer 6 Alpha',\n 'The alpha used to blend this layer over the layers below.',\n 'float', presets=None)\n self.params['layer6blend'] = Param('layer6blend', 'Mode',\n 'The blend mode used to blend this layer over the layers below.',\n 'enum', presets=None)\n self.params['layer7'] = Param('layer7', 'Layer 7',\n 'The color plugged in here will be blended over the layers below according to its alpha and blend mode.'\n , 'rgb', presets=None)\n self.params['layer7a'] = Param('layer7a', 'Layer 7 Alpha',\n 'The alpha used to blend this layer over the layers below.',\n 'float', presets=None)\n self.params['layer7blend'] = Param('layer7blend', 'Mode',\n 'The blend mode used to blend this layer over the layers below.',\n 'enum', presets=None)\n self.params['layer8'] = Param('layer8', 'Layer 8',\n 'The color plugged in here will be blended over the layers below according to its alpha and blend mode.'\n , 'rgb', presets=None)\n self.params['layer8a'] = Param('layer8a', 'Layer 8 Alpha',\n 'The alpha used to blend this layer over the layers below.',\n 'float', presets=None)\n self.params['layer8blend'] = Param('layer8blend', 'Mode',\n 'The blend mode used to blend this layer over the layers below.',\n 'enum', presets=None)\n self.addSwatch()\n self.beginScrollLayout()\n self.addCustomRgb('layer1')\n self.addCustomFlt('layer1a')\n self.addControl('layer1blend', label='Mode', annotation=\n 'Blend mode for the background layer.')\n self.addCustomRgb('layer2')\n self.addCustomFlt('layer2a')\n self.addControl('layer2blend', label='Mode', annotation=\n 'The blend mode used to blend this layer over the layers below.')\n self.addCustomRgb('layer3')\n self.addCustomFlt('layer3a')\n self.addControl('layer3blend', label='Mode', annotation=\n 'The blend mode used to blend this layer over the layers below.')\n self.addCustomRgb('layer4')\n self.addCustomFlt('layer4a')\n self.addControl('layer4blend', label='Mode', annotation=\n 'The blend mode used to blend this layer over the layers below.')\n self.addCustomRgb('layer5')\n self.addCustomFlt('layer5a')\n self.addControl('layer5blend', label='Mode', annotation=\n 'The blend mode used to blend this layer over the layers below.')\n self.addCustomRgb('layer6')\n self.addCustomFlt('layer6a')\n self.addControl('layer6blend', label='Mode', annotation=\n 'The blend mode used to blend this layer over the layers below.')\n self.addCustomRgb('layer7')\n self.addCustomFlt('layer7a')\n self.addControl('layer7blend', label='Mode', annotation=\n 'The blend mode used to blend this layer over the layers below.')\n self.addCustomRgb('layer8')\n self.addCustomFlt('layer8a')\n self.addControl('layer8blend', label='Mode', annotation=\n 'The blend mode used to blend this layer over the layers below.')\n pm.mel.AEdependNodeTemplate(self.nodeName)\n self.addExtraControls()\n self.endScrollLayout()\n",
"step-3": "<mask token>\n\n\nclass AEalLayerColorTemplate(alShadersTemplate):\n controls = {}\n params = {}\n\n def setup(self):\n self.params.clear()\n self.params['layer1'] = Param('layer1', 'Layer 1',\n 'The background layer (will be blended over black if its alpha is not 1.'\n , 'rgb', presets=None)\n self.params['layer1a'] = Param('layer1a', 'Layer 1 Alpha',\n 'The alpha of the background layer', 'float', presets=None)\n self.params['layer1blend'] = Param('layer1blend', 'Mode',\n 'Blend mode for the background layer.', 'enum', presets=None)\n self.params['layer2'] = Param('layer2', 'Layer 2',\n 'The color plugged in here will be blended over the layers below according to its alpha and blend mode.'\n , 'rgb', presets=None)\n self.params['layer2a'] = Param('layer2a', 'Layer 2 Alpha',\n 'The alpha used to blend this layer over the layers below.',\n 'float', presets=None)\n self.params['layer2blend'] = Param('layer2blend', 'Mode',\n 'The blend mode used to blend this layer over the layers below.',\n 'enum', presets=None)\n self.params['layer3'] = Param('layer3', 'Layer 3',\n 'The color plugged in here will be blended over the layers below according to its alpha and blend mode.'\n , 'rgb', presets=None)\n self.params['layer3a'] = Param('layer3a', 'Layer 3 Alpha',\n 'The alpha used to blend this layer over the layers below.',\n 'float', presets=None)\n self.params['layer3blend'] = Param('layer3blend', 'Mode',\n 'The blend mode used to blend this layer over the layers below.',\n 'enum', presets=None)\n self.params['layer4'] = Param('layer4', 'Layer 4',\n 'The color plugged in here will be blended over the layers below according to its alpha and blend mode.'\n , 'rgb', presets=None)\n self.params['layer4a'] = Param('layer4a', 'Layer 4 Alpha',\n 'The alpha used to blend this layer over the layers below.',\n 'float', presets=None)\n self.params['layer4blend'] = Param('layer4blend', 'Mode',\n 'The blend mode used to blend this layer over the layers below.',\n 'enum', presets=None)\n self.params['layer5'] = Param('layer5', 'Layer 5',\n 'The color plugged in here will be blended over the layers below according to its alpha and blend mode.'\n , 'rgb', presets=None)\n self.params['layer5a'] = Param('layer5a', 'Layer 5 Alpha',\n 'The alpha used to blend this layer over the layers below.',\n 'float', presets=None)\n self.params['layer5blend'] = Param('layer5blend', 'Mode',\n 'The blend mode used to blend this layer over the layers below.',\n 'enum', presets=None)\n self.params['layer6'] = Param('layer6', 'Layer 6',\n 'The color plugged in here will be blended over the layers below according to its alpha and blend mode.'\n , 'rgb', presets=None)\n self.params['layer6a'] = Param('layer6a', 'Layer 6 Alpha',\n 'The alpha used to blend this layer over the layers below.',\n 'float', presets=None)\n self.params['layer6blend'] = Param('layer6blend', 'Mode',\n 'The blend mode used to blend this layer over the layers below.',\n 'enum', presets=None)\n self.params['layer7'] = Param('layer7', 'Layer 7',\n 'The color plugged in here will be blended over the layers below according to its alpha and blend mode.'\n , 'rgb', presets=None)\n self.params['layer7a'] = Param('layer7a', 'Layer 7 Alpha',\n 'The alpha used to blend this layer over the layers below.',\n 'float', presets=None)\n self.params['layer7blend'] = Param('layer7blend', 'Mode',\n 'The blend mode used to blend this layer over the layers below.',\n 'enum', presets=None)\n self.params['layer8'] = Param('layer8', 'Layer 8',\n 'The color plugged in here will be blended over the layers below according to its alpha and blend mode.'\n , 'rgb', presets=None)\n self.params['layer8a'] = Param('layer8a', 'Layer 8 Alpha',\n 'The alpha used to blend this layer over the layers below.',\n 'float', presets=None)\n self.params['layer8blend'] = Param('layer8blend', 'Mode',\n 'The blend mode used to blend this layer over the layers below.',\n 'enum', presets=None)\n self.addSwatch()\n self.beginScrollLayout()\n self.addCustomRgb('layer1')\n self.addCustomFlt('layer1a')\n self.addControl('layer1blend', label='Mode', annotation=\n 'Blend mode for the background layer.')\n self.addCustomRgb('layer2')\n self.addCustomFlt('layer2a')\n self.addControl('layer2blend', label='Mode', annotation=\n 'The blend mode used to blend this layer over the layers below.')\n self.addCustomRgb('layer3')\n self.addCustomFlt('layer3a')\n self.addControl('layer3blend', label='Mode', annotation=\n 'The blend mode used to blend this layer over the layers below.')\n self.addCustomRgb('layer4')\n self.addCustomFlt('layer4a')\n self.addControl('layer4blend', label='Mode', annotation=\n 'The blend mode used to blend this layer over the layers below.')\n self.addCustomRgb('layer5')\n self.addCustomFlt('layer5a')\n self.addControl('layer5blend', label='Mode', annotation=\n 'The blend mode used to blend this layer over the layers below.')\n self.addCustomRgb('layer6')\n self.addCustomFlt('layer6a')\n self.addControl('layer6blend', label='Mode', annotation=\n 'The blend mode used to blend this layer over the layers below.')\n self.addCustomRgb('layer7')\n self.addCustomFlt('layer7a')\n self.addControl('layer7blend', label='Mode', annotation=\n 'The blend mode used to blend this layer over the layers below.')\n self.addCustomRgb('layer8')\n self.addCustomFlt('layer8a')\n self.addControl('layer8blend', label='Mode', annotation=\n 'The blend mode used to blend this layer over the layers below.')\n pm.mel.AEdependNodeTemplate(self.nodeName)\n self.addExtraControls()\n self.endScrollLayout()\n",
"step-4": "import pymel.core as pm\nfrom alShaders import *\n\n\nclass AEalLayerColorTemplate(alShadersTemplate):\n controls = {}\n params = {}\n\n def setup(self):\n self.params.clear()\n self.params['layer1'] = Param('layer1', 'Layer 1',\n 'The background layer (will be blended over black if its alpha is not 1.'\n , 'rgb', presets=None)\n self.params['layer1a'] = Param('layer1a', 'Layer 1 Alpha',\n 'The alpha of the background layer', 'float', presets=None)\n self.params['layer1blend'] = Param('layer1blend', 'Mode',\n 'Blend mode for the background layer.', 'enum', presets=None)\n self.params['layer2'] = Param('layer2', 'Layer 2',\n 'The color plugged in here will be blended over the layers below according to its alpha and blend mode.'\n , 'rgb', presets=None)\n self.params['layer2a'] = Param('layer2a', 'Layer 2 Alpha',\n 'The alpha used to blend this layer over the layers below.',\n 'float', presets=None)\n self.params['layer2blend'] = Param('layer2blend', 'Mode',\n 'The blend mode used to blend this layer over the layers below.',\n 'enum', presets=None)\n self.params['layer3'] = Param('layer3', 'Layer 3',\n 'The color plugged in here will be blended over the layers below according to its alpha and blend mode.'\n , 'rgb', presets=None)\n self.params['layer3a'] = Param('layer3a', 'Layer 3 Alpha',\n 'The alpha used to blend this layer over the layers below.',\n 'float', presets=None)\n self.params['layer3blend'] = Param('layer3blend', 'Mode',\n 'The blend mode used to blend this layer over the layers below.',\n 'enum', presets=None)\n self.params['layer4'] = Param('layer4', 'Layer 4',\n 'The color plugged in here will be blended over the layers below according to its alpha and blend mode.'\n , 'rgb', presets=None)\n self.params['layer4a'] = Param('layer4a', 'Layer 4 Alpha',\n 'The alpha used to blend this layer over the layers below.',\n 'float', presets=None)\n self.params['layer4blend'] = Param('layer4blend', 'Mode',\n 'The blend mode used to blend this layer over the layers below.',\n 'enum', presets=None)\n self.params['layer5'] = Param('layer5', 'Layer 5',\n 'The color plugged in here will be blended over the layers below according to its alpha and blend mode.'\n , 'rgb', presets=None)\n self.params['layer5a'] = Param('layer5a', 'Layer 5 Alpha',\n 'The alpha used to blend this layer over the layers below.',\n 'float', presets=None)\n self.params['layer5blend'] = Param('layer5blend', 'Mode',\n 'The blend mode used to blend this layer over the layers below.',\n 'enum', presets=None)\n self.params['layer6'] = Param('layer6', 'Layer 6',\n 'The color plugged in here will be blended over the layers below according to its alpha and blend mode.'\n , 'rgb', presets=None)\n self.params['layer6a'] = Param('layer6a', 'Layer 6 Alpha',\n 'The alpha used to blend this layer over the layers below.',\n 'float', presets=None)\n self.params['layer6blend'] = Param('layer6blend', 'Mode',\n 'The blend mode used to blend this layer over the layers below.',\n 'enum', presets=None)\n self.params['layer7'] = Param('layer7', 'Layer 7',\n 'The color plugged in here will be blended over the layers below according to its alpha and blend mode.'\n , 'rgb', presets=None)\n self.params['layer7a'] = Param('layer7a', 'Layer 7 Alpha',\n 'The alpha used to blend this layer over the layers below.',\n 'float', presets=None)\n self.params['layer7blend'] = Param('layer7blend', 'Mode',\n 'The blend mode used to blend this layer over the layers below.',\n 'enum', presets=None)\n self.params['layer8'] = Param('layer8', 'Layer 8',\n 'The color plugged in here will be blended over the layers below according to its alpha and blend mode.'\n , 'rgb', presets=None)\n self.params['layer8a'] = Param('layer8a', 'Layer 8 Alpha',\n 'The alpha used to blend this layer over the layers below.',\n 'float', presets=None)\n self.params['layer8blend'] = Param('layer8blend', 'Mode',\n 'The blend mode used to blend this layer over the layers below.',\n 'enum', presets=None)\n self.addSwatch()\n self.beginScrollLayout()\n self.addCustomRgb('layer1')\n self.addCustomFlt('layer1a')\n self.addControl('layer1blend', label='Mode', annotation=\n 'Blend mode for the background layer.')\n self.addCustomRgb('layer2')\n self.addCustomFlt('layer2a')\n self.addControl('layer2blend', label='Mode', annotation=\n 'The blend mode used to blend this layer over the layers below.')\n self.addCustomRgb('layer3')\n self.addCustomFlt('layer3a')\n self.addControl('layer3blend', label='Mode', annotation=\n 'The blend mode used to blend this layer over the layers below.')\n self.addCustomRgb('layer4')\n self.addCustomFlt('layer4a')\n self.addControl('layer4blend', label='Mode', annotation=\n 'The blend mode used to blend this layer over the layers below.')\n self.addCustomRgb('layer5')\n self.addCustomFlt('layer5a')\n self.addControl('layer5blend', label='Mode', annotation=\n 'The blend mode used to blend this layer over the layers below.')\n self.addCustomRgb('layer6')\n self.addCustomFlt('layer6a')\n self.addControl('layer6blend', label='Mode', annotation=\n 'The blend mode used to blend this layer over the layers below.')\n self.addCustomRgb('layer7')\n self.addCustomFlt('layer7a')\n self.addControl('layer7blend', label='Mode', annotation=\n 'The blend mode used to blend this layer over the layers below.')\n self.addCustomRgb('layer8')\n self.addCustomFlt('layer8a')\n self.addControl('layer8blend', label='Mode', annotation=\n 'The blend mode used to blend this layer over the layers below.')\n pm.mel.AEdependNodeTemplate(self.nodeName)\n self.addExtraControls()\n self.endScrollLayout()\n",
"step-5": "import pymel.core as pm\nfrom alShaders import *\n\nclass AEalLayerColorTemplate(alShadersTemplate):\n\tcontrols = {}\n\tparams = {}\n\tdef setup(self):\n\t\tself.params.clear()\n\t\tself.params[\"layer1\"] = Param(\"layer1\", \"Layer 1\", \"The background layer (will be blended over black if its alpha is not 1.\", \"rgb\", presets=None)\n\t\tself.params[\"layer1a\"] = Param(\"layer1a\", \"Layer 1 Alpha\", \"The alpha of the background layer\", \"float\", presets=None)\n\t\tself.params[\"layer1blend\"] = Param(\"layer1blend\", \"Mode\", \"Blend mode for the background layer.\", \"enum\", presets=None)\n\t\tself.params[\"layer2\"] = Param(\"layer2\", \"Layer 2\", \"The color plugged in here will be blended over the layers below according to its alpha and blend mode.\", \"rgb\", presets=None)\n\t\tself.params[\"layer2a\"] = Param(\"layer2a\", \"Layer 2 Alpha\", \"The alpha used to blend this layer over the layers below.\", \"float\", presets=None)\n\t\tself.params[\"layer2blend\"] = Param(\"layer2blend\", \"Mode\", \"The blend mode used to blend this layer over the layers below.\", \"enum\", presets=None)\n\t\tself.params[\"layer3\"] = Param(\"layer3\", \"Layer 3\", \"The color plugged in here will be blended over the layers below according to its alpha and blend mode.\", \"rgb\", presets=None)\n\t\tself.params[\"layer3a\"] = Param(\"layer3a\", \"Layer 3 Alpha\", \"The alpha used to blend this layer over the layers below.\", \"float\", presets=None)\n\t\tself.params[\"layer3blend\"] = Param(\"layer3blend\", \"Mode\", \"The blend mode used to blend this layer over the layers below.\", \"enum\", presets=None)\n\t\tself.params[\"layer4\"] = Param(\"layer4\", \"Layer 4\", \"The color plugged in here will be blended over the layers below according to its alpha and blend mode.\", \"rgb\", presets=None)\n\t\tself.params[\"layer4a\"] = Param(\"layer4a\", \"Layer 4 Alpha\", \"The alpha used to blend this layer over the layers below.\", \"float\", presets=None)\n\t\tself.params[\"layer4blend\"] = Param(\"layer4blend\", \"Mode\", \"The blend mode used to blend this layer over the layers below.\", \"enum\", presets=None)\n\t\tself.params[\"layer5\"] = Param(\"layer5\", \"Layer 5\", \"The color plugged in here will be blended over the layers below according to its alpha and blend mode.\", \"rgb\", presets=None)\n\t\tself.params[\"layer5a\"] = Param(\"layer5a\", \"Layer 5 Alpha\", \"The alpha used to blend this layer over the layers below.\", \"float\", presets=None)\n\t\tself.params[\"layer5blend\"] = Param(\"layer5blend\", \"Mode\", \"The blend mode used to blend this layer over the layers below.\", \"enum\", presets=None)\n\t\tself.params[\"layer6\"] = Param(\"layer6\", \"Layer 6\", \"The color plugged in here will be blended over the layers below according to its alpha and blend mode.\", \"rgb\", presets=None)\n\t\tself.params[\"layer6a\"] = Param(\"layer6a\", \"Layer 6 Alpha\", \"The alpha used to blend this layer over the layers below.\", \"float\", presets=None)\n\t\tself.params[\"layer6blend\"] = Param(\"layer6blend\", \"Mode\", \"The blend mode used to blend this layer over the layers below.\", \"enum\", presets=None)\n\t\tself.params[\"layer7\"] = Param(\"layer7\", \"Layer 7\", \"The color plugged in here will be blended over the layers below according to its alpha and blend mode.\", \"rgb\", presets=None)\n\t\tself.params[\"layer7a\"] = Param(\"layer7a\", \"Layer 7 Alpha\", \"The alpha used to blend this layer over the layers below.\", \"float\", presets=None)\n\t\tself.params[\"layer7blend\"] = Param(\"layer7blend\", \"Mode\", \"The blend mode used to blend this layer over the layers below.\", \"enum\", presets=None)\n\t\tself.params[\"layer8\"] = Param(\"layer8\", \"Layer 8\", \"The color plugged in here will be blended over the layers below according to its alpha and blend mode.\", \"rgb\", presets=None)\n\t\tself.params[\"layer8a\"] = Param(\"layer8a\", \"Layer 8 Alpha\", \"The alpha used to blend this layer over the layers below.\", \"float\", presets=None)\n\t\tself.params[\"layer8blend\"] = Param(\"layer8blend\", \"Mode\", \"The blend mode used to blend this layer over the layers below.\", \"enum\", presets=None)\n\n\t\tself.addSwatch()\n\t\tself.beginScrollLayout()\n\n\t\tself.addCustomRgb(\"layer1\")\n\t\tself.addCustomFlt(\"layer1a\")\n\t\tself.addControl(\"layer1blend\", label=\"Mode\", annotation=\"Blend mode for the background layer.\")\n\t\tself.addCustomRgb(\"layer2\")\n\t\tself.addCustomFlt(\"layer2a\")\n\t\tself.addControl(\"layer2blend\", label=\"Mode\", annotation=\"The blend mode used to blend this layer over the layers below.\")\n\t\tself.addCustomRgb(\"layer3\")\n\t\tself.addCustomFlt(\"layer3a\")\n\t\tself.addControl(\"layer3blend\", label=\"Mode\", annotation=\"The blend mode used to blend this layer over the layers below.\")\n\t\tself.addCustomRgb(\"layer4\")\n\t\tself.addCustomFlt(\"layer4a\")\n\t\tself.addControl(\"layer4blend\", label=\"Mode\", annotation=\"The blend mode used to blend this layer over the layers below.\")\n\t\tself.addCustomRgb(\"layer5\")\n\t\tself.addCustomFlt(\"layer5a\")\n\t\tself.addControl(\"layer5blend\", label=\"Mode\", annotation=\"The blend mode used to blend this layer over the layers below.\")\n\t\tself.addCustomRgb(\"layer6\")\n\t\tself.addCustomFlt(\"layer6a\")\n\t\tself.addControl(\"layer6blend\", label=\"Mode\", annotation=\"The blend mode used to blend this layer over the layers below.\")\n\t\tself.addCustomRgb(\"layer7\")\n\t\tself.addCustomFlt(\"layer7a\")\n\t\tself.addControl(\"layer7blend\", label=\"Mode\", annotation=\"The blend mode used to blend this layer over the layers below.\")\n\t\tself.addCustomRgb(\"layer8\")\n\t\tself.addCustomFlt(\"layer8a\")\n\t\tself.addControl(\"layer8blend\", label=\"Mode\", annotation=\"The blend mode used to blend this layer over the layers below.\")\n\n\t\tpm.mel.AEdependNodeTemplate(self.nodeName)\n\t\tself.addExtraControls()\n\n\t\tself.endScrollLayout()\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def fn1():
print('One')
def fn2():
print('Two')
def fn3():
print('Three')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def fn1():
print('One')
def fn2():
print('Two')
def fn3():
print('Three')
<|reserved_special_token_0|>
def sub(one, two):
c = one - two
print(c)
print(type(c))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def fn1():
print('One')
def fn2():
print('Two')
def fn3():
print('Three')
<|reserved_special_token_0|>
def add(one, two):
c = one + two
print(c)
print(type(c))
def sub(one, two):
c = one - two
print(c)
print(type(c))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
print('\n')
def fn1():
print('One')
def fn2():
print('Two')
def fn3():
print('Three')
<|reserved_special_token_0|>
fndict[keynames[1]]()
fndict['C']()
def add(one, two):
c = one + two
print(c)
print(type(c))
def sub(one, two):
c = one - two
print(c)
print(type(c))
<|reserved_special_token_0|>
trainee[1](10, 4)
print('\n PROVERKA TIPA', type(trainee[1]))
print('\n PROVERKA TIPA', type(trainee[1](10, 4)))
<|reserved_special_token_1|>
print('\n')
# Первый вариант
def fn1():
print("One")
def fn2():
print("Two")
def fn3():
print("Three")
fndict = {"A": fn1, "B": fn2, "C": fn3}
keynames = ["A", "B", "C"]
fndict[keynames[1]]()
fndict['C']()
# Второй вариант
def add(one,two):
c = one+two
print(c)
print(type(c))
def sub(one,two):
c = one-two
print(c)
print(type(c))
trainee = {1:add, 2:sub}
trainee[1](10,4)
print('\n PROVERKA TIPA', type(trainee[1]))
print('\n PROVERKA TIPA', type(trainee[1](10,4)))
|
flexible
|
{
"blob_id": "dc226a646af32d052c6d51832b95a340d6986e08",
"index": 489,
"step-1": "<mask token>\n\n\ndef fn1():\n print('One')\n\n\ndef fn2():\n print('Two')\n\n\ndef fn3():\n print('Three')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef fn1():\n print('One')\n\n\ndef fn2():\n print('Two')\n\n\ndef fn3():\n print('Three')\n\n\n<mask token>\n\n\ndef sub(one, two):\n c = one - two\n print(c)\n print(type(c))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef fn1():\n print('One')\n\n\ndef fn2():\n print('Two')\n\n\ndef fn3():\n print('Three')\n\n\n<mask token>\n\n\ndef add(one, two):\n c = one + two\n print(c)\n print(type(c))\n\n\ndef sub(one, two):\n c = one - two\n print(c)\n print(type(c))\n\n\n<mask token>\n",
"step-4": "print('\\n')\n\n\ndef fn1():\n print('One')\n\n\ndef fn2():\n print('Two')\n\n\ndef fn3():\n print('Three')\n\n\n<mask token>\nfndict[keynames[1]]()\nfndict['C']()\n\n\ndef add(one, two):\n c = one + two\n print(c)\n print(type(c))\n\n\ndef sub(one, two):\n c = one - two\n print(c)\n print(type(c))\n\n\n<mask token>\ntrainee[1](10, 4)\nprint('\\n PROVERKA TIPA', type(trainee[1]))\nprint('\\n PROVERKA TIPA', type(trainee[1](10, 4)))\n",
"step-5": "\nprint('\\n')\n\n#\tПервый вариант\n\ndef fn1():\n print(\"One\")\n\ndef fn2():\n print(\"Two\")\n\ndef fn3():\n print(\"Three\")\n\nfndict = {\"A\": fn1, \"B\": fn2, \"C\": fn3}\n\nkeynames = [\"A\", \"B\", \"C\"]\n\nfndict[keynames[1]]()\nfndict['C']()\n\n\n#\t\tВторой вариант\n\ndef add(one,two):\n\tc = one+two\n\tprint(c)\n\tprint(type(c))\n\ndef sub(one,two):\n\tc = one-two\n\tprint(c)\n\tprint(type(c))\n\ntrainee = {1:add, 2:sub}\n\ntrainee[1](10,4)\nprint('\\n PROVERKA TIPA', type(trainee[1]))\nprint('\\n PROVERKA TIPA', type(trainee[1](10,4)))\n",
"step-ids": [
3,
4,
5,
6,
8
]
}
|
[
3,
4,
5,
6,
8
] |
# -*- coding: GB18030 -*-
import inspect
import os,sys
import subprocess
from lib.common.utils import *
from lib.common.loger import loger
from lib.common import checker
from lib.common.logreader import LogReader
import shutil
from lib.common.XmlHandler import *
from lib.common.Dict import *
class baseModule(object):
def __init__(self):
self.sys = Shell_System()
self.path =None
#模块bin 路径
self.bin_path = None
#模块配置路径
self.conf_path = None
#模块字典路径
self.dict_path = None
#log路径
self.log_path = None
#用于存储被分配得到的端口
self.port=[]
#用于表示本模块需要设置的端口数目
self.port_num = 0
#用于表示模块名
self.type=None
#是否进行conf 备份flag
self.conf_bak_flag = False
#是否进行dict备份
self.dict_back_flag = False
#以下变量根据需要在各个module中初始化
#notice 日志名称
self.ntlogname = None
#WF日志名称
self.wflogname = None
self.nt_logreader = None
self.wf_logreader = None
def add_relation(self,module):
"""
@note: 参数传递的是已经生成的其他module的实例
具体关联关系的建立
"""
self.module_rel_set.append(module)
loger.info("Topology is %s ----> %s",self.type,getattr(module,"type"))
return 0
def build_relation(self):
"""
@note: 如果有下游模块必须实现改方法
建本模块和下游模块关系
"""
pass
def get_port(self):
"""
@note: 返回本模块申请的端口list
"""
return self.port
def set_listen_port(self):
"""
@note:各模块实现设置对用的conf
"""
pass
def start(self):
"""
@note: 启动模块
注意可通过端口或进程是否存在判断是否启动成功
checker.check_process_exist(processpath)
checker.check_port_exist(port)
"""
pass
def stop(self):
"""
@note:停止运行
默认通过self.bin_path实现
"""
if self.bin_path <> None and os.path.exists(self.bin_path):
kill_process(self.bin_path)
loger.debug("kill process %s"%(self.bin_path))
else:
loger.warning("module [%s] has not bin_path!"%(self.type))
def bak_or_revert_env(self):
"""
@note:根据bakflag 进行bak 操作
默认进行两项bak conf dict
如果path.robotbak不存在,则将path备份
- 如果path.dtsbak存在,则用path.robotbak覆盖path
"""
#清理log目录
if self.log_path is not None:
cmd = "rm -rf " + self.log_path
loger.debug(cmd)
self.sys.shell(cmd)
# 重命名core
rename_cores(self.path)
#备份恢复conf
if self.conf_bak_flag:
bak_or_revert(self.conf_path)
#备份恢复dict
if self.dict_back_flag:
bak_or_revert(self.dict_path)
return 0
def __conf_op(self, optype, confid, k, v=None):
"""
@note: 封装 获取,删除、设置3种conf操作方法
optype为操作类型 0:设置、1:获取、2:删除
对外接口由 set_conf、get_conf、delete_conf
"""
if self.path is None:
raise AssertionError("get modulepath error[%s]"%(self.path))
path, seg = getconfitem(self.path, self.type, confid)
if path is None:
raise AssertionError("set conf error[%s][%s][%s][%s]"%(self.type, confid, k , v))
conf = UbConfigParser(path, seg)
if optype == 0:
conf.set(k , v)
return
if optype == 1:
return conf.get(k)
if optype == 2:
conf.delete(k)
return
def set_conf(self, confid, k, v):
"""
@note:设置conf
confid为conf.xml中注册id
"""
return self.__conf_op(0, confid, str(k), str(v))
def get_conf(self, confid, k):
return self.__conf_op(1, confid, str(k))
def delete_conf(self, confid, k):
return self.__conf_op(2, confid, str(k))
def set_dict(self, dictid, *line_item):
"""
@note:设置字典数据 将数据设置进不同的列中
"""
path, seg = getdictitem(self.type, dictid)
real_path = os.path.join(self.path, path)
dicth = DictHandler(real_path, seg)
dicth.set_dict(line_item)
def clear_dict(self, dictid):
"""
@note:清理字典
"""
path, seg = getdictitem(self.type, dictid)
real_path = os.path.join(self.path, path)
dicth = DictHandler(self, real_path, seg)
dicth.clear_dict()
#以下接口为测试接口
def check_notice_log_has(self, regex):
"""
@note:检查 notice log中是否包含某项
regex为匹配正则表达式
return: 包含返回True、否则为False
"""
if self.nt_logreader == None:
nt_log_path = os.path.join(self.log_path, self.ntlogname)
self.nt_logreader = LogReader(nt_log_path)
return checker.check_log_contain(self.nt_logreader,regex)
def check_wf_log_has(self, regex):
"""
检查wf日志包含某项
regex为匹配正则表达式
return: 包含返回True、否则为False
"""
if self.wf_logreader == None:
wf_log_path = os.path.join(self.log_path, self.wflogname)
self.wf_logreader = LogReader(wf_log_path)
return checker.check_log_contain(self.wf_logreader, regex)
def check_fatal(self):
"""
@note:检查结果中是否包含fatal
return: 包含fatal 返回 True, 否则返回false
"""
regex="^FATAL.*"
return self.check_wf_log_has(regex)
def set_req(self, reqresjs=None, *agrs):
"""
@note:设置请求
注意不是字典设置
"""
pass
def set_res():
"""
@note:设置返回
"""
pass
def common_check(self):
"""
通用commoncheck接口
该接口无传入参数
一般用作fatal、core等检查
"""
#将log打印出
if self.nt_logreader == None:
nt_log_path = os.path.join(self.log_path, self.ntlogname)
self.nt_logreader = LogReader(nt_log_path)
if self.wf_logreader == None:
wf_log_path = os.path.join(self.log_path, self.wflogname)
self.wf_logreader = LogReader(wf_log_path)
loger.diagnose("Module[%s] wf logs:\n%s"%(self.type, self.wf_logreader.read_fatal_and_last_lines(10)))
loger.diagnose("Module[%s] notice logs:\n%s"%(self.type, self.nt_logreader.read_last_lines(10)))
#检查core
log_cores(self.path)
#检查FATAL
if self.check_fatal():
raise AssertionError("There FATAL in module[%s]"%(self.type))
def check(self, checkjs=None):
"""
@note:check接口
"""
pass
def reqdata(self):
'''
@note: 将各个模块的req形成json赋值给内部变量
'''
pass
def get_used_port(self):
"""
@note:获得该模块所在机器的空闲端口号
"""
used_port_list = self.sys.shell("netstat -na 2>/dev/null|grep \":\"|awk -F \"[ :]\" '{print $17}'",output = "true")[1].splitlines()
return used_port_list
def test_system():
"单元测试"
npatSys = Shell_System()
npatSys.shell("echo '12345' > a.txt")
npatSys.shell("rm b.txt")
npatSys.shell("cat a.txt b.txt", output = True)
npatSys.shell("ttt")
npatSys.shell("ttt", output = True)
used_port_list = npatSys.shell("netstat -na 2>/dev/null|grep \":\"|awk -F \"[ :]\" '{print $17}'",output = "true")[1].splitlines()
print used_port_list
if __name__ == '__main__':
mm = baseModule()
print type(mm.sys)
|
normal
|
{
"blob_id": "a74d27d9e31872100b4f22512abe9de7d9277de7",
"index": 2970,
"step-1": "# -*- coding: GB18030 -*-\nimport inspect\nimport os,sys\nimport subprocess\nfrom lib.common.utils import *\nfrom lib.common.loger import loger\nfrom lib.common import checker\nfrom lib.common.logreader import LogReader\nimport shutil\nfrom lib.common.XmlHandler import *\nfrom lib.common.Dict import *\n\nclass baseModule(object):\n def __init__(self):\n self.sys = Shell_System()\n self.path =None\n #模块bin 路径\n self.bin_path = None\n #模块配置路径\n self.conf_path = None\n #模块字典路径\n self.dict_path = None\n #log路径\n self.log_path = None\n #用于存储被分配得到的端口\n self.port=[]\n #用于表示本模块需要设置的端口数目\n self.port_num = 0\n #用于表示模块名\n self.type=None\n #是否进行conf 备份flag\n self.conf_bak_flag = False\n #是否进行dict备份\n self.dict_back_flag = False\n #以下变量根据需要在各个module中初始化\n #notice 日志名称\n self.ntlogname = None\n #WF日志名称\n self.wflogname = None\n self.nt_logreader = None\n self.wf_logreader = None\n \n def add_relation(self,module):\n \"\"\"\n @note: 参数传递的是已经生成的其他module的实例\n 具体关联关系的建立\n \"\"\"\n self.module_rel_set.append(module)\n loger.info(\"Topology is %s ----> %s\",self.type,getattr(module,\"type\"))\n return 0\n\n def build_relation(self):\n \"\"\"\n @note: 如果有下游模块必须实现改方法\n 建本模块和下游模块关系\n \"\"\"\n pass\n \n def get_port(self):\n \"\"\"\n @note: 返回本模块申请的端口list\n \"\"\"\n return self.port\n\n def set_listen_port(self):\n \"\"\"\n @note:各模块实现设置对用的conf\n \"\"\"\n pass\n\n def start(self):\n \"\"\"\n @note: 启动模块\n 注意可通过端口或进程是否存在判断是否启动成功\n checker.check_process_exist(processpath)\n checker.check_port_exist(port)\n \"\"\"\n pass\n\n def stop(self):\n \"\"\"\n @note:停止运行\n 默认通过self.bin_path实现\n \"\"\"\n if self.bin_path <> None and os.path.exists(self.bin_path):\n kill_process(self.bin_path)\n loger.debug(\"kill process %s\"%(self.bin_path))\n else:\n loger.warning(\"module [%s] has not bin_path!\"%(self.type))\n\n def bak_or_revert_env(self):\n \"\"\"\n @note:根据bakflag 进行bak 操作\n 默认进行两项bak conf dict\n 如果path.robotbak不存在,则将path备份\n - 如果path.dtsbak存在,则用path.robotbak覆盖path\n \"\"\"\n #清理log目录\n if self.log_path is not None:\n cmd = \"rm -rf \" + self.log_path\n loger.debug(cmd)\n self.sys.shell(cmd)\n # 重命名core\n rename_cores(self.path)\n #备份恢复conf\n if self.conf_bak_flag:\n bak_or_revert(self.conf_path)\n #备份恢复dict\n if self.dict_back_flag:\n bak_or_revert(self.dict_path)\n return 0\n \n def __conf_op(self, optype, confid, k, v=None):\n \"\"\"\n @note: 封装 获取,删除、设置3种conf操作方法\n optype为操作类型 0:设置、1:获取、2:删除\n 对外接口由 set_conf、get_conf、delete_conf\n \"\"\"\n if self.path is None:\n raise AssertionError(\"get modulepath error[%s]\"%(self.path))\n path, seg = getconfitem(self.path, self.type, confid)\n if path is None:\n raise AssertionError(\"set conf error[%s][%s][%s][%s]\"%(self.type, confid, k , v))\n conf = UbConfigParser(path, seg)\n if optype == 0:\n conf.set(k , v)\n return \n if optype == 1:\n return conf.get(k)\n if optype == 2:\n conf.delete(k)\n return\n \n def set_conf(self, confid, k, v):\n \"\"\"\n @note:设置conf\n confid为conf.xml中注册id\n \"\"\"\n return self.__conf_op(0, confid, str(k), str(v)) \n\n def get_conf(self, confid, k):\n return self.__conf_op(1, confid, str(k))\n\n def delete_conf(self, confid, k):\n return self.__conf_op(2, confid, str(k))\n \n def set_dict(self, dictid, *line_item):\n \"\"\"\n @note:设置字典数据 将数据设置进不同的列中\n \"\"\"\n path, seg = getdictitem(self.type, dictid) \n real_path = os.path.join(self.path, path)\n dicth = DictHandler(real_path, seg)\n dicth.set_dict(line_item)\n\n def clear_dict(self, dictid):\n \"\"\"\n @note:清理字典\n \"\"\"\n path, seg = getdictitem(self.type, dictid) \n real_path = os.path.join(self.path, path)\n dicth = DictHandler(self, real_path, seg)\n dicth.clear_dict()\n\n #以下接口为测试接口\n def check_notice_log_has(self, regex):\n \"\"\"\n @note:检查 notice log中是否包含某项\n regex为匹配正则表达式\n return: 包含返回True、否则为False \n \"\"\"\n if self.nt_logreader == None:\n nt_log_path = os.path.join(self.log_path, self.ntlogname)\n self.nt_logreader = LogReader(nt_log_path)\n return checker.check_log_contain(self.nt_logreader,regex)\n \n def check_wf_log_has(self, regex):\n \"\"\"\n 检查wf日志包含某项\n regex为匹配正则表达式\n return: 包含返回True、否则为False \n \"\"\"\n if self.wf_logreader == None:\n wf_log_path = os.path.join(self.log_path, self.wflogname)\n self.wf_logreader = LogReader(wf_log_path)\n return checker.check_log_contain(self.wf_logreader, regex)\n \n def check_fatal(self):\n \"\"\"\n @note:检查结果中是否包含fatal\n return: 包含fatal 返回 True, 否则返回false\n \"\"\"\n regex=\"^FATAL.*\"\n return self.check_wf_log_has(regex)\n\n \n def set_req(self, reqresjs=None, *agrs):\n \"\"\"\n @note:设置请求\n 注意不是字典设置\n \"\"\"\n pass\n\n def set_res():\n \"\"\"\n @note:设置返回\n \"\"\"\n pass\n\n def common_check(self):\n \"\"\"\n 通用commoncheck接口\n 该接口无传入参数\n 一般用作fatal、core等检查\n \"\"\"\n #将log打印出\n if self.nt_logreader == None:\n nt_log_path = os.path.join(self.log_path, self.ntlogname)\n self.nt_logreader = LogReader(nt_log_path)\n if self.wf_logreader == None:\n wf_log_path = os.path.join(self.log_path, self.wflogname)\n self.wf_logreader = LogReader(wf_log_path)\n loger.diagnose(\"Module[%s] wf logs:\\n%s\"%(self.type, self.wf_logreader.read_fatal_and_last_lines(10)))\n loger.diagnose(\"Module[%s] notice logs:\\n%s\"%(self.type, self.nt_logreader.read_last_lines(10)))\n #检查core\n log_cores(self.path)\n #检查FATAL\n if self.check_fatal():\n raise AssertionError(\"There FATAL in module[%s]\"%(self.type))\n \n def check(self, checkjs=None):\n \"\"\"\n @note:check接口\n \"\"\"\n pass\n \n def reqdata(self):\n '''\n @note: 将各个模块的req形成json赋值给内部变量\n '''\n pass\n\n def get_used_port(self):\n \"\"\"\n @note:获得该模块所在机器的空闲端口号 \n \"\"\"\n used_port_list = self.sys.shell(\"netstat -na 2>/dev/null|grep \\\":\\\"|awk -F \\\"[ :]\\\" '{print $17}'\",output = \"true\")[1].splitlines()\n return used_port_list\n\ndef test_system():\n \"单元测试\"\n npatSys = Shell_System()\n npatSys.shell(\"echo '12345' > a.txt\")\n npatSys.shell(\"rm b.txt\")\n npatSys.shell(\"cat a.txt b.txt\", output = True)\n npatSys.shell(\"ttt\")\n npatSys.shell(\"ttt\", output = True)\n used_port_list = npatSys.shell(\"netstat -na 2>/dev/null|grep \\\":\\\"|awk -F \\\"[ :]\\\" '{print $17}'\",output = \"true\")[1].splitlines()\n print used_port_list\n\nif __name__ == '__main__':\n mm = baseModule()\n print type(mm.sys)\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import tensorflow as tf
def build_shared_network(x, add_summaries=False):
conv1 = tf.layers.conv2d(x, 16, 8, 4, activation=tf.nn.relu, name="conv1")
conv2 = tf.layers.conv2d(conv1, 32, 4, 2, activation=tf.nn.relu, name="conv2")
fc1 = tf.layers.dense(tf.layers.flatten(conv2), 256, name="fc1")
if add_summaries:
tf.contrib.layers.summarize_activation(conv1)
tf.contrib.layers.summarize_activation(conv2)
tf.contrib.layers.summarize_activation(fc1)
return fc1
class PolicyEstimator():
def __init__(self, num_ouptuts, reuse=False, trainable=True):
self.num_outputs = num_ouptuts
self.states = tf.placeholder(shape=[None, 84, 84, 4], dtype=tf.uint8, name="X")
self.targets = tf.placeholder(shape=[None], dtype=tf.float32, name="Y")
self.actions = tf.placeholder(shape=[None], dtype=tf.int32, name="actions")
x = tf.to_float(self.states) / 255.0
batch_size = tf.shape(self.states)[0]
with tf.variable_scope("shared", reuse=reuse):
fc1 = build_shared_network(x, add_summaries=(not reuse))
with tf.variable_scope("policy_net"):
self.logits = tf.layers.dense(fc1, num_ouptuts, activation=None)
self.probs = tf.nn.softmax(self.logits) + 1e-8
self.predictions = {"logits": self.logits, "probs": self.probs}
self.entropy = -tf.reduce_sum(self.probs * tf.log(self.probs), 1, name="entropy")
self.entropy_mean = tf.reduce_mean(self.entropy, name="entropy_mean")
# 배열을 리스트처럼 만듬 => 각 데이터의 시작 부분(offset) + action값(onehot 아님) = action의 위치
# 그 후 tf.gather을 이용해 원하는 action에 해당하는 확률값만 뽑아냄
gather_indices = tf.range(batch_size) * tf.shape(self.probs)[1] + self.actions
self.picked_action_probs = tf.gather(tf.reshape(self.probs, [-1]), gather_indices)
self.losses = - (tf.log(self.picked_action_probs) * self.targets + 0.01*self.entropy)
self.loss = tf.reduce_sum(self.losses, name="loss")
tf.summary.scalar(self.loss.op.name, self.loss)
tf.summary.scalar(self.entropy_mean.op.name, self.entropy_mean)
tf.summary.histogram(self.entropy.op.name, self.entropy)
if trainable:
self.optimizer = tf.train.RMSPropOptimizer(0.00025, 0.99, 0.0, 1e-6)
self.grads_and_vars = self.optimizer.compute_gradients(self.loss)
# grad가 None인 경우 학습이 망가지는 것을 막기 위해서 이렇게 만든 듯 하다.
self.grads_and_vars = [[grad, var] for grad, var in self.grads_and_vars if grad is not None]
# 여기 train_op 정작 쓰진 않음. worker에서 apply_gradient를 함. 지워도 될 듯
self.train_op = self.optimizer.apply_gradients(self.grads_and_vars, global_step=tf.train.get_global_step())
var_scope_name = tf.get_variable_scope().name
summary_ops = tf.get_collection(tf.GraphKeys.SUMMARIES)
summaries = [s for s in summary_ops if "policy_net" in s.name or "shared" in s.name]
summaries = [s for s in summary_ops if var_scope_name in s.name]
self.summaries = tf.summary.merge(summaries)
class ValueEstimator():
def __init__(self, reuse=False, trainable=True):
self.states = tf.placeholder(shape=[None, 84, 84, 4], dtype=tf.uint8, name="X")
self.targets = tf.placeholder(shape=[None], dtype=tf.float32, name="Y")
x = tf.to_float(self.states) / 255.0
with tf.variable_scope("shared", reuse=reuse):
fc1 = build_shared_network(x, add_summaries=(not reuse))
with tf.variable_scope("value_net"):
self.logits = tf.layers.dense(fc1, 1, activation=None)
# squeeze는 1인 차원(행렬)을 날림. => [1, 2, 3] squeeze => [2, 3]
self.logits = tf.squeeze(self.logits, squeeze_dims=[1], name="logits")
self.losses = tf.squared_difference(self.logits, self.targets)
self.loss = tf.reduce_sum(self.losses, name="loss")
self.predictions = { "logits": self.logits }
prefix = tf.get_variable_scope().name
tf.summary.scalar(self.loss.name, self.loss)
tf.summary.scalar("{}/max_value".format(prefix), tf.reduce_max(self.logits))
tf.summary.scalar("{}/min_value".format(prefix), tf.reduce_min(self.logits))
tf.summary.scalar("{}/mean_value".format(prefix), tf.reduce_mean(self.logits))
tf.summary.scalar("{}/reward_max".format(prefix), tf.reduce_max(self.targets))
tf.summary.scalar("{}/reward_min".format(prefix), tf.reduce_min(self.targets))
tf.summary.scalar("{}/reward_mean".format(prefix), tf.reduce_mean(self.targets))
tf.summary.histogram("{}/reward_targets".format(prefix), self.targets)
tf.summary.histogram("{}/values".format(prefix), self.logits)
if trainable:
self.optimizer = tf.train.RMSPropOptimizer(0.00025, 0.99, 0.0, 1e-6)
self.grads_and_vars = self.optimizer.compute_gradients(self.loss)
self.grads_and_vars = [[grad, var] for grad, var in self.grads_and_vars if grad is not None]
self.train_op = self.optimizer.apply_gradients(self.grads_and_vars, global_step=tf.train.get_global_step())
var_scope_name = tf.get_variable_scope().name
summary_ops = tf.get_collection(tf.GraphKeys.SUMMARIES)
summaries = [s for s in summary_ops if "policy_net" in s.name or "shared" in s.name]
summaries = [s for s in summary_ops if var_scope_name in s.name]
self.summaries = tf.summary.merge(summaries)
|
normal
|
{
"blob_id": "0fbf8efd39f583581c46fcd3f84c65a7787145cd",
"index": 847,
"step-1": "<mask token>\n\n\nclass PolicyEstimator:\n <mask token>\n\n\nclass ValueEstimator:\n\n def __init__(self, reuse=False, trainable=True):\n self.states = tf.placeholder(shape=[None, 84, 84, 4], dtype=tf.\n uint8, name='X')\n self.targets = tf.placeholder(shape=[None], dtype=tf.float32, name='Y')\n x = tf.to_float(self.states) / 255.0\n with tf.variable_scope('shared', reuse=reuse):\n fc1 = build_shared_network(x, add_summaries=not reuse)\n with tf.variable_scope('value_net'):\n self.logits = tf.layers.dense(fc1, 1, activation=None)\n self.logits = tf.squeeze(self.logits, squeeze_dims=[1], name=\n 'logits')\n self.losses = tf.squared_difference(self.logits, self.targets)\n self.loss = tf.reduce_sum(self.losses, name='loss')\n self.predictions = {'logits': self.logits}\n prefix = tf.get_variable_scope().name\n tf.summary.scalar(self.loss.name, self.loss)\n tf.summary.scalar('{}/max_value'.format(prefix), tf.reduce_max(\n self.logits))\n tf.summary.scalar('{}/min_value'.format(prefix), tf.reduce_min(\n self.logits))\n tf.summary.scalar('{}/mean_value'.format(prefix), tf.\n reduce_mean(self.logits))\n tf.summary.scalar('{}/reward_max'.format(prefix), tf.reduce_max\n (self.targets))\n tf.summary.scalar('{}/reward_min'.format(prefix), tf.reduce_min\n (self.targets))\n tf.summary.scalar('{}/reward_mean'.format(prefix), tf.\n reduce_mean(self.targets))\n tf.summary.histogram('{}/reward_targets'.format(prefix), self.\n targets)\n tf.summary.histogram('{}/values'.format(prefix), self.logits)\n if trainable:\n self.optimizer = tf.train.RMSPropOptimizer(0.00025, 0.99, \n 0.0, 1e-06)\n self.grads_and_vars = self.optimizer.compute_gradients(self\n .loss)\n self.grads_and_vars = [[grad, var] for grad, var in self.\n grads_and_vars if grad is not None]\n self.train_op = self.optimizer.apply_gradients(self.\n grads_and_vars, global_step=tf.train.get_global_step())\n var_scope_name = tf.get_variable_scope().name\n summary_ops = tf.get_collection(tf.GraphKeys.SUMMARIES)\n summaries = [s for s in summary_ops if 'policy_net' in s.name or\n 'shared' in s.name]\n summaries = [s for s in summary_ops if var_scope_name in s.name]\n self.summaries = tf.summary.merge(summaries)\n",
"step-2": "<mask token>\n\n\nclass PolicyEstimator:\n\n def __init__(self, num_ouptuts, reuse=False, trainable=True):\n self.num_outputs = num_ouptuts\n self.states = tf.placeholder(shape=[None, 84, 84, 4], dtype=tf.\n uint8, name='X')\n self.targets = tf.placeholder(shape=[None], dtype=tf.float32, name='Y')\n self.actions = tf.placeholder(shape=[None], dtype=tf.int32, name=\n 'actions')\n x = tf.to_float(self.states) / 255.0\n batch_size = tf.shape(self.states)[0]\n with tf.variable_scope('shared', reuse=reuse):\n fc1 = build_shared_network(x, add_summaries=not reuse)\n with tf.variable_scope('policy_net'):\n self.logits = tf.layers.dense(fc1, num_ouptuts, activation=None)\n self.probs = tf.nn.softmax(self.logits) + 1e-08\n self.predictions = {'logits': self.logits, 'probs': self.probs}\n self.entropy = -tf.reduce_sum(self.probs * tf.log(self.probs), \n 1, name='entropy')\n self.entropy_mean = tf.reduce_mean(self.entropy, name=\n 'entropy_mean')\n gather_indices = tf.range(batch_size) * tf.shape(self.probs)[1\n ] + self.actions\n self.picked_action_probs = tf.gather(tf.reshape(self.probs, [-1\n ]), gather_indices)\n self.losses = -(tf.log(self.picked_action_probs) * self.targets +\n 0.01 * self.entropy)\n self.loss = tf.reduce_sum(self.losses, name='loss')\n tf.summary.scalar(self.loss.op.name, self.loss)\n tf.summary.scalar(self.entropy_mean.op.name, self.entropy_mean)\n tf.summary.histogram(self.entropy.op.name, self.entropy)\n if trainable:\n self.optimizer = tf.train.RMSPropOptimizer(0.00025, 0.99, \n 0.0, 1e-06)\n self.grads_and_vars = self.optimizer.compute_gradients(self\n .loss)\n self.grads_and_vars = [[grad, var] for grad, var in self.\n grads_and_vars if grad is not None]\n self.train_op = self.optimizer.apply_gradients(self.\n grads_and_vars, global_step=tf.train.get_global_step())\n var_scope_name = tf.get_variable_scope().name\n summary_ops = tf.get_collection(tf.GraphKeys.SUMMARIES)\n summaries = [s for s in summary_ops if 'policy_net' in s.\n name or 'shared' in s.name]\n summaries = [s for s in summary_ops if var_scope_name in s.name\n ]\n self.summaries = tf.summary.merge(summaries)\n\n\nclass ValueEstimator:\n\n def __init__(self, reuse=False, trainable=True):\n self.states = tf.placeholder(shape=[None, 84, 84, 4], dtype=tf.\n uint8, name='X')\n self.targets = tf.placeholder(shape=[None], dtype=tf.float32, name='Y')\n x = tf.to_float(self.states) / 255.0\n with tf.variable_scope('shared', reuse=reuse):\n fc1 = build_shared_network(x, add_summaries=not reuse)\n with tf.variable_scope('value_net'):\n self.logits = tf.layers.dense(fc1, 1, activation=None)\n self.logits = tf.squeeze(self.logits, squeeze_dims=[1], name=\n 'logits')\n self.losses = tf.squared_difference(self.logits, self.targets)\n self.loss = tf.reduce_sum(self.losses, name='loss')\n self.predictions = {'logits': self.logits}\n prefix = tf.get_variable_scope().name\n tf.summary.scalar(self.loss.name, self.loss)\n tf.summary.scalar('{}/max_value'.format(prefix), tf.reduce_max(\n self.logits))\n tf.summary.scalar('{}/min_value'.format(prefix), tf.reduce_min(\n self.logits))\n tf.summary.scalar('{}/mean_value'.format(prefix), tf.\n reduce_mean(self.logits))\n tf.summary.scalar('{}/reward_max'.format(prefix), tf.reduce_max\n (self.targets))\n tf.summary.scalar('{}/reward_min'.format(prefix), tf.reduce_min\n (self.targets))\n tf.summary.scalar('{}/reward_mean'.format(prefix), tf.\n reduce_mean(self.targets))\n tf.summary.histogram('{}/reward_targets'.format(prefix), self.\n targets)\n tf.summary.histogram('{}/values'.format(prefix), self.logits)\n if trainable:\n self.optimizer = tf.train.RMSPropOptimizer(0.00025, 0.99, \n 0.0, 1e-06)\n self.grads_and_vars = self.optimizer.compute_gradients(self\n .loss)\n self.grads_and_vars = [[grad, var] for grad, var in self.\n grads_and_vars if grad is not None]\n self.train_op = self.optimizer.apply_gradients(self.\n grads_and_vars, global_step=tf.train.get_global_step())\n var_scope_name = tf.get_variable_scope().name\n summary_ops = tf.get_collection(tf.GraphKeys.SUMMARIES)\n summaries = [s for s in summary_ops if 'policy_net' in s.name or\n 'shared' in s.name]\n summaries = [s for s in summary_ops if var_scope_name in s.name]\n self.summaries = tf.summary.merge(summaries)\n",
"step-3": "<mask token>\n\n\ndef build_shared_network(x, add_summaries=False):\n conv1 = tf.layers.conv2d(x, 16, 8, 4, activation=tf.nn.relu, name='conv1')\n conv2 = tf.layers.conv2d(conv1, 32, 4, 2, activation=tf.nn.relu, name=\n 'conv2')\n fc1 = tf.layers.dense(tf.layers.flatten(conv2), 256, name='fc1')\n if add_summaries:\n tf.contrib.layers.summarize_activation(conv1)\n tf.contrib.layers.summarize_activation(conv2)\n tf.contrib.layers.summarize_activation(fc1)\n return fc1\n\n\nclass PolicyEstimator:\n\n def __init__(self, num_ouptuts, reuse=False, trainable=True):\n self.num_outputs = num_ouptuts\n self.states = tf.placeholder(shape=[None, 84, 84, 4], dtype=tf.\n uint8, name='X')\n self.targets = tf.placeholder(shape=[None], dtype=tf.float32, name='Y')\n self.actions = tf.placeholder(shape=[None], dtype=tf.int32, name=\n 'actions')\n x = tf.to_float(self.states) / 255.0\n batch_size = tf.shape(self.states)[0]\n with tf.variable_scope('shared', reuse=reuse):\n fc1 = build_shared_network(x, add_summaries=not reuse)\n with tf.variable_scope('policy_net'):\n self.logits = tf.layers.dense(fc1, num_ouptuts, activation=None)\n self.probs = tf.nn.softmax(self.logits) + 1e-08\n self.predictions = {'logits': self.logits, 'probs': self.probs}\n self.entropy = -tf.reduce_sum(self.probs * tf.log(self.probs), \n 1, name='entropy')\n self.entropy_mean = tf.reduce_mean(self.entropy, name=\n 'entropy_mean')\n gather_indices = tf.range(batch_size) * tf.shape(self.probs)[1\n ] + self.actions\n self.picked_action_probs = tf.gather(tf.reshape(self.probs, [-1\n ]), gather_indices)\n self.losses = -(tf.log(self.picked_action_probs) * self.targets +\n 0.01 * self.entropy)\n self.loss = tf.reduce_sum(self.losses, name='loss')\n tf.summary.scalar(self.loss.op.name, self.loss)\n tf.summary.scalar(self.entropy_mean.op.name, self.entropy_mean)\n tf.summary.histogram(self.entropy.op.name, self.entropy)\n if trainable:\n self.optimizer = tf.train.RMSPropOptimizer(0.00025, 0.99, \n 0.0, 1e-06)\n self.grads_and_vars = self.optimizer.compute_gradients(self\n .loss)\n self.grads_and_vars = [[grad, var] for grad, var in self.\n grads_and_vars if grad is not None]\n self.train_op = self.optimizer.apply_gradients(self.\n grads_and_vars, global_step=tf.train.get_global_step())\n var_scope_name = tf.get_variable_scope().name\n summary_ops = tf.get_collection(tf.GraphKeys.SUMMARIES)\n summaries = [s for s in summary_ops if 'policy_net' in s.\n name or 'shared' in s.name]\n summaries = [s for s in summary_ops if var_scope_name in s.name\n ]\n self.summaries = tf.summary.merge(summaries)\n\n\nclass ValueEstimator:\n\n def __init__(self, reuse=False, trainable=True):\n self.states = tf.placeholder(shape=[None, 84, 84, 4], dtype=tf.\n uint8, name='X')\n self.targets = tf.placeholder(shape=[None], dtype=tf.float32, name='Y')\n x = tf.to_float(self.states) / 255.0\n with tf.variable_scope('shared', reuse=reuse):\n fc1 = build_shared_network(x, add_summaries=not reuse)\n with tf.variable_scope('value_net'):\n self.logits = tf.layers.dense(fc1, 1, activation=None)\n self.logits = tf.squeeze(self.logits, squeeze_dims=[1], name=\n 'logits')\n self.losses = tf.squared_difference(self.logits, self.targets)\n self.loss = tf.reduce_sum(self.losses, name='loss')\n self.predictions = {'logits': self.logits}\n prefix = tf.get_variable_scope().name\n tf.summary.scalar(self.loss.name, self.loss)\n tf.summary.scalar('{}/max_value'.format(prefix), tf.reduce_max(\n self.logits))\n tf.summary.scalar('{}/min_value'.format(prefix), tf.reduce_min(\n self.logits))\n tf.summary.scalar('{}/mean_value'.format(prefix), tf.\n reduce_mean(self.logits))\n tf.summary.scalar('{}/reward_max'.format(prefix), tf.reduce_max\n (self.targets))\n tf.summary.scalar('{}/reward_min'.format(prefix), tf.reduce_min\n (self.targets))\n tf.summary.scalar('{}/reward_mean'.format(prefix), tf.\n reduce_mean(self.targets))\n tf.summary.histogram('{}/reward_targets'.format(prefix), self.\n targets)\n tf.summary.histogram('{}/values'.format(prefix), self.logits)\n if trainable:\n self.optimizer = tf.train.RMSPropOptimizer(0.00025, 0.99, \n 0.0, 1e-06)\n self.grads_and_vars = self.optimizer.compute_gradients(self\n .loss)\n self.grads_and_vars = [[grad, var] for grad, var in self.\n grads_and_vars if grad is not None]\n self.train_op = self.optimizer.apply_gradients(self.\n grads_and_vars, global_step=tf.train.get_global_step())\n var_scope_name = tf.get_variable_scope().name\n summary_ops = tf.get_collection(tf.GraphKeys.SUMMARIES)\n summaries = [s for s in summary_ops if 'policy_net' in s.name or\n 'shared' in s.name]\n summaries = [s for s in summary_ops if var_scope_name in s.name]\n self.summaries = tf.summary.merge(summaries)\n",
"step-4": "import tensorflow as tf\n\n\ndef build_shared_network(x, add_summaries=False):\n conv1 = tf.layers.conv2d(x, 16, 8, 4, activation=tf.nn.relu, name='conv1')\n conv2 = tf.layers.conv2d(conv1, 32, 4, 2, activation=tf.nn.relu, name=\n 'conv2')\n fc1 = tf.layers.dense(tf.layers.flatten(conv2), 256, name='fc1')\n if add_summaries:\n tf.contrib.layers.summarize_activation(conv1)\n tf.contrib.layers.summarize_activation(conv2)\n tf.contrib.layers.summarize_activation(fc1)\n return fc1\n\n\nclass PolicyEstimator:\n\n def __init__(self, num_ouptuts, reuse=False, trainable=True):\n self.num_outputs = num_ouptuts\n self.states = tf.placeholder(shape=[None, 84, 84, 4], dtype=tf.\n uint8, name='X')\n self.targets = tf.placeholder(shape=[None], dtype=tf.float32, name='Y')\n self.actions = tf.placeholder(shape=[None], dtype=tf.int32, name=\n 'actions')\n x = tf.to_float(self.states) / 255.0\n batch_size = tf.shape(self.states)[0]\n with tf.variable_scope('shared', reuse=reuse):\n fc1 = build_shared_network(x, add_summaries=not reuse)\n with tf.variable_scope('policy_net'):\n self.logits = tf.layers.dense(fc1, num_ouptuts, activation=None)\n self.probs = tf.nn.softmax(self.logits) + 1e-08\n self.predictions = {'logits': self.logits, 'probs': self.probs}\n self.entropy = -tf.reduce_sum(self.probs * tf.log(self.probs), \n 1, name='entropy')\n self.entropy_mean = tf.reduce_mean(self.entropy, name=\n 'entropy_mean')\n gather_indices = tf.range(batch_size) * tf.shape(self.probs)[1\n ] + self.actions\n self.picked_action_probs = tf.gather(tf.reshape(self.probs, [-1\n ]), gather_indices)\n self.losses = -(tf.log(self.picked_action_probs) * self.targets +\n 0.01 * self.entropy)\n self.loss = tf.reduce_sum(self.losses, name='loss')\n tf.summary.scalar(self.loss.op.name, self.loss)\n tf.summary.scalar(self.entropy_mean.op.name, self.entropy_mean)\n tf.summary.histogram(self.entropy.op.name, self.entropy)\n if trainable:\n self.optimizer = tf.train.RMSPropOptimizer(0.00025, 0.99, \n 0.0, 1e-06)\n self.grads_and_vars = self.optimizer.compute_gradients(self\n .loss)\n self.grads_and_vars = [[grad, var] for grad, var in self.\n grads_and_vars if grad is not None]\n self.train_op = self.optimizer.apply_gradients(self.\n grads_and_vars, global_step=tf.train.get_global_step())\n var_scope_name = tf.get_variable_scope().name\n summary_ops = tf.get_collection(tf.GraphKeys.SUMMARIES)\n summaries = [s for s in summary_ops if 'policy_net' in s.\n name or 'shared' in s.name]\n summaries = [s for s in summary_ops if var_scope_name in s.name\n ]\n self.summaries = tf.summary.merge(summaries)\n\n\nclass ValueEstimator:\n\n def __init__(self, reuse=False, trainable=True):\n self.states = tf.placeholder(shape=[None, 84, 84, 4], dtype=tf.\n uint8, name='X')\n self.targets = tf.placeholder(shape=[None], dtype=tf.float32, name='Y')\n x = tf.to_float(self.states) / 255.0\n with tf.variable_scope('shared', reuse=reuse):\n fc1 = build_shared_network(x, add_summaries=not reuse)\n with tf.variable_scope('value_net'):\n self.logits = tf.layers.dense(fc1, 1, activation=None)\n self.logits = tf.squeeze(self.logits, squeeze_dims=[1], name=\n 'logits')\n self.losses = tf.squared_difference(self.logits, self.targets)\n self.loss = tf.reduce_sum(self.losses, name='loss')\n self.predictions = {'logits': self.logits}\n prefix = tf.get_variable_scope().name\n tf.summary.scalar(self.loss.name, self.loss)\n tf.summary.scalar('{}/max_value'.format(prefix), tf.reduce_max(\n self.logits))\n tf.summary.scalar('{}/min_value'.format(prefix), tf.reduce_min(\n self.logits))\n tf.summary.scalar('{}/mean_value'.format(prefix), tf.\n reduce_mean(self.logits))\n tf.summary.scalar('{}/reward_max'.format(prefix), tf.reduce_max\n (self.targets))\n tf.summary.scalar('{}/reward_min'.format(prefix), tf.reduce_min\n (self.targets))\n tf.summary.scalar('{}/reward_mean'.format(prefix), tf.\n reduce_mean(self.targets))\n tf.summary.histogram('{}/reward_targets'.format(prefix), self.\n targets)\n tf.summary.histogram('{}/values'.format(prefix), self.logits)\n if trainable:\n self.optimizer = tf.train.RMSPropOptimizer(0.00025, 0.99, \n 0.0, 1e-06)\n self.grads_and_vars = self.optimizer.compute_gradients(self\n .loss)\n self.grads_and_vars = [[grad, var] for grad, var in self.\n grads_and_vars if grad is not None]\n self.train_op = self.optimizer.apply_gradients(self.\n grads_and_vars, global_step=tf.train.get_global_step())\n var_scope_name = tf.get_variable_scope().name\n summary_ops = tf.get_collection(tf.GraphKeys.SUMMARIES)\n summaries = [s for s in summary_ops if 'policy_net' in s.name or\n 'shared' in s.name]\n summaries = [s for s in summary_ops if var_scope_name in s.name]\n self.summaries = tf.summary.merge(summaries)\n",
"step-5": "import tensorflow as tf\n\ndef build_shared_network(x, add_summaries=False):\n conv1 = tf.layers.conv2d(x, 16, 8, 4, activation=tf.nn.relu, name=\"conv1\")\n conv2 = tf.layers.conv2d(conv1, 32, 4, 2, activation=tf.nn.relu, name=\"conv2\")\n\n fc1 = tf.layers.dense(tf.layers.flatten(conv2), 256, name=\"fc1\")\n\n if add_summaries:\n tf.contrib.layers.summarize_activation(conv1)\n tf.contrib.layers.summarize_activation(conv2)\n tf.contrib.layers.summarize_activation(fc1)\n\n return fc1\n\nclass PolicyEstimator():\n def __init__(self, num_ouptuts, reuse=False, trainable=True):\n self.num_outputs = num_ouptuts\n\n self.states = tf.placeholder(shape=[None, 84, 84, 4], dtype=tf.uint8, name=\"X\")\n self.targets = tf.placeholder(shape=[None], dtype=tf.float32, name=\"Y\")\n self.actions = tf.placeholder(shape=[None], dtype=tf.int32, name=\"actions\")\n\n x = tf.to_float(self.states) / 255.0\n batch_size = tf.shape(self.states)[0]\n\n with tf.variable_scope(\"shared\", reuse=reuse):\n fc1 = build_shared_network(x, add_summaries=(not reuse))\n\n with tf.variable_scope(\"policy_net\"):\n self.logits = tf.layers.dense(fc1, num_ouptuts, activation=None)\n self.probs = tf.nn.softmax(self.logits) + 1e-8\n\n self.predictions = {\"logits\": self.logits, \"probs\": self.probs}\n\n self.entropy = -tf.reduce_sum(self.probs * tf.log(self.probs), 1, name=\"entropy\")\n self.entropy_mean = tf.reduce_mean(self.entropy, name=\"entropy_mean\")\n\n # 배열을 리스트처럼 만듬 => 각 데이터의 시작 부분(offset) + action값(onehot 아님) = action의 위치\n # 그 후 tf.gather을 이용해 원하는 action에 해당하는 확률값만 뽑아냄\n gather_indices = tf.range(batch_size) * tf.shape(self.probs)[1] + self.actions\n self.picked_action_probs = tf.gather(tf.reshape(self.probs, [-1]), gather_indices)\n\n self.losses = - (tf.log(self.picked_action_probs) * self.targets + 0.01*self.entropy)\n self.loss = tf.reduce_sum(self.losses, name=\"loss\")\n\n tf.summary.scalar(self.loss.op.name, self.loss)\n tf.summary.scalar(self.entropy_mean.op.name, self.entropy_mean)\n tf.summary.histogram(self.entropy.op.name, self.entropy)\n\n if trainable:\n self.optimizer = tf.train.RMSPropOptimizer(0.00025, 0.99, 0.0, 1e-6)\n self.grads_and_vars = self.optimizer.compute_gradients(self.loss)\n # grad가 None인 경우 학습이 망가지는 것을 막기 위해서 이렇게 만든 듯 하다.\n self.grads_and_vars = [[grad, var] for grad, var in self.grads_and_vars if grad is not None]\n # 여기 train_op 정작 쓰진 않음. worker에서 apply_gradient를 함. 지워도 될 듯\n self.train_op = self.optimizer.apply_gradients(self.grads_and_vars, global_step=tf.train.get_global_step())\n\n var_scope_name = tf.get_variable_scope().name\n summary_ops = tf.get_collection(tf.GraphKeys.SUMMARIES)\n summaries = [s for s in summary_ops if \"policy_net\" in s.name or \"shared\" in s.name]\n summaries = [s for s in summary_ops if var_scope_name in s.name]\n self.summaries = tf.summary.merge(summaries)\n\nclass ValueEstimator():\n def __init__(self, reuse=False, trainable=True):\n\n self.states = tf.placeholder(shape=[None, 84, 84, 4], dtype=tf.uint8, name=\"X\")\n self.targets = tf.placeholder(shape=[None], dtype=tf.float32, name=\"Y\")\n\n x = tf.to_float(self.states) / 255.0\n\n with tf.variable_scope(\"shared\", reuse=reuse):\n fc1 = build_shared_network(x, add_summaries=(not reuse))\n\n with tf.variable_scope(\"value_net\"):\n self.logits = tf.layers.dense(fc1, 1, activation=None)\n # squeeze는 1인 차원(행렬)을 날림. => [1, 2, 3] squeeze => [2, 3]\n self.logits = tf.squeeze(self.logits, squeeze_dims=[1], name=\"logits\")\n\n self.losses = tf.squared_difference(self.logits, self.targets)\n self.loss = tf.reduce_sum(self.losses, name=\"loss\")\n\n self.predictions = { \"logits\": self.logits }\n\n prefix = tf.get_variable_scope().name\n tf.summary.scalar(self.loss.name, self.loss)\n tf.summary.scalar(\"{}/max_value\".format(prefix), tf.reduce_max(self.logits))\n tf.summary.scalar(\"{}/min_value\".format(prefix), tf.reduce_min(self.logits))\n tf.summary.scalar(\"{}/mean_value\".format(prefix), tf.reduce_mean(self.logits))\n tf.summary.scalar(\"{}/reward_max\".format(prefix), tf.reduce_max(self.targets))\n tf.summary.scalar(\"{}/reward_min\".format(prefix), tf.reduce_min(self.targets))\n tf.summary.scalar(\"{}/reward_mean\".format(prefix), tf.reduce_mean(self.targets))\n tf.summary.histogram(\"{}/reward_targets\".format(prefix), self.targets)\n tf.summary.histogram(\"{}/values\".format(prefix), self.logits)\n\n if trainable:\n self.optimizer = tf.train.RMSPropOptimizer(0.00025, 0.99, 0.0, 1e-6)\n self.grads_and_vars = self.optimizer.compute_gradients(self.loss)\n self.grads_and_vars = [[grad, var] for grad, var in self.grads_and_vars if grad is not None]\n self.train_op = self.optimizer.apply_gradients(self.grads_and_vars, global_step=tf.train.get_global_step())\n\n var_scope_name = tf.get_variable_scope().name\n summary_ops = tf.get_collection(tf.GraphKeys.SUMMARIES)\n summaries = [s for s in summary_ops if \"policy_net\" in s.name or \"shared\" in s.name]\n summaries = [s for s in summary_ops if var_scope_name in s.name]\n self.summaries = tf.summary.merge(summaries)\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
#!/usr/bin/python
# coding:utf-8
#
#这个脚本主要是对apache日志文件的处理分析,过滤出需要的信息
#处理后得到的数据是: 主机IP:192.168.14.44 访问流量:814 K
#使用说明 python 脚本名 文件名; eg:python python.analysis.apachelog.py access.log
#
# by wangdd 2016/02/02
#
import os
import re
import sys
import shelve
#re 模块,利用re模块对apahce日志进行分析
#通过 re.match(……) 和 re.compile(……).match返回
# 该对象有如下方法和属性:
# 方法:
# group( [group1, ...])
# groups( [default])
# groupdict( [default])
# start( [group])
# end( [group])
#
# apache 日志格式: 192.168.47.82 - - [17/Dec/2014:16:41:03 +0800] "GET /application/account/loginIndex.htm HTTP/1.1" 200 56273
#
#基本思路是,利用re模块进行正则匹配,过滤出对应IP的访问字节数,然后把数据保存到apache_log.db数据库中,最后进行数据的格式
log_line_re = re.compile(r'''(?P<remote_host>^\d{1,3}\.(\d{1,3}\.){2}\d{1,3})
\s+
(?P<log_name>\S+)
\s+
(?P<login_user>\S+)
\s+
(?P<time>\[.*\])
\s+
".*"
\s+
(?P<status>\d+)
\s+
(?P<bytes_sent>-|\d+)
''',re.X)
#利用正则表达过滤出需要的数据,返回一个字典类型的数据
def logline(line):
m = log_line_re.search(line)
if m:
groupdict = m.groupdict()
if groupdict['bytes_sent'] == '-':
groupdict['bytes_sent'] = '0'
return groupdict
else:
return {'remote_host':None,'status':None,'bytes_sent':"0",}
#从获取的字典中得到需要的数据
def log_report(logfile):
report_dict ={}
for line in logfile:
line_dict = logline(line)
try:
bytes_sent = int(line_dict['bytes_sent'])
except ValueError:
continue
report_dict.setdefault(line_dict['remote_host'],[]).append(bytes_sent)
for k,v in report_dict.iteritems():
sum = 0
if k != None:
for data in v:
sum = sum +data
print '主机IP:%s\t 访问流量:%s K' % (k,sum/1024)
#这个函数是把处理后的数据保存到data.db文件中,利用了shelv 模块
def store_data(file):
shelv_file = shelve.open('apache_log.db')
if not os.path.isfile('shelv_file'):
for line in file:
d_line = logline(line)
shelv_file[d_line['remote_host']] = \
shelv_file.setdefault(d_line['remote_host'],0) + \
int (d_line['bytes_sent'])
data_file.close()
shelv_file.close()
if __name__ == '__main__':
if not len(sys.argv) >1:
print __doc__
sys.exit(1)
infile_name = sys.argv[1]
try:
infile = open(infile_name,'r')
except IOError:
print "please input some file"
print __doc__
sys.exit(1)
log_report(infile)
store_data(infile)
infile.close()
#--------------------------------------------------------------------
|
normal
|
{
"blob_id": "3240a7fb9fbd5cd84165e68f8406e0a146c2b6b6",
"index": 1454,
"step-1": "#!/usr/bin/python\n# coding:utf-8\n#\n#这个脚本主要是对apache日志文件的处理分析,过滤出需要的信息\n#处理后得到的数据是:\t主机IP:192.168.14.44 访问流量:814 K\n#使用说明 python 脚本名 文件名; eg:python python.analysis.apachelog.py access.log\n#\n#\tby wangdd 2016/02/02\n#\nimport os\nimport re\nimport sys\nimport shelve\n\n#re 模块,利用re模块对apahce日志进行分析\n#通过 re.match(……) 和 re.compile(……).match返回\n# 该对象有如下方法和属性:\n# 方法:\n# group( [group1, ...])\n# groups( [default])\n# groupdict( [default])\n# start( [group])\n# end( [group]) \n#\n# apache 日志格式: 192.168.47.82 - - [17/Dec/2014:16:41:03 +0800] \"GET /application/account/loginIndex.htm HTTP/1.1\" 200 56273\n#\n#基本思路是,利用re模块进行正则匹配,过滤出对应IP的访问字节数,然后把数据保存到apache_log.db数据库中,最后进行数据的格式\n\nlog_line_re = re.compile(r'''(?P<remote_host>^\\d{1,3}\\.(\\d{1,3}\\.){2}\\d{1,3})\n\t\t\t \\s+\n\t\t\t (?P<log_name>\\S+)\n\t\t\t \\s+\n\t\t\t (?P<login_user>\\S+)\n\t\t\t \\s+\n\t\t\t (?P<time>\\[.*\\])\n\t\t \\s+\n \t\t\t \".*\"\n\t\t\t \\s+\n\t\t\t (?P<status>\\d+)\n\t\t\t \\s+\n\t\t\t (?P<bytes_sent>-|\\d+)\n\t\t\t''',re.X)\n#利用正则表达过滤出需要的数据,返回一个字典类型的数据\ndef logline(line):\n m = log_line_re.search(line)\n if m:\n\tgroupdict = m.groupdict()\n\tif groupdict['bytes_sent'] == '-':\n\t\tgroupdict['bytes_sent'] = '0'\n\treturn groupdict\n else:\n\treturn {'remote_host':None,'status':None,'bytes_sent':\"0\",}\n#从获取的字典中得到需要的数据\ndef log_report(logfile):\n report_dict ={}\n for line in logfile:\n\tline_dict = logline(line)\n\ttry:\n\t\tbytes_sent = int(line_dict['bytes_sent'])\n\texcept ValueError:\n\t\tcontinue\n\treport_dict.setdefault(line_dict['remote_host'],[]).append(bytes_sent)\n for k,v in report_dict.iteritems():\n\tsum = 0\n\tif k != None:\n\t\tfor data in v:\n\t\t\tsum = sum +data\n \t\tprint '主机IP:%s\\t 访问流量:%s K' % (k,sum/1024)\n\n#这个函数是把处理后的数据保存到data.db文件中,利用了shelv 模块\ndef store_data(file):\n shelv_file = shelve.open('apache_log.db')\n if not os.path.isfile('shelv_file'):\n \tfor line in file:\n\t\td_line = logline(line)\n \tshelv_file[d_line['remote_host']] = \\\n \tshelv_file.setdefault(d_line['remote_host'],0) + \\\n \tint (d_line['bytes_sent'])\n\t\tdata_file.close()\n\t\tshelv_file.close() \n\nif __name__ == '__main__':\n if not len(sys.argv) >1:\n\tprint __doc__\n\tsys.exit(1)\n infile_name = sys.argv[1]\n try:\n\tinfile = open(infile_name,'r')\n except IOError:\n\tprint \"please input some file\"\n\tprint __doc__\n\tsys.exit(1)\n log_report(infile)\n store_data(infile)\n infile.close()\n\n#--------------------------------------------------------------------\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
class Element(object):
def __init__(self):
self.ndof = 0
self.nn = 0
self.ng = 0
self.element_type = 0
self.coord_position = np.array([])
self.setup()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def shape_function_partial(self):
pass
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Element(object):
def __init__(self):
self.ndof = 0
self.nn = 0
self.ng = 0
self.element_type = 0
self.coord_position = np.array([])
self.setup()
def setup(self):
pass
<|reserved_special_token_0|>
def shape_function_partial(self):
pass
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Element(object):
def __init__(self):
self.ndof = 0
self.nn = 0
self.ng = 0
self.element_type = 0
self.coord_position = np.array([])
self.setup()
def setup(self):
pass
def shape_function_value(self):
pass
def shape_function_partial(self):
pass
<|reserved_special_token_1|>
import numpy as np
class Element(object):
def __init__(self):
self.ndof = 0
self.nn = 0
self.ng = 0
self.element_type = 0
self.coord_position = np.array([])
self.setup()
def setup(self):
pass
def shape_function_value(self):
pass
def shape_function_partial(self):
pass
|
flexible
|
{
"blob_id": "ed2ae166c4881289b27b7e74e212ba2d6164998b",
"index": 2981,
"step-1": "<mask token>\n\n\nclass Element(object):\n\n def __init__(self):\n self.ndof = 0\n self.nn = 0\n self.ng = 0\n self.element_type = 0\n self.coord_position = np.array([])\n self.setup()\n <mask token>\n <mask token>\n\n def shape_function_partial(self):\n pass\n",
"step-2": "<mask token>\n\n\nclass Element(object):\n\n def __init__(self):\n self.ndof = 0\n self.nn = 0\n self.ng = 0\n self.element_type = 0\n self.coord_position = np.array([])\n self.setup()\n\n def setup(self):\n pass\n <mask token>\n\n def shape_function_partial(self):\n pass\n",
"step-3": "<mask token>\n\n\nclass Element(object):\n\n def __init__(self):\n self.ndof = 0\n self.nn = 0\n self.ng = 0\n self.element_type = 0\n self.coord_position = np.array([])\n self.setup()\n\n def setup(self):\n pass\n\n def shape_function_value(self):\n pass\n\n def shape_function_partial(self):\n pass\n",
"step-4": "import numpy as np\n\n\nclass Element(object):\n\n def __init__(self):\n self.ndof = 0\n self.nn = 0\n self.ng = 0\n self.element_type = 0\n self.coord_position = np.array([])\n self.setup()\n\n def setup(self):\n pass\n\n def shape_function_value(self):\n pass\n\n def shape_function_partial(self):\n pass\n",
"step-5": null,
"step-ids": [
3,
4,
5,
6
]
}
|
[
3,
4,
5,
6
] |
import cv2
import numpy as np
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import SeparableConv2D, Conv2D, MaxPooling2D
from keras.layers import BatchNormalization, Activation, Dropout, Flatten, Dense
from keras import backend as K
# dimensions of images.
img_width, img_height = 64,64
train_data_dir = 'data/train'
validation_data_dir = 'data/test'
nb_train_samples = 25473
nb_validation_samples = 7000
epochs = 50
batch_size = 64
if K.image_data_format() == 'channels_first':
input_shape = (3, img_width, img_height)
else:
input_shape = (img_width, img_height, 3)
model = Sequential()
convout1 = Conv2D(32, kernel_size=6, strides=2, input_shape=input_shape)
model.add(convout1)
activ1 = Activation('relu')
model.add(activ1)
convout2 = Conv2D(64, kernel_size=5, strides=1)
model.add(convout2)
activ2 = Activation('relu')
model.add(activ2)
pool1 = MaxPooling2D(pool_size=(3, 3), strides=1)
model.add(pool1)
convout3 = Conv2D(128, kernel_size=4, strides=2)
model.add(convout3)
activ3 = Activation('relu')
model.add(activ3)
convout4 = Conv2D(128, kernel_size=3, strides=1)
model.add(convout4)
activ4 = Activation('relu')
model.add(activ4)
pool2 = MaxPooling2D(pool_size=2, strides=1)
model.add(pool2)
convout5 = Conv2D(256, kernel_size=3, strides=1)
model.add(convout5)
activ5 = Activation('relu')
model.add(activ5)
pool3 = MaxPooling2D(pool_size=2, strides=1)
model.add(pool3)
model.add(Flatten())
dense1 = Dense(256)
model.add(dense1)
activ6 = Activation('relu')
model.add(activ6)
batchn = BatchNormalization()
model.add(batchn)
dense2 = Dense(184)
model.add(dense2)
activ7 = Activation('softmax')
model.add(activ7)
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
img = cv2.imread('test.jpg')
img = cv2.resize(img, (64, 64))
img = np.expand_dims(img, axis=0)
classes = model.predict(img)
def layer_to_visualize(layer):
inputs = [K.learning_phase()] + model.inputs
_convout1_f = K.function(inputs, [layer.output])
def convout1_f(X):
# The [0] is to disable the training phase flag
return _convout1_f([0] + [X])
convolutions = convout1_f(img)
convolutions = np.squeeze(convolutions)
print ('Shape of conv:', convolutions.shape)
n = convolutions.shape[0]
n = int(np.ceil(np.sqrt(n)))
# Visualization of each filter of the layer
fig = plt.figure(figsize=(12,8))
for i in range(len(convolutions)):
ax = fig.add_subplot(n,n,i+1)
ax.imshow(convolutions[i], cmap='gray')
# Specify the layer to want to visualize
layer_to_visualize(convout1)
layer_to_visualize(activ1)
layer_to_visualize(convout2)
layer_to_visualize(activ2)
layer_to_visualize(pool1)
layer_to_visualize(convout3)
layer_to_visualize(activ3)
layer_to_visualize(convout4)
layer_to_visualize(activ4)
layer_to_visualize(pool2)
layer_to_visualize(convout5)
layer_to_visualize(activ5)
layer_to_visualize(pool3)
|
normal
|
{
"blob_id": "e47d6b5d46f2dd84569a2341178b2ea5e074603a",
"index": 7361,
"step-1": "<mask token>\n\n\ndef layer_to_visualize(layer):\n inputs = [K.learning_phase()] + model.inputs\n _convout1_f = K.function(inputs, [layer.output])\n\n def convout1_f(X):\n return _convout1_f([0] + [X])\n convolutions = convout1_f(img)\n convolutions = np.squeeze(convolutions)\n print('Shape of conv:', convolutions.shape)\n n = convolutions.shape[0]\n n = int(np.ceil(np.sqrt(n)))\n fig = plt.figure(figsize=(12, 8))\n for i in range(len(convolutions)):\n ax = fig.add_subplot(n, n, i + 1)\n ax.imshow(convolutions[i], cmap='gray')\n\n\n<mask token>\n",
"step-2": "<mask token>\nmatplotlib.use('agg')\n<mask token>\nif K.image_data_format() == 'channels_first':\n input_shape = 3, img_width, img_height\nelse:\n input_shape = img_width, img_height, 3\n<mask token>\nmodel.add(convout1)\n<mask token>\nmodel.add(activ1)\n<mask token>\nmodel.add(convout2)\n<mask token>\nmodel.add(activ2)\n<mask token>\nmodel.add(pool1)\n<mask token>\nmodel.add(convout3)\n<mask token>\nmodel.add(activ3)\n<mask token>\nmodel.add(convout4)\n<mask token>\nmodel.add(activ4)\n<mask token>\nmodel.add(pool2)\n<mask token>\nmodel.add(convout5)\n<mask token>\nmodel.add(activ5)\n<mask token>\nmodel.add(pool3)\nmodel.add(Flatten())\n<mask token>\nmodel.add(dense1)\n<mask token>\nmodel.add(activ6)\n<mask token>\nmodel.add(batchn)\n<mask token>\nmodel.add(dense2)\n<mask token>\nmodel.add(activ7)\nmodel.compile(loss='categorical_crossentropy', optimizer='rmsprop', metrics\n =['accuracy'])\n<mask token>\n\n\ndef layer_to_visualize(layer):\n inputs = [K.learning_phase()] + model.inputs\n _convout1_f = K.function(inputs, [layer.output])\n\n def convout1_f(X):\n return _convout1_f([0] + [X])\n convolutions = convout1_f(img)\n convolutions = np.squeeze(convolutions)\n print('Shape of conv:', convolutions.shape)\n n = convolutions.shape[0]\n n = int(np.ceil(np.sqrt(n)))\n fig = plt.figure(figsize=(12, 8))\n for i in range(len(convolutions)):\n ax = fig.add_subplot(n, n, i + 1)\n ax.imshow(convolutions[i], cmap='gray')\n\n\nlayer_to_visualize(convout1)\nlayer_to_visualize(activ1)\nlayer_to_visualize(convout2)\nlayer_to_visualize(activ2)\nlayer_to_visualize(pool1)\nlayer_to_visualize(convout3)\nlayer_to_visualize(activ3)\nlayer_to_visualize(convout4)\nlayer_to_visualize(activ4)\nlayer_to_visualize(pool2)\nlayer_to_visualize(convout5)\nlayer_to_visualize(activ5)\nlayer_to_visualize(pool3)\n",
"step-3": "<mask token>\nmatplotlib.use('agg')\n<mask token>\nimg_width, img_height = 64, 64\ntrain_data_dir = 'data/train'\nvalidation_data_dir = 'data/test'\nnb_train_samples = 25473\nnb_validation_samples = 7000\nepochs = 50\nbatch_size = 64\nif K.image_data_format() == 'channels_first':\n input_shape = 3, img_width, img_height\nelse:\n input_shape = img_width, img_height, 3\nmodel = Sequential()\nconvout1 = Conv2D(32, kernel_size=6, strides=2, input_shape=input_shape)\nmodel.add(convout1)\nactiv1 = Activation('relu')\nmodel.add(activ1)\nconvout2 = Conv2D(64, kernel_size=5, strides=1)\nmodel.add(convout2)\nactiv2 = Activation('relu')\nmodel.add(activ2)\npool1 = MaxPooling2D(pool_size=(3, 3), strides=1)\nmodel.add(pool1)\nconvout3 = Conv2D(128, kernel_size=4, strides=2)\nmodel.add(convout3)\nactiv3 = Activation('relu')\nmodel.add(activ3)\nconvout4 = Conv2D(128, kernel_size=3, strides=1)\nmodel.add(convout4)\nactiv4 = Activation('relu')\nmodel.add(activ4)\npool2 = MaxPooling2D(pool_size=2, strides=1)\nmodel.add(pool2)\nconvout5 = Conv2D(256, kernel_size=3, strides=1)\nmodel.add(convout5)\nactiv5 = Activation('relu')\nmodel.add(activ5)\npool3 = MaxPooling2D(pool_size=2, strides=1)\nmodel.add(pool3)\nmodel.add(Flatten())\ndense1 = Dense(256)\nmodel.add(dense1)\nactiv6 = Activation('relu')\nmodel.add(activ6)\nbatchn = BatchNormalization()\nmodel.add(batchn)\ndense2 = Dense(184)\nmodel.add(dense2)\nactiv7 = Activation('softmax')\nmodel.add(activ7)\nmodel.compile(loss='categorical_crossentropy', optimizer='rmsprop', metrics\n =['accuracy'])\nimg = cv2.imread('test.jpg')\nimg = cv2.resize(img, (64, 64))\nimg = np.expand_dims(img, axis=0)\nclasses = model.predict(img)\n\n\ndef layer_to_visualize(layer):\n inputs = [K.learning_phase()] + model.inputs\n _convout1_f = K.function(inputs, [layer.output])\n\n def convout1_f(X):\n return _convout1_f([0] + [X])\n convolutions = convout1_f(img)\n convolutions = np.squeeze(convolutions)\n print('Shape of conv:', convolutions.shape)\n n = convolutions.shape[0]\n n = int(np.ceil(np.sqrt(n)))\n fig = plt.figure(figsize=(12, 8))\n for i in range(len(convolutions)):\n ax = fig.add_subplot(n, n, i + 1)\n ax.imshow(convolutions[i], cmap='gray')\n\n\nlayer_to_visualize(convout1)\nlayer_to_visualize(activ1)\nlayer_to_visualize(convout2)\nlayer_to_visualize(activ2)\nlayer_to_visualize(pool1)\nlayer_to_visualize(convout3)\nlayer_to_visualize(activ3)\nlayer_to_visualize(convout4)\nlayer_to_visualize(activ4)\nlayer_to_visualize(pool2)\nlayer_to_visualize(convout5)\nlayer_to_visualize(activ5)\nlayer_to_visualize(pool3)\n",
"step-4": "import cv2\nimport numpy as np\nimport matplotlib\nmatplotlib.use('agg')\nimport matplotlib.pyplot as plt\nfrom keras.preprocessing.image import ImageDataGenerator\nfrom keras.models import Sequential\nfrom keras.layers import SeparableConv2D, Conv2D, MaxPooling2D\nfrom keras.layers import BatchNormalization, Activation, Dropout, Flatten, Dense\nfrom keras import backend as K\nimg_width, img_height = 64, 64\ntrain_data_dir = 'data/train'\nvalidation_data_dir = 'data/test'\nnb_train_samples = 25473\nnb_validation_samples = 7000\nepochs = 50\nbatch_size = 64\nif K.image_data_format() == 'channels_first':\n input_shape = 3, img_width, img_height\nelse:\n input_shape = img_width, img_height, 3\nmodel = Sequential()\nconvout1 = Conv2D(32, kernel_size=6, strides=2, input_shape=input_shape)\nmodel.add(convout1)\nactiv1 = Activation('relu')\nmodel.add(activ1)\nconvout2 = Conv2D(64, kernel_size=5, strides=1)\nmodel.add(convout2)\nactiv2 = Activation('relu')\nmodel.add(activ2)\npool1 = MaxPooling2D(pool_size=(3, 3), strides=1)\nmodel.add(pool1)\nconvout3 = Conv2D(128, kernel_size=4, strides=2)\nmodel.add(convout3)\nactiv3 = Activation('relu')\nmodel.add(activ3)\nconvout4 = Conv2D(128, kernel_size=3, strides=1)\nmodel.add(convout4)\nactiv4 = Activation('relu')\nmodel.add(activ4)\npool2 = MaxPooling2D(pool_size=2, strides=1)\nmodel.add(pool2)\nconvout5 = Conv2D(256, kernel_size=3, strides=1)\nmodel.add(convout5)\nactiv5 = Activation('relu')\nmodel.add(activ5)\npool3 = MaxPooling2D(pool_size=2, strides=1)\nmodel.add(pool3)\nmodel.add(Flatten())\ndense1 = Dense(256)\nmodel.add(dense1)\nactiv6 = Activation('relu')\nmodel.add(activ6)\nbatchn = BatchNormalization()\nmodel.add(batchn)\ndense2 = Dense(184)\nmodel.add(dense2)\nactiv7 = Activation('softmax')\nmodel.add(activ7)\nmodel.compile(loss='categorical_crossentropy', optimizer='rmsprop', metrics\n =['accuracy'])\nimg = cv2.imread('test.jpg')\nimg = cv2.resize(img, (64, 64))\nimg = np.expand_dims(img, axis=0)\nclasses = model.predict(img)\n\n\ndef layer_to_visualize(layer):\n inputs = [K.learning_phase()] + model.inputs\n _convout1_f = K.function(inputs, [layer.output])\n\n def convout1_f(X):\n return _convout1_f([0] + [X])\n convolutions = convout1_f(img)\n convolutions = np.squeeze(convolutions)\n print('Shape of conv:', convolutions.shape)\n n = convolutions.shape[0]\n n = int(np.ceil(np.sqrt(n)))\n fig = plt.figure(figsize=(12, 8))\n for i in range(len(convolutions)):\n ax = fig.add_subplot(n, n, i + 1)\n ax.imshow(convolutions[i], cmap='gray')\n\n\nlayer_to_visualize(convout1)\nlayer_to_visualize(activ1)\nlayer_to_visualize(convout2)\nlayer_to_visualize(activ2)\nlayer_to_visualize(pool1)\nlayer_to_visualize(convout3)\nlayer_to_visualize(activ3)\nlayer_to_visualize(convout4)\nlayer_to_visualize(activ4)\nlayer_to_visualize(pool2)\nlayer_to_visualize(convout5)\nlayer_to_visualize(activ5)\nlayer_to_visualize(pool3)\n",
"step-5": "import cv2\nimport numpy as np\nimport matplotlib\nmatplotlib.use('agg')\nimport matplotlib.pyplot as plt\nfrom keras.preprocessing.image import ImageDataGenerator\nfrom keras.models import Sequential\nfrom keras.layers import SeparableConv2D, Conv2D, MaxPooling2D\nfrom keras.layers import BatchNormalization, Activation, Dropout, Flatten, Dense\nfrom keras import backend as K\n\n# dimensions of images.\nimg_width, img_height = 64,64 \n\ntrain_data_dir = 'data/train'\nvalidation_data_dir = 'data/test'\nnb_train_samples = 25473\nnb_validation_samples = 7000\nepochs = 50\nbatch_size = 64\n\nif K.image_data_format() == 'channels_first':\n input_shape = (3, img_width, img_height)\nelse:\n input_shape = (img_width, img_height, 3)\nmodel = Sequential()\nconvout1 = Conv2D(32, kernel_size=6, strides=2, input_shape=input_shape)\nmodel.add(convout1)\nactiv1 = Activation('relu')\nmodel.add(activ1)\nconvout2 = Conv2D(64, kernel_size=5, strides=1)\nmodel.add(convout2)\nactiv2 = Activation('relu')\nmodel.add(activ2)\npool1 = MaxPooling2D(pool_size=(3, 3), strides=1)\nmodel.add(pool1)\n\nconvout3 = Conv2D(128, kernel_size=4, strides=2)\nmodel.add(convout3)\nactiv3 = Activation('relu')\nmodel.add(activ3)\nconvout4 = Conv2D(128, kernel_size=3, strides=1)\nmodel.add(convout4)\nactiv4 = Activation('relu')\nmodel.add(activ4)\npool2 = MaxPooling2D(pool_size=2, strides=1)\nmodel.add(pool2)\n\nconvout5 = Conv2D(256, kernel_size=3, strides=1)\nmodel.add(convout5)\nactiv5 = Activation('relu')\nmodel.add(activ5)\npool3 = MaxPooling2D(pool_size=2, strides=1)\nmodel.add(pool3)\n\nmodel.add(Flatten())\ndense1 = Dense(256)\nmodel.add(dense1)\nactiv6 = Activation('relu')\nmodel.add(activ6)\nbatchn = BatchNormalization()\nmodel.add(batchn)\ndense2 = Dense(184)\nmodel.add(dense2)\nactiv7 = Activation('softmax')\nmodel.add(activ7)\n\nmodel.compile(loss='categorical_crossentropy',\n optimizer='rmsprop',\n metrics=['accuracy'])\n\n\nimg = cv2.imread('test.jpg')\nimg = cv2.resize(img, (64, 64))\nimg = np.expand_dims(img, axis=0)\nclasses = model.predict(img)\n\ndef layer_to_visualize(layer):\n inputs = [K.learning_phase()] + model.inputs\n\n _convout1_f = K.function(inputs, [layer.output])\n def convout1_f(X):\n # The [0] is to disable the training phase flag\n return _convout1_f([0] + [X])\n\n convolutions = convout1_f(img)\n convolutions = np.squeeze(convolutions)\n\n print ('Shape of conv:', convolutions.shape)\n\n n = convolutions.shape[0]\n n = int(np.ceil(np.sqrt(n)))\n\n # Visualization of each filter of the layer\n fig = plt.figure(figsize=(12,8))\n for i in range(len(convolutions)):\n ax = fig.add_subplot(n,n,i+1)\n ax.imshow(convolutions[i], cmap='gray')\n\n# Specify the layer to want to visualize\nlayer_to_visualize(convout1)\nlayer_to_visualize(activ1)\nlayer_to_visualize(convout2)\nlayer_to_visualize(activ2)\nlayer_to_visualize(pool1)\n\nlayer_to_visualize(convout3)\nlayer_to_visualize(activ3)\nlayer_to_visualize(convout4)\nlayer_to_visualize(activ4)\nlayer_to_visualize(pool2)\n\nlayer_to_visualize(convout5)\nlayer_to_visualize(activ5)\nlayer_to_visualize(pool3)\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from msl.equipment.connection import Connection
from msl.equipment.connection_demo import ConnectionDemo
from msl.equipment.record_types import EquipmentRecord
from msl.equipment.resources.picotech.picoscope.picoscope import PicoScope
from msl.equipment.resources.picotech.picoscope.channel import PicoScopeChannel
class MyConnection(Connection):
def __init__(self, record):
super(MyConnection, self).__init__(record)
def get_none1(self):
"""No return type is specified."""
pass
def get_none2(self, channel):
"""This function takes 1 input but returns nothing.
Parameters
----------
channel : :obj:`str`
Some channel number
"""
pass
def get_bool1(self):
""":obj:`bool`: A boolean value."""
pass
def get_bool2(self):
"""Returns a boolean value.
Returns
-------
:obj:`bool`
A boolean value.
"""
pass
def get_string1(self):
""":obj:`str`: A string value."""
pass
def get_string2(self):
"""Returns a string value.
Returns
-------
:obj:`str`
A string value.
"""
pass
def get_bytes1(self):
""":obj:`bytes`: A bytes value."""
pass
def get_bytes2(self):
"""Returns a bytes value.
Returns
-------
:obj:`bytes`
A bytes value.
"""
pass
def get_int1(self):
""":obj:`int`: An integer value."""
pass
def get_int2(self):
"""Returns an integer value.
Returns
-------
:obj:`int`
An integer value.
"""
pass
def get_float1(self):
""":obj:`float`: A floating-point value."""
pass
def get_float2(self):
"""Returns a floating-point value.
Returns
-------
:obj:`float`
A floating-point value.
"""
pass
def get_list_of_bool1(self):
""":obj:`list` of :obj:`bool`: A list of boolean values."""
pass
def get_list_of_bool2(self):
"""A list of boolean values.
Returns
-------
:obj:`list` of :obj:`bool`
A list of boolean values.
"""
pass
def get_list_of_str1(self):
""":obj:`list` of :obj:`str`: A list of string values."""
pass
def get_list_of_str2(self):
"""A list of string values.
Returns
-------
:obj:`list` of :obj:`str`
A list of string values.
"""
pass
def get_list_of_bytes1(self):
""":obj:`list` of :obj:`bytes`: A list of bytes values."""
pass
def get_list_of_bytes2(self):
"""A list of bytes values.
Returns
-------
:obj:`list` of :obj:`bytes`
A list of bytes values.
"""
pass
def get_list_of_int1(self):
""":obj:`list` of :obj:`int`: A list of integer values."""
pass
def get_list_of_int2(self):
"""A list of integer values.
Returns
-------
:obj:`list` of :obj:`int`
A list of integer values.
"""
pass
def get_list_of_float1(self):
""":obj:`list` of :obj:`float`: A list of floating-point values."""
pass
def get_list_of_float2(self):
"""A list of floating-point values.
Returns
-------
:obj:`list` of :obj:`float`
A list of floating-point values.
"""
pass
def get_dict_of_bool1(self):
""":obj:`dict` of :obj:`bool`: A dictionary of boolean values."""
pass
def get_dict_of_bool2(self):
"""A dictionary of boolean values.
Returns
-------
:obj:`dict` of :obj:`bool`
A dictionary of boolean values.
"""
pass
def get_dict_of_str1(self):
""":obj:`dict` of :obj:`str`: A dictionary of string values."""
pass
def get_dict_of_str2(self):
"""A dictionary of string values.
Returns
-------
:obj:`dict` of :obj:`str`
A dictionary of string values.
"""
pass
def get_dict_of_bytes1(self):
""":obj:`dict` of :obj:`bytes`: A dictionary of bytes values."""
pass
def get_dict_of_bytes2(self):
"""A dictionary of bytes values.
Returns
-------
:obj:`dict` of :obj:`bytes`
A dictionary of bytes values.
"""
pass
def get_dict_of_int1(self):
""":obj:`dict` of :obj:`int`: A dictionary of integer values."""
pass
def get_dict_of_int2(self):
"""A dictionary of integer values.
Returns
-------
:obj:`dict` of :obj:`int`
A dictionary of integer values.
"""
pass
def get_dict_of_float1(self):
""":obj:`dict` of :obj:`float`: A dictionary of floating-point values."""
pass
def get_dict_of_float2(self):
"""A dictionary of floating-point values.
Returns
-------
:obj:`dict` of :obj:`float`
A dictionary of floating-point values.
"""
pass
def get_multiple1(self):
"""Many different data types.
Returns
-------
:obj:`str`
A string value.
:obj:`float`
A floating-point value.
:obj:`float`
A floating-point value.
:obj:`dict` of :obj:`int`
A dictionary of integer values.
:obj:`bytes`
A bytes value.
"""
pass
def test_return_type_builtin():
demo = ConnectionDemo(EquipmentRecord(), MyConnection)
assert demo.get_none1() is None
assert demo.get_none2() is None
assert isinstance(demo.get_bool1(), bool)
assert isinstance(demo.get_bool2(), bool)
assert isinstance(demo.get_string1(), str)
assert isinstance(demo.get_string2(), str)
assert isinstance(demo.get_bytes1(), bytes)
assert isinstance(demo.get_bytes2(), bytes)
assert isinstance(demo.get_int1(), int)
assert isinstance(demo.get_int2(), int)
assert isinstance(demo.get_float1(), float)
assert isinstance(demo.get_float2(), float)
x = demo.get_list_of_bool1()
assert isinstance(x, list) and isinstance(x[0], bool)
x = demo.get_list_of_bool2()
assert isinstance(x, list) and isinstance(x[0], bool)
x = demo.get_list_of_str1()
assert isinstance(x, list) and isinstance(x[0], str)
x = demo.get_list_of_str2()
assert isinstance(x, list) and isinstance(x[0], str)
x = demo.get_list_of_bytes1()
assert isinstance(x, list) and isinstance(x[0], bytes)
x = demo.get_list_of_bytes2()
assert isinstance(x, list) and isinstance(x[0], bytes)
x = demo.get_list_of_int1()
assert isinstance(x, list) and isinstance(x[0], int)
x = demo.get_list_of_int2()
assert isinstance(x, list) and isinstance(x[0], int)
x = demo.get_list_of_float1()
assert isinstance(x, list) and isinstance(x[0], float)
x = demo.get_list_of_float2()
assert isinstance(x, list) and isinstance(x[0], float)
x = demo.get_dict_of_bool1()
assert isinstance(x, dict) and isinstance(x['demo'], bool)
x = demo.get_dict_of_bool2()
assert isinstance(x, dict) and isinstance(x['demo'], bool)
x = demo.get_dict_of_str1()
assert isinstance(x, dict) and isinstance(x['demo'], str)
x = demo.get_dict_of_str2()
assert isinstance(x, dict) and isinstance(x['demo'], str)
x = demo.get_dict_of_bytes1()
assert isinstance(x, dict) and isinstance(x['demo'], bytes)
x = demo.get_dict_of_bytes2()
assert isinstance(x, dict) and isinstance(x['demo'], bytes)
x = demo.get_dict_of_int1()
assert isinstance(x, dict) and isinstance(x['demo'], int)
x = demo.get_dict_of_int2()
assert isinstance(x, dict) and isinstance(x['demo'], int)
x = demo.get_dict_of_float1()
assert isinstance(x, dict) and isinstance(x['demo'], float)
x = demo.get_dict_of_float2()
assert isinstance(x, dict) and isinstance(x['demo'], float)
x = demo.get_multiple1()
assert len(x) == 5
assert isinstance(x[0], str)
assert isinstance(x[1], float)
assert isinstance(x[2], float)
assert isinstance(x[3], dict) and isinstance(x[3]['demo'], int)
assert isinstance(x[4], bytes)
def test_return_type_object():
scope = ConnectionDemo(EquipmentRecord(), PicoScope)
x = scope.channel()
assert isinstance(x, dict) and x['demo'] == PicoScopeChannel
|
normal
|
{
"blob_id": "82c3419679a93c7640eae48b543aca75f5ff086d",
"index": 4880,
"step-1": "<mask token>\n\n\nclass MyConnection(Connection):\n <mask token>\n\n def get_none1(self):\n \"\"\"No return type is specified.\"\"\"\n pass\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def get_bytes1(self):\n \"\"\":obj:`bytes`: A bytes value.\"\"\"\n pass\n\n def get_bytes2(self):\n \"\"\"Returns a bytes value.\n\n Returns\n -------\n :obj:`bytes`\n A bytes value.\n \"\"\"\n pass\n\n def get_int1(self):\n \"\"\":obj:`int`: An integer value.\"\"\"\n pass\n\n def get_int2(self):\n \"\"\"Returns an integer value.\n\n Returns\n -------\n :obj:`int`\n An integer value.\n \"\"\"\n pass\n <mask token>\n <mask token>\n\n def get_list_of_bool1(self):\n \"\"\":obj:`list` of :obj:`bool`: A list of boolean values.\"\"\"\n pass\n <mask token>\n\n def get_list_of_str1(self):\n \"\"\":obj:`list` of :obj:`str`: A list of string values.\"\"\"\n pass\n\n def get_list_of_str2(self):\n \"\"\"A list of string values.\n\n Returns\n -------\n :obj:`list` of :obj:`str`\n A list of string values.\n \"\"\"\n pass\n <mask token>\n\n def get_list_of_bytes2(self):\n \"\"\"A list of bytes values.\n\n Returns\n -------\n :obj:`list` of :obj:`bytes`\n A list of bytes values.\n \"\"\"\n pass\n\n def get_list_of_int1(self):\n \"\"\":obj:`list` of :obj:`int`: A list of integer values.\"\"\"\n pass\n\n def get_list_of_int2(self):\n \"\"\"A list of integer values.\n\n Returns\n -------\n :obj:`list` of :obj:`int`\n A list of integer values.\n \"\"\"\n pass\n <mask token>\n <mask token>\n\n def get_dict_of_bool1(self):\n \"\"\":obj:`dict` of :obj:`bool`: A dictionary of boolean values.\"\"\"\n pass\n <mask token>\n\n def get_dict_of_str1(self):\n \"\"\":obj:`dict` of :obj:`str`: A dictionary of string values.\"\"\"\n pass\n\n def get_dict_of_str2(self):\n \"\"\"A dictionary of string values.\n\n Returns\n -------\n :obj:`dict` of :obj:`str`\n A dictionary of string values.\n \"\"\"\n pass\n\n def get_dict_of_bytes1(self):\n \"\"\":obj:`dict` of :obj:`bytes`: A dictionary of bytes values.\"\"\"\n pass\n\n def get_dict_of_bytes2(self):\n \"\"\"A dictionary of bytes values.\n\n Returns\n -------\n :obj:`dict` of :obj:`bytes`\n A dictionary of bytes values.\n \"\"\"\n pass\n <mask token>\n\n def get_dict_of_int2(self):\n \"\"\"A dictionary of integer values.\n\n Returns\n -------\n :obj:`dict` of :obj:`int`\n A dictionary of integer values.\n \"\"\"\n pass\n <mask token>\n <mask token>\n\n def get_multiple1(self):\n \"\"\"Many different data types.\n\n Returns\n -------\n :obj:`str`\n A string value.\n :obj:`float`\n A floating-point value.\n :obj:`float`\n A floating-point value.\n :obj:`dict` of :obj:`int`\n A dictionary of integer values.\n :obj:`bytes`\n A bytes value.\n \"\"\"\n pass\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass MyConnection(Connection):\n <mask token>\n\n def get_none1(self):\n \"\"\"No return type is specified.\"\"\"\n pass\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def get_bytes1(self):\n \"\"\":obj:`bytes`: A bytes value.\"\"\"\n pass\n\n def get_bytes2(self):\n \"\"\"Returns a bytes value.\n\n Returns\n -------\n :obj:`bytes`\n A bytes value.\n \"\"\"\n pass\n\n def get_int1(self):\n \"\"\":obj:`int`: An integer value.\"\"\"\n pass\n\n def get_int2(self):\n \"\"\"Returns an integer value.\n\n Returns\n -------\n :obj:`int`\n An integer value.\n \"\"\"\n pass\n <mask token>\n <mask token>\n\n def get_list_of_bool1(self):\n \"\"\":obj:`list` of :obj:`bool`: A list of boolean values.\"\"\"\n pass\n <mask token>\n\n def get_list_of_str1(self):\n \"\"\":obj:`list` of :obj:`str`: A list of string values.\"\"\"\n pass\n\n def get_list_of_str2(self):\n \"\"\"A list of string values.\n\n Returns\n -------\n :obj:`list` of :obj:`str`\n A list of string values.\n \"\"\"\n pass\n <mask token>\n\n def get_list_of_bytes2(self):\n \"\"\"A list of bytes values.\n\n Returns\n -------\n :obj:`list` of :obj:`bytes`\n A list of bytes values.\n \"\"\"\n pass\n\n def get_list_of_int1(self):\n \"\"\":obj:`list` of :obj:`int`: A list of integer values.\"\"\"\n pass\n\n def get_list_of_int2(self):\n \"\"\"A list of integer values.\n\n Returns\n -------\n :obj:`list` of :obj:`int`\n A list of integer values.\n \"\"\"\n pass\n\n def get_list_of_float1(self):\n \"\"\":obj:`list` of :obj:`float`: A list of floating-point values.\"\"\"\n pass\n <mask token>\n\n def get_dict_of_bool1(self):\n \"\"\":obj:`dict` of :obj:`bool`: A dictionary of boolean values.\"\"\"\n pass\n <mask token>\n\n def get_dict_of_str1(self):\n \"\"\":obj:`dict` of :obj:`str`: A dictionary of string values.\"\"\"\n pass\n\n def get_dict_of_str2(self):\n \"\"\"A dictionary of string values.\n\n Returns\n -------\n :obj:`dict` of :obj:`str`\n A dictionary of string values.\n \"\"\"\n pass\n\n def get_dict_of_bytes1(self):\n \"\"\":obj:`dict` of :obj:`bytes`: A dictionary of bytes values.\"\"\"\n pass\n\n def get_dict_of_bytes2(self):\n \"\"\"A dictionary of bytes values.\n\n Returns\n -------\n :obj:`dict` of :obj:`bytes`\n A dictionary of bytes values.\n \"\"\"\n pass\n <mask token>\n\n def get_dict_of_int2(self):\n \"\"\"A dictionary of integer values.\n\n Returns\n -------\n :obj:`dict` of :obj:`int`\n A dictionary of integer values.\n \"\"\"\n pass\n <mask token>\n <mask token>\n\n def get_multiple1(self):\n \"\"\"Many different data types.\n\n Returns\n -------\n :obj:`str`\n A string value.\n :obj:`float`\n A floating-point value.\n :obj:`float`\n A floating-point value.\n :obj:`dict` of :obj:`int`\n A dictionary of integer values.\n :obj:`bytes`\n A bytes value.\n \"\"\"\n pass\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass MyConnection(Connection):\n <mask token>\n\n def get_none1(self):\n \"\"\"No return type is specified.\"\"\"\n pass\n <mask token>\n <mask token>\n <mask token>\n\n def get_string1(self):\n \"\"\":obj:`str`: A string value.\"\"\"\n pass\n <mask token>\n\n def get_bytes1(self):\n \"\"\":obj:`bytes`: A bytes value.\"\"\"\n pass\n\n def get_bytes2(self):\n \"\"\"Returns a bytes value.\n\n Returns\n -------\n :obj:`bytes`\n A bytes value.\n \"\"\"\n pass\n\n def get_int1(self):\n \"\"\":obj:`int`: An integer value.\"\"\"\n pass\n\n def get_int2(self):\n \"\"\"Returns an integer value.\n\n Returns\n -------\n :obj:`int`\n An integer value.\n \"\"\"\n pass\n <mask token>\n <mask token>\n\n def get_list_of_bool1(self):\n \"\"\":obj:`list` of :obj:`bool`: A list of boolean values.\"\"\"\n pass\n <mask token>\n\n def get_list_of_str1(self):\n \"\"\":obj:`list` of :obj:`str`: A list of string values.\"\"\"\n pass\n\n def get_list_of_str2(self):\n \"\"\"A list of string values.\n\n Returns\n -------\n :obj:`list` of :obj:`str`\n A list of string values.\n \"\"\"\n pass\n <mask token>\n\n def get_list_of_bytes2(self):\n \"\"\"A list of bytes values.\n\n Returns\n -------\n :obj:`list` of :obj:`bytes`\n A list of bytes values.\n \"\"\"\n pass\n\n def get_list_of_int1(self):\n \"\"\":obj:`list` of :obj:`int`: A list of integer values.\"\"\"\n pass\n\n def get_list_of_int2(self):\n \"\"\"A list of integer values.\n\n Returns\n -------\n :obj:`list` of :obj:`int`\n A list of integer values.\n \"\"\"\n pass\n\n def get_list_of_float1(self):\n \"\"\":obj:`list` of :obj:`float`: A list of floating-point values.\"\"\"\n pass\n <mask token>\n\n def get_dict_of_bool1(self):\n \"\"\":obj:`dict` of :obj:`bool`: A dictionary of boolean values.\"\"\"\n pass\n <mask token>\n\n def get_dict_of_str1(self):\n \"\"\":obj:`dict` of :obj:`str`: A dictionary of string values.\"\"\"\n pass\n\n def get_dict_of_str2(self):\n \"\"\"A dictionary of string values.\n\n Returns\n -------\n :obj:`dict` of :obj:`str`\n A dictionary of string values.\n \"\"\"\n pass\n\n def get_dict_of_bytes1(self):\n \"\"\":obj:`dict` of :obj:`bytes`: A dictionary of bytes values.\"\"\"\n pass\n\n def get_dict_of_bytes2(self):\n \"\"\"A dictionary of bytes values.\n\n Returns\n -------\n :obj:`dict` of :obj:`bytes`\n A dictionary of bytes values.\n \"\"\"\n pass\n <mask token>\n\n def get_dict_of_int2(self):\n \"\"\"A dictionary of integer values.\n\n Returns\n -------\n :obj:`dict` of :obj:`int`\n A dictionary of integer values.\n \"\"\"\n pass\n <mask token>\n <mask token>\n\n def get_multiple1(self):\n \"\"\"Many different data types.\n\n Returns\n -------\n :obj:`str`\n A string value.\n :obj:`float`\n A floating-point value.\n :obj:`float`\n A floating-point value.\n :obj:`dict` of :obj:`int`\n A dictionary of integer values.\n :obj:`bytes`\n A bytes value.\n \"\"\"\n pass\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass MyConnection(Connection):\n\n def __init__(self, record):\n super(MyConnection, self).__init__(record)\n\n def get_none1(self):\n \"\"\"No return type is specified.\"\"\"\n pass\n\n def get_none2(self, channel):\n \"\"\"This function takes 1 input but returns nothing.\n\n Parameters\n ----------\n channel : :obj:`str`\n Some channel number\n \"\"\"\n pass\n\n def get_bool1(self):\n \"\"\":obj:`bool`: A boolean value.\"\"\"\n pass\n\n def get_bool2(self):\n \"\"\"Returns a boolean value.\n\n Returns\n -------\n :obj:`bool`\n A boolean value.\n \"\"\"\n pass\n\n def get_string1(self):\n \"\"\":obj:`str`: A string value.\"\"\"\n pass\n\n def get_string2(self):\n \"\"\"Returns a string value.\n\n Returns\n -------\n :obj:`str`\n A string value.\n \"\"\"\n pass\n\n def get_bytes1(self):\n \"\"\":obj:`bytes`: A bytes value.\"\"\"\n pass\n\n def get_bytes2(self):\n \"\"\"Returns a bytes value.\n\n Returns\n -------\n :obj:`bytes`\n A bytes value.\n \"\"\"\n pass\n\n def get_int1(self):\n \"\"\":obj:`int`: An integer value.\"\"\"\n pass\n\n def get_int2(self):\n \"\"\"Returns an integer value.\n\n Returns\n -------\n :obj:`int`\n An integer value.\n \"\"\"\n pass\n\n def get_float1(self):\n \"\"\":obj:`float`: A floating-point value.\"\"\"\n pass\n\n def get_float2(self):\n \"\"\"Returns a floating-point value.\n\n Returns\n -------\n :obj:`float`\n A floating-point value.\n \"\"\"\n pass\n\n def get_list_of_bool1(self):\n \"\"\":obj:`list` of :obj:`bool`: A list of boolean values.\"\"\"\n pass\n\n def get_list_of_bool2(self):\n \"\"\"A list of boolean values.\n\n Returns\n -------\n :obj:`list` of :obj:`bool`\n A list of boolean values.\n \"\"\"\n pass\n\n def get_list_of_str1(self):\n \"\"\":obj:`list` of :obj:`str`: A list of string values.\"\"\"\n pass\n\n def get_list_of_str2(self):\n \"\"\"A list of string values.\n\n Returns\n -------\n :obj:`list` of :obj:`str`\n A list of string values.\n \"\"\"\n pass\n\n def get_list_of_bytes1(self):\n \"\"\":obj:`list` of :obj:`bytes`: A list of bytes values.\"\"\"\n pass\n\n def get_list_of_bytes2(self):\n \"\"\"A list of bytes values.\n\n Returns\n -------\n :obj:`list` of :obj:`bytes`\n A list of bytes values.\n \"\"\"\n pass\n\n def get_list_of_int1(self):\n \"\"\":obj:`list` of :obj:`int`: A list of integer values.\"\"\"\n pass\n\n def get_list_of_int2(self):\n \"\"\"A list of integer values.\n\n Returns\n -------\n :obj:`list` of :obj:`int`\n A list of integer values.\n \"\"\"\n pass\n\n def get_list_of_float1(self):\n \"\"\":obj:`list` of :obj:`float`: A list of floating-point values.\"\"\"\n pass\n\n def get_list_of_float2(self):\n \"\"\"A list of floating-point values.\n\n Returns\n -------\n :obj:`list` of :obj:`float`\n A list of floating-point values.\n \"\"\"\n pass\n\n def get_dict_of_bool1(self):\n \"\"\":obj:`dict` of :obj:`bool`: A dictionary of boolean values.\"\"\"\n pass\n\n def get_dict_of_bool2(self):\n \"\"\"A dictionary of boolean values.\n\n Returns\n -------\n :obj:`dict` of :obj:`bool`\n A dictionary of boolean values.\n \"\"\"\n pass\n\n def get_dict_of_str1(self):\n \"\"\":obj:`dict` of :obj:`str`: A dictionary of string values.\"\"\"\n pass\n\n def get_dict_of_str2(self):\n \"\"\"A dictionary of string values.\n\n Returns\n -------\n :obj:`dict` of :obj:`str`\n A dictionary of string values.\n \"\"\"\n pass\n\n def get_dict_of_bytes1(self):\n \"\"\":obj:`dict` of :obj:`bytes`: A dictionary of bytes values.\"\"\"\n pass\n\n def get_dict_of_bytes2(self):\n \"\"\"A dictionary of bytes values.\n\n Returns\n -------\n :obj:`dict` of :obj:`bytes`\n A dictionary of bytes values.\n \"\"\"\n pass\n\n def get_dict_of_int1(self):\n \"\"\":obj:`dict` of :obj:`int`: A dictionary of integer values.\"\"\"\n pass\n\n def get_dict_of_int2(self):\n \"\"\"A dictionary of integer values.\n\n Returns\n -------\n :obj:`dict` of :obj:`int`\n A dictionary of integer values.\n \"\"\"\n pass\n\n def get_dict_of_float1(self):\n \"\"\":obj:`dict` of :obj:`float`: A dictionary of floating-point values.\"\"\"\n pass\n\n def get_dict_of_float2(self):\n \"\"\"A dictionary of floating-point values.\n\n Returns\n -------\n :obj:`dict` of :obj:`float`\n A dictionary of floating-point values.\n \"\"\"\n pass\n\n def get_multiple1(self):\n \"\"\"Many different data types.\n\n Returns\n -------\n :obj:`str`\n A string value.\n :obj:`float`\n A floating-point value.\n :obj:`float`\n A floating-point value.\n :obj:`dict` of :obj:`int`\n A dictionary of integer values.\n :obj:`bytes`\n A bytes value.\n \"\"\"\n pass\n\n\ndef test_return_type_builtin():\n demo = ConnectionDemo(EquipmentRecord(), MyConnection)\n assert demo.get_none1() is None\n assert demo.get_none2() is None\n assert isinstance(demo.get_bool1(), bool)\n assert isinstance(demo.get_bool2(), bool)\n assert isinstance(demo.get_string1(), str)\n assert isinstance(demo.get_string2(), str)\n assert isinstance(demo.get_bytes1(), bytes)\n assert isinstance(demo.get_bytes2(), bytes)\n assert isinstance(demo.get_int1(), int)\n assert isinstance(demo.get_int2(), int)\n assert isinstance(demo.get_float1(), float)\n assert isinstance(demo.get_float2(), float)\n x = demo.get_list_of_bool1()\n assert isinstance(x, list) and isinstance(x[0], bool)\n x = demo.get_list_of_bool2()\n assert isinstance(x, list) and isinstance(x[0], bool)\n x = demo.get_list_of_str1()\n assert isinstance(x, list) and isinstance(x[0], str)\n x = demo.get_list_of_str2()\n assert isinstance(x, list) and isinstance(x[0], str)\n x = demo.get_list_of_bytes1()\n assert isinstance(x, list) and isinstance(x[0], bytes)\n x = demo.get_list_of_bytes2()\n assert isinstance(x, list) and isinstance(x[0], bytes)\n x = demo.get_list_of_int1()\n assert isinstance(x, list) and isinstance(x[0], int)\n x = demo.get_list_of_int2()\n assert isinstance(x, list) and isinstance(x[0], int)\n x = demo.get_list_of_float1()\n assert isinstance(x, list) and isinstance(x[0], float)\n x = demo.get_list_of_float2()\n assert isinstance(x, list) and isinstance(x[0], float)\n x = demo.get_dict_of_bool1()\n assert isinstance(x, dict) and isinstance(x['demo'], bool)\n x = demo.get_dict_of_bool2()\n assert isinstance(x, dict) and isinstance(x['demo'], bool)\n x = demo.get_dict_of_str1()\n assert isinstance(x, dict) and isinstance(x['demo'], str)\n x = demo.get_dict_of_str2()\n assert isinstance(x, dict) and isinstance(x['demo'], str)\n x = demo.get_dict_of_bytes1()\n assert isinstance(x, dict) and isinstance(x['demo'], bytes)\n x = demo.get_dict_of_bytes2()\n assert isinstance(x, dict) and isinstance(x['demo'], bytes)\n x = demo.get_dict_of_int1()\n assert isinstance(x, dict) and isinstance(x['demo'], int)\n x = demo.get_dict_of_int2()\n assert isinstance(x, dict) and isinstance(x['demo'], int)\n x = demo.get_dict_of_float1()\n assert isinstance(x, dict) and isinstance(x['demo'], float)\n x = demo.get_dict_of_float2()\n assert isinstance(x, dict) and isinstance(x['demo'], float)\n x = demo.get_multiple1()\n assert len(x) == 5\n assert isinstance(x[0], str)\n assert isinstance(x[1], float)\n assert isinstance(x[2], float)\n assert isinstance(x[3], dict) and isinstance(x[3]['demo'], int)\n assert isinstance(x[4], bytes)\n\n\ndef test_return_type_object():\n scope = ConnectionDemo(EquipmentRecord(), PicoScope)\n x = scope.channel()\n assert isinstance(x, dict) and x['demo'] == PicoScopeChannel\n",
"step-5": "from msl.equipment.connection import Connection\nfrom msl.equipment.connection_demo import ConnectionDemo\nfrom msl.equipment.record_types import EquipmentRecord\nfrom msl.equipment.resources.picotech.picoscope.picoscope import PicoScope\nfrom msl.equipment.resources.picotech.picoscope.channel import PicoScopeChannel\n\n\nclass MyConnection(Connection):\n\n def __init__(self, record):\n super(MyConnection, self).__init__(record)\n\n def get_none1(self):\n \"\"\"No return type is specified.\"\"\"\n pass\n\n def get_none2(self, channel):\n \"\"\"This function takes 1 input but returns nothing.\n\n Parameters\n ----------\n channel : :obj:`str`\n Some channel number\n \"\"\"\n pass\n\n def get_bool1(self):\n \"\"\":obj:`bool`: A boolean value.\"\"\"\n pass\n\n def get_bool2(self):\n \"\"\"Returns a boolean value.\n\n Returns\n -------\n :obj:`bool`\n A boolean value.\n \"\"\"\n pass\n\n def get_string1(self):\n \"\"\":obj:`str`: A string value.\"\"\"\n pass\n\n def get_string2(self):\n \"\"\"Returns a string value.\n\n Returns\n -------\n :obj:`str`\n A string value.\n \"\"\"\n pass\n\n def get_bytes1(self):\n \"\"\":obj:`bytes`: A bytes value.\"\"\"\n pass\n\n def get_bytes2(self):\n \"\"\"Returns a bytes value.\n\n Returns\n -------\n :obj:`bytes`\n A bytes value.\n \"\"\"\n pass\n\n def get_int1(self):\n \"\"\":obj:`int`: An integer value.\"\"\"\n pass\n\n def get_int2(self):\n \"\"\"Returns an integer value.\n\n Returns\n -------\n :obj:`int`\n An integer value.\n \"\"\"\n pass\n\n def get_float1(self):\n \"\"\":obj:`float`: A floating-point value.\"\"\"\n pass\n\n def get_float2(self):\n \"\"\"Returns a floating-point value.\n\n Returns\n -------\n :obj:`float`\n A floating-point value.\n \"\"\"\n pass\n\n def get_list_of_bool1(self):\n \"\"\":obj:`list` of :obj:`bool`: A list of boolean values.\"\"\"\n pass\n\n def get_list_of_bool2(self):\n \"\"\"A list of boolean values.\n\n Returns\n -------\n :obj:`list` of :obj:`bool`\n A list of boolean values.\n \"\"\"\n pass\n\n def get_list_of_str1(self):\n \"\"\":obj:`list` of :obj:`str`: A list of string values.\"\"\"\n pass\n\n def get_list_of_str2(self):\n \"\"\"A list of string values.\n\n Returns\n -------\n :obj:`list` of :obj:`str`\n A list of string values.\n \"\"\"\n pass\n\n def get_list_of_bytes1(self):\n \"\"\":obj:`list` of :obj:`bytes`: A list of bytes values.\"\"\"\n pass\n\n def get_list_of_bytes2(self):\n \"\"\"A list of bytes values.\n\n Returns\n -------\n :obj:`list` of :obj:`bytes`\n A list of bytes values.\n \"\"\"\n pass\n\n def get_list_of_int1(self):\n \"\"\":obj:`list` of :obj:`int`: A list of integer values.\"\"\"\n pass\n\n def get_list_of_int2(self):\n \"\"\"A list of integer values.\n\n Returns\n -------\n :obj:`list` of :obj:`int`\n A list of integer values.\n \"\"\"\n pass\n\n def get_list_of_float1(self):\n \"\"\":obj:`list` of :obj:`float`: A list of floating-point values.\"\"\"\n pass\n\n def get_list_of_float2(self):\n \"\"\"A list of floating-point values.\n\n Returns\n -------\n :obj:`list` of :obj:`float`\n A list of floating-point values.\n \"\"\"\n pass\n\n def get_dict_of_bool1(self):\n \"\"\":obj:`dict` of :obj:`bool`: A dictionary of boolean values.\"\"\"\n pass\n\n def get_dict_of_bool2(self):\n \"\"\"A dictionary of boolean values.\n\n Returns\n -------\n :obj:`dict` of :obj:`bool`\n A dictionary of boolean values.\n \"\"\"\n pass\n\n def get_dict_of_str1(self):\n \"\"\":obj:`dict` of :obj:`str`: A dictionary of string values.\"\"\"\n pass\n\n def get_dict_of_str2(self):\n \"\"\"A dictionary of string values.\n\n Returns\n -------\n :obj:`dict` of :obj:`str`\n A dictionary of string values.\n \"\"\"\n pass\n\n def get_dict_of_bytes1(self):\n \"\"\":obj:`dict` of :obj:`bytes`: A dictionary of bytes values.\"\"\"\n pass\n\n def get_dict_of_bytes2(self):\n \"\"\"A dictionary of bytes values.\n\n Returns\n -------\n :obj:`dict` of :obj:`bytes`\n A dictionary of bytes values.\n \"\"\"\n pass\n\n def get_dict_of_int1(self):\n \"\"\":obj:`dict` of :obj:`int`: A dictionary of integer values.\"\"\"\n pass\n\n def get_dict_of_int2(self):\n \"\"\"A dictionary of integer values.\n\n Returns\n -------\n :obj:`dict` of :obj:`int`\n A dictionary of integer values.\n \"\"\"\n pass\n\n def get_dict_of_float1(self):\n \"\"\":obj:`dict` of :obj:`float`: A dictionary of floating-point values.\"\"\"\n pass\n\n def get_dict_of_float2(self):\n \"\"\"A dictionary of floating-point values.\n\n Returns\n -------\n :obj:`dict` of :obj:`float`\n A dictionary of floating-point values.\n \"\"\"\n pass\n\n def get_multiple1(self):\n \"\"\"Many different data types.\n\n Returns\n -------\n :obj:`str`\n A string value.\n :obj:`float`\n A floating-point value.\n :obj:`float`\n A floating-point value.\n :obj:`dict` of :obj:`int`\n A dictionary of integer values.\n :obj:`bytes`\n A bytes value.\n \"\"\"\n pass\n\n\ndef test_return_type_builtin():\n demo = ConnectionDemo(EquipmentRecord(), MyConnection)\n\n assert demo.get_none1() is None\n assert demo.get_none2() is None\n\n assert isinstance(demo.get_bool1(), bool)\n assert isinstance(demo.get_bool2(), bool)\n\n assert isinstance(demo.get_string1(), str)\n assert isinstance(demo.get_string2(), str)\n\n assert isinstance(demo.get_bytes1(), bytes)\n assert isinstance(demo.get_bytes2(), bytes)\n\n assert isinstance(demo.get_int1(), int)\n assert isinstance(demo.get_int2(), int)\n\n assert isinstance(demo.get_float1(), float)\n assert isinstance(demo.get_float2(), float)\n\n x = demo.get_list_of_bool1()\n assert isinstance(x, list) and isinstance(x[0], bool)\n\n x = demo.get_list_of_bool2()\n assert isinstance(x, list) and isinstance(x[0], bool)\n\n x = demo.get_list_of_str1()\n assert isinstance(x, list) and isinstance(x[0], str)\n\n x = demo.get_list_of_str2()\n assert isinstance(x, list) and isinstance(x[0], str)\n\n x = demo.get_list_of_bytes1()\n assert isinstance(x, list) and isinstance(x[0], bytes)\n\n x = demo.get_list_of_bytes2()\n assert isinstance(x, list) and isinstance(x[0], bytes)\n\n x = demo.get_list_of_int1()\n assert isinstance(x, list) and isinstance(x[0], int)\n\n x = demo.get_list_of_int2()\n assert isinstance(x, list) and isinstance(x[0], int)\n\n x = demo.get_list_of_float1()\n assert isinstance(x, list) and isinstance(x[0], float)\n\n x = demo.get_list_of_float2()\n assert isinstance(x, list) and isinstance(x[0], float)\n\n x = demo.get_dict_of_bool1()\n assert isinstance(x, dict) and isinstance(x['demo'], bool)\n\n x = demo.get_dict_of_bool2()\n assert isinstance(x, dict) and isinstance(x['demo'], bool)\n\n x = demo.get_dict_of_str1()\n assert isinstance(x, dict) and isinstance(x['demo'], str)\n\n x = demo.get_dict_of_str2()\n assert isinstance(x, dict) and isinstance(x['demo'], str)\n\n x = demo.get_dict_of_bytes1()\n assert isinstance(x, dict) and isinstance(x['demo'], bytes)\n\n x = demo.get_dict_of_bytes2()\n assert isinstance(x, dict) and isinstance(x['demo'], bytes)\n\n x = demo.get_dict_of_int1()\n assert isinstance(x, dict) and isinstance(x['demo'], int)\n\n x = demo.get_dict_of_int2()\n assert isinstance(x, dict) and isinstance(x['demo'], int)\n\n x = demo.get_dict_of_float1()\n assert isinstance(x, dict) and isinstance(x['demo'], float)\n\n x = demo.get_dict_of_float2()\n assert isinstance(x, dict) and isinstance(x['demo'], float)\n\n x = demo.get_multiple1()\n assert len(x) == 5\n assert isinstance(x[0], str)\n assert isinstance(x[1], float)\n assert isinstance(x[2], float)\n assert isinstance(x[3], dict) and isinstance(x[3]['demo'], int)\n assert isinstance(x[4], bytes)\n\n\ndef test_return_type_object():\n scope = ConnectionDemo(EquipmentRecord(), PicoScope)\n\n x = scope.channel()\n assert isinstance(x, dict) and x['demo'] == PicoScopeChannel\n",
"step-ids": [
19,
20,
21,
37,
39
]
}
|
[
19,
20,
21,
37,
39
] |
# Generated by Django 2.1.7 on 2019-05-31 18:45
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('events', '0004_auto_20190526_1436'),
]
operations = [
migrations.AlterField(
model_name='eventattendance',
name='event_id',
field=models.ForeignKey(db_column='event_id', on_delete=django.db.models.deletion.DO_NOTHING, to='events.Event'),
),
migrations.AlterField(
model_name='eventattendance',
name='user_id',
field=models.ForeignKey(db_column='user_id', on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL),
),
migrations.AlterUniqueTogether(
name='eventattendance',
unique_together={('event_id', 'user_id')},
),
]
|
normal
|
{
"blob_id": "2ec8d3853ea4a99d4e764c6c24d7b5a3afb64f63",
"index": 2830,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [migrations.swappable_dependency(settings.\n AUTH_USER_MODEL), ('events', '0004_auto_20190526_1436')]\n operations = [migrations.AlterField(model_name='eventattendance', name=\n 'event_id', field=models.ForeignKey(db_column='event_id', on_delete\n =django.db.models.deletion.DO_NOTHING, to='events.Event')),\n migrations.AlterField(model_name='eventattendance', name='user_id',\n field=models.ForeignKey(db_column='user_id', on_delete=django.db.\n models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL)),\n migrations.AlterUniqueTogether(name='eventattendance',\n unique_together={('event_id', 'user_id')})]\n",
"step-4": "from django.conf import settings\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n dependencies = [migrations.swappable_dependency(settings.\n AUTH_USER_MODEL), ('events', '0004_auto_20190526_1436')]\n operations = [migrations.AlterField(model_name='eventattendance', name=\n 'event_id', field=models.ForeignKey(db_column='event_id', on_delete\n =django.db.models.deletion.DO_NOTHING, to='events.Event')),\n migrations.AlterField(model_name='eventattendance', name='user_id',\n field=models.ForeignKey(db_column='user_id', on_delete=django.db.\n models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL)),\n migrations.AlterUniqueTogether(name='eventattendance',\n unique_together={('event_id', 'user_id')})]\n",
"step-5": "# Generated by Django 2.1.7 on 2019-05-31 18:45\n\nfrom django.conf import settings\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n migrations.swappable_dependency(settings.AUTH_USER_MODEL),\n ('events', '0004_auto_20190526_1436'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='eventattendance',\n name='event_id',\n field=models.ForeignKey(db_column='event_id', on_delete=django.db.models.deletion.DO_NOTHING, to='events.Event'),\n ),\n migrations.AlterField(\n model_name='eventattendance',\n name='user_id',\n field=models.ForeignKey(db_column='user_id', on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL),\n ),\n migrations.AlterUniqueTogether(\n name='eventattendance',\n unique_together={('event_id', 'user_id')},\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from selenium import webdriver
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
SELENIUM_TIMEOUT = 12
def get_browser_driver():
"""获取浏览器服务 使用后记得 driver.quit() 否则容易引起状态污染"""
try:
# PhantomJS 设置不加载图片
driver = webdriver.PhantomJS(service_args=['--load-images=no'])
except WebDriverException:
# chrome 设置不加载图片
chrome_options = webdriver.ChromeOptions()
chrome_profile = {"profile.managed_default_content_settings.images": 2}
chrome_options.add_experimental_option("prefs", chrome_profile)
driver = webdriver.Chrome(chrome_options=chrome_options)
driver.set_page_load_timeout(SELENIUM_TIMEOUT)
driver.implicitly_wait(SELENIUM_TIMEOUT)
return driver
def wait_driver(driver, id, wait_time, watch_step):
locator = (By.ID, id)
try:
WebDriverWait(driver, wait_time, watch_step).until(EC.presence_of_element_located(locator))
print(u"成功访问搜索引擎!")
except Exception as e:
print(e)
print(u"搜索引擎未加载成功,浏览器将被退出!")
driver.quit()
|
normal
|
{
"blob_id": "5ab877ef15cdcd52463b1567c28327dc2eeea2de",
"index": 1204,
"step-1": "<mask token>\n\n\ndef get_browser_driver():\n \"\"\"获取浏览器服务 使用后记得 driver.quit() 否则容易引起状态污染\"\"\"\n try:\n driver = webdriver.PhantomJS(service_args=['--load-images=no'])\n except WebDriverException:\n chrome_options = webdriver.ChromeOptions()\n chrome_profile = {'profile.managed_default_content_settings.images': 2}\n chrome_options.add_experimental_option('prefs', chrome_profile)\n driver = webdriver.Chrome(chrome_options=chrome_options)\n driver.set_page_load_timeout(SELENIUM_TIMEOUT)\n driver.implicitly_wait(SELENIUM_TIMEOUT)\n return driver\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_browser_driver():\n \"\"\"获取浏览器服务 使用后记得 driver.quit() 否则容易引起状态污染\"\"\"\n try:\n driver = webdriver.PhantomJS(service_args=['--load-images=no'])\n except WebDriverException:\n chrome_options = webdriver.ChromeOptions()\n chrome_profile = {'profile.managed_default_content_settings.images': 2}\n chrome_options.add_experimental_option('prefs', chrome_profile)\n driver = webdriver.Chrome(chrome_options=chrome_options)\n driver.set_page_load_timeout(SELENIUM_TIMEOUT)\n driver.implicitly_wait(SELENIUM_TIMEOUT)\n return driver\n\n\ndef wait_driver(driver, id, wait_time, watch_step):\n locator = By.ID, id\n try:\n WebDriverWait(driver, wait_time, watch_step).until(EC.\n presence_of_element_located(locator))\n print(u'成功访问搜索引擎!')\n except Exception as e:\n print(e)\n print(u'搜索引擎未加载成功,浏览器将被退出!')\n driver.quit()\n",
"step-3": "<mask token>\nSELENIUM_TIMEOUT = 12\n\n\ndef get_browser_driver():\n \"\"\"获取浏览器服务 使用后记得 driver.quit() 否则容易引起状态污染\"\"\"\n try:\n driver = webdriver.PhantomJS(service_args=['--load-images=no'])\n except WebDriverException:\n chrome_options = webdriver.ChromeOptions()\n chrome_profile = {'profile.managed_default_content_settings.images': 2}\n chrome_options.add_experimental_option('prefs', chrome_profile)\n driver = webdriver.Chrome(chrome_options=chrome_options)\n driver.set_page_load_timeout(SELENIUM_TIMEOUT)\n driver.implicitly_wait(SELENIUM_TIMEOUT)\n return driver\n\n\ndef wait_driver(driver, id, wait_time, watch_step):\n locator = By.ID, id\n try:\n WebDriverWait(driver, wait_time, watch_step).until(EC.\n presence_of_element_located(locator))\n print(u'成功访问搜索引擎!')\n except Exception as e:\n print(e)\n print(u'搜索引擎未加载成功,浏览器将被退出!')\n driver.quit()\n",
"step-4": "from selenium import webdriver\nfrom selenium.common.exceptions import WebDriverException\nfrom selenium.webdriver.support.wait import WebDriverWait\nfrom selenium.webdriver.support import expected_conditions as EC\nfrom selenium.webdriver.common.by import By\nSELENIUM_TIMEOUT = 12\n\n\ndef get_browser_driver():\n \"\"\"获取浏览器服务 使用后记得 driver.quit() 否则容易引起状态污染\"\"\"\n try:\n driver = webdriver.PhantomJS(service_args=['--load-images=no'])\n except WebDriverException:\n chrome_options = webdriver.ChromeOptions()\n chrome_profile = {'profile.managed_default_content_settings.images': 2}\n chrome_options.add_experimental_option('prefs', chrome_profile)\n driver = webdriver.Chrome(chrome_options=chrome_options)\n driver.set_page_load_timeout(SELENIUM_TIMEOUT)\n driver.implicitly_wait(SELENIUM_TIMEOUT)\n return driver\n\n\ndef wait_driver(driver, id, wait_time, watch_step):\n locator = By.ID, id\n try:\n WebDriverWait(driver, wait_time, watch_step).until(EC.\n presence_of_element_located(locator))\n print(u'成功访问搜索引擎!')\n except Exception as e:\n print(e)\n print(u'搜索引擎未加载成功,浏览器将被退出!')\n driver.quit()\n",
"step-5": "from selenium import webdriver\nfrom selenium.common.exceptions import WebDriverException\nfrom selenium.webdriver.support.wait import WebDriverWait\nfrom selenium.webdriver.support import expected_conditions as EC\nfrom selenium.webdriver.common.by import By\n\nSELENIUM_TIMEOUT = 12\n\ndef get_browser_driver():\n \"\"\"获取浏览器服务 使用后记得 driver.quit() 否则容易引起状态污染\"\"\"\n try:\n # PhantomJS 设置不加载图片\n driver = webdriver.PhantomJS(service_args=['--load-images=no'])\n except WebDriverException:\n # chrome 设置不加载图片\n chrome_options = webdriver.ChromeOptions()\n chrome_profile = {\"profile.managed_default_content_settings.images\": 2}\n chrome_options.add_experimental_option(\"prefs\", chrome_profile)\n driver = webdriver.Chrome(chrome_options=chrome_options)\n driver.set_page_load_timeout(SELENIUM_TIMEOUT)\n driver.implicitly_wait(SELENIUM_TIMEOUT)\n return driver\n\ndef wait_driver(driver, id, wait_time, watch_step):\n locator = (By.ID, id)\n try:\n WebDriverWait(driver, wait_time, watch_step).until(EC.presence_of_element_located(locator))\n print(u\"成功访问搜索引擎!\")\n except Exception as e:\n print(e)\n print(u\"搜索引擎未加载成功,浏览器将被退出!\")\n driver.quit()",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class Cart(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def save(self):
self.session[settings.CART_SESSION_ID] = self.cart
self.session.modified = True
<|reserved_special_token_0|>
def __iter__(self):
blood_ids = self.cart.keys()
bloods = Blood.objects.filter(id__in=blood_ids)
for blood in bloods:
self.cart[str(blood.id)]['blood'] = blood
for item in self.cart.values():
item['price'] = Decimal(item['price'])
item['total_price'] = item['price'] * item['quantity']
yield item
def __len__(self):
return sum(item['quantity'] for item in self.cart.values())
def get_total_price(self):
return sum(Decimal(item['price']) * item['quantity'] for item in
self.cart.values())
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Cart(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def save(self):
self.session[settings.CART_SESSION_ID] = self.cart
self.session.modified = True
<|reserved_special_token_0|>
def __iter__(self):
blood_ids = self.cart.keys()
bloods = Blood.objects.filter(id__in=blood_ids)
for blood in bloods:
self.cart[str(blood.id)]['blood'] = blood
for item in self.cart.values():
item['price'] = Decimal(item['price'])
item['total_price'] = item['price'] * item['quantity']
yield item
def __len__(self):
return sum(item['quantity'] for item in self.cart.values())
def get_total_price(self):
return sum(Decimal(item['price']) * item['quantity'] for item in
self.cart.values())
def clear(self):
del self.session[settings.CART_SESSION_ID]
self.session.modified = True
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Cart(object):
<|reserved_special_token_0|>
def add(self, blood, quantity=1, update_quantity=False):
blood_id = str(blood.id)
max_quantity = Blood.objects.get(id=blood.id).stock
if blood_id not in self.cart:
self.cart[blood_id] = {'quantity': 0, 'price': str(blood.price)}
if update_quantity and self.cart[blood_id]['quantity'] <= max_quantity:
self.cart[blood_id]['quantity'] = quantity
elif int(self.cart[blood_id]['quantity'] + quantity) <= max_quantity:
self.cart[blood_id]['quantity'] += quantity
self.save()
def save(self):
self.session[settings.CART_SESSION_ID] = self.cart
self.session.modified = True
def remove(self, blood):
blood_id = str(blood.id)
if blood_id in self.cart:
del self.cart[blood_id]
self.save()
def __iter__(self):
blood_ids = self.cart.keys()
bloods = Blood.objects.filter(id__in=blood_ids)
for blood in bloods:
self.cart[str(blood.id)]['blood'] = blood
for item in self.cart.values():
item['price'] = Decimal(item['price'])
item['total_price'] = item['price'] * item['quantity']
yield item
def __len__(self):
return sum(item['quantity'] for item in self.cart.values())
def get_total_price(self):
return sum(Decimal(item['price']) * item['quantity'] for item in
self.cart.values())
def clear(self):
del self.session[settings.CART_SESSION_ID]
self.session.modified = True
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Cart(object):
def __init__(self, request):
self.session = request.session
cart = self.session.get(settings.CART_SESSION_ID)
if not cart:
cart = self.session[settings.CART_SESSION_ID] = {}
self.cart = cart
def add(self, blood, quantity=1, update_quantity=False):
blood_id = str(blood.id)
max_quantity = Blood.objects.get(id=blood.id).stock
if blood_id not in self.cart:
self.cart[blood_id] = {'quantity': 0, 'price': str(blood.price)}
if update_quantity and self.cart[blood_id]['quantity'] <= max_quantity:
self.cart[blood_id]['quantity'] = quantity
elif int(self.cart[blood_id]['quantity'] + quantity) <= max_quantity:
self.cart[blood_id]['quantity'] += quantity
self.save()
def save(self):
self.session[settings.CART_SESSION_ID] = self.cart
self.session.modified = True
def remove(self, blood):
blood_id = str(blood.id)
if blood_id in self.cart:
del self.cart[blood_id]
self.save()
def __iter__(self):
blood_ids = self.cart.keys()
bloods = Blood.objects.filter(id__in=blood_ids)
for blood in bloods:
self.cart[str(blood.id)]['blood'] = blood
for item in self.cart.values():
item['price'] = Decimal(item['price'])
item['total_price'] = item['price'] * item['quantity']
yield item
def __len__(self):
return sum(item['quantity'] for item in self.cart.values())
def get_total_price(self):
return sum(Decimal(item['price']) * item['quantity'] for item in
self.cart.values())
def clear(self):
del self.session[settings.CART_SESSION_ID]
self.session.modified = True
<|reserved_special_token_1|>
from decimal import Decimal
from django.conf import settings
from blood.models import Bank, Blood
class Cart(object):
def __init__(self, request):
self.session = request.session
cart = self.session.get(settings.CART_SESSION_ID)
if not cart:
cart = self.session[settings.CART_SESSION_ID] = {}
self.cart = cart
def add(self, blood, quantity=1, update_quantity=False):
blood_id = str(blood.id)
max_quantity = Blood.objects.get(id=blood.id).stock
if blood_id not in self.cart:
self.cart[blood_id] = {
'quantity': 0, 'price': str(blood.price)}
if update_quantity and self.cart[blood_id]['quantity'] <= max_quantity:
self.cart[blood_id]['quantity'] = quantity
elif int(self.cart[blood_id]['quantity']+quantity) <= max_quantity:
self.cart[blood_id]['quantity'] += quantity
self.save()
def save(self):
self.session[settings.CART_SESSION_ID] = self.cart
self.session.modified = True
def remove(self, blood):
blood_id = str(blood.id)
if blood_id in self.cart:
del self.cart[blood_id]
self.save()
def __iter__(self):
blood_ids = self.cart.keys()
bloods = Blood.objects.filter(id__in=blood_ids)
for blood in bloods:
self.cart[str(blood.id)]['blood'] = blood
for item in self.cart.values():
item['price'] = Decimal(item['price'])
item['total_price'] = item['price'] * item['quantity']
yield item
def __len__(self):
return sum(item['quantity'] for item in self.cart.values())
def get_total_price(self):
return sum(Decimal(item['price']) * item['quantity'] for item in self.cart.values())
def clear(self):
del self.session[settings.CART_SESSION_ID]
self.session.modified = True
|
flexible
|
{
"blob_id": "a638504737d0069d4fa40b0fc5026203904563e8",
"index": 5537,
"step-1": "<mask token>\n\n\nclass Cart(object):\n <mask token>\n <mask token>\n\n def save(self):\n self.session[settings.CART_SESSION_ID] = self.cart\n self.session.modified = True\n <mask token>\n\n def __iter__(self):\n blood_ids = self.cart.keys()\n bloods = Blood.objects.filter(id__in=blood_ids)\n for blood in bloods:\n self.cart[str(blood.id)]['blood'] = blood\n for item in self.cart.values():\n item['price'] = Decimal(item['price'])\n item['total_price'] = item['price'] * item['quantity']\n yield item\n\n def __len__(self):\n return sum(item['quantity'] for item in self.cart.values())\n\n def get_total_price(self):\n return sum(Decimal(item['price']) * item['quantity'] for item in\n self.cart.values())\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Cart(object):\n <mask token>\n <mask token>\n\n def save(self):\n self.session[settings.CART_SESSION_ID] = self.cart\n self.session.modified = True\n <mask token>\n\n def __iter__(self):\n blood_ids = self.cart.keys()\n bloods = Blood.objects.filter(id__in=blood_ids)\n for blood in bloods:\n self.cart[str(blood.id)]['blood'] = blood\n for item in self.cart.values():\n item['price'] = Decimal(item['price'])\n item['total_price'] = item['price'] * item['quantity']\n yield item\n\n def __len__(self):\n return sum(item['quantity'] for item in self.cart.values())\n\n def get_total_price(self):\n return sum(Decimal(item['price']) * item['quantity'] for item in\n self.cart.values())\n\n def clear(self):\n del self.session[settings.CART_SESSION_ID]\n self.session.modified = True\n",
"step-3": "<mask token>\n\n\nclass Cart(object):\n <mask token>\n\n def add(self, blood, quantity=1, update_quantity=False):\n blood_id = str(blood.id)\n max_quantity = Blood.objects.get(id=blood.id).stock\n if blood_id not in self.cart:\n self.cart[blood_id] = {'quantity': 0, 'price': str(blood.price)}\n if update_quantity and self.cart[blood_id]['quantity'] <= max_quantity:\n self.cart[blood_id]['quantity'] = quantity\n elif int(self.cart[blood_id]['quantity'] + quantity) <= max_quantity:\n self.cart[blood_id]['quantity'] += quantity\n self.save()\n\n def save(self):\n self.session[settings.CART_SESSION_ID] = self.cart\n self.session.modified = True\n\n def remove(self, blood):\n blood_id = str(blood.id)\n if blood_id in self.cart:\n del self.cart[blood_id]\n self.save()\n\n def __iter__(self):\n blood_ids = self.cart.keys()\n bloods = Blood.objects.filter(id__in=blood_ids)\n for blood in bloods:\n self.cart[str(blood.id)]['blood'] = blood\n for item in self.cart.values():\n item['price'] = Decimal(item['price'])\n item['total_price'] = item['price'] * item['quantity']\n yield item\n\n def __len__(self):\n return sum(item['quantity'] for item in self.cart.values())\n\n def get_total_price(self):\n return sum(Decimal(item['price']) * item['quantity'] for item in\n self.cart.values())\n\n def clear(self):\n del self.session[settings.CART_SESSION_ID]\n self.session.modified = True\n",
"step-4": "<mask token>\n\n\nclass Cart(object):\n\n def __init__(self, request):\n self.session = request.session\n cart = self.session.get(settings.CART_SESSION_ID)\n if not cart:\n cart = self.session[settings.CART_SESSION_ID] = {}\n self.cart = cart\n\n def add(self, blood, quantity=1, update_quantity=False):\n blood_id = str(blood.id)\n max_quantity = Blood.objects.get(id=blood.id).stock\n if blood_id not in self.cart:\n self.cart[blood_id] = {'quantity': 0, 'price': str(blood.price)}\n if update_quantity and self.cart[blood_id]['quantity'] <= max_quantity:\n self.cart[blood_id]['quantity'] = quantity\n elif int(self.cart[blood_id]['quantity'] + quantity) <= max_quantity:\n self.cart[blood_id]['quantity'] += quantity\n self.save()\n\n def save(self):\n self.session[settings.CART_SESSION_ID] = self.cart\n self.session.modified = True\n\n def remove(self, blood):\n blood_id = str(blood.id)\n if blood_id in self.cart:\n del self.cart[blood_id]\n self.save()\n\n def __iter__(self):\n blood_ids = self.cart.keys()\n bloods = Blood.objects.filter(id__in=blood_ids)\n for blood in bloods:\n self.cart[str(blood.id)]['blood'] = blood\n for item in self.cart.values():\n item['price'] = Decimal(item['price'])\n item['total_price'] = item['price'] * item['quantity']\n yield item\n\n def __len__(self):\n return sum(item['quantity'] for item in self.cart.values())\n\n def get_total_price(self):\n return sum(Decimal(item['price']) * item['quantity'] for item in\n self.cart.values())\n\n def clear(self):\n del self.session[settings.CART_SESSION_ID]\n self.session.modified = True\n",
"step-5": "from decimal import Decimal\nfrom django.conf import settings\nfrom blood.models import Bank, Blood\n\n\nclass Cart(object):\n def __init__(self, request):\n self.session = request.session\n cart = self.session.get(settings.CART_SESSION_ID)\n if not cart:\n cart = self.session[settings.CART_SESSION_ID] = {}\n self.cart = cart\n\n def add(self, blood, quantity=1, update_quantity=False):\n blood_id = str(blood.id)\n max_quantity = Blood.objects.get(id=blood.id).stock\n if blood_id not in self.cart:\n self.cart[blood_id] = {\n 'quantity': 0, 'price': str(blood.price)}\n if update_quantity and self.cart[blood_id]['quantity'] <= max_quantity:\n self.cart[blood_id]['quantity'] = quantity\n elif int(self.cart[blood_id]['quantity']+quantity) <= max_quantity:\n self.cart[blood_id]['quantity'] += quantity\n self.save()\n\n def save(self):\n self.session[settings.CART_SESSION_ID] = self.cart\n self.session.modified = True\n\n def remove(self, blood):\n blood_id = str(blood.id)\n if blood_id in self.cart:\n del self.cart[blood_id]\n self.save()\n\n def __iter__(self):\n blood_ids = self.cart.keys()\n bloods = Blood.objects.filter(id__in=blood_ids)\n for blood in bloods:\n self.cart[str(blood.id)]['blood'] = blood\n\n for item in self.cart.values():\n item['price'] = Decimal(item['price'])\n item['total_price'] = item['price'] * item['quantity']\n yield item\n\n def __len__(self):\n return sum(item['quantity'] for item in self.cart.values())\n\n def get_total_price(self):\n return sum(Decimal(item['price']) * item['quantity'] for item in self.cart.values())\n\n def clear(self):\n del self.session[settings.CART_SESSION_ID]\n self.session.modified = True\n",
"step-ids": [
5,
6,
8,
9,
11
]
}
|
[
5,
6,
8,
9,
11
] |
STATUS_DISCONNECT = 0
STATUS_CONNECTED = 1
STATUS_OPEN_CH_REQUEST = 2
STATUS_OPENED = 3
STATUS_EXITING = 4
STATUS_EXITTED = 5
CONTENT_TYPE_IMAGE = 0
CONTENT_TYPE_VIDEO = 1
STATUS_OK = 0
STATUS_ERROR = 1
class Point(object):
def __init__(self, x = 0, y = 0):
self.x = x
self.y = y
class ObjectDetectionResult(object):
def __init__(self, ltx = 0, lty = 0, rbx = 0, rby = 0, text = None):
self.object_class = 0
self.confidence = 0
self.lt = Point(ltx, lty)
self.rb = Point(rbx, rby)
self.result_text = text
def IsRectInvalid(self):
return ((self.lt.x < 0) or \
(self.lt.y < 0) or \
(self.rb.x < 0) or \
(self.rb.y < 0) or \
(self.lt.x > self.rb.x) or \
(self.lt.y > self.rb.y))
|
normal
|
{
"blob_id": "0ceb9eac46e3182821e65a1ae3a69d842db51e62",
"index": 7879,
"step-1": "<mask token>\n\n\nclass ObjectDetectionResult(object):\n\n def __init__(self, ltx=0, lty=0, rbx=0, rby=0, text=None):\n self.object_class = 0\n self.confidence = 0\n self.lt = Point(ltx, lty)\n self.rb = Point(rbx, rby)\n self.result_text = text\n\n def IsRectInvalid(self):\n return (self.lt.x < 0 or self.lt.y < 0 or self.rb.x < 0 or self.rb.\n y < 0 or self.lt.x > self.rb.x or self.lt.y > self.rb.y)\n",
"step-2": "<mask token>\n\n\nclass Point(object):\n <mask token>\n\n\nclass ObjectDetectionResult(object):\n\n def __init__(self, ltx=0, lty=0, rbx=0, rby=0, text=None):\n self.object_class = 0\n self.confidence = 0\n self.lt = Point(ltx, lty)\n self.rb = Point(rbx, rby)\n self.result_text = text\n\n def IsRectInvalid(self):\n return (self.lt.x < 0 or self.lt.y < 0 or self.rb.x < 0 or self.rb.\n y < 0 or self.lt.x > self.rb.x or self.lt.y > self.rb.y)\n",
"step-3": "<mask token>\n\n\nclass Point(object):\n\n def __init__(self, x=0, y=0):\n self.x = x\n self.y = y\n\n\nclass ObjectDetectionResult(object):\n\n def __init__(self, ltx=0, lty=0, rbx=0, rby=0, text=None):\n self.object_class = 0\n self.confidence = 0\n self.lt = Point(ltx, lty)\n self.rb = Point(rbx, rby)\n self.result_text = text\n\n def IsRectInvalid(self):\n return (self.lt.x < 0 or self.lt.y < 0 or self.rb.x < 0 or self.rb.\n y < 0 or self.lt.x > self.rb.x or self.lt.y > self.rb.y)\n",
"step-4": "STATUS_DISCONNECT = 0\nSTATUS_CONNECTED = 1\nSTATUS_OPEN_CH_REQUEST = 2\nSTATUS_OPENED = 3\nSTATUS_EXITING = 4\nSTATUS_EXITTED = 5\nCONTENT_TYPE_IMAGE = 0\nCONTENT_TYPE_VIDEO = 1\nSTATUS_OK = 0\nSTATUS_ERROR = 1\n\n\nclass Point(object):\n\n def __init__(self, x=0, y=0):\n self.x = x\n self.y = y\n\n\nclass ObjectDetectionResult(object):\n\n def __init__(self, ltx=0, lty=0, rbx=0, rby=0, text=None):\n self.object_class = 0\n self.confidence = 0\n self.lt = Point(ltx, lty)\n self.rb = Point(rbx, rby)\n self.result_text = text\n\n def IsRectInvalid(self):\n return (self.lt.x < 0 or self.lt.y < 0 or self.rb.x < 0 or self.rb.\n y < 0 or self.lt.x > self.rb.x or self.lt.y > self.rb.y)\n",
"step-5": "\nSTATUS_DISCONNECT = 0\nSTATUS_CONNECTED = 1\nSTATUS_OPEN_CH_REQUEST = 2\nSTATUS_OPENED = 3\nSTATUS_EXITING = 4\nSTATUS_EXITTED = 5\n\nCONTENT_TYPE_IMAGE = 0\nCONTENT_TYPE_VIDEO = 1\n\nSTATUS_OK = 0\nSTATUS_ERROR = 1\n\nclass Point(object):\n def __init__(self, x = 0, y = 0):\n self.x = x\n self.y = y\n\n\nclass ObjectDetectionResult(object):\n def __init__(self, ltx = 0, lty = 0, rbx = 0, rby = 0, text = None):\n self.object_class = 0\n self.confidence = 0\n self.lt = Point(ltx, lty)\n self.rb = Point(rbx, rby)\n self.result_text = text\n \n def IsRectInvalid(self):\n return ((self.lt.x < 0) or \\\n (self.lt.y < 0) or \\\n (self.rb.x < 0) or \\\n (self.rb.y < 0) or \\\n (self.lt.x > self.rb.x) or \\\n (self.lt.y > self.rb.y))\n\n\n\n\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
def remove_element(self, nums: list[int], val: int) ->int:
last_position = 0
for num in nums:
if num != val:
nums[last_position] = num
last_position += 1
return last_position
<|reserved_special_token_0|>
<|reserved_special_token_1|>
"""
Given an array nums and a value val, remove all instances of
that value in-place and return the new length.
Do not allocate extra space for another array, you must do
this by modifying the input array in-place with O(1) extra memory.
The order of elements can be changed. It doesn't matter
what you leave beyond the new length.
"""
class Solution:
def remove_element(self, nums: list[int], val: int) -> int:
last_position = 0
for num in nums:
if num != val:
nums[last_position] = num
last_position += 1
return last_position
"""
Complexity: Time : O(n) | Space: O(1)
"""
|
flexible
|
{
"blob_id": "8be4bf5c1a5a7b841edc915793571686ee0bffe6",
"index": 113,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution:\n <mask token>\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Solution:\n\n def remove_element(self, nums: list[int], val: int) ->int:\n last_position = 0\n for num in nums:\n if num != val:\n nums[last_position] = num\n last_position += 1\n return last_position\n\n\n<mask token>\n",
"step-4": "\"\"\"\n Given an array nums and a value val, remove all instances of\n that value in-place and return the new length.\n\n Do not allocate extra space for another array, you must do\n this by modifying the input array in-place with O(1) extra memory.\n\n The order of elements can be changed. It doesn't matter\n what you leave beyond the new length.\n\"\"\"\nclass Solution:\n def remove_element(self, nums: list[int], val: int) -> int:\n last_position = 0\n\n for num in nums:\n if num != val:\n nums[last_position] = num\n last_position += 1\n\n return last_position\n\n\"\"\"\n Complexity: Time : O(n) | Space: O(1)\n\"\"\"",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# -*- coding:utf-8 -*-
import requests
import json
def fun1():
s_cut = [('72af8ecf3609a546bac3150c20f70455', ['老凤祥', '六福珠宝', '周生生', '亚一珠宝', '亚一金店']),
('3e78397f7dbb88ffbd78ba52d0e925fa', ['老庙', '谢瑞麟', '中国黄金', '明牌珠宝']), # yh
('6bee32b2f0719ea45cc194847efd8917', ['周大福', '潮宏基', '东华美钻', '周大生']), # zyy
]
num = 1
city_code = ['上海']
for s_key, store_names in s_cut:
for store in store_names:
for code in city_code:
params = {'keywords': store,
'types': '购物服务',
'city': code,
'citylimit': 'True',
'output': 'json',
'key': s_key,
'offset': 20,
'page': num}
response = requests.get('https://restapi.amap.com/v3/place/text', params=params)
map_results = json.loads(response.text)
print(map_results)
return map_results
json_text = fun1()
print(json_text['pois'])
print(len(json_text['pois']))
|
normal
|
{
"blob_id": "66f8fa5fc12dc80b8f46684c39781c2e4634de4a",
"index": 3479,
"step-1": "<mask token>\n\n\ndef fun1():\n s_cut = [('72af8ecf3609a546bac3150c20f70455', ['老凤祥', '六福珠宝', '周生生',\n '亚一珠宝', '亚一金店']), ('3e78397f7dbb88ffbd78ba52d0e925fa', ['老庙', '谢瑞麟',\n '中国黄金', '明牌珠宝']), ('6bee32b2f0719ea45cc194847efd8917', ['周大福',\n '潮宏基', '东华美钻', '周大生'])]\n num = 1\n city_code = ['上海']\n for s_key, store_names in s_cut:\n for store in store_names:\n for code in city_code:\n params = {'keywords': store, 'types': '购物服务', 'city': code,\n 'citylimit': 'True', 'output': 'json', 'key': s_key,\n 'offset': 20, 'page': num}\n response = requests.get(\n 'https://restapi.amap.com/v3/place/text', params=params)\n map_results = json.loads(response.text)\n print(map_results)\n return map_results\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef fun1():\n s_cut = [('72af8ecf3609a546bac3150c20f70455', ['老凤祥', '六福珠宝', '周生生',\n '亚一珠宝', '亚一金店']), ('3e78397f7dbb88ffbd78ba52d0e925fa', ['老庙', '谢瑞麟',\n '中国黄金', '明牌珠宝']), ('6bee32b2f0719ea45cc194847efd8917', ['周大福',\n '潮宏基', '东华美钻', '周大生'])]\n num = 1\n city_code = ['上海']\n for s_key, store_names in s_cut:\n for store in store_names:\n for code in city_code:\n params = {'keywords': store, 'types': '购物服务', 'city': code,\n 'citylimit': 'True', 'output': 'json', 'key': s_key,\n 'offset': 20, 'page': num}\n response = requests.get(\n 'https://restapi.amap.com/v3/place/text', params=params)\n map_results = json.loads(response.text)\n print(map_results)\n return map_results\n\n\n<mask token>\nprint(json_text['pois'])\nprint(len(json_text['pois']))\n",
"step-3": "<mask token>\n\n\ndef fun1():\n s_cut = [('72af8ecf3609a546bac3150c20f70455', ['老凤祥', '六福珠宝', '周生生',\n '亚一珠宝', '亚一金店']), ('3e78397f7dbb88ffbd78ba52d0e925fa', ['老庙', '谢瑞麟',\n '中国黄金', '明牌珠宝']), ('6bee32b2f0719ea45cc194847efd8917', ['周大福',\n '潮宏基', '东华美钻', '周大生'])]\n num = 1\n city_code = ['上海']\n for s_key, store_names in s_cut:\n for store in store_names:\n for code in city_code:\n params = {'keywords': store, 'types': '购物服务', 'city': code,\n 'citylimit': 'True', 'output': 'json', 'key': s_key,\n 'offset': 20, 'page': num}\n response = requests.get(\n 'https://restapi.amap.com/v3/place/text', params=params)\n map_results = json.loads(response.text)\n print(map_results)\n return map_results\n\n\njson_text = fun1()\nprint(json_text['pois'])\nprint(len(json_text['pois']))\n",
"step-4": "import requests\nimport json\n\n\ndef fun1():\n s_cut = [('72af8ecf3609a546bac3150c20f70455', ['老凤祥', '六福珠宝', '周生生',\n '亚一珠宝', '亚一金店']), ('3e78397f7dbb88ffbd78ba52d0e925fa', ['老庙', '谢瑞麟',\n '中国黄金', '明牌珠宝']), ('6bee32b2f0719ea45cc194847efd8917', ['周大福',\n '潮宏基', '东华美钻', '周大生'])]\n num = 1\n city_code = ['上海']\n for s_key, store_names in s_cut:\n for store in store_names:\n for code in city_code:\n params = {'keywords': store, 'types': '购物服务', 'city': code,\n 'citylimit': 'True', 'output': 'json', 'key': s_key,\n 'offset': 20, 'page': num}\n response = requests.get(\n 'https://restapi.amap.com/v3/place/text', params=params)\n map_results = json.loads(response.text)\n print(map_results)\n return map_results\n\n\njson_text = fun1()\nprint(json_text['pois'])\nprint(len(json_text['pois']))\n",
"step-5": "# -*- coding:utf-8 -*-\n\nimport requests\nimport json\n\n\ndef fun1():\n s_cut = [('72af8ecf3609a546bac3150c20f70455', ['老凤祥', '六福珠宝', '周生生', '亚一珠宝', '亚一金店']),\n ('3e78397f7dbb88ffbd78ba52d0e925fa', ['老庙', '谢瑞麟', '中国黄金', '明牌珠宝']), # yh\n ('6bee32b2f0719ea45cc194847efd8917', ['周大福', '潮宏基', '东华美钻', '周大生']), # zyy\n ]\n num = 1\n city_code = ['上海']\n for s_key, store_names in s_cut:\n for store in store_names:\n for code in city_code:\n params = {'keywords': store,\n 'types': '购物服务',\n 'city': code,\n 'citylimit': 'True',\n 'output': 'json',\n 'key': s_key,\n 'offset': 20,\n 'page': num}\n response = requests.get('https://restapi.amap.com/v3/place/text', params=params)\n map_results = json.loads(response.text)\n print(map_results)\n return map_results\n\n\njson_text = fun1()\nprint(json_text['pois'])\n\nprint(len(json_text['pois']))\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
@app.route('/', methods=['GET', 'POST'])
def inicio():
nombre = 'jose'
return render_template('inicio.html', nombre=nombre)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@app.route('/', methods=['GET', 'POST'])
def inicio():
nombre = 'jose'
return render_template('inicio.html', nombre=nombre)
app.run(debug=True)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def inicio():
nombre = 'jose'
return render_template('inicio.html', nombre=nombre)
app.run(debug=True)
<|reserved_special_token_1|>
from flask import Flask, render_template
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def inicio():
nombre = 'jose'
return render_template('inicio.html', nombre=nombre)
app.run(debug=True)
<|reserved_special_token_1|>
from flask import Flask, render_template
app = Flask(__name__)
@app.route('/',methods=["GET","POST"])
def inicio():
nombre = "jose"
return render_template("inicio.html",nombre=nombre)
app.run(debug=True)
|
flexible
|
{
"blob_id": "caa28bd64141c8d2f3212b5e4e77129d81d24c71",
"index": 2290,
"step-1": "<mask token>\n\n\n@app.route('/', methods=['GET', 'POST'])\ndef inicio():\n nombre = 'jose'\n return render_template('inicio.html', nombre=nombre)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@app.route('/', methods=['GET', 'POST'])\ndef inicio():\n nombre = 'jose'\n return render_template('inicio.html', nombre=nombre)\n\n\napp.run(debug=True)\n",
"step-3": "<mask token>\napp = Flask(__name__)\n\n\n@app.route('/', methods=['GET', 'POST'])\ndef inicio():\n nombre = 'jose'\n return render_template('inicio.html', nombre=nombre)\n\n\napp.run(debug=True)\n",
"step-4": "from flask import Flask, render_template\napp = Flask(__name__)\n\n\n@app.route('/', methods=['GET', 'POST'])\ndef inicio():\n nombre = 'jose'\n return render_template('inicio.html', nombre=nombre)\n\n\napp.run(debug=True)\n",
"step-5": "from flask import Flask, render_template\napp = Flask(__name__)\t\n\n@app.route('/',methods=[\"GET\",\"POST\"])\ndef inicio():\n\tnombre = \"jose\"\n\treturn render_template(\"inicio.html\",nombre=nombre)\n\napp.run(debug=True)",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
class LinearQueue:
<|reserved_special_token_0|>
def enqueue(self, *args):
for i in args:
if not self.isFull():
self._tail += 1
self._queue[self._tail] = i
else:
break
def dequeue(self):
if self.isEmpty() or self._tail == self._head:
return None
else:
self._head += 1
self._dequeueValue = self._queue[self._head]
return self._dequeueValue
def isFull(self):
return self._tail == self._length - 1
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class LinearQueue:
def __init__(self, length):
self._length = length
self._head = self._tail = -1
self._queue = [None] * self._length
def enqueue(self, *args):
for i in args:
if not self.isFull():
self._tail += 1
self._queue[self._tail] = i
else:
break
def dequeue(self):
if self.isEmpty() or self._tail == self._head:
return None
else:
self._head += 1
self._dequeueValue = self._queue[self._head]
return self._dequeueValue
def isFull(self):
return self._tail == self._length - 1
def isEmpty(self):
return self._tail == self._head == -1
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class LinearQueue:
def __init__(self, length):
self._length = length
self._head = self._tail = -1
self._queue = [None] * self._length
def enqueue(self, *args):
for i in args:
if not self.isFull():
self._tail += 1
self._queue[self._tail] = i
else:
break
def dequeue(self):
if self.isEmpty() or self._tail == self._head:
return None
else:
self._head += 1
self._dequeueValue = self._queue[self._head]
return self._dequeueValue
def isFull(self):
return self._tail == self._length - 1
def isEmpty(self):
return self._tail == self._head == -1
print('Creating Linear Queue of 5 with No Values')
<|reserved_special_token_0|>
print('empty', q.isEmpty())
print('Enqueuing 1, 2, 3')
q.enqueue(1, 2, 3)
print('full', q.isFull())
print('empty', q.isEmpty())
print('dequeuing 1, 2, 3')
for i in range(0, 3):
print('dequeuing', q.dequeue())
print('empty', q.isEmpty())
print('Enqueuing 4, 5')
q.enqueue(4, 5)
print('full', q.isFull())
print('empty', q.isEmpty())
print('dequeuing all')
for i in range(0, 2):
print('dequeuing', q.dequeue())
print('full', q.isFull())
print('empty', q.isEmpty())
print('dequeuing extra value (should return None)')
print('dequeuing', q.dequeue())
<|reserved_special_token_1|>
class LinearQueue:
def __init__(self, length):
self._length = length
self._head = self._tail = -1
self._queue = [None] * self._length
def enqueue(self, *args):
for i in args:
if not self.isFull():
self._tail += 1
self._queue[self._tail] = i
else:
break
def dequeue(self):
if self.isEmpty() or self._tail == self._head:
return None
else:
self._head += 1
self._dequeueValue = self._queue[self._head]
return self._dequeueValue
def isFull(self):
return self._tail == self._length - 1
def isEmpty(self):
return self._tail == self._head == -1
print('Creating Linear Queue of 5 with No Values')
q = LinearQueue(5)
print('empty', q.isEmpty())
print('Enqueuing 1, 2, 3')
q.enqueue(1, 2, 3)
print('full', q.isFull())
print('empty', q.isEmpty())
print('dequeuing 1, 2, 3')
for i in range(0, 3):
print('dequeuing', q.dequeue())
print('empty', q.isEmpty())
print('Enqueuing 4, 5')
q.enqueue(4, 5)
print('full', q.isFull())
print('empty', q.isEmpty())
print('dequeuing all')
for i in range(0, 2):
print('dequeuing', q.dequeue())
print('full', q.isFull())
print('empty', q.isEmpty())
print('dequeuing extra value (should return None)')
print('dequeuing', q.dequeue())
<|reserved_special_token_1|>
##Linear Queue Data Structure
#Main Queue Class
class LinearQueue():
def __init__(self, length):
#When initiating, user defines the length.
#The head and tail pointers are set at -1 (i.e. not pointing to anything, index beginning at zero)
#The queue is set as a series of None objects in a list the length the user gave
self._length = length
self._head = self._tail = -1
self._queue = [None]*self._length
def enqueue(self, *args):
#Enqueue - Adds value to Queue (First-In)
#Arguments are taken as a tuple of any length and are processed one at a time
for i in args:
if not self.isFull():
#The queue is checked if it is full. If it isn't, the value is added to the end of the queue and the tail is updated.
self._tail += 1
self._queue[self._tail] = i
else:
#Otherwise, if the list is full, the loop breaks and no more values are taken from the arguments.
break
def dequeue(self):
#Dequeue - Take value from Queue (First Out)
if self.isEmpty() or (self._tail == self._head):
#If the queue is empty or the head and tail point at the same position, None is returned
return None
else:
#If the queue is not empty, the value being pointed to by the head pointer is returned and the head pointer shifts up one
#To emulate a real Queue, this value is not removed, however it is ignored
self._head += 1
self._dequeueValue = self._queue[self._head]
return self._dequeueValue
def isFull(self):
return self._tail == (self._length-1) #If the tail pointer is the same as the length (minus one) of the queue then it is full. If not, it isn't full.
def isEmpty(self):
return self._tail == self._head == -1 #If the head and tail pointers are both -1, then the queue is empty. If not, it isn't empty.
#Test with a Queue of Length 5 named 'q'
print("Creating Linear Queue of 5 with No Values")
q = LinearQueue(5)
print("empty",q.isEmpty())
print("Enqueuing 1, 2, 3")
q.enqueue(1, 2, 3)
print("full",q.isFull())
print("empty",q.isEmpty())
print("dequeuing 1, 2, 3")
for i in range(0,3):
print("dequeuing",q.dequeue())
print("empty",q.isEmpty())
print("Enqueuing 4, 5")
q.enqueue(4, 5)
print("full",q.isFull())
print("empty",q.isEmpty())
print("dequeuing all")
for i in range(0,2):
print("dequeuing",q.dequeue())
print("full",q.isFull())
print("empty",q.isEmpty())
print("dequeuing extra value (should return None)")
print("dequeuing",q.dequeue())
|
flexible
|
{
"blob_id": "0efac7d9d1a9180eafa8c9c4e3a42b4c68e718a2",
"index": 4597,
"step-1": "class LinearQueue:\n <mask token>\n\n def enqueue(self, *args):\n for i in args:\n if not self.isFull():\n self._tail += 1\n self._queue[self._tail] = i\n else:\n break\n\n def dequeue(self):\n if self.isEmpty() or self._tail == self._head:\n return None\n else:\n self._head += 1\n self._dequeueValue = self._queue[self._head]\n return self._dequeueValue\n\n def isFull(self):\n return self._tail == self._length - 1\n <mask token>\n\n\n<mask token>\n",
"step-2": "class LinearQueue:\n\n def __init__(self, length):\n self._length = length\n self._head = self._tail = -1\n self._queue = [None] * self._length\n\n def enqueue(self, *args):\n for i in args:\n if not self.isFull():\n self._tail += 1\n self._queue[self._tail] = i\n else:\n break\n\n def dequeue(self):\n if self.isEmpty() or self._tail == self._head:\n return None\n else:\n self._head += 1\n self._dequeueValue = self._queue[self._head]\n return self._dequeueValue\n\n def isFull(self):\n return self._tail == self._length - 1\n\n def isEmpty(self):\n return self._tail == self._head == -1\n\n\n<mask token>\n",
"step-3": "class LinearQueue:\n\n def __init__(self, length):\n self._length = length\n self._head = self._tail = -1\n self._queue = [None] * self._length\n\n def enqueue(self, *args):\n for i in args:\n if not self.isFull():\n self._tail += 1\n self._queue[self._tail] = i\n else:\n break\n\n def dequeue(self):\n if self.isEmpty() or self._tail == self._head:\n return None\n else:\n self._head += 1\n self._dequeueValue = self._queue[self._head]\n return self._dequeueValue\n\n def isFull(self):\n return self._tail == self._length - 1\n\n def isEmpty(self):\n return self._tail == self._head == -1\n\n\nprint('Creating Linear Queue of 5 with No Values')\n<mask token>\nprint('empty', q.isEmpty())\nprint('Enqueuing 1, 2, 3')\nq.enqueue(1, 2, 3)\nprint('full', q.isFull())\nprint('empty', q.isEmpty())\nprint('dequeuing 1, 2, 3')\nfor i in range(0, 3):\n print('dequeuing', q.dequeue())\nprint('empty', q.isEmpty())\nprint('Enqueuing 4, 5')\nq.enqueue(4, 5)\nprint('full', q.isFull())\nprint('empty', q.isEmpty())\nprint('dequeuing all')\nfor i in range(0, 2):\n print('dequeuing', q.dequeue())\nprint('full', q.isFull())\nprint('empty', q.isEmpty())\nprint('dequeuing extra value (should return None)')\nprint('dequeuing', q.dequeue())\n",
"step-4": "class LinearQueue:\n\n def __init__(self, length):\n self._length = length\n self._head = self._tail = -1\n self._queue = [None] * self._length\n\n def enqueue(self, *args):\n for i in args:\n if not self.isFull():\n self._tail += 1\n self._queue[self._tail] = i\n else:\n break\n\n def dequeue(self):\n if self.isEmpty() or self._tail == self._head:\n return None\n else:\n self._head += 1\n self._dequeueValue = self._queue[self._head]\n return self._dequeueValue\n\n def isFull(self):\n return self._tail == self._length - 1\n\n def isEmpty(self):\n return self._tail == self._head == -1\n\n\nprint('Creating Linear Queue of 5 with No Values')\nq = LinearQueue(5)\nprint('empty', q.isEmpty())\nprint('Enqueuing 1, 2, 3')\nq.enqueue(1, 2, 3)\nprint('full', q.isFull())\nprint('empty', q.isEmpty())\nprint('dequeuing 1, 2, 3')\nfor i in range(0, 3):\n print('dequeuing', q.dequeue())\nprint('empty', q.isEmpty())\nprint('Enqueuing 4, 5')\nq.enqueue(4, 5)\nprint('full', q.isFull())\nprint('empty', q.isEmpty())\nprint('dequeuing all')\nfor i in range(0, 2):\n print('dequeuing', q.dequeue())\nprint('full', q.isFull())\nprint('empty', q.isEmpty())\nprint('dequeuing extra value (should return None)')\nprint('dequeuing', q.dequeue())\n",
"step-5": "##Linear Queue Data Structure\n\n#Main Queue Class\nclass LinearQueue():\n def __init__(self, length):\n #When initiating, user defines the length.\n #The head and tail pointers are set at -1 (i.e. not pointing to anything, index beginning at zero)\n #The queue is set as a series of None objects in a list the length the user gave\n self._length = length\n self._head = self._tail = -1\n self._queue = [None]*self._length\n def enqueue(self, *args):\n #Enqueue - Adds value to Queue (First-In)\n #Arguments are taken as a tuple of any length and are processed one at a time\n for i in args:\n if not self.isFull():\n #The queue is checked if it is full. If it isn't, the value is added to the end of the queue and the tail is updated.\n self._tail += 1\n self._queue[self._tail] = i\n else:\n #Otherwise, if the list is full, the loop breaks and no more values are taken from the arguments.\n break\n def dequeue(self):\n #Dequeue - Take value from Queue (First Out)\n if self.isEmpty() or (self._tail == self._head):\n #If the queue is empty or the head and tail point at the same position, None is returned\n return None\n else:\n #If the queue is not empty, the value being pointed to by the head pointer is returned and the head pointer shifts up one\n #To emulate a real Queue, this value is not removed, however it is ignored\n self._head += 1\n self._dequeueValue = self._queue[self._head]\n return self._dequeueValue\n def isFull(self):\n return self._tail == (self._length-1) #If the tail pointer is the same as the length (minus one) of the queue then it is full. If not, it isn't full.\n def isEmpty(self):\n return self._tail == self._head == -1 #If the head and tail pointers are both -1, then the queue is empty. If not, it isn't empty.\n\n#Test with a Queue of Length 5 named 'q'\nprint(\"Creating Linear Queue of 5 with No Values\")\nq = LinearQueue(5)\nprint(\"empty\",q.isEmpty())\nprint(\"Enqueuing 1, 2, 3\")\nq.enqueue(1, 2, 3)\nprint(\"full\",q.isFull())\nprint(\"empty\",q.isEmpty())\nprint(\"dequeuing 1, 2, 3\")\nfor i in range(0,3):\n print(\"dequeuing\",q.dequeue())\nprint(\"empty\",q.isEmpty())\nprint(\"Enqueuing 4, 5\")\nq.enqueue(4, 5)\nprint(\"full\",q.isFull())\nprint(\"empty\",q.isEmpty())\nprint(\"dequeuing all\")\nfor i in range(0,2):\n print(\"dequeuing\",q.dequeue())\nprint(\"full\",q.isFull())\nprint(\"empty\",q.isEmpty())\nprint(\"dequeuing extra value (should return None)\")\nprint(\"dequeuing\",q.dequeue())\n",
"step-ids": [
4,
6,
7,
8,
9
]
}
|
[
4,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
EXP_GROUPS = {}
EXP_GROUPS['starter_issam'] = hu.cartesian_exp_group({'batch_size': 32,
'opt': {'name': 'adamW', 'lr': 0.0001, 'wd': 1e-06}, 'model': {'name':
'resnext50_32x4d_ssl'}, 'loss_func': {'name': 'cross_entropy'},
'max_epoch': [50]})
EXP_GROUPS['clip'] = hu.cartesian_exp_group({'batch_size': 32, 'model': {
'name': 'clip'}, 'max_epoch': [30]})
<|reserved_special_token_1|>
from haven import haven_utils as hu
import itertools, copy
EXP_GROUPS = {}
EXP_GROUPS['starter_issam'] = hu.cartesian_exp_group({'batch_size': 32,
'opt': {'name': 'adamW', 'lr': 0.0001, 'wd': 1e-06}, 'model': {'name':
'resnext50_32x4d_ssl'}, 'loss_func': {'name': 'cross_entropy'},
'max_epoch': [50]})
EXP_GROUPS['clip'] = hu.cartesian_exp_group({'batch_size': 32, 'model': {
'name': 'clip'}, 'max_epoch': [30]})
<|reserved_special_token_1|>
from haven import haven_utils as hu
import itertools, copy
EXP_GROUPS = {}
EXP_GROUPS['starter_issam'] = hu.cartesian_exp_group({
'batch_size': 32,
'opt': {'name': 'adamW', 'lr': 0.0001, 'wd': 1e-6},
'model': {'name': 'resnext50_32x4d_ssl'},
'loss_func': {'name': 'cross_entropy'},
'max_epoch': [50]
})
EXP_GROUPS['clip'] = hu.cartesian_exp_group({
'batch_size': 32,
'model': {'name': 'clip'},
'max_epoch': [30],
})
|
flexible
|
{
"blob_id": "dafefc65335a0d7e27057f51b43e52b286f5bc6b",
"index": 6067,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nEXP_GROUPS = {}\nEXP_GROUPS['starter_issam'] = hu.cartesian_exp_group({'batch_size': 32,\n 'opt': {'name': 'adamW', 'lr': 0.0001, 'wd': 1e-06}, 'model': {'name':\n 'resnext50_32x4d_ssl'}, 'loss_func': {'name': 'cross_entropy'},\n 'max_epoch': [50]})\nEXP_GROUPS['clip'] = hu.cartesian_exp_group({'batch_size': 32, 'model': {\n 'name': 'clip'}, 'max_epoch': [30]})\n",
"step-3": "from haven import haven_utils as hu\nimport itertools, copy\nEXP_GROUPS = {}\nEXP_GROUPS['starter_issam'] = hu.cartesian_exp_group({'batch_size': 32,\n 'opt': {'name': 'adamW', 'lr': 0.0001, 'wd': 1e-06}, 'model': {'name':\n 'resnext50_32x4d_ssl'}, 'loss_func': {'name': 'cross_entropy'},\n 'max_epoch': [50]})\nEXP_GROUPS['clip'] = hu.cartesian_exp_group({'batch_size': 32, 'model': {\n 'name': 'clip'}, 'max_epoch': [30]})\n",
"step-4": "from haven import haven_utils as hu\nimport itertools, copy\n\nEXP_GROUPS = {}\n\n\nEXP_GROUPS['starter_issam'] = hu.cartesian_exp_group({\n 'batch_size': 32,\n 'opt': {'name': 'adamW', 'lr': 0.0001, 'wd': 1e-6},\n 'model': {'name': 'resnext50_32x4d_ssl'},\n 'loss_func': {'name': 'cross_entropy'},\n 'max_epoch': [50]\n })\n\nEXP_GROUPS['clip'] = hu.cartesian_exp_group({\n 'batch_size': 32,\n 'model': {'name': 'clip'},\n 'max_epoch': [30],\n })",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import scipy.io as sio
import glob
import numpy as np
import matplotlib.pyplot as plt
import math
import os,sys
BIN = os.path.expanduser("../tools/")
sys.path.append(BIN)
import myfilemanager as mfm
import mystyle as ms
import propsort as ps
from functools import partial
from scipy.ndimage import gaussian_filter1d
from scipy.constants import c as clight
plt.close('all')
# Scan Parameters
fraction_device_quad_vect = [0.07, 0.16, 0.26]
n_slices_vect = np.array([250., 500., 750., 1000.])
betax_vect = [50, 100, 150, 200, 300, 400, 500, 600]
# Simulations Parameters
PyPICmode_tag = 'Tblocked'
# If you want to save the figures with all the scan parameters choose: savefigures = True and mode = 'auto'
savefigure = True
mode = 'auto'
#~ # Comment this part if you want to save the plots. You can choose only some scan parameters
#~ savefigure = False
#~ fraction_device_quad_vect = [0.26]
#~ n_slices_vect = np.array([1000.,])
#~ betax_vect = [100]
#~ mode = 'manual'
#~ turn_obs = 350
betay_vect = betax_vect
folder_plot = 'intrabunch_modes/'
if not os.path.exists(folder_plot) and savefigure:
os.makedirs(folder_plot)
# choice of the window of turns
# import the dictionary elements
dic = sio.loadmat('tt_complete.mat')
tt = np.squeeze(dic['tt_first'])
smooth = partial(gaussian_filter1d, sigma=2, mode='nearest')
n_turns_window = 20
n_sigmaz_sim = 10. #we are simulating 10 long sigmas
i_want_to_count_over = 4.
flag_weighted = True
#Figure parameters
ii_fig = 0
tick_size = 20
axis_font = {'fontname':'Arial', 'size':'24'}
fig_size = (15, 5)
line_width = 3.5
ms.mystyle_arial(16)
# calculate intra-bunch modes
for fraction_device_quad in fraction_device_quad_vect:
kk = np.argmin(np.abs(dic['fraction_device_quad_vect']-fraction_device_quad))
for betax, betay in zip(betax_vect, betay_vect):
jj = np.argmin(np.abs(dic['betax_vect']-betax))
subfolder_plot = folder_plot + 'betaxy_%d_length_%.2f/'%(betax,fraction_device_quad)
if not os.path.exists(subfolder_plot) and savefigure:
os.makedirs(subfolder_plot)
for n_slices in n_slices_vect:
ii = np.argmin(np.abs(dic['n_slices_vect']-n_slices))
if not math.isnan(tt[ii,jj,kk]):
if mode == 'auto':
wind_center = int(tt[ii,jj,kk])
elif mode == 'manual':
wind_center = turn_obs
else:
raise ValueError("I don't understand!?")
start = [wind_center + n_turns_window/2]
if int(tt[ii,jj,kk]) - n_turns_window/2 < 0:
window_min = 1
window = [np.s_[1:s] for s in start]
else:
window_min = wind_center - n_turns_window/2
window = [np.s_[s-n_turns_window:s] for s in start]
window_max = wind_center + n_turns_window/2
folder_curr_sim = '../simulations_PyPARIS/transverse_grid_%s_betaxy_%.0fm_length%.2f_slices_%d'%(PyPICmode_tag, betax,fraction_device_quad,n_slices)
sim_curr_list = ps.sort_properly(glob.glob(folder_curr_sim+'/slice_evolution_*.h5'))
print sim_curr_list[0]
try:
data = mfm.monitorh5list_to_obj(sim_curr_list, key='Slices', flag_transpose=True)
if flag_weighted:
bpm_x = data.mean_x * data.n_macroparticles_per_slice
bpm_y = data.mean_y * data.n_macroparticles_per_slice
else:
bpm_x = data.mean_x
bpm_y = data.mean_y
xfft = np.fft.rfft(bpm_x, axis=0)
yfft = np.fft.rfft(bpm_y, axis=0)
xfft = np.abs(xfft)**2 #Power
yfft = np.abs(yfft)**2 #Power
for wd in window:
print wd
n_slices, n_turns = data.mean_z.shape
zz = np.linspace(-2.5e-9*clight/2, 2.5e-9*clight/2, n_slices)
xx, yy = bpm_x, bpm_y
# Setting to plot the fft
xftt_to_plot = np.log10(xfft.T)
yftt_to_plot = np.log10(yfft.T)
minval_x = np.max([xftt_to_plot])-3
minval_y = np.max([yftt_to_plot])-3
xftt_to_plot[xftt_to_plot<minval_x] = minval_x
yftt_to_plot[yftt_to_plot<minval_y] = minval_y
YY_to_plot, XX_to_plot = xftt_to_plot.shape
XX_to_plot = np.arange(XX_to_plot)
YY_to_plot = np.arange(YY_to_plot)
fig, ((ax1, ax2)) = plt.subplots(1, 2, figsize=fig_size)
fig.patch.set_facecolor('w')
fig.subplots_adjust(left=0.05, right=0.95, wspace=0.3)
xmin, xmax = wd.start, wd.stop
col = plt.cm.rainbow_r(np.linspace(0, 1, xmax-xmin))
for i, t in enumerate(range(n_turns)[wd]):
ax1.plot(zz, smooth(bpm_x[:, t]), c=col[i], linewidth=line_width)
ax2.plot(zz, smooth(bpm_y[:, t]), c=col[i], linewidth=line_width)
ax1.set_xlabel('z [m]')
ax2.set_xlabel('z [m]')
ax1.set_title('Turns %.0f - %.0f'%(window_min, window_max))
ax2.set_title('Turns %.0f - %.0f'%(window_min, window_max))
if flag_weighted:
ax1.set_xlim(-2.5e-9*c/2, 2.5e-9*c/2)
ax2.set_xlim(-2.5e-9*c/2, 2.5e-9*c/2)
ax1.set_ylabel('Charge weighted\nhorizontal signal')
ax2.set_ylabel('Charge weighted\nvertical signal')
else:
ax1.set_xlim(-0.30, 0.30)
ax2.set_xlim(-0.30, 0.30)
#~ ax1.set_ylim(-.0001,.0001)
#~ ax2.set_ylim(-.0001,.0001)
ax1.set_ylabel('Horizontal signal')
ax2.set_ylabel('Vertical signal')
title = fig.suptitle('Beta_xy = %.0f Fraction Device = %.3f Slices = %.0f\n'%(betax, fraction_device_quad, n_slices))
if flag_weighted and savefigure:
plt.savefig(subfolder_plot + 'charge_weighted_betaxy_%d_length_%.2f_slices_%.0f.png'%(betax, fraction_device_quad, n_slices), dpi=300, bbox_inches='tight')
except IOError as goterror:
print 'Skipped. Got:', goterror
plt.show()
|
normal
|
{
"blob_id": "a4f56b1f93f62d80707367eaba0bba7ef4b2caca",
"index": 4749,
"step-1": "import scipy.io as sio\nimport glob\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport math\n\nimport os,sys\nBIN = os.path.expanduser(\"../tools/\")\nsys.path.append(BIN)\nimport myfilemanager as mfm\nimport mystyle as ms \nimport propsort as ps\n\nfrom functools import partial\nfrom scipy.ndimage import gaussian_filter1d\n\nfrom scipy.constants import c as clight\n\n\nplt.close('all')\n\n\n# Scan Parameters\nfraction_device_quad_vect = [0.07, 0.16, 0.26]\nn_slices_vect = np.array([250., 500., 750., 1000.])\nbetax_vect = [50, 100, 150, 200, 300, 400, 500, 600]\n\n# Simulations Parameters\nPyPICmode_tag = 'Tblocked'\n\n\n# If you want to save the figures with all the scan parameters choose: savefigures = True and mode = 'auto'\nsavefigure = True\nmode = 'auto'\n#~ # Comment this part if you want to save the plots. You can choose only some scan parameters\n#~ savefigure = False \n#~ fraction_device_quad_vect = [0.26]\n#~ n_slices_vect = np.array([1000.,])\n#~ betax_vect = [100]\n#~ mode = 'manual'\n#~ turn_obs = 350\n\nbetay_vect = betax_vect\nfolder_plot = 'intrabunch_modes/'\nif not os.path.exists(folder_plot) and savefigure:\n os.makedirs(folder_plot)\n\n\n# choice of the window of turns\n# import the dictionary elements\ndic = sio.loadmat('tt_complete.mat')\ntt = np.squeeze(dic['tt_first'])\n\nsmooth = partial(gaussian_filter1d, sigma=2, mode='nearest')\n\nn_turns_window = 20\nn_sigmaz_sim = 10. #we are simulating 10 long sigmas\ni_want_to_count_over = 4.\nflag_weighted = True\n\n\n#Figure parameters\nii_fig = 0\ntick_size = 20\naxis_font = {'fontname':'Arial', 'size':'24'}\nfig_size = (15, 5)\nline_width = 3.5\n\nms.mystyle_arial(16)\n\n\n# calculate intra-bunch modes\nfor fraction_device_quad in fraction_device_quad_vect:\n \n kk = np.argmin(np.abs(dic['fraction_device_quad_vect']-fraction_device_quad))\n for betax, betay in zip(betax_vect, betay_vect):\n jj = np.argmin(np.abs(dic['betax_vect']-betax)) \n subfolder_plot = folder_plot + 'betaxy_%d_length_%.2f/'%(betax,fraction_device_quad)\n if not os.path.exists(subfolder_plot) and savefigure:\n os.makedirs(subfolder_plot)\n \n for n_slices in n_slices_vect:\n ii = np.argmin(np.abs(dic['n_slices_vect']-n_slices)) \n if not math.isnan(tt[ii,jj,kk]):\n\n if mode == 'auto':\n wind_center = int(tt[ii,jj,kk])\n elif mode == 'manual':\n wind_center = turn_obs\n else:\n raise ValueError(\"I don't understand!?\")\n\n start = [wind_center + n_turns_window/2]\n \n if int(tt[ii,jj,kk]) - n_turns_window/2 < 0:\n window_min = 1\n window = [np.s_[1:s] for s in start]\n else:\n window_min = wind_center - n_turns_window/2\n window = [np.s_[s-n_turns_window:s] for s in start]\n \n window_max = wind_center + n_turns_window/2\n \n folder_curr_sim = '../simulations_PyPARIS/transverse_grid_%s_betaxy_%.0fm_length%.2f_slices_%d'%(PyPICmode_tag, betax,fraction_device_quad,n_slices) \n \n sim_curr_list = ps.sort_properly(glob.glob(folder_curr_sim+'/slice_evolution_*.h5'))\n\n \n print sim_curr_list[0]\n\n try:\n data = mfm.monitorh5list_to_obj(sim_curr_list, key='Slices', flag_transpose=True)\n\n if flag_weighted:\n bpm_x = data.mean_x * data.n_macroparticles_per_slice\n bpm_y = data.mean_y * data.n_macroparticles_per_slice\n else:\n bpm_x = data.mean_x \n bpm_y = data.mean_y \n\n xfft = np.fft.rfft(bpm_x, axis=0)\n yfft = np.fft.rfft(bpm_y, axis=0)\n xfft = np.abs(xfft)**2 #Power\n yfft = np.abs(yfft)**2 #Power\n\n\n for wd in window:\n print wd\n\n n_slices, n_turns = data.mean_z.shape\n zz = np.linspace(-2.5e-9*clight/2, 2.5e-9*clight/2, n_slices)\n xx, yy = bpm_x, bpm_y \n\n # Setting to plot the fft\n xftt_to_plot = np.log10(xfft.T)\n yftt_to_plot = np.log10(yfft.T)\n minval_x = np.max([xftt_to_plot])-3\n minval_y = np.max([yftt_to_plot])-3\n xftt_to_plot[xftt_to_plot<minval_x] = minval_x\n yftt_to_plot[yftt_to_plot<minval_y] = minval_y\n \n YY_to_plot, XX_to_plot = xftt_to_plot.shape\n XX_to_plot = np.arange(XX_to_plot)\n YY_to_plot = np.arange(YY_to_plot)\n\n fig, ((ax1, ax2)) = plt.subplots(1, 2, figsize=fig_size)\n fig.patch.set_facecolor('w')\n fig.subplots_adjust(left=0.05, right=0.95, wspace=0.3)\n \n xmin, xmax = wd.start, wd.stop\n col = plt.cm.rainbow_r(np.linspace(0, 1, xmax-xmin))\n for i, t in enumerate(range(n_turns)[wd]):\n ax1.plot(zz, smooth(bpm_x[:, t]), c=col[i], linewidth=line_width)\n ax2.plot(zz, smooth(bpm_y[:, t]), c=col[i], linewidth=line_width)\n\n\n ax1.set_xlabel('z [m]')\n ax2.set_xlabel('z [m]')\n\n ax1.set_title('Turns %.0f - %.0f'%(window_min, window_max))\n ax2.set_title('Turns %.0f - %.0f'%(window_min, window_max))\n\n if flag_weighted:\n ax1.set_xlim(-2.5e-9*c/2, 2.5e-9*c/2)\n ax2.set_xlim(-2.5e-9*c/2, 2.5e-9*c/2)\n ax1.set_ylabel('Charge weighted\\nhorizontal signal')\n ax2.set_ylabel('Charge weighted\\nvertical signal')\n\n else:\n ax1.set_xlim(-0.30, 0.30)\n ax2.set_xlim(-0.30, 0.30)\n #~ ax1.set_ylim(-.0001,.0001)\n #~ ax2.set_ylim(-.0001,.0001)\n ax1.set_ylabel('Horizontal signal')\n ax2.set_ylabel('Vertical signal')\n\n title = fig.suptitle('Beta_xy = %.0f Fraction Device = %.3f Slices = %.0f\\n'%(betax, fraction_device_quad, n_slices))\n\n if flag_weighted and savefigure:\n plt.savefig(subfolder_plot + 'charge_weighted_betaxy_%d_length_%.2f_slices_%.0f.png'%(betax, fraction_device_quad, n_slices), dpi=300, bbox_inches='tight')\n \n except IOError as goterror:\n print 'Skipped. Got:', goterror\n \nplt.show()\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
try:
a = int(input('Enter a:'))
b = int(input('Enter b:'))
c = a / b
except:
print("Can't divide with zero")
<|reserved_special_token_1|>
try:
a = int(input("Enter a:"))
b = int(input("Enter b:"))
c = a/b
except:
print("Can't divide with zero")
|
flexible
|
{
"blob_id": "143f6ee38413a0713c18281e9737c09d9947a61a",
"index": 2805,
"step-1": "<mask token>\n",
"step-2": "try:\n a = int(input('Enter a:'))\n b = int(input('Enter b:'))\n c = a / b\nexcept:\n print(\"Can't divide with zero\")\n",
"step-3": "try:\n a = int(input(\"Enter a:\"))\n b = int(input(\"Enter b:\"))\n c = a/b\nexcept:\n print(\"Can't divide with zero\")",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from flask import Flask,render_template, redirect, url_for,request, jsonify, abort,request
from flask_sqlalchemy import SQLAlchemy
from src.flaskbasic import *
from src.flaskbasic.form import StudentForm
from src.flaskbasic.models import Student
import sys
import logging
# logging.basicConfig(filename='app.log', filemode='w', format='%(asctime)s - %(levelname)s - %(message)s',datefmt='%d-%b-%y %H:%M:%S')
_logger_adding = logging.getLogger('Adding results')
_logger_getting = logging.getLogger('Get results')
_logger_update = logging.getLogger('Update results')
_logger_delete = logging.getLogger('Delete results')
# class Student(db.Model):
# id = db.Column(db.Integer, primary_key=True)
# name = db.Column(db.String(50), nullable= False)
# physics = db.Column(db.Integer)
# maths = db.Column(db.Integer)
# chemistry = db.Column(db.Integer)
@application.route('/', methods=['GET','POST'])
def add_results():
form = StudentForm()
_logger_adding.warning("Inside Add Results function")
_logger_adding.warning("Student form waiting for Input")
if form.validate_on_submit():
_logger_adding.warning("When form is submitted with data")
student = Student(name=form.name.data, physics=form.physics.data, maths=form.maths.data,chemistry=form.chemistry.data,)
_logger_adding.warning("Student: {} , physics: {} , maths: {}, chemistry: {}".format(form.name.data,form.physics.data,form.maths.data,form.chemistry.data))
db.session.add(student)
_logger_adding.warning('student results was added to database')
db.session.commit()
_logger_adding.warning("database commit")
return redirect(url_for("add_results"))
else:
return render_template('home.html', form=form)
@application.route('/results', methods=['GET','POST'])
def get_results():
_logger_getting.warning('retrieving all student results')
data = Student.query.all()
_logger_getting.warning('the students results have been collected for {}'.format(data))
return render_template('results.html', data = data)
@application.route('/edit_results/<int:student_id>', methods=['GET','POST'])
def edit_student(student_id):
form = StudentForm()
data = Student.query.get_or_404(student_id)
return render_template('edit_results.html',data=data)
@application.route('/edit_results/<int:student_id>/update_results',methods=['GET','PUT','POST'])
def update_results(student_id):
student_data = Student.query.get_or_404(student_id)
form = StudentForm()
if form.validate_on_submit():
student_data.name = form.name.data
student_data.physics = form.physics.data
student_data.maths = form.maths.data
student_data.chemistry = form.chemistry.data
db.session.commit()
return redirect(url_for('edit_student', student_id=student_data.id))
elif request.method == 'GET':
form.name.data = student_data.name
form.physics.data = student_data.physics
form.maths.data = student_data.maths
form.chemistry.data = student_data.chemistry
# return render_template('edit_results.html', student_data=student_data)
return render_template('update_page.html',form=form)
@application.route("/edit_results/<int:student_id>/delete", methods=['GET'])
def delete_post(student_id):
if request.method == 'GET':
student_results = Student.query.get_or_404(student_id)
db.session.delete(student_results)
db.session.commit()
return redirect(url_for('get_results'))
# @application.route('/results/<int:indexId>/update_results', methods=['PUT'])
# def update_results(indexId):
# _logger_update.warning("Inside Update function")
# student = Student.query.filter_by(id = indexId).first()
# if not student:
# _logger_update.warning("No Students in database")
# return render_template('home.html',form=form)
# student.name = request.json['name']
# student.physics = request.json.get('physics', "")
# student.maths = request.json.get('maths', "")
# student.chemistry = request.json.get('chemistry', "")
# _logger_update.warning("The updated results are Student Name: {}, Physics: {}, Maths: {}, Chemistry: {}".format(student.name,student.physics,student.maths,student.chemistry))
# db.session.commit()
# return jsonify({'student':'Pass'})
@application.route('/results/<int:indexId>', methods=['DELETE'])
def delete_student(indexId):
_logger_delete.warning("Inside Delete function")
student = Student.query.filter_by(id = indexId).first()
if not student:
_logger_delete.warning("No Students in database")
return jsonify({'message':'No user found'})
db.session.delete(student)
_logger_delete.warning("Deleted Student {} and commit to database".format(student))
db.session.commit()
return jsonify({'message':'Student found and Deleted'})
|
normal
|
{
"blob_id": "18f9e55b62b30ce8c9d4a57cd9c159543a738770",
"index": 4709,
"step-1": "<mask token>\n\n\n@application.route('/results', methods=['GET', 'POST'])\ndef get_results():\n _logger_getting.warning('retrieving all student results')\n data = Student.query.all()\n _logger_getting.warning('the students results have been collected for {}'\n .format(data))\n return render_template('results.html', data=data)\n\n\n@application.route('/edit_results/<int:student_id>', methods=['GET', 'POST'])\ndef edit_student(student_id):\n form = StudentForm()\n data = Student.query.get_or_404(student_id)\n return render_template('edit_results.html', data=data)\n\n\n@application.route('/edit_results/<int:student_id>/update_results', methods\n =['GET', 'PUT', 'POST'])\ndef update_results(student_id):\n student_data = Student.query.get_or_404(student_id)\n form = StudentForm()\n if form.validate_on_submit():\n student_data.name = form.name.data\n student_data.physics = form.physics.data\n student_data.maths = form.maths.data\n student_data.chemistry = form.chemistry.data\n db.session.commit()\n return redirect(url_for('edit_student', student_id=student_data.id))\n elif request.method == 'GET':\n form.name.data = student_data.name\n form.physics.data = student_data.physics\n form.maths.data = student_data.maths\n form.chemistry.data = student_data.chemistry\n return render_template('update_page.html', form=form)\n\n\n<mask token>\n\n\n@application.route('/results/<int:indexId>', methods=['DELETE'])\ndef delete_student(indexId):\n _logger_delete.warning('Inside Delete function')\n student = Student.query.filter_by(id=indexId).first()\n if not student:\n _logger_delete.warning('No Students in database')\n return jsonify({'message': 'No user found'})\n db.session.delete(student)\n _logger_delete.warning('Deleted Student {} and commit to database'.\n format(student))\n db.session.commit()\n return jsonify({'message': 'Student found and Deleted'})\n",
"step-2": "<mask token>\n\n\n@application.route('/', methods=['GET', 'POST'])\ndef add_results():\n form = StudentForm()\n _logger_adding.warning('Inside Add Results function')\n _logger_adding.warning('Student form waiting for Input')\n if form.validate_on_submit():\n _logger_adding.warning('When form is submitted with data')\n student = Student(name=form.name.data, physics=form.physics.data,\n maths=form.maths.data, chemistry=form.chemistry.data)\n _logger_adding.warning(\n 'Student: {} , physics: {} , maths: {}, chemistry: {}'.format(\n form.name.data, form.physics.data, form.maths.data, form.\n chemistry.data))\n db.session.add(student)\n _logger_adding.warning('student results was added to database')\n db.session.commit()\n _logger_adding.warning('database commit')\n return redirect(url_for('add_results'))\n else:\n return render_template('home.html', form=form)\n\n\n@application.route('/results', methods=['GET', 'POST'])\ndef get_results():\n _logger_getting.warning('retrieving all student results')\n data = Student.query.all()\n _logger_getting.warning('the students results have been collected for {}'\n .format(data))\n return render_template('results.html', data=data)\n\n\n@application.route('/edit_results/<int:student_id>', methods=['GET', 'POST'])\ndef edit_student(student_id):\n form = StudentForm()\n data = Student.query.get_or_404(student_id)\n return render_template('edit_results.html', data=data)\n\n\n@application.route('/edit_results/<int:student_id>/update_results', methods\n =['GET', 'PUT', 'POST'])\ndef update_results(student_id):\n student_data = Student.query.get_or_404(student_id)\n form = StudentForm()\n if form.validate_on_submit():\n student_data.name = form.name.data\n student_data.physics = form.physics.data\n student_data.maths = form.maths.data\n student_data.chemistry = form.chemistry.data\n db.session.commit()\n return redirect(url_for('edit_student', student_id=student_data.id))\n elif request.method == 'GET':\n form.name.data = student_data.name\n form.physics.data = student_data.physics\n form.maths.data = student_data.maths\n form.chemistry.data = student_data.chemistry\n return render_template('update_page.html', form=form)\n\n\n@application.route('/edit_results/<int:student_id>/delete', methods=['GET'])\ndef delete_post(student_id):\n if request.method == 'GET':\n student_results = Student.query.get_or_404(student_id)\n db.session.delete(student_results)\n db.session.commit()\n return redirect(url_for('get_results'))\n\n\n@application.route('/results/<int:indexId>', methods=['DELETE'])\ndef delete_student(indexId):\n _logger_delete.warning('Inside Delete function')\n student = Student.query.filter_by(id=indexId).first()\n if not student:\n _logger_delete.warning('No Students in database')\n return jsonify({'message': 'No user found'})\n db.session.delete(student)\n _logger_delete.warning('Deleted Student {} and commit to database'.\n format(student))\n db.session.commit()\n return jsonify({'message': 'Student found and Deleted'})\n",
"step-3": "<mask token>\n_logger_adding = logging.getLogger('Adding results')\n_logger_getting = logging.getLogger('Get results')\n_logger_update = logging.getLogger('Update results')\n_logger_delete = logging.getLogger('Delete results')\n\n\n@application.route('/', methods=['GET', 'POST'])\ndef add_results():\n form = StudentForm()\n _logger_adding.warning('Inside Add Results function')\n _logger_adding.warning('Student form waiting for Input')\n if form.validate_on_submit():\n _logger_adding.warning('When form is submitted with data')\n student = Student(name=form.name.data, physics=form.physics.data,\n maths=form.maths.data, chemistry=form.chemistry.data)\n _logger_adding.warning(\n 'Student: {} , physics: {} , maths: {}, chemistry: {}'.format(\n form.name.data, form.physics.data, form.maths.data, form.\n chemistry.data))\n db.session.add(student)\n _logger_adding.warning('student results was added to database')\n db.session.commit()\n _logger_adding.warning('database commit')\n return redirect(url_for('add_results'))\n else:\n return render_template('home.html', form=form)\n\n\n@application.route('/results', methods=['GET', 'POST'])\ndef get_results():\n _logger_getting.warning('retrieving all student results')\n data = Student.query.all()\n _logger_getting.warning('the students results have been collected for {}'\n .format(data))\n return render_template('results.html', data=data)\n\n\n@application.route('/edit_results/<int:student_id>', methods=['GET', 'POST'])\ndef edit_student(student_id):\n form = StudentForm()\n data = Student.query.get_or_404(student_id)\n return render_template('edit_results.html', data=data)\n\n\n@application.route('/edit_results/<int:student_id>/update_results', methods\n =['GET', 'PUT', 'POST'])\ndef update_results(student_id):\n student_data = Student.query.get_or_404(student_id)\n form = StudentForm()\n if form.validate_on_submit():\n student_data.name = form.name.data\n student_data.physics = form.physics.data\n student_data.maths = form.maths.data\n student_data.chemistry = form.chemistry.data\n db.session.commit()\n return redirect(url_for('edit_student', student_id=student_data.id))\n elif request.method == 'GET':\n form.name.data = student_data.name\n form.physics.data = student_data.physics\n form.maths.data = student_data.maths\n form.chemistry.data = student_data.chemistry\n return render_template('update_page.html', form=form)\n\n\n@application.route('/edit_results/<int:student_id>/delete', methods=['GET'])\ndef delete_post(student_id):\n if request.method == 'GET':\n student_results = Student.query.get_or_404(student_id)\n db.session.delete(student_results)\n db.session.commit()\n return redirect(url_for('get_results'))\n\n\n@application.route('/results/<int:indexId>', methods=['DELETE'])\ndef delete_student(indexId):\n _logger_delete.warning('Inside Delete function')\n student = Student.query.filter_by(id=indexId).first()\n if not student:\n _logger_delete.warning('No Students in database')\n return jsonify({'message': 'No user found'})\n db.session.delete(student)\n _logger_delete.warning('Deleted Student {} and commit to database'.\n format(student))\n db.session.commit()\n return jsonify({'message': 'Student found and Deleted'})\n",
"step-4": "from flask import Flask, render_template, redirect, url_for, request, jsonify, abort, request\nfrom flask_sqlalchemy import SQLAlchemy\nfrom src.flaskbasic import *\nfrom src.flaskbasic.form import StudentForm\nfrom src.flaskbasic.models import Student\nimport sys\nimport logging\n_logger_adding = logging.getLogger('Adding results')\n_logger_getting = logging.getLogger('Get results')\n_logger_update = logging.getLogger('Update results')\n_logger_delete = logging.getLogger('Delete results')\n\n\n@application.route('/', methods=['GET', 'POST'])\ndef add_results():\n form = StudentForm()\n _logger_adding.warning('Inside Add Results function')\n _logger_adding.warning('Student form waiting for Input')\n if form.validate_on_submit():\n _logger_adding.warning('When form is submitted with data')\n student = Student(name=form.name.data, physics=form.physics.data,\n maths=form.maths.data, chemistry=form.chemistry.data)\n _logger_adding.warning(\n 'Student: {} , physics: {} , maths: {}, chemistry: {}'.format(\n form.name.data, form.physics.data, form.maths.data, form.\n chemistry.data))\n db.session.add(student)\n _logger_adding.warning('student results was added to database')\n db.session.commit()\n _logger_adding.warning('database commit')\n return redirect(url_for('add_results'))\n else:\n return render_template('home.html', form=form)\n\n\n@application.route('/results', methods=['GET', 'POST'])\ndef get_results():\n _logger_getting.warning('retrieving all student results')\n data = Student.query.all()\n _logger_getting.warning('the students results have been collected for {}'\n .format(data))\n return render_template('results.html', data=data)\n\n\n@application.route('/edit_results/<int:student_id>', methods=['GET', 'POST'])\ndef edit_student(student_id):\n form = StudentForm()\n data = Student.query.get_or_404(student_id)\n return render_template('edit_results.html', data=data)\n\n\n@application.route('/edit_results/<int:student_id>/update_results', methods\n =['GET', 'PUT', 'POST'])\ndef update_results(student_id):\n student_data = Student.query.get_or_404(student_id)\n form = StudentForm()\n if form.validate_on_submit():\n student_data.name = form.name.data\n student_data.physics = form.physics.data\n student_data.maths = form.maths.data\n student_data.chemistry = form.chemistry.data\n db.session.commit()\n return redirect(url_for('edit_student', student_id=student_data.id))\n elif request.method == 'GET':\n form.name.data = student_data.name\n form.physics.data = student_data.physics\n form.maths.data = student_data.maths\n form.chemistry.data = student_data.chemistry\n return render_template('update_page.html', form=form)\n\n\n@application.route('/edit_results/<int:student_id>/delete', methods=['GET'])\ndef delete_post(student_id):\n if request.method == 'GET':\n student_results = Student.query.get_or_404(student_id)\n db.session.delete(student_results)\n db.session.commit()\n return redirect(url_for('get_results'))\n\n\n@application.route('/results/<int:indexId>', methods=['DELETE'])\ndef delete_student(indexId):\n _logger_delete.warning('Inside Delete function')\n student = Student.query.filter_by(id=indexId).first()\n if not student:\n _logger_delete.warning('No Students in database')\n return jsonify({'message': 'No user found'})\n db.session.delete(student)\n _logger_delete.warning('Deleted Student {} and commit to database'.\n format(student))\n db.session.commit()\n return jsonify({'message': 'Student found and Deleted'})\n",
"step-5": "from flask import Flask,render_template, redirect, url_for,request, jsonify, abort,request\r\nfrom flask_sqlalchemy import SQLAlchemy\r\nfrom src.flaskbasic import *\r\nfrom src.flaskbasic.form import StudentForm\r\nfrom src.flaskbasic.models import Student\r\nimport sys\r\nimport logging\r\n\r\n# logging.basicConfig(filename='app.log', filemode='w', format='%(asctime)s - %(levelname)s - %(message)s',datefmt='%d-%b-%y %H:%M:%S')\r\n_logger_adding = logging.getLogger('Adding results')\r\n_logger_getting = logging.getLogger('Get results')\r\n_logger_update = logging.getLogger('Update results')\r\n_logger_delete = logging.getLogger('Delete results')\r\n\r\n# class Student(db.Model):\r\n# id = db.Column(db.Integer, primary_key=True)\r\n# name = db.Column(db.String(50), nullable= False)\r\n# physics = db.Column(db.Integer)\r\n# maths = db.Column(db.Integer)\r\n# chemistry = db.Column(db.Integer)\r\n\r\n@application.route('/', methods=['GET','POST'])\r\ndef add_results():\r\n form = StudentForm()\r\n _logger_adding.warning(\"Inside Add Results function\")\r\n _logger_adding.warning(\"Student form waiting for Input\")\r\n if form.validate_on_submit():\r\n _logger_adding.warning(\"When form is submitted with data\")\r\n student = Student(name=form.name.data, physics=form.physics.data, maths=form.maths.data,chemistry=form.chemistry.data,)\r\n _logger_adding.warning(\"Student: {} , physics: {} , maths: {}, chemistry: {}\".format(form.name.data,form.physics.data,form.maths.data,form.chemistry.data))\r\n db.session.add(student)\r\n _logger_adding.warning('student results was added to database')\r\n db.session.commit()\r\n _logger_adding.warning(\"database commit\")\r\n return redirect(url_for(\"add_results\"))\r\n else:\r\n return render_template('home.html', form=form)\r\n\r\n@application.route('/results', methods=['GET','POST'])\r\ndef get_results():\r\n _logger_getting.warning('retrieving all student results')\r\n data = Student.query.all()\r\n _logger_getting.warning('the students results have been collected for {}'.format(data))\r\n return render_template('results.html', data = data)\r\n\r\n@application.route('/edit_results/<int:student_id>', methods=['GET','POST'])\r\ndef edit_student(student_id):\r\n form = StudentForm()\r\n data = Student.query.get_or_404(student_id)\r\n return render_template('edit_results.html',data=data)\r\n\r\n@application.route('/edit_results/<int:student_id>/update_results',methods=['GET','PUT','POST'])\r\ndef update_results(student_id):\r\n student_data = Student.query.get_or_404(student_id)\r\n form = StudentForm()\r\n if form.validate_on_submit():\r\n student_data.name = form.name.data\r\n student_data.physics = form.physics.data\r\n student_data.maths = form.maths.data\r\n student_data.chemistry = form.chemistry.data\r\n db.session.commit()\r\n return redirect(url_for('edit_student', student_id=student_data.id))\r\n elif request.method == 'GET':\r\n form.name.data = student_data.name\r\n form.physics.data = student_data.physics\r\n form.maths.data = student_data.maths\r\n form.chemistry.data = student_data.chemistry\r\n # return render_template('edit_results.html', student_data=student_data)\r\n return render_template('update_page.html',form=form)\r\n\r\n@application.route(\"/edit_results/<int:student_id>/delete\", methods=['GET'])\r\ndef delete_post(student_id):\r\n if request.method == 'GET':\r\n student_results = Student.query.get_or_404(student_id)\r\n db.session.delete(student_results)\r\n db.session.commit()\r\n return redirect(url_for('get_results'))\r\n\r\n# @application.route('/results/<int:indexId>/update_results', methods=['PUT'])\r\n# def update_results(indexId):\r\n# _logger_update.warning(\"Inside Update function\")\r\n# student = Student.query.filter_by(id = indexId).first()\r\n\r\n# if not student:\r\n# _logger_update.warning(\"No Students in database\")\r\n# return render_template('home.html',form=form)\r\n\r\n# student.name = request.json['name']\r\n# student.physics = request.json.get('physics', \"\")\r\n# student.maths = request.json.get('maths', \"\")\r\n# student.chemistry = request.json.get('chemistry', \"\")\r\n# _logger_update.warning(\"The updated results are Student Name: {}, Physics: {}, Maths: {}, Chemistry: {}\".format(student.name,student.physics,student.maths,student.chemistry)) \r\n# db.session.commit()\r\n \r\n# return jsonify({'student':'Pass'})\r\n\r\n@application.route('/results/<int:indexId>', methods=['DELETE'])\r\ndef delete_student(indexId):\r\n _logger_delete.warning(\"Inside Delete function\")\r\n student = Student.query.filter_by(id = indexId).first()\r\n\r\n if not student:\r\n _logger_delete.warning(\"No Students in database\")\r\n return jsonify({'message':'No user found'})\r\n\r\n db.session.delete(student)\r\n _logger_delete.warning(\"Deleted Student {} and commit to database\".format(student))\r\n db.session.commit()\r\n\r\n return jsonify({'message':'Student found and Deleted'})\r\n\r\n\r\n\r\n\r\n\r\n",
"step-ids": [
4,
6,
7,
8,
9
]
}
|
[
4,
6,
7,
8,
9
] |
import subprocess
from collections import namedtuple
from os.path import basename, splitext
def hdfs_get_filelist(blob_path, delimiter="_"):
""" Lists hdfs dir and returns named tuples with information of file based on its filename. """
def hdfs_listdir(blob_path):
command = 'hdfs dfs -ls ' + blob_path
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
p.wait()
files = [item.rstrip("\n").split()[-1] for item in p.stdout.readlines()]
if len(files) > 0:
files.pop(0) # remove summary from ls: "found n items".
qty_files = len(files)
return files, qty_files
files, qty_files = hdfs_listdir(blob_path)
kpis = []
# If there are items in dir.
if qty_files > 0:
KPI = namedtuple('KPI', ["filepath", "filename", "kpi_name", "initial_date", "final_date", "key", "extension"])
for file in files:
filename, ext = basename(file), splitext(basename(file))[1]
if ext == ".json":
splits = 3
kpi = KPI(
filepath=file
, filename=filename
, kpi_name=filename.rsplit(delimiter, splits)[0]
, initial_date=filename.rsplit(delimiter, splits)[1]
, final_date=filename.rsplit(delimiter, splits)[2]
, key=splitext(filename.rsplit(delimiter, splits)[3])[0]
, extension=ext
)
else: # ext != ".json":
splits = 1
kpi = KPI(
filepath=file
, filename=filename
, kpi_name=filename.rsplit(delimiter, splits)[0]
, initial_date=None
, final_date=None
, key=splitext(filename.rsplit(delimiter, splits)[1])[0]
, extension=ext
)
kpis.append(kpi)
return kpis, len(kpis)
kpis, files = hdfs_get_filelist("wasbs://hdiprojsupplydatalake-2018-07-12t15-58-09-078z@hdiprojsupplydatalake.blob.core.windows.net/estrutura_final/")
|
normal
|
{
"blob_id": "6909e70db4f907e26ad604f95c79a405010907bd",
"index": 2086,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef hdfs_get_filelist(blob_path, delimiter='_'):\n \"\"\" Lists hdfs dir and returns named tuples with information of file based on its filename. \"\"\"\n\n def hdfs_listdir(blob_path):\n command = 'hdfs dfs -ls ' + blob_path\n p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT)\n p.wait()\n files = [item.rstrip('\\n').split()[-1] for item in p.stdout.readlines()\n ]\n if len(files) > 0:\n files.pop(0)\n qty_files = len(files)\n return files, qty_files\n files, qty_files = hdfs_listdir(blob_path)\n kpis = []\n if qty_files > 0:\n KPI = namedtuple('KPI', ['filepath', 'filename', 'kpi_name',\n 'initial_date', 'final_date', 'key', 'extension'])\n for file in files:\n filename, ext = basename(file), splitext(basename(file))[1]\n if ext == '.json':\n splits = 3\n kpi = KPI(filepath=file, filename=filename, kpi_name=\n filename.rsplit(delimiter, splits)[0], initial_date=\n filename.rsplit(delimiter, splits)[1], final_date=\n filename.rsplit(delimiter, splits)[2], key=splitext(\n filename.rsplit(delimiter, splits)[3])[0], extension=ext)\n else:\n splits = 1\n kpi = KPI(filepath=file, filename=filename, kpi_name=\n filename.rsplit(delimiter, splits)[0], initial_date=\n None, final_date=None, key=splitext(filename.rsplit(\n delimiter, splits)[1])[0], extension=ext)\n kpis.append(kpi)\n return kpis, len(kpis)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef hdfs_get_filelist(blob_path, delimiter='_'):\n \"\"\" Lists hdfs dir and returns named tuples with information of file based on its filename. \"\"\"\n\n def hdfs_listdir(blob_path):\n command = 'hdfs dfs -ls ' + blob_path\n p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT)\n p.wait()\n files = [item.rstrip('\\n').split()[-1] for item in p.stdout.readlines()\n ]\n if len(files) > 0:\n files.pop(0)\n qty_files = len(files)\n return files, qty_files\n files, qty_files = hdfs_listdir(blob_path)\n kpis = []\n if qty_files > 0:\n KPI = namedtuple('KPI', ['filepath', 'filename', 'kpi_name',\n 'initial_date', 'final_date', 'key', 'extension'])\n for file in files:\n filename, ext = basename(file), splitext(basename(file))[1]\n if ext == '.json':\n splits = 3\n kpi = KPI(filepath=file, filename=filename, kpi_name=\n filename.rsplit(delimiter, splits)[0], initial_date=\n filename.rsplit(delimiter, splits)[1], final_date=\n filename.rsplit(delimiter, splits)[2], key=splitext(\n filename.rsplit(delimiter, splits)[3])[0], extension=ext)\n else:\n splits = 1\n kpi = KPI(filepath=file, filename=filename, kpi_name=\n filename.rsplit(delimiter, splits)[0], initial_date=\n None, final_date=None, key=splitext(filename.rsplit(\n delimiter, splits)[1])[0], extension=ext)\n kpis.append(kpi)\n return kpis, len(kpis)\n\n\nkpis, files = hdfs_get_filelist(\n 'wasbs://hdiprojsupplydatalake-2018-07-12t15-58-09-078z@hdiprojsupplydatalake.blob.core.windows.net/estrutura_final/'\n )\n",
"step-4": "import subprocess\nfrom collections import namedtuple\nfrom os.path import basename, splitext\n\n\ndef hdfs_get_filelist(blob_path, delimiter='_'):\n \"\"\" Lists hdfs dir and returns named tuples with information of file based on its filename. \"\"\"\n\n def hdfs_listdir(blob_path):\n command = 'hdfs dfs -ls ' + blob_path\n p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT)\n p.wait()\n files = [item.rstrip('\\n').split()[-1] for item in p.stdout.readlines()\n ]\n if len(files) > 0:\n files.pop(0)\n qty_files = len(files)\n return files, qty_files\n files, qty_files = hdfs_listdir(blob_path)\n kpis = []\n if qty_files > 0:\n KPI = namedtuple('KPI', ['filepath', 'filename', 'kpi_name',\n 'initial_date', 'final_date', 'key', 'extension'])\n for file in files:\n filename, ext = basename(file), splitext(basename(file))[1]\n if ext == '.json':\n splits = 3\n kpi = KPI(filepath=file, filename=filename, kpi_name=\n filename.rsplit(delimiter, splits)[0], initial_date=\n filename.rsplit(delimiter, splits)[1], final_date=\n filename.rsplit(delimiter, splits)[2], key=splitext(\n filename.rsplit(delimiter, splits)[3])[0], extension=ext)\n else:\n splits = 1\n kpi = KPI(filepath=file, filename=filename, kpi_name=\n filename.rsplit(delimiter, splits)[0], initial_date=\n None, final_date=None, key=splitext(filename.rsplit(\n delimiter, splits)[1])[0], extension=ext)\n kpis.append(kpi)\n return kpis, len(kpis)\n\n\nkpis, files = hdfs_get_filelist(\n 'wasbs://hdiprojsupplydatalake-2018-07-12t15-58-09-078z@hdiprojsupplydatalake.blob.core.windows.net/estrutura_final/'\n )\n",
"step-5": "import subprocess\nfrom collections import namedtuple\nfrom os.path import basename, splitext\n\n\ndef hdfs_get_filelist(blob_path, delimiter=\"_\"):\n \"\"\" Lists hdfs dir and returns named tuples with information of file based on its filename. \"\"\"\n\n def hdfs_listdir(blob_path):\n command = 'hdfs dfs -ls ' + blob_path\n p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n p.wait()\n files = [item.rstrip(\"\\n\").split()[-1] for item in p.stdout.readlines()]\n if len(files) > 0:\n files.pop(0) # remove summary from ls: \"found n items\".\n qty_files = len(files)\n return files, qty_files\n\n files, qty_files = hdfs_listdir(blob_path)\n kpis = []\n # If there are items in dir.\n if qty_files > 0:\n KPI = namedtuple('KPI', [\"filepath\", \"filename\", \"kpi_name\", \"initial_date\", \"final_date\", \"key\", \"extension\"])\n for file in files:\n filename, ext = basename(file), splitext(basename(file))[1]\n if ext == \".json\":\n splits = 3\n kpi = KPI(\n filepath=file\n , filename=filename\n , kpi_name=filename.rsplit(delimiter, splits)[0]\n , initial_date=filename.rsplit(delimiter, splits)[1]\n , final_date=filename.rsplit(delimiter, splits)[2]\n , key=splitext(filename.rsplit(delimiter, splits)[3])[0]\n , extension=ext\n )\n else: # ext != \".json\":\n splits = 1\n kpi = KPI(\n filepath=file\n , filename=filename\n , kpi_name=filename.rsplit(delimiter, splits)[0]\n , initial_date=None\n , final_date=None\n , key=splitext(filename.rsplit(delimiter, splits)[1])[0]\n , extension=ext\n )\n kpis.append(kpi)\n return kpis, len(kpis)\n\n\nkpis, files = hdfs_get_filelist(\"wasbs://hdiprojsupplydatalake-2018-07-12t15-58-09-078z@hdiprojsupplydatalake.blob.core.windows.net/estrutura_final/\")\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from django.urls import path
from . import views
app_name = 'adverts'
urlpatterns = [
path('', views.AdvertListView.as_view(), name="list"),
path('create/', views.AdvertFormView.as_view(), name='adverts-create'),
path('<str:category>/', views.AdvertListView.as_view(), name="adverts-list-categories"),
]
|
normal
|
{
"blob_id": "8c1718f56a73fdd962154abfaedc7c0c3cb0d9ba",
"index": 6626,
"step-1": "<mask token>\n",
"step-2": "<mask token>\napp_name = 'adverts'\nurlpatterns = [path('', views.AdvertListView.as_view(), name='list'), path(\n 'create/', views.AdvertFormView.as_view(), name='adverts-create'), path\n ('<str:category>/', views.AdvertListView.as_view(), name=\n 'adverts-list-categories')]\n",
"step-3": "from django.urls import path\nfrom . import views\napp_name = 'adverts'\nurlpatterns = [path('', views.AdvertListView.as_view(), name='list'), path(\n 'create/', views.AdvertFormView.as_view(), name='adverts-create'), path\n ('<str:category>/', views.AdvertListView.as_view(), name=\n 'adverts-list-categories')]\n",
"step-4": "from django.urls import path\n\nfrom . import views\n\napp_name = 'adverts'\n\nurlpatterns = [\n path('', views.AdvertListView.as_view(), name=\"list\"),\n path('create/', views.AdvertFormView.as_view(), name='adverts-create'),\n path('<str:category>/', views.AdvertListView.as_view(), name=\"adverts-list-categories\"),\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import unittest
from unflatten import _path_tuples_with_values_to_dict_tree, dot_colon_join, dot_colon_split
from unflatten import _recognize_lists
from unflatten import _tree_to_path_tuples_with_values
from unflatten import brackets_join
from unflatten import flatten
from unflatten import unflatten
class BracketsReduceTestCase(unittest.TestCase):
def test_one_element(self):
self.assertEqual(brackets_join(['aa']), 'aa')
def test_simple(self):
self.assertEqual(brackets_join(['aa', 1, 'bb', 2]), 'aa[1][bb][2]')
class TreeToPathTuplesWithValuesTestCase(unittest.TestCase):
def test_simple(self):
self.assertSequenceEqual(
list(_tree_to_path_tuples_with_values(
{'a': ['b',
{'e': 1}]})),
[(('a', 0), 'b'),
(('a', 1, 'e'), 1)])
class PathTuplesWithValuesToDictTreeTestCase(unittest.TestCase):
def test_simple(self):
self.assertDictEqual(
_path_tuples_with_values_to_dict_tree(
[(('a', 0), 'b'),
(('a', 1, 'e'), 1)]),
{'a': {0: 'b',
1: {'e': 1}}})
class RecognizeListsTestCase(unittest.TestCase):
def test_simple(self):
self.assertListEqual(
_recognize_lists(
{0: 'a',
1: {'b': -1,
'c': {0: 'd',
1: -2}}}),
['a',
{'b': -1,
'c': ['d',
-2]}])
def test_again(self):
self.assertDictEqual(
unflatten(
{'a': 1,
'b': {0: 'c',
1: {0: 'd',
1: {'e': {'f': -1,
'g': 'h'}}}}}),
{'a': 1,
'b': ['c',
['d',
{'e': {'f': -1,
'g': 'h'}}]]})
class FlattenTestCase(unittest.TestCase):
def test_simple(self):
self.assertDictEqual(
flatten(
{'a': 1,
'b': ['c',
['d',
{'e': {'f': -1,
'g': 'h'}}]]}),
{'a': 1,
'b[0]': 'c',
'b[1][0]': 'd',
'b[1][1][e][f]': -1,
'b[1][1][e][g]': 'h'})
def test_dot_colon(self):
self.assertDictEqual(
flatten(
{'a': 1,
'b': ['c',
['d',
{'e': {'f': -1,
'g': 'h'}}]]},
join=dot_colon_join),
{'a': 1,
'b:0': 'c',
'b:1:0': 'd',
'b:1:1.e.f': -1,
'b:1:1.e.g': 'h'})
class UnflattenTestCase(unittest.TestCase):
def test_simple(self):
self.assertDictEqual(
unflatten(
{'a': 1,
'b[0]': 'c',
'b[1][0]': 'd',
'b[1][1][e][f]': -1,
'b[1][1][e][g]': 'h'}),
{'a': 1,
'b': ['c',
['d',
{'e': {'f': -1,
'g': 'h'}}]]})
def test_dot_colon(self):
self.assertDictEqual(
unflatten(
{'a': 1,
'b:0': 'c',
'b:1:0': 'd',
'b:1:1.e.f': -1,
'b:1:1.e.g': 'h'},
split=dot_colon_split),
{'a': 1,
'b': ['c',
['d',
{'e': {'f': -1,
'g': 'h'}}]]})
class DotColonJoinTestCase(unittest.TestCase):
def test_simple(self):
self.assertSequenceEqual(dot_colon_join(('a',)), 'a')
self.assertSequenceEqual(dot_colon_join(('b', 0)), 'b:0')
self.assertSequenceEqual(dot_colon_join(('b', 1)), 'b:1')
self.assertSequenceEqual(dot_colon_join(('b', 2, 'e')), 'b:2.e')
self.assertSequenceEqual(dot_colon_join(('b', 2, 'f')), 'b:2.f')
class DotColonSplitTestCase(unittest.TestCase):
def test_simple(self):
self.assertTupleEqual(dot_colon_split('a'), ('a',))
self.assertTupleEqual(dot_colon_split('b:0'), ('b', 0))
self.assertTupleEqual(dot_colon_split('b:1'), ('b', 1))
self.assertTupleEqual(dot_colon_split('b:2.e'), ('b', 2, 'e'))
self.assertTupleEqual(dot_colon_split('b:2.f'), ('b', 2, 'f'))
|
normal
|
{
"blob_id": "5119b1b6817e002c870b4d6a19fe9aee661fff7e",
"index": 8425,
"step-1": "<mask token>\n\n\nclass RecognizeListsTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertListEqual(_recognize_lists({(0): 'a', (1): {'b': -1, 'c':\n {(0): 'd', (1): -2}}}), ['a', {'b': -1, 'c': ['d', -2]}])\n\n def test_again(self):\n self.assertDictEqual(unflatten({'a': 1, 'b': {(0): 'c', (1): {(0):\n 'd', (1): {'e': {'f': -1, 'g': 'h'}}}}}), {'a': 1, 'b': ['c', [\n 'd', {'e': {'f': -1, 'g': 'h'}}]]})\n\n\nclass FlattenTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertDictEqual(flatten({'a': 1, 'b': ['c', ['d', {'e': {'f': \n -1, 'g': 'h'}}]]}), {'a': 1, 'b[0]': 'c', 'b[1][0]': 'd',\n 'b[1][1][e][f]': -1, 'b[1][1][e][g]': 'h'})\n\n def test_dot_colon(self):\n self.assertDictEqual(flatten({'a': 1, 'b': ['c', ['d', {'e': {'f': \n -1, 'g': 'h'}}]]}, join=dot_colon_join), {'a': 1, 'b:0': 'c',\n 'b:1:0': 'd', 'b:1:1.e.f': -1, 'b:1:1.e.g': 'h'})\n\n\nclass UnflattenTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertDictEqual(unflatten({'a': 1, 'b[0]': 'c', 'b[1][0]': 'd',\n 'b[1][1][e][f]': -1, 'b[1][1][e][g]': 'h'}), {'a': 1, 'b': ['c',\n ['d', {'e': {'f': -1, 'g': 'h'}}]]})\n\n def test_dot_colon(self):\n self.assertDictEqual(unflatten({'a': 1, 'b:0': 'c', 'b:1:0': 'd',\n 'b:1:1.e.f': -1, 'b:1:1.e.g': 'h'}, split=dot_colon_split), {\n 'a': 1, 'b': ['c', ['d', {'e': {'f': -1, 'g': 'h'}}]]})\n\n\nclass DotColonJoinTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertSequenceEqual(dot_colon_join(('a',)), 'a')\n self.assertSequenceEqual(dot_colon_join(('b', 0)), 'b:0')\n self.assertSequenceEqual(dot_colon_join(('b', 1)), 'b:1')\n self.assertSequenceEqual(dot_colon_join(('b', 2, 'e')), 'b:2.e')\n self.assertSequenceEqual(dot_colon_join(('b', 2, 'f')), 'b:2.f')\n\n\nclass DotColonSplitTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertTupleEqual(dot_colon_split('a'), ('a',))\n self.assertTupleEqual(dot_colon_split('b:0'), ('b', 0))\n self.assertTupleEqual(dot_colon_split('b:1'), ('b', 1))\n self.assertTupleEqual(dot_colon_split('b:2.e'), ('b', 2, 'e'))\n self.assertTupleEqual(dot_colon_split('b:2.f'), ('b', 2, 'f'))\n",
"step-2": "<mask token>\n\n\nclass TreeToPathTuplesWithValuesTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertSequenceEqual(list(_tree_to_path_tuples_with_values({'a':\n ['b', {'e': 1}]})), [(('a', 0), 'b'), (('a', 1, 'e'), 1)])\n\n\nclass PathTuplesWithValuesToDictTreeTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertDictEqual(_path_tuples_with_values_to_dict_tree([(('a', \n 0), 'b'), (('a', 1, 'e'), 1)]), {'a': {(0): 'b', (1): {'e': 1}}})\n\n\nclass RecognizeListsTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertListEqual(_recognize_lists({(0): 'a', (1): {'b': -1, 'c':\n {(0): 'd', (1): -2}}}), ['a', {'b': -1, 'c': ['d', -2]}])\n\n def test_again(self):\n self.assertDictEqual(unflatten({'a': 1, 'b': {(0): 'c', (1): {(0):\n 'd', (1): {'e': {'f': -1, 'g': 'h'}}}}}), {'a': 1, 'b': ['c', [\n 'd', {'e': {'f': -1, 'g': 'h'}}]]})\n\n\nclass FlattenTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertDictEqual(flatten({'a': 1, 'b': ['c', ['d', {'e': {'f': \n -1, 'g': 'h'}}]]}), {'a': 1, 'b[0]': 'c', 'b[1][0]': 'd',\n 'b[1][1][e][f]': -1, 'b[1][1][e][g]': 'h'})\n\n def test_dot_colon(self):\n self.assertDictEqual(flatten({'a': 1, 'b': ['c', ['d', {'e': {'f': \n -1, 'g': 'h'}}]]}, join=dot_colon_join), {'a': 1, 'b:0': 'c',\n 'b:1:0': 'd', 'b:1:1.e.f': -1, 'b:1:1.e.g': 'h'})\n\n\nclass UnflattenTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertDictEqual(unflatten({'a': 1, 'b[0]': 'c', 'b[1][0]': 'd',\n 'b[1][1][e][f]': -1, 'b[1][1][e][g]': 'h'}), {'a': 1, 'b': ['c',\n ['d', {'e': {'f': -1, 'g': 'h'}}]]})\n\n def test_dot_colon(self):\n self.assertDictEqual(unflatten({'a': 1, 'b:0': 'c', 'b:1:0': 'd',\n 'b:1:1.e.f': -1, 'b:1:1.e.g': 'h'}, split=dot_colon_split), {\n 'a': 1, 'b': ['c', ['d', {'e': {'f': -1, 'g': 'h'}}]]})\n\n\nclass DotColonJoinTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertSequenceEqual(dot_colon_join(('a',)), 'a')\n self.assertSequenceEqual(dot_colon_join(('b', 0)), 'b:0')\n self.assertSequenceEqual(dot_colon_join(('b', 1)), 'b:1')\n self.assertSequenceEqual(dot_colon_join(('b', 2, 'e')), 'b:2.e')\n self.assertSequenceEqual(dot_colon_join(('b', 2, 'f')), 'b:2.f')\n\n\nclass DotColonSplitTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertTupleEqual(dot_colon_split('a'), ('a',))\n self.assertTupleEqual(dot_colon_split('b:0'), ('b', 0))\n self.assertTupleEqual(dot_colon_split('b:1'), ('b', 1))\n self.assertTupleEqual(dot_colon_split('b:2.e'), ('b', 2, 'e'))\n self.assertTupleEqual(dot_colon_split('b:2.f'), ('b', 2, 'f'))\n",
"step-3": "<mask token>\n\n\nclass BracketsReduceTestCase(unittest.TestCase):\n\n def test_one_element(self):\n self.assertEqual(brackets_join(['aa']), 'aa')\n\n def test_simple(self):\n self.assertEqual(brackets_join(['aa', 1, 'bb', 2]), 'aa[1][bb][2]')\n\n\nclass TreeToPathTuplesWithValuesTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertSequenceEqual(list(_tree_to_path_tuples_with_values({'a':\n ['b', {'e': 1}]})), [(('a', 0), 'b'), (('a', 1, 'e'), 1)])\n\n\nclass PathTuplesWithValuesToDictTreeTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertDictEqual(_path_tuples_with_values_to_dict_tree([(('a', \n 0), 'b'), (('a', 1, 'e'), 1)]), {'a': {(0): 'b', (1): {'e': 1}}})\n\n\nclass RecognizeListsTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertListEqual(_recognize_lists({(0): 'a', (1): {'b': -1, 'c':\n {(0): 'd', (1): -2}}}), ['a', {'b': -1, 'c': ['d', -2]}])\n\n def test_again(self):\n self.assertDictEqual(unflatten({'a': 1, 'b': {(0): 'c', (1): {(0):\n 'd', (1): {'e': {'f': -1, 'g': 'h'}}}}}), {'a': 1, 'b': ['c', [\n 'd', {'e': {'f': -1, 'g': 'h'}}]]})\n\n\nclass FlattenTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertDictEqual(flatten({'a': 1, 'b': ['c', ['d', {'e': {'f': \n -1, 'g': 'h'}}]]}), {'a': 1, 'b[0]': 'c', 'b[1][0]': 'd',\n 'b[1][1][e][f]': -1, 'b[1][1][e][g]': 'h'})\n\n def test_dot_colon(self):\n self.assertDictEqual(flatten({'a': 1, 'b': ['c', ['d', {'e': {'f': \n -1, 'g': 'h'}}]]}, join=dot_colon_join), {'a': 1, 'b:0': 'c',\n 'b:1:0': 'd', 'b:1:1.e.f': -1, 'b:1:1.e.g': 'h'})\n\n\nclass UnflattenTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertDictEqual(unflatten({'a': 1, 'b[0]': 'c', 'b[1][0]': 'd',\n 'b[1][1][e][f]': -1, 'b[1][1][e][g]': 'h'}), {'a': 1, 'b': ['c',\n ['d', {'e': {'f': -1, 'g': 'h'}}]]})\n\n def test_dot_colon(self):\n self.assertDictEqual(unflatten({'a': 1, 'b:0': 'c', 'b:1:0': 'd',\n 'b:1:1.e.f': -1, 'b:1:1.e.g': 'h'}, split=dot_colon_split), {\n 'a': 1, 'b': ['c', ['d', {'e': {'f': -1, 'g': 'h'}}]]})\n\n\nclass DotColonJoinTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertSequenceEqual(dot_colon_join(('a',)), 'a')\n self.assertSequenceEqual(dot_colon_join(('b', 0)), 'b:0')\n self.assertSequenceEqual(dot_colon_join(('b', 1)), 'b:1')\n self.assertSequenceEqual(dot_colon_join(('b', 2, 'e')), 'b:2.e')\n self.assertSequenceEqual(dot_colon_join(('b', 2, 'f')), 'b:2.f')\n\n\nclass DotColonSplitTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertTupleEqual(dot_colon_split('a'), ('a',))\n self.assertTupleEqual(dot_colon_split('b:0'), ('b', 0))\n self.assertTupleEqual(dot_colon_split('b:1'), ('b', 1))\n self.assertTupleEqual(dot_colon_split('b:2.e'), ('b', 2, 'e'))\n self.assertTupleEqual(dot_colon_split('b:2.f'), ('b', 2, 'f'))\n",
"step-4": "import unittest\nfrom unflatten import _path_tuples_with_values_to_dict_tree, dot_colon_join, dot_colon_split\nfrom unflatten import _recognize_lists\nfrom unflatten import _tree_to_path_tuples_with_values\nfrom unflatten import brackets_join\nfrom unflatten import flatten\nfrom unflatten import unflatten\n\n\nclass BracketsReduceTestCase(unittest.TestCase):\n\n def test_one_element(self):\n self.assertEqual(brackets_join(['aa']), 'aa')\n\n def test_simple(self):\n self.assertEqual(brackets_join(['aa', 1, 'bb', 2]), 'aa[1][bb][2]')\n\n\nclass TreeToPathTuplesWithValuesTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertSequenceEqual(list(_tree_to_path_tuples_with_values({'a':\n ['b', {'e': 1}]})), [(('a', 0), 'b'), (('a', 1, 'e'), 1)])\n\n\nclass PathTuplesWithValuesToDictTreeTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertDictEqual(_path_tuples_with_values_to_dict_tree([(('a', \n 0), 'b'), (('a', 1, 'e'), 1)]), {'a': {(0): 'b', (1): {'e': 1}}})\n\n\nclass RecognizeListsTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertListEqual(_recognize_lists({(0): 'a', (1): {'b': -1, 'c':\n {(0): 'd', (1): -2}}}), ['a', {'b': -1, 'c': ['d', -2]}])\n\n def test_again(self):\n self.assertDictEqual(unflatten({'a': 1, 'b': {(0): 'c', (1): {(0):\n 'd', (1): {'e': {'f': -1, 'g': 'h'}}}}}), {'a': 1, 'b': ['c', [\n 'd', {'e': {'f': -1, 'g': 'h'}}]]})\n\n\nclass FlattenTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertDictEqual(flatten({'a': 1, 'b': ['c', ['d', {'e': {'f': \n -1, 'g': 'h'}}]]}), {'a': 1, 'b[0]': 'c', 'b[1][0]': 'd',\n 'b[1][1][e][f]': -1, 'b[1][1][e][g]': 'h'})\n\n def test_dot_colon(self):\n self.assertDictEqual(flatten({'a': 1, 'b': ['c', ['d', {'e': {'f': \n -1, 'g': 'h'}}]]}, join=dot_colon_join), {'a': 1, 'b:0': 'c',\n 'b:1:0': 'd', 'b:1:1.e.f': -1, 'b:1:1.e.g': 'h'})\n\n\nclass UnflattenTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertDictEqual(unflatten({'a': 1, 'b[0]': 'c', 'b[1][0]': 'd',\n 'b[1][1][e][f]': -1, 'b[1][1][e][g]': 'h'}), {'a': 1, 'b': ['c',\n ['d', {'e': {'f': -1, 'g': 'h'}}]]})\n\n def test_dot_colon(self):\n self.assertDictEqual(unflatten({'a': 1, 'b:0': 'c', 'b:1:0': 'd',\n 'b:1:1.e.f': -1, 'b:1:1.e.g': 'h'}, split=dot_colon_split), {\n 'a': 1, 'b': ['c', ['d', {'e': {'f': -1, 'g': 'h'}}]]})\n\n\nclass DotColonJoinTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertSequenceEqual(dot_colon_join(('a',)), 'a')\n self.assertSequenceEqual(dot_colon_join(('b', 0)), 'b:0')\n self.assertSequenceEqual(dot_colon_join(('b', 1)), 'b:1')\n self.assertSequenceEqual(dot_colon_join(('b', 2, 'e')), 'b:2.e')\n self.assertSequenceEqual(dot_colon_join(('b', 2, 'f')), 'b:2.f')\n\n\nclass DotColonSplitTestCase(unittest.TestCase):\n\n def test_simple(self):\n self.assertTupleEqual(dot_colon_split('a'), ('a',))\n self.assertTupleEqual(dot_colon_split('b:0'), ('b', 0))\n self.assertTupleEqual(dot_colon_split('b:1'), ('b', 1))\n self.assertTupleEqual(dot_colon_split('b:2.e'), ('b', 2, 'e'))\n self.assertTupleEqual(dot_colon_split('b:2.f'), ('b', 2, 'f'))\n",
"step-5": "import unittest\n\nfrom unflatten import _path_tuples_with_values_to_dict_tree, dot_colon_join, dot_colon_split\nfrom unflatten import _recognize_lists\nfrom unflatten import _tree_to_path_tuples_with_values\nfrom unflatten import brackets_join\nfrom unflatten import flatten\nfrom unflatten import unflatten\n\n\nclass BracketsReduceTestCase(unittest.TestCase):\n def test_one_element(self):\n self.assertEqual(brackets_join(['aa']), 'aa')\n\n def test_simple(self):\n self.assertEqual(brackets_join(['aa', 1, 'bb', 2]), 'aa[1][bb][2]')\n\n\nclass TreeToPathTuplesWithValuesTestCase(unittest.TestCase):\n def test_simple(self):\n self.assertSequenceEqual(\n list(_tree_to_path_tuples_with_values(\n {'a': ['b',\n {'e': 1}]})),\n [(('a', 0), 'b'),\n (('a', 1, 'e'), 1)])\n\n\nclass PathTuplesWithValuesToDictTreeTestCase(unittest.TestCase):\n def test_simple(self):\n self.assertDictEqual(\n _path_tuples_with_values_to_dict_tree(\n [(('a', 0), 'b'),\n (('a', 1, 'e'), 1)]),\n {'a': {0: 'b',\n 1: {'e': 1}}})\n\n\nclass RecognizeListsTestCase(unittest.TestCase):\n def test_simple(self):\n self.assertListEqual(\n _recognize_lists(\n {0: 'a',\n 1: {'b': -1,\n 'c': {0: 'd',\n 1: -2}}}),\n ['a',\n {'b': -1,\n 'c': ['d',\n -2]}])\n\n def test_again(self):\n self.assertDictEqual(\n unflatten(\n {'a': 1,\n 'b': {0: 'c',\n 1: {0: 'd',\n 1: {'e': {'f': -1,\n 'g': 'h'}}}}}),\n {'a': 1,\n 'b': ['c',\n ['d',\n {'e': {'f': -1,\n 'g': 'h'}}]]})\n\n\nclass FlattenTestCase(unittest.TestCase):\n def test_simple(self):\n self.assertDictEqual(\n flatten(\n {'a': 1,\n 'b': ['c',\n ['d',\n {'e': {'f': -1,\n 'g': 'h'}}]]}),\n {'a': 1,\n 'b[0]': 'c',\n 'b[1][0]': 'd',\n 'b[1][1][e][f]': -1,\n 'b[1][1][e][g]': 'h'})\n\n def test_dot_colon(self):\n self.assertDictEqual(\n flatten(\n {'a': 1,\n 'b': ['c',\n ['d',\n {'e': {'f': -1,\n 'g': 'h'}}]]},\n join=dot_colon_join),\n {'a': 1,\n 'b:0': 'c',\n 'b:1:0': 'd',\n 'b:1:1.e.f': -1,\n 'b:1:1.e.g': 'h'})\n\n\nclass UnflattenTestCase(unittest.TestCase):\n def test_simple(self):\n self.assertDictEqual(\n unflatten(\n {'a': 1,\n 'b[0]': 'c',\n 'b[1][0]': 'd',\n 'b[1][1][e][f]': -1,\n 'b[1][1][e][g]': 'h'}),\n {'a': 1,\n 'b': ['c',\n ['d',\n {'e': {'f': -1,\n 'g': 'h'}}]]})\n\n def test_dot_colon(self):\n self.assertDictEqual(\n unflatten(\n {'a': 1,\n 'b:0': 'c',\n 'b:1:0': 'd',\n 'b:1:1.e.f': -1,\n 'b:1:1.e.g': 'h'},\n split=dot_colon_split),\n {'a': 1,\n 'b': ['c',\n ['d',\n {'e': {'f': -1,\n 'g': 'h'}}]]})\n\n\nclass DotColonJoinTestCase(unittest.TestCase):\n def test_simple(self):\n self.assertSequenceEqual(dot_colon_join(('a',)), 'a')\n self.assertSequenceEqual(dot_colon_join(('b', 0)), 'b:0')\n self.assertSequenceEqual(dot_colon_join(('b', 1)), 'b:1')\n self.assertSequenceEqual(dot_colon_join(('b', 2, 'e')), 'b:2.e')\n self.assertSequenceEqual(dot_colon_join(('b', 2, 'f')), 'b:2.f')\n\n\nclass DotColonSplitTestCase(unittest.TestCase):\n def test_simple(self):\n self.assertTupleEqual(dot_colon_split('a'), ('a',))\n self.assertTupleEqual(dot_colon_split('b:0'), ('b', 0))\n self.assertTupleEqual(dot_colon_split('b:1'), ('b', 1))\n self.assertTupleEqual(dot_colon_split('b:2.e'), ('b', 2, 'e'))\n self.assertTupleEqual(dot_colon_split('b:2.f'), ('b', 2, 'f'))\n",
"step-ids": [
13,
17,
20,
21,
22
]
}
|
[
13,
17,
20,
21,
22
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if __name__ == '__main__':
setLogLevel('info')
network = TreeContainerNet(depth=2, fanout=100, switch=OVSSwitch)
network.run(CLI, network)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from mininet.cli import CLI
from mininet.log import setLogLevel
from mininet.node import OVSSwitch
from mininet.topolib import TreeContainerNet
if __name__ == '__main__':
setLogLevel('info')
network = TreeContainerNet(depth=2, fanout=100, switch=OVSSwitch)
network.run(CLI, network)
<|reserved_special_token_1|>
#!/usr/bin/python
"""
Create a 1024-host network, and run the CLI on it.
If this fails because of kernel limits, you may have
to adjust them, e.g. by adding entries to /etc/sysctl.conf
and running sysctl -p. Check util/sysctl_addon.
This is a copy of tree1024.py that is using the Containernet
constructor. Containernet overrides the buildFromTopo
functionality and adds Docker hosts instead.
"""
from mininet.cli import CLI
from mininet.log import setLogLevel
from mininet.node import OVSSwitch
from mininet.topolib import TreeContainerNet
if __name__ == '__main__':
setLogLevel( 'info' )
network = TreeContainerNet( depth=2, fanout=100, switch=OVSSwitch )
network.run( CLI, network )
|
flexible
|
{
"blob_id": "9c3ca2fa43c6a34d7fe06517812a6d0bf5d6dbe1",
"index": 4029,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n setLogLevel('info')\n network = TreeContainerNet(depth=2, fanout=100, switch=OVSSwitch)\n network.run(CLI, network)\n",
"step-3": "<mask token>\nfrom mininet.cli import CLI\nfrom mininet.log import setLogLevel\nfrom mininet.node import OVSSwitch\nfrom mininet.topolib import TreeContainerNet\nif __name__ == '__main__':\n setLogLevel('info')\n network = TreeContainerNet(depth=2, fanout=100, switch=OVSSwitch)\n network.run(CLI, network)\n",
"step-4": "#!/usr/bin/python\n\n\"\"\"\nCreate a 1024-host network, and run the CLI on it.\nIf this fails because of kernel limits, you may have\nto adjust them, e.g. by adding entries to /etc/sysctl.conf\nand running sysctl -p. Check util/sysctl_addon.\nThis is a copy of tree1024.py that is using the Containernet\nconstructor. Containernet overrides the buildFromTopo\nfunctionality and adds Docker hosts instead.\n\"\"\"\n\nfrom mininet.cli import CLI\nfrom mininet.log import setLogLevel\nfrom mininet.node import OVSSwitch\nfrom mininet.topolib import TreeContainerNet\n\nif __name__ == '__main__':\n setLogLevel( 'info' )\n network = TreeContainerNet( depth=2, fanout=100, switch=OVSSwitch )\n network.run( CLI, network )\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
@app.errorhandler(500)
def internal_server_error(e):
return render_template('500.html', error=str(e))
<|reserved_special_token_0|>
@app.route('/pools', methods=['GET'])
def pools():
return render_template('pools.html', pools=Pool.query.all())
@app.route('/pool/<pool>', methods=['GET'])
def pool(pool):
db_pool = Pool.query.get(pool)
if db_pool is None:
abort(404, description='Pool with ID {} could not be found.'.format
(pool))
return render_template('pool.html', pool=db_pool)
<|reserved_special_token_0|>
@app.route('/about', methods=['GET'])
def about():
return render_template('about.html', version=app_version)
@app.route('/partitions/<part>', methods=['GET'])
def partitions(part):
try:
drive = driveMan.get_drive_by_id(int(part))
except ValueError:
abort(500, description='Expected int, but got {}.'.format(part))
except LookupError:
abort(500, description='Invalid drive id {}'.format(part))
return render_template('partitions.html', parts=drive.partitions)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html', error=str(e))
@app.errorhandler(500)
def internal_server_error(e):
return render_template('500.html', error=str(e))
<|reserved_special_token_0|>
@app.route('/drives', methods=['GET'])
def drives():
delete = request.args.get('delete')
if delete is not None:
try:
drive = driveMan.get_drive_by_id(int(delete))
driveMan.delete_drive(drive)
except ValueError:
abort(500, description='Expected int, but got {}.'.format(delete))
except LookupError:
abort(500, description='Invalid drive id {}'.format(delete))
return redirect(url_for('drives'))
return render_template('drives.html', drives=Drive.query.all())
@app.route('/pools', methods=['GET'])
def pools():
return render_template('pools.html', pools=Pool.query.all())
@app.route('/pool/<pool>', methods=['GET'])
def pool(pool):
db_pool = Pool.query.get(pool)
if db_pool is None:
abort(404, description='Pool with ID {} could not be found.'.format
(pool))
return render_template('pool.html', pool=db_pool)
@app.route('/pools/add', methods=['GET', 'POST'])
def add_pool():
form = PoolAddForm(request.form)
form.drives.choices = viewManager.get_empty_drives()
if request.method == 'POST' and form.validate():
try:
viewManager.create_btrfs_pool(form)
except subprocess.CalledProcessError as e:
abort(500, description=
'While creating a pool, the following exception occured: {}'
.format(e))
except subprocess.TimeoutExpired as e:
abort(500, description='Pool creation took too long: {}'.format(e))
scheduler.get_job('refresh_disks').modify(next_run_time=datetime.
datetime.now())
sleep(1)
return redirect(url_for('pools'))
return render_template('pool_add.html', form=form)
@app.route('/about', methods=['GET'])
def about():
return render_template('about.html', version=app_version)
@app.route('/partitions/<part>', methods=['GET'])
def partitions(part):
try:
drive = driveMan.get_drive_by_id(int(part))
except ValueError:
abort(500, description='Expected int, but got {}.'.format(part))
except LookupError:
abort(500, description='Invalid drive id {}'.format(part))
return render_template('partitions.html', parts=drive.partitions)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html', error=str(e))
@app.errorhandler(500)
def internal_server_error(e):
return render_template('500.html', error=str(e))
<|reserved_special_token_0|>
@app.route('/drives', methods=['GET'])
def drives():
delete = request.args.get('delete')
if delete is not None:
try:
drive = driveMan.get_drive_by_id(int(delete))
driveMan.delete_drive(drive)
except ValueError:
abort(500, description='Expected int, but got {}.'.format(delete))
except LookupError:
abort(500, description='Invalid drive id {}'.format(delete))
return redirect(url_for('drives'))
return render_template('drives.html', drives=Drive.query.all())
@app.route('/pools', methods=['GET'])
def pools():
return render_template('pools.html', pools=Pool.query.all())
@app.route('/pool/<pool>', methods=['GET'])
def pool(pool):
db_pool = Pool.query.get(pool)
if db_pool is None:
abort(404, description='Pool with ID {} could not be found.'.format
(pool))
return render_template('pool.html', pool=db_pool)
@app.route('/pools/add', methods=['GET', 'POST'])
def add_pool():
form = PoolAddForm(request.form)
form.drives.choices = viewManager.get_empty_drives()
if request.method == 'POST' and form.validate():
try:
viewManager.create_btrfs_pool(form)
except subprocess.CalledProcessError as e:
abort(500, description=
'While creating a pool, the following exception occured: {}'
.format(e))
except subprocess.TimeoutExpired as e:
abort(500, description='Pool creation took too long: {}'.format(e))
scheduler.get_job('refresh_disks').modify(next_run_time=datetime.
datetime.now())
sleep(1)
return redirect(url_for('pools'))
return render_template('pool_add.html', form=form)
@app.route('/about', methods=['GET'])
def about():
return render_template('about.html', version=app_version)
@app.route('/partitions/<part>', methods=['GET'])
def partitions(part):
try:
drive = driveMan.get_drive_by_id(int(part))
except ValueError:
abort(500, description='Expected int, but got {}.'.format(part))
except LookupError:
abort(500, description='Invalid drive id {}'.format(part))
return render_template('partitions.html', parts=drive.partitions)
<|reserved_special_token_0|>
@app.route('/add_cleaning', methods=['GET', 'POST'])
def add_cleaning():
form = CleaningForm(request.form)
if request.method == 'POST' and form.validate():
viewManager.create_cleaning_obj(form.jobname.data, form.path.data,
form.active.data)
return redirect(url_for('cleaning'))
return render_template('add_cleaning.html', form=form)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html', error=str(e))
@app.errorhandler(500)
def internal_server_error(e):
return render_template('500.html', error=str(e))
@app.route('/', methods=['GET'])
def index():
return render_template('index.html', service=servMan.service_state())
@app.route('/drives', methods=['GET'])
def drives():
delete = request.args.get('delete')
if delete is not None:
try:
drive = driveMan.get_drive_by_id(int(delete))
driveMan.delete_drive(drive)
except ValueError:
abort(500, description='Expected int, but got {}.'.format(delete))
except LookupError:
abort(500, description='Invalid drive id {}'.format(delete))
return redirect(url_for('drives'))
return render_template('drives.html', drives=Drive.query.all())
@app.route('/pools', methods=['GET'])
def pools():
return render_template('pools.html', pools=Pool.query.all())
@app.route('/pool/<pool>', methods=['GET'])
def pool(pool):
db_pool = Pool.query.get(pool)
if db_pool is None:
abort(404, description='Pool with ID {} could not be found.'.format
(pool))
return render_template('pool.html', pool=db_pool)
@app.route('/pools/add', methods=['GET', 'POST'])
def add_pool():
form = PoolAddForm(request.form)
form.drives.choices = viewManager.get_empty_drives()
if request.method == 'POST' and form.validate():
try:
viewManager.create_btrfs_pool(form)
except subprocess.CalledProcessError as e:
abort(500, description=
'While creating a pool, the following exception occured: {}'
.format(e))
except subprocess.TimeoutExpired as e:
abort(500, description='Pool creation took too long: {}'.format(e))
scheduler.get_job('refresh_disks').modify(next_run_time=datetime.
datetime.now())
sleep(1)
return redirect(url_for('pools'))
return render_template('pool_add.html', form=form)
@app.route('/about', methods=['GET'])
def about():
return render_template('about.html', version=app_version)
@app.route('/partitions/<part>', methods=['GET'])
def partitions(part):
try:
drive = driveMan.get_drive_by_id(int(part))
except ValueError:
abort(500, description='Expected int, but got {}.'.format(part))
except LookupError:
abort(500, description='Invalid drive id {}'.format(part))
return render_template('partitions.html', parts=drive.partitions)
@app.route('/cleaning', methods=['GET'])
def cleaning():
remove = request.args.get('remove')
changestate = request.args.get('changestate')
service = request.args.get('service')
if not (remove is not None and changestate is not None):
if remove is not None:
try:
remove = int(remove)
Cleaning.query.filter(Cleaning.id == remove).delete()
db.session.commit()
return redirect(request.path, code=302)
except ValueError:
flash('Value Error: remove')
elif changestate is not None:
try:
changestate = int(changestate)
job = Cleaning.query.get(changestate)
if job.state == 0:
job.state = 1
else:
job.state = 0
db.session.commit()
return redirect(request.path, code=302)
except ValueError:
flash('Value Error: changestate')
else:
flash('Value Error: remove and changestate set')
if service is not None:
try:
service = str(service)
if service == 'start':
if not cleaningMan.running():
cleaningMan.enable()
return redirect(request.path, code=302)
else:
flash('Error: Cleaning Service already running.')
elif service == 'pause':
if cleaningMan.running():
cleaningMan.disable()
return redirect(request.path, code=302)
else:
flash('Error: Cleaning Service already paused.')
else:
raise ValueError
except ValueError:
flash('Value Error: service')
elements = Cleaning.query.order_by(db.asc(db.collate(Cleaning.name,
'NOCASE'))).all()
return render_template('cleaning.html', elements=elements, task_running
=cleaningMan.running())
@app.route('/add_cleaning', methods=['GET', 'POST'])
def add_cleaning():
form = CleaningForm(request.form)
if request.method == 'POST' and form.validate():
viewManager.create_cleaning_obj(form.jobname.data, form.path.data,
form.active.data)
return redirect(url_for('cleaning'))
return render_template('add_cleaning.html', form=form)
<|reserved_special_token_1|>
import datetime
import subprocess
from time import sleep
from flask import render_template, redirect, request, url_for, flash, abort
from dirkules import app, db, scheduler, app_version
import dirkules.manager.serviceManager as servMan
import dirkules.manager.driveManager as driveMan
import dirkules.manager.cleaning as cleaningMan
from dirkules.models import Drive, Cleaning, Pool
import dirkules.manager.viewManager as viewManager
from dirkules.validation.validators import CleaningForm, PoolAddForm
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html', error=str(e))
@app.errorhandler(500)
def internal_server_error(e):
return render_template('500.html', error=str(e))
@app.route('/', methods=['GET'])
def index():
return render_template('index.html', service=servMan.service_state())
@app.route('/drives', methods=['GET'])
def drives():
delete = request.args.get('delete')
if delete is not None:
try:
drive = driveMan.get_drive_by_id(int(delete))
driveMan.delete_drive(drive)
except ValueError:
abort(500, description="Expected int, but got {}.".format(delete))
except LookupError:
abort(500, description="Invalid drive id {}".format(delete))
return redirect(url_for('drives'))
return render_template('drives.html', drives=Drive.query.all())
@app.route('/pools', methods=['GET'])
def pools():
return render_template('pools.html', pools=Pool.query.all())
@app.route('/pool/<pool>', methods=['GET'])
def pool(pool):
db_pool = Pool.query.get(pool)
if db_pool is None:
abort(404, description="Pool with ID {} could not be found.".format(pool))
return render_template('pool.html', pool=db_pool)
@app.route('/pools/add', methods=['GET', 'POST'])
def add_pool():
form = PoolAddForm(request.form)
form.drives.choices = viewManager.get_empty_drives()
if request.method == 'POST' and form.validate():
try:
viewManager.create_btrfs_pool(form)
except subprocess.CalledProcessError as e:
abort(500, description="While creating a pool, the following exception occured: {}".format(e))
except subprocess.TimeoutExpired as e:
abort(500, description="Pool creation took too long: {}".format(e))
scheduler.get_job("refresh_disks").modify(next_run_time=datetime.datetime.now())
sleep(1)
return redirect(url_for('pools'))
return render_template('pool_add.html', form=form)
@app.route('/about', methods=['GET'])
def about():
return render_template('about.html', version=app_version)
@app.route('/partitions/<part>', methods=['GET'])
def partitions(part):
try:
drive = driveMan.get_drive_by_id(int(part))
except ValueError:
abort(500, description="Expected int, but got {}.".format(part))
except LookupError:
abort(500, description="Invalid drive id {}".format(part))
return render_template('partitions.html', parts=drive.partitions)
@app.route('/cleaning', methods=['GET'])
def cleaning():
remove = request.args.get('remove')
changestate = request.args.get('changestate')
service = request.args.get('service')
if not (remove is not None and changestate is not None):
if remove is not None:
try:
remove = int(remove)
Cleaning.query.filter(Cleaning.id == remove).delete()
db.session.commit()
return redirect(request.path, code=302)
except ValueError:
flash("Value Error: remove")
elif changestate is not None:
try:
changestate = int(changestate)
job = Cleaning.query.get(changestate)
if job.state == 0:
job.state = 1
else:
job.state = 0
db.session.commit()
return redirect(request.path, code=302)
except ValueError:
flash("Value Error: changestate")
else:
flash("Value Error: remove and changestate set")
if service is not None:
try:
service = str(service)
if service == "start":
if not cleaningMan.running():
cleaningMan.enable()
return redirect(request.path, code=302)
else:
flash("Error: Cleaning Service already running.")
elif service == "pause":
if cleaningMan.running():
cleaningMan.disable()
return redirect(request.path, code=302)
else:
flash("Error: Cleaning Service already paused.")
else:
raise ValueError
except ValueError:
flash("Value Error: service")
elements = Cleaning.query.order_by(db.asc(db.collate(Cleaning.name, 'NOCASE'))).all()
return render_template('cleaning.html', elements=elements, task_running=cleaningMan.running())
@app.route('/add_cleaning', methods=['GET', 'POST'])
def add_cleaning():
form = CleaningForm(request.form)
if request.method == 'POST' and form.validate():
viewManager.create_cleaning_obj(form.jobname.data, form.path.data, form.active.data)
return redirect(url_for('cleaning'))
return render_template('add_cleaning.html', form=form)
|
flexible
|
{
"blob_id": "ab27780b19db6854855af51eea063f07d9eb7302",
"index": 3553,
"step-1": "<mask token>\n\n\n@app.errorhandler(500)\ndef internal_server_error(e):\n return render_template('500.html', error=str(e))\n\n\n<mask token>\n\n\n@app.route('/pools', methods=['GET'])\ndef pools():\n return render_template('pools.html', pools=Pool.query.all())\n\n\n@app.route('/pool/<pool>', methods=['GET'])\ndef pool(pool):\n db_pool = Pool.query.get(pool)\n if db_pool is None:\n abort(404, description='Pool with ID {} could not be found.'.format\n (pool))\n return render_template('pool.html', pool=db_pool)\n\n\n<mask token>\n\n\n@app.route('/about', methods=['GET'])\ndef about():\n return render_template('about.html', version=app_version)\n\n\n@app.route('/partitions/<part>', methods=['GET'])\ndef partitions(part):\n try:\n drive = driveMan.get_drive_by_id(int(part))\n except ValueError:\n abort(500, description='Expected int, but got {}.'.format(part))\n except LookupError:\n abort(500, description='Invalid drive id {}'.format(part))\n return render_template('partitions.html', parts=drive.partitions)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@app.errorhandler(404)\ndef page_not_found(e):\n return render_template('404.html', error=str(e))\n\n\n@app.errorhandler(500)\ndef internal_server_error(e):\n return render_template('500.html', error=str(e))\n\n\n<mask token>\n\n\n@app.route('/drives', methods=['GET'])\ndef drives():\n delete = request.args.get('delete')\n if delete is not None:\n try:\n drive = driveMan.get_drive_by_id(int(delete))\n driveMan.delete_drive(drive)\n except ValueError:\n abort(500, description='Expected int, but got {}.'.format(delete))\n except LookupError:\n abort(500, description='Invalid drive id {}'.format(delete))\n return redirect(url_for('drives'))\n return render_template('drives.html', drives=Drive.query.all())\n\n\n@app.route('/pools', methods=['GET'])\ndef pools():\n return render_template('pools.html', pools=Pool.query.all())\n\n\n@app.route('/pool/<pool>', methods=['GET'])\ndef pool(pool):\n db_pool = Pool.query.get(pool)\n if db_pool is None:\n abort(404, description='Pool with ID {} could not be found.'.format\n (pool))\n return render_template('pool.html', pool=db_pool)\n\n\n@app.route('/pools/add', methods=['GET', 'POST'])\ndef add_pool():\n form = PoolAddForm(request.form)\n form.drives.choices = viewManager.get_empty_drives()\n if request.method == 'POST' and form.validate():\n try:\n viewManager.create_btrfs_pool(form)\n except subprocess.CalledProcessError as e:\n abort(500, description=\n 'While creating a pool, the following exception occured: {}'\n .format(e))\n except subprocess.TimeoutExpired as e:\n abort(500, description='Pool creation took too long: {}'.format(e))\n scheduler.get_job('refresh_disks').modify(next_run_time=datetime.\n datetime.now())\n sleep(1)\n return redirect(url_for('pools'))\n return render_template('pool_add.html', form=form)\n\n\n@app.route('/about', methods=['GET'])\ndef about():\n return render_template('about.html', version=app_version)\n\n\n@app.route('/partitions/<part>', methods=['GET'])\ndef partitions(part):\n try:\n drive = driveMan.get_drive_by_id(int(part))\n except ValueError:\n abort(500, description='Expected int, but got {}.'.format(part))\n except LookupError:\n abort(500, description='Invalid drive id {}'.format(part))\n return render_template('partitions.html', parts=drive.partitions)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\n@app.errorhandler(404)\ndef page_not_found(e):\n return render_template('404.html', error=str(e))\n\n\n@app.errorhandler(500)\ndef internal_server_error(e):\n return render_template('500.html', error=str(e))\n\n\n<mask token>\n\n\n@app.route('/drives', methods=['GET'])\ndef drives():\n delete = request.args.get('delete')\n if delete is not None:\n try:\n drive = driveMan.get_drive_by_id(int(delete))\n driveMan.delete_drive(drive)\n except ValueError:\n abort(500, description='Expected int, but got {}.'.format(delete))\n except LookupError:\n abort(500, description='Invalid drive id {}'.format(delete))\n return redirect(url_for('drives'))\n return render_template('drives.html', drives=Drive.query.all())\n\n\n@app.route('/pools', methods=['GET'])\ndef pools():\n return render_template('pools.html', pools=Pool.query.all())\n\n\n@app.route('/pool/<pool>', methods=['GET'])\ndef pool(pool):\n db_pool = Pool.query.get(pool)\n if db_pool is None:\n abort(404, description='Pool with ID {} could not be found.'.format\n (pool))\n return render_template('pool.html', pool=db_pool)\n\n\n@app.route('/pools/add', methods=['GET', 'POST'])\ndef add_pool():\n form = PoolAddForm(request.form)\n form.drives.choices = viewManager.get_empty_drives()\n if request.method == 'POST' and form.validate():\n try:\n viewManager.create_btrfs_pool(form)\n except subprocess.CalledProcessError as e:\n abort(500, description=\n 'While creating a pool, the following exception occured: {}'\n .format(e))\n except subprocess.TimeoutExpired as e:\n abort(500, description='Pool creation took too long: {}'.format(e))\n scheduler.get_job('refresh_disks').modify(next_run_time=datetime.\n datetime.now())\n sleep(1)\n return redirect(url_for('pools'))\n return render_template('pool_add.html', form=form)\n\n\n@app.route('/about', methods=['GET'])\ndef about():\n return render_template('about.html', version=app_version)\n\n\n@app.route('/partitions/<part>', methods=['GET'])\ndef partitions(part):\n try:\n drive = driveMan.get_drive_by_id(int(part))\n except ValueError:\n abort(500, description='Expected int, but got {}.'.format(part))\n except LookupError:\n abort(500, description='Invalid drive id {}'.format(part))\n return render_template('partitions.html', parts=drive.partitions)\n\n\n<mask token>\n\n\n@app.route('/add_cleaning', methods=['GET', 'POST'])\ndef add_cleaning():\n form = CleaningForm(request.form)\n if request.method == 'POST' and form.validate():\n viewManager.create_cleaning_obj(form.jobname.data, form.path.data,\n form.active.data)\n return redirect(url_for('cleaning'))\n return render_template('add_cleaning.html', form=form)\n",
"step-4": "<mask token>\n\n\n@app.errorhandler(404)\ndef page_not_found(e):\n return render_template('404.html', error=str(e))\n\n\n@app.errorhandler(500)\ndef internal_server_error(e):\n return render_template('500.html', error=str(e))\n\n\n@app.route('/', methods=['GET'])\ndef index():\n return render_template('index.html', service=servMan.service_state())\n\n\n@app.route('/drives', methods=['GET'])\ndef drives():\n delete = request.args.get('delete')\n if delete is not None:\n try:\n drive = driveMan.get_drive_by_id(int(delete))\n driveMan.delete_drive(drive)\n except ValueError:\n abort(500, description='Expected int, but got {}.'.format(delete))\n except LookupError:\n abort(500, description='Invalid drive id {}'.format(delete))\n return redirect(url_for('drives'))\n return render_template('drives.html', drives=Drive.query.all())\n\n\n@app.route('/pools', methods=['GET'])\ndef pools():\n return render_template('pools.html', pools=Pool.query.all())\n\n\n@app.route('/pool/<pool>', methods=['GET'])\ndef pool(pool):\n db_pool = Pool.query.get(pool)\n if db_pool is None:\n abort(404, description='Pool with ID {} could not be found.'.format\n (pool))\n return render_template('pool.html', pool=db_pool)\n\n\n@app.route('/pools/add', methods=['GET', 'POST'])\ndef add_pool():\n form = PoolAddForm(request.form)\n form.drives.choices = viewManager.get_empty_drives()\n if request.method == 'POST' and form.validate():\n try:\n viewManager.create_btrfs_pool(form)\n except subprocess.CalledProcessError as e:\n abort(500, description=\n 'While creating a pool, the following exception occured: {}'\n .format(e))\n except subprocess.TimeoutExpired as e:\n abort(500, description='Pool creation took too long: {}'.format(e))\n scheduler.get_job('refresh_disks').modify(next_run_time=datetime.\n datetime.now())\n sleep(1)\n return redirect(url_for('pools'))\n return render_template('pool_add.html', form=form)\n\n\n@app.route('/about', methods=['GET'])\ndef about():\n return render_template('about.html', version=app_version)\n\n\n@app.route('/partitions/<part>', methods=['GET'])\ndef partitions(part):\n try:\n drive = driveMan.get_drive_by_id(int(part))\n except ValueError:\n abort(500, description='Expected int, but got {}.'.format(part))\n except LookupError:\n abort(500, description='Invalid drive id {}'.format(part))\n return render_template('partitions.html', parts=drive.partitions)\n\n\n@app.route('/cleaning', methods=['GET'])\ndef cleaning():\n remove = request.args.get('remove')\n changestate = request.args.get('changestate')\n service = request.args.get('service')\n if not (remove is not None and changestate is not None):\n if remove is not None:\n try:\n remove = int(remove)\n Cleaning.query.filter(Cleaning.id == remove).delete()\n db.session.commit()\n return redirect(request.path, code=302)\n except ValueError:\n flash('Value Error: remove')\n elif changestate is not None:\n try:\n changestate = int(changestate)\n job = Cleaning.query.get(changestate)\n if job.state == 0:\n job.state = 1\n else:\n job.state = 0\n db.session.commit()\n return redirect(request.path, code=302)\n except ValueError:\n flash('Value Error: changestate')\n else:\n flash('Value Error: remove and changestate set')\n if service is not None:\n try:\n service = str(service)\n if service == 'start':\n if not cleaningMan.running():\n cleaningMan.enable()\n return redirect(request.path, code=302)\n else:\n flash('Error: Cleaning Service already running.')\n elif service == 'pause':\n if cleaningMan.running():\n cleaningMan.disable()\n return redirect(request.path, code=302)\n else:\n flash('Error: Cleaning Service already paused.')\n else:\n raise ValueError\n except ValueError:\n flash('Value Error: service')\n elements = Cleaning.query.order_by(db.asc(db.collate(Cleaning.name,\n 'NOCASE'))).all()\n return render_template('cleaning.html', elements=elements, task_running\n =cleaningMan.running())\n\n\n@app.route('/add_cleaning', methods=['GET', 'POST'])\ndef add_cleaning():\n form = CleaningForm(request.form)\n if request.method == 'POST' and form.validate():\n viewManager.create_cleaning_obj(form.jobname.data, form.path.data,\n form.active.data)\n return redirect(url_for('cleaning'))\n return render_template('add_cleaning.html', form=form)\n",
"step-5": "import datetime\nimport subprocess\nfrom time import sleep\nfrom flask import render_template, redirect, request, url_for, flash, abort\nfrom dirkules import app, db, scheduler, app_version\nimport dirkules.manager.serviceManager as servMan\nimport dirkules.manager.driveManager as driveMan\nimport dirkules.manager.cleaning as cleaningMan\nfrom dirkules.models import Drive, Cleaning, Pool\nimport dirkules.manager.viewManager as viewManager\nfrom dirkules.validation.validators import CleaningForm, PoolAddForm\n\n\n@app.errorhandler(404)\ndef page_not_found(e):\n return render_template('404.html', error=str(e))\n\n\n@app.errorhandler(500)\ndef internal_server_error(e):\n return render_template('500.html', error=str(e))\n\n\n@app.route('/', methods=['GET'])\ndef index():\n return render_template('index.html', service=servMan.service_state())\n\n\n@app.route('/drives', methods=['GET'])\ndef drives():\n delete = request.args.get('delete')\n if delete is not None:\n try:\n drive = driveMan.get_drive_by_id(int(delete))\n driveMan.delete_drive(drive)\n except ValueError:\n abort(500, description=\"Expected int, but got {}.\".format(delete))\n except LookupError:\n abort(500, description=\"Invalid drive id {}\".format(delete))\n return redirect(url_for('drives'))\n return render_template('drives.html', drives=Drive.query.all())\n\n\n@app.route('/pools', methods=['GET'])\ndef pools():\n return render_template('pools.html', pools=Pool.query.all())\n\n\n@app.route('/pool/<pool>', methods=['GET'])\ndef pool(pool):\n db_pool = Pool.query.get(pool)\n if db_pool is None:\n abort(404, description=\"Pool with ID {} could not be found.\".format(pool))\n return render_template('pool.html', pool=db_pool)\n\n\n@app.route('/pools/add', methods=['GET', 'POST'])\ndef add_pool():\n form = PoolAddForm(request.form)\n form.drives.choices = viewManager.get_empty_drives()\n if request.method == 'POST' and form.validate():\n try:\n viewManager.create_btrfs_pool(form)\n except subprocess.CalledProcessError as e:\n abort(500, description=\"While creating a pool, the following exception occured: {}\".format(e))\n except subprocess.TimeoutExpired as e:\n abort(500, description=\"Pool creation took too long: {}\".format(e))\n scheduler.get_job(\"refresh_disks\").modify(next_run_time=datetime.datetime.now())\n sleep(1)\n return redirect(url_for('pools'))\n return render_template('pool_add.html', form=form)\n\n\n@app.route('/about', methods=['GET'])\ndef about():\n return render_template('about.html', version=app_version)\n\n\n@app.route('/partitions/<part>', methods=['GET'])\ndef partitions(part):\n try:\n drive = driveMan.get_drive_by_id(int(part))\n except ValueError:\n abort(500, description=\"Expected int, but got {}.\".format(part))\n except LookupError:\n abort(500, description=\"Invalid drive id {}\".format(part))\n return render_template('partitions.html', parts=drive.partitions)\n\n\n@app.route('/cleaning', methods=['GET'])\ndef cleaning():\n remove = request.args.get('remove')\n changestate = request.args.get('changestate')\n service = request.args.get('service')\n if not (remove is not None and changestate is not None):\n if remove is not None:\n try:\n remove = int(remove)\n Cleaning.query.filter(Cleaning.id == remove).delete()\n db.session.commit()\n return redirect(request.path, code=302)\n except ValueError:\n flash(\"Value Error: remove\")\n elif changestate is not None:\n try:\n changestate = int(changestate)\n job = Cleaning.query.get(changestate)\n if job.state == 0:\n job.state = 1\n else:\n job.state = 0\n db.session.commit()\n return redirect(request.path, code=302)\n except ValueError:\n flash(\"Value Error: changestate\")\n else:\n flash(\"Value Error: remove and changestate set\")\n if service is not None:\n try:\n service = str(service)\n if service == \"start\":\n if not cleaningMan.running():\n cleaningMan.enable()\n return redirect(request.path, code=302)\n else:\n flash(\"Error: Cleaning Service already running.\")\n elif service == \"pause\":\n if cleaningMan.running():\n cleaningMan.disable()\n return redirect(request.path, code=302)\n else:\n flash(\"Error: Cleaning Service already paused.\")\n else:\n raise ValueError\n except ValueError:\n flash(\"Value Error: service\")\n elements = Cleaning.query.order_by(db.asc(db.collate(Cleaning.name, 'NOCASE'))).all()\n return render_template('cleaning.html', elements=elements, task_running=cleaningMan.running())\n\n\n@app.route('/add_cleaning', methods=['GET', 'POST'])\ndef add_cleaning():\n form = CleaningForm(request.form)\n if request.method == 'POST' and form.validate():\n viewManager.create_cleaning_obj(form.jobname.data, form.path.data, form.active.data)\n return redirect(url_for('cleaning'))\n return render_template('add_cleaning.html', form=form)\n",
"step-ids": [
5,
8,
9,
11,
13
]
}
|
[
5,
8,
9,
11,
13
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys,os,traceback
from PIL import Image
class ResizeImageBuilder:
def __init__(self):
# print(self.__class__)
pass
def setOriginImagePath(self, filePath):
try:
img = Image.open(filePath)
# img = img.convert('RGB')
# size = 32, 32
# img.thumbnail(size)
print('origin image mode:', img.mode)
img = img.convert('RGB')
print('target image mode:', img.mode)
# img.show()
self.baseImage = img
return None
except (BaseException,e):
return str(filePath + " open error: " + traceback.format_exc(e))
def createImageWithOriginImage(self, img, imageSize):
return img.resize((imageSize, imageSize),Image.ANTIALIAS)
def saveImageWithPath(self, img, savePath):
img.save(savePath)
def createImage(self, savePath, imageSize):
if self.baseImage == None:
print('error: self.baseImage == None, please call setOriginImagePath() before createImage()')
return
try:
newimg = self.createImageWithOriginImage(self.baseImage, imageSize)
self.saveImageWithPath(newimg, savePath)
# print('done')
except (BaseException,e):
return 'createImage error: ' + traceback.format_exc(e)
def main():
# builder = ResizeImageBuilder()
# builder.setOriginImagePath(originImagePath)
# builder.createImage(path1, size1)
# builder.createImage(path2, size2)
pass
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "47119f46cdbbb7306aef8237d4f56f0f10690ae4",
"index": 9245,
"step-1": "<mask token>\n\n\nclass ResizeImageBuilder:\n\n def __init__(self):\n pass\n\n def setOriginImagePath(self, filePath):\n try:\n img = Image.open(filePath)\n print('origin image mode:', img.mode)\n img = img.convert('RGB')\n print('target image mode:', img.mode)\n self.baseImage = img\n return None\n except (BaseException, e):\n return str(filePath + ' open error: ' + traceback.format_exc(e))\n\n def createImageWithOriginImage(self, img, imageSize):\n return img.resize((imageSize, imageSize), Image.ANTIALIAS)\n <mask token>\n\n def createImage(self, savePath, imageSize):\n if self.baseImage == None:\n print(\n 'error: self.baseImage == None, please call setOriginImagePath() before createImage()'\n )\n return\n try:\n newimg = self.createImageWithOriginImage(self.baseImage, imageSize)\n self.saveImageWithPath(newimg, savePath)\n except (BaseException, e):\n return 'createImage error: ' + traceback.format_exc(e)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ResizeImageBuilder:\n\n def __init__(self):\n pass\n\n def setOriginImagePath(self, filePath):\n try:\n img = Image.open(filePath)\n print('origin image mode:', img.mode)\n img = img.convert('RGB')\n print('target image mode:', img.mode)\n self.baseImage = img\n return None\n except (BaseException, e):\n return str(filePath + ' open error: ' + traceback.format_exc(e))\n\n def createImageWithOriginImage(self, img, imageSize):\n return img.resize((imageSize, imageSize), Image.ANTIALIAS)\n\n def saveImageWithPath(self, img, savePath):\n img.save(savePath)\n\n def createImage(self, savePath, imageSize):\n if self.baseImage == None:\n print(\n 'error: self.baseImage == None, please call setOriginImagePath() before createImage()'\n )\n return\n try:\n newimg = self.createImageWithOriginImage(self.baseImage, imageSize)\n self.saveImageWithPath(newimg, savePath)\n except (BaseException, e):\n return 'createImage error: ' + traceback.format_exc(e)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass ResizeImageBuilder:\n\n def __init__(self):\n pass\n\n def setOriginImagePath(self, filePath):\n try:\n img = Image.open(filePath)\n print('origin image mode:', img.mode)\n img = img.convert('RGB')\n print('target image mode:', img.mode)\n self.baseImage = img\n return None\n except (BaseException, e):\n return str(filePath + ' open error: ' + traceback.format_exc(e))\n\n def createImageWithOriginImage(self, img, imageSize):\n return img.resize((imageSize, imageSize), Image.ANTIALIAS)\n\n def saveImageWithPath(self, img, savePath):\n img.save(savePath)\n\n def createImage(self, savePath, imageSize):\n if self.baseImage == None:\n print(\n 'error: self.baseImage == None, please call setOriginImagePath() before createImage()'\n )\n return\n try:\n newimg = self.createImageWithOriginImage(self.baseImage, imageSize)\n self.saveImageWithPath(newimg, savePath)\n except (BaseException, e):\n return 'createImage error: ' + traceback.format_exc(e)\n\n\ndef main():\n pass\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import sys, os, traceback\nfrom PIL import Image\n\n\nclass ResizeImageBuilder:\n\n def __init__(self):\n pass\n\n def setOriginImagePath(self, filePath):\n try:\n img = Image.open(filePath)\n print('origin image mode:', img.mode)\n img = img.convert('RGB')\n print('target image mode:', img.mode)\n self.baseImage = img\n return None\n except (BaseException, e):\n return str(filePath + ' open error: ' + traceback.format_exc(e))\n\n def createImageWithOriginImage(self, img, imageSize):\n return img.resize((imageSize, imageSize), Image.ANTIALIAS)\n\n def saveImageWithPath(self, img, savePath):\n img.save(savePath)\n\n def createImage(self, savePath, imageSize):\n if self.baseImage == None:\n print(\n 'error: self.baseImage == None, please call setOriginImagePath() before createImage()'\n )\n return\n try:\n newimg = self.createImageWithOriginImage(self.baseImage, imageSize)\n self.saveImageWithPath(newimg, savePath)\n except (BaseException, e):\n return 'createImage error: ' + traceback.format_exc(e)\n\n\ndef main():\n pass\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport sys,os,traceback\nfrom PIL import Image\n\nclass ResizeImageBuilder:\n def __init__(self):\n # print(self.__class__)\n pass\n\n def setOriginImagePath(self, filePath):\n try:\n img = Image.open(filePath)\n # img = img.convert('RGB')\n # size = 32, 32\n # img.thumbnail(size)\n print('origin image mode:', img.mode)\n img = img.convert('RGB')\n print('target image mode:', img.mode)\n # img.show()\n self.baseImage = img\n return None\n except (BaseException,e):\n return str(filePath + \" open error: \" + traceback.format_exc(e))\n\n def createImageWithOriginImage(self, img, imageSize):\n return img.resize((imageSize, imageSize),Image.ANTIALIAS)\n\n def saveImageWithPath(self, img, savePath):\n img.save(savePath)\n\n def createImage(self, savePath, imageSize):\n if self.baseImage == None:\n print('error: self.baseImage == None, please call setOriginImagePath() before createImage()')\n return\n\n try:\n newimg = self.createImageWithOriginImage(self.baseImage, imageSize)\n self.saveImageWithPath(newimg, savePath)\n # print('done')\n except (BaseException,e):\n return 'createImage error: ' + traceback.format_exc(e)\n\ndef main():\n # builder = ResizeImageBuilder()\n # builder.setOriginImagePath(originImagePath)\n # builder.createImage(path1, size1)\n # builder.createImage(path2, size2)\n pass\n\nif __name__ == '__main__':\n main()",
"step-ids": [
5,
6,
8,
9,
10
]
}
|
[
5,
6,
8,
9,
10
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def calc_points(expr):
points = 0
hasil = eval(expr)
points -= abs(24 - hasil)
for c in expr:
points += points_dict.get(c, 0)
return points
def solve(bil):
bil.sort(reverse=True)
expr = str(bil[0])
bil = bil[1:]
for _ in range(3):
b_max_fitness = float('-Inf')
for b in bil:
for op in op_list:
curr_fitness = fitness(expr, op, b)
if curr_fitness > b_max_fitness:
b_max_fitness = curr_fitness
curr_op_max = op
curr_b_max = b
expr += f' {curr_op_max} {curr_b_max}'
bil.remove(curr_b_max)
points = calc_points(expr)
return expr, points
def main():
points = 0
solves = []
for a in range(1, 14):
for b in range(1, 14):
for c in range(1, 14):
for d in range(1, 14):
bil = [a, b, c, d]
expre, point = solve(bil)
if expre not in solves:
solves.append((expre, point))
points += point
print(
f'{(a - 1) * 13 * 13 * 13 + (b - 1) * 13 * 13 + (c - 1) * 13 + d} : {expre}'
)
avg_points = points / 13 ** 4
print(f'Average points : {avg_points}')
count24 = 0
for expr in solves:
res = eval(expr[0])
if res == 24:
count24 += 1
print(f'24 Count : {count24}')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def fitness(x1, op, x2):
try:
hasil = eval(f'{x1} {op} {x2}')
diff = points_dict[op] - abs(24 - hasil)
if abs(24 - hasil) == 0:
return diff + 10
else:
return diff
except ZeroDivisionError:
return float('-inf')
def calc_points(expr):
points = 0
hasil = eval(expr)
points -= abs(24 - hasil)
for c in expr:
points += points_dict.get(c, 0)
return points
def solve(bil):
bil.sort(reverse=True)
expr = str(bil[0])
bil = bil[1:]
for _ in range(3):
b_max_fitness = float('-Inf')
for b in bil:
for op in op_list:
curr_fitness = fitness(expr, op, b)
if curr_fitness > b_max_fitness:
b_max_fitness = curr_fitness
curr_op_max = op
curr_b_max = b
expr += f' {curr_op_max} {curr_b_max}'
bil.remove(curr_b_max)
points = calc_points(expr)
return expr, points
def main():
points = 0
solves = []
for a in range(1, 14):
for b in range(1, 14):
for c in range(1, 14):
for d in range(1, 14):
bil = [a, b, c, d]
expre, point = solve(bil)
if expre not in solves:
solves.append((expre, point))
points += point
print(
f'{(a - 1) * 13 * 13 * 13 + (b - 1) * 13 * 13 + (c - 1) * 13 + d} : {expre}'
)
avg_points = points / 13 ** 4
print(f'Average points : {avg_points}')
count24 = 0
for expr in solves:
res = eval(expr[0])
if res == 24:
count24 += 1
print(f'24 Count : {count24}')
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
points_dict = {'+': 5, '-': 4, '*': 3, '/': 2, '(': -1}
op_list = ['+', '-', '*', '/']
def fitness(x1, op, x2):
try:
hasil = eval(f'{x1} {op} {x2}')
diff = points_dict[op] - abs(24 - hasil)
if abs(24 - hasil) == 0:
return diff + 10
else:
return diff
except ZeroDivisionError:
return float('-inf')
def calc_points(expr):
points = 0
hasil = eval(expr)
points -= abs(24 - hasil)
for c in expr:
points += points_dict.get(c, 0)
return points
def solve(bil):
bil.sort(reverse=True)
expr = str(bil[0])
bil = bil[1:]
for _ in range(3):
b_max_fitness = float('-Inf')
for b in bil:
for op in op_list:
curr_fitness = fitness(expr, op, b)
if curr_fitness > b_max_fitness:
b_max_fitness = curr_fitness
curr_op_max = op
curr_b_max = b
expr += f' {curr_op_max} {curr_b_max}'
bil.remove(curr_b_max)
points = calc_points(expr)
return expr, points
def main():
points = 0
solves = []
for a in range(1, 14):
for b in range(1, 14):
for c in range(1, 14):
for d in range(1, 14):
bil = [a, b, c, d]
expre, point = solve(bil)
if expre not in solves:
solves.append((expre, point))
points += point
print(
f'{(a - 1) * 13 * 13 * 13 + (b - 1) * 13 * 13 + (c - 1) * 13 + d} : {expre}'
)
avg_points = points / 13 ** 4
print(f'Average points : {avg_points}')
count24 = 0
for expr in solves:
res = eval(expr[0])
if res == 24:
count24 += 1
print(f'24 Count : {count24}')
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
points_dict = {
'+': 5,
'-': 4,
'*': 3,
'/': 2,
'(': -1,
}
op_list = ['+','-','*','/']
def fitness(x1,op,x2):
#Mengembalikan point dari penyambungan expresi dengan operasi dan bilangan berikutnya
try:
hasil = eval(f"{x1} {op} {x2}")
diff = points_dict[op] - abs(24-hasil)
if (abs(24-hasil) == 0):
return diff+10
else:
return diff
except ZeroDivisionError:
return float("-inf")
def calc_points(expr):
points = 0
hasil = eval(expr)
points -= abs(24-hasil)
for c in expr:
points += points_dict.get(c,0)
return points
def solve(bil):
bil.sort(reverse=True)
expr = str(bil[0])
bil = bil[1:]
for _ in range(3):
b_max_fitness = float("-Inf")
for b in bil:
for op in op_list:
curr_fitness = fitness(expr,op,b)
if curr_fitness > b_max_fitness:
b_max_fitness = curr_fitness
curr_op_max = op
curr_b_max = b
expr += f" {curr_op_max} {curr_b_max}"
bil.remove(curr_b_max)
points = calc_points(expr)
# print(f"{expr} ~ Points: {points}")
return (expr,points)
def main():
# bil = [int(c) for c in input("Masukkan 4 angka dipisahkan spasi:").strip().split()]
points = 0
solves = []
for a in range(1,14):
for b in range(1,14):
for c in range(1,14):
for d in range(1,14):
bil = [a,b,c,d]
expre,point = solve(bil)
if expre not in solves:
solves.append((expre,point))
points += point
print(f"{(a-1)*13*13*13+(b-1)*13*13+(c-1)*13+d} : {expre}")
avg_points = points/(13**4)
print(f"Average points : {avg_points}")
count24 = 0
for expr in solves:
res = eval(expr[0])
if res==24:
count24 += 1
print(f"24 Count : {count24}")
if __name__ == "__main__":
main()
|
flexible
|
{
"blob_id": "c420fb855fbf5691798eadca476b6eccec4aee57",
"index": 7409,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef calc_points(expr):\n points = 0\n hasil = eval(expr)\n points -= abs(24 - hasil)\n for c in expr:\n points += points_dict.get(c, 0)\n return points\n\n\ndef solve(bil):\n bil.sort(reverse=True)\n expr = str(bil[0])\n bil = bil[1:]\n for _ in range(3):\n b_max_fitness = float('-Inf')\n for b in bil:\n for op in op_list:\n curr_fitness = fitness(expr, op, b)\n if curr_fitness > b_max_fitness:\n b_max_fitness = curr_fitness\n curr_op_max = op\n curr_b_max = b\n expr += f' {curr_op_max} {curr_b_max}'\n bil.remove(curr_b_max)\n points = calc_points(expr)\n return expr, points\n\n\ndef main():\n points = 0\n solves = []\n for a in range(1, 14):\n for b in range(1, 14):\n for c in range(1, 14):\n for d in range(1, 14):\n bil = [a, b, c, d]\n expre, point = solve(bil)\n if expre not in solves:\n solves.append((expre, point))\n points += point\n print(\n f'{(a - 1) * 13 * 13 * 13 + (b - 1) * 13 * 13 + (c - 1) * 13 + d} : {expre}'\n )\n avg_points = points / 13 ** 4\n print(f'Average points : {avg_points}')\n count24 = 0\n for expr in solves:\n res = eval(expr[0])\n if res == 24:\n count24 += 1\n print(f'24 Count : {count24}')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef fitness(x1, op, x2):\n try:\n hasil = eval(f'{x1} {op} {x2}')\n diff = points_dict[op] - abs(24 - hasil)\n if abs(24 - hasil) == 0:\n return diff + 10\n else:\n return diff\n except ZeroDivisionError:\n return float('-inf')\n\n\ndef calc_points(expr):\n points = 0\n hasil = eval(expr)\n points -= abs(24 - hasil)\n for c in expr:\n points += points_dict.get(c, 0)\n return points\n\n\ndef solve(bil):\n bil.sort(reverse=True)\n expr = str(bil[0])\n bil = bil[1:]\n for _ in range(3):\n b_max_fitness = float('-Inf')\n for b in bil:\n for op in op_list:\n curr_fitness = fitness(expr, op, b)\n if curr_fitness > b_max_fitness:\n b_max_fitness = curr_fitness\n curr_op_max = op\n curr_b_max = b\n expr += f' {curr_op_max} {curr_b_max}'\n bil.remove(curr_b_max)\n points = calc_points(expr)\n return expr, points\n\n\ndef main():\n points = 0\n solves = []\n for a in range(1, 14):\n for b in range(1, 14):\n for c in range(1, 14):\n for d in range(1, 14):\n bil = [a, b, c, d]\n expre, point = solve(bil)\n if expre not in solves:\n solves.append((expre, point))\n points += point\n print(\n f'{(a - 1) * 13 * 13 * 13 + (b - 1) * 13 * 13 + (c - 1) * 13 + d} : {expre}'\n )\n avg_points = points / 13 ** 4\n print(f'Average points : {avg_points}')\n count24 = 0\n for expr in solves:\n res = eval(expr[0])\n if res == 24:\n count24 += 1\n print(f'24 Count : {count24}')\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "points_dict = {'+': 5, '-': 4, '*': 3, '/': 2, '(': -1}\nop_list = ['+', '-', '*', '/']\n\n\ndef fitness(x1, op, x2):\n try:\n hasil = eval(f'{x1} {op} {x2}')\n diff = points_dict[op] - abs(24 - hasil)\n if abs(24 - hasil) == 0:\n return diff + 10\n else:\n return diff\n except ZeroDivisionError:\n return float('-inf')\n\n\ndef calc_points(expr):\n points = 0\n hasil = eval(expr)\n points -= abs(24 - hasil)\n for c in expr:\n points += points_dict.get(c, 0)\n return points\n\n\ndef solve(bil):\n bil.sort(reverse=True)\n expr = str(bil[0])\n bil = bil[1:]\n for _ in range(3):\n b_max_fitness = float('-Inf')\n for b in bil:\n for op in op_list:\n curr_fitness = fitness(expr, op, b)\n if curr_fitness > b_max_fitness:\n b_max_fitness = curr_fitness\n curr_op_max = op\n curr_b_max = b\n expr += f' {curr_op_max} {curr_b_max}'\n bil.remove(curr_b_max)\n points = calc_points(expr)\n return expr, points\n\n\ndef main():\n points = 0\n solves = []\n for a in range(1, 14):\n for b in range(1, 14):\n for c in range(1, 14):\n for d in range(1, 14):\n bil = [a, b, c, d]\n expre, point = solve(bil)\n if expre not in solves:\n solves.append((expre, point))\n points += point\n print(\n f'{(a - 1) * 13 * 13 * 13 + (b - 1) * 13 * 13 + (c - 1) * 13 + d} : {expre}'\n )\n avg_points = points / 13 ** 4\n print(f'Average points : {avg_points}')\n count24 = 0\n for expr in solves:\n res = eval(expr[0])\n if res == 24:\n count24 += 1\n print(f'24 Count : {count24}')\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "points_dict = {\n '+': 5,\n '-': 4,\n '*': 3,\n '/': 2,\n '(': -1,\n}\n\nop_list = ['+','-','*','/']\n\ndef fitness(x1,op,x2):\n #Mengembalikan point dari penyambungan expresi dengan operasi dan bilangan berikutnya\n try:\n hasil = eval(f\"{x1} {op} {x2}\")\n diff = points_dict[op] - abs(24-hasil)\n if (abs(24-hasil) == 0):\n return diff+10\n else:\n return diff\n except ZeroDivisionError:\n return float(\"-inf\")\n\ndef calc_points(expr):\n points = 0\n hasil = eval(expr)\n points -= abs(24-hasil)\n for c in expr:\n points += points_dict.get(c,0)\n return points\n\ndef solve(bil):\n bil.sort(reverse=True)\n\n expr = str(bil[0])\n bil = bil[1:]\n for _ in range(3):\n b_max_fitness = float(\"-Inf\")\n for b in bil:\n for op in op_list:\n curr_fitness = fitness(expr,op,b)\n if curr_fitness > b_max_fitness:\n b_max_fitness = curr_fitness\n curr_op_max = op\n curr_b_max = b\n expr += f\" {curr_op_max} {curr_b_max}\"\n bil.remove(curr_b_max)\n\n points = calc_points(expr)\n # print(f\"{expr} ~ Points: {points}\")\n return (expr,points)\n\ndef main():\n # bil = [int(c) for c in input(\"Masukkan 4 angka dipisahkan spasi:\").strip().split()]\n points = 0\n solves = []\n for a in range(1,14):\n for b in range(1,14):\n for c in range(1,14):\n for d in range(1,14):\n bil = [a,b,c,d]\n expre,point = solve(bil)\n if expre not in solves:\n solves.append((expre,point))\n points += point\n print(f\"{(a-1)*13*13*13+(b-1)*13*13+(c-1)*13+d} : {expre}\")\n\n avg_points = points/(13**4)\n print(f\"Average points : {avg_points}\")\n\n count24 = 0\n for expr in solves:\n res = eval(expr[0])\n if res==24:\n count24 += 1\n print(f\"24 Count : {count24}\")\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
0,
3,
5,
6,
7
]
}
|
[
0,
3,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
admin.site.register(CarouselImage)
admin.site.register(Budget)
<|reserved_special_token_1|>
from django.contrib import admin
from .models import CarouselImage, Budget
admin.site.register(CarouselImage)
admin.site.register(Budget)
|
flexible
|
{
"blob_id": "98fb70e1911522365292c86603481656e7b86d73",
"index": 8337,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nadmin.site.register(CarouselImage)\nadmin.site.register(Budget)\n",
"step-3": "from django.contrib import admin\nfrom .models import CarouselImage, Budget\nadmin.site.register(CarouselImage)\nadmin.site.register(Budget)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
class BaselineModule(pl.LightningModule):
<|reserved_special_token_0|>
def _get_hidden_size(self, input_size):
self.backbone(torch.randn(1, 3, input_size, input_size))
def forward(self, input_tensor):
hidden = self.backbone(input_tensor)
return self.classifier(hidden.squeeze())
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BaselineModule(pl.LightningModule):
<|reserved_special_token_0|>
def _get_hidden_size(self, input_size):
self.backbone(torch.randn(1, 3, input_size, input_size))
def forward(self, input_tensor):
hidden = self.backbone(input_tensor)
return self.classifier(hidden.squeeze())
def training_step(self, batch, batch_idx):
input_tensor, target = batch
logits = self(input_tensor)
loss = F.cross_entropy(logits, target)
self.train_acc(F.softmax(logits, 1), target)
self.log('train_acc', self.train_acc, on_epoch=True)
self.log('train_loss', loss, on_epoch=True)
return loss
def validation_step(self, batch, batch_idx):
input_tensor, target = batch
logits = self(input_tensor)
loss = F.cross_entropy(logits, target)
self.val_acc(F.softmax(logits, 1), target)
self.log('val_acc', self.val_acc, on_epoch=True)
self.log('val_loss', loss, on_epoch=True)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BaselineModule(pl.LightningModule):
<|reserved_special_token_0|>
def _get_hidden_size(self, input_size):
self.backbone(torch.randn(1, 3, input_size, input_size))
def forward(self, input_tensor):
hidden = self.backbone(input_tensor)
return self.classifier(hidden.squeeze())
def training_step(self, batch, batch_idx):
input_tensor, target = batch
logits = self(input_tensor)
loss = F.cross_entropy(logits, target)
self.train_acc(F.softmax(logits, 1), target)
self.log('train_acc', self.train_acc, on_epoch=True)
self.log('train_loss', loss, on_epoch=True)
return loss
def validation_step(self, batch, batch_idx):
input_tensor, target = batch
logits = self(input_tensor)
loss = F.cross_entropy(logits, target)
self.val_acc(F.softmax(logits, 1), target)
self.log('val_acc', self.val_acc, on_epoch=True)
self.log('val_loss', loss, on_epoch=True)
def configure_optimizers(self):
optimizer = torch.optim.Adam(self.parameters(), lr=self.lr)
return optimizer
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BaselineModule(pl.LightningModule):
def __init__(self, input_size, num_classes=4, lr=0.0003):
super().__init__()
self.backbone = nn.Sequential(nn.Conv2d(3, 64, 5), nn.BatchNorm2d(
64), nn.ReLU(), nn.MaxPool2d(3, 2), nn.Conv2d(64, 256, 5), nn.
BatchNorm2d(256), nn.ReLU(), nn.MaxPool2d(3, 2), nn.Conv2d(256,
512, 5), nn.BatchNorm2d(512), nn.ReLU(), nn.MaxPool2d(3, 2), nn
.AdaptiveAvgPool2d((1, 1)))
hidden_size = self._get_hidden_size(input_size)
self.classifier = nn.Linear(hidden_size, num_classes)
self.lr = lr
self.train_acc = torchmetrics.Accuracy()
self.val_acc = torchmetrics.Accuracy()
def _get_hidden_size(self, input_size):
self.backbone(torch.randn(1, 3, input_size, input_size))
def forward(self, input_tensor):
hidden = self.backbone(input_tensor)
return self.classifier(hidden.squeeze())
def training_step(self, batch, batch_idx):
input_tensor, target = batch
logits = self(input_tensor)
loss = F.cross_entropy(logits, target)
self.train_acc(F.softmax(logits, 1), target)
self.log('train_acc', self.train_acc, on_epoch=True)
self.log('train_loss', loss, on_epoch=True)
return loss
def validation_step(self, batch, batch_idx):
input_tensor, target = batch
logits = self(input_tensor)
loss = F.cross_entropy(logits, target)
self.val_acc(F.softmax(logits, 1), target)
self.log('val_acc', self.val_acc, on_epoch=True)
self.log('val_loss', loss, on_epoch=True)
def configure_optimizers(self):
optimizer = torch.optim.Adam(self.parameters(), lr=self.lr)
return optimizer
<|reserved_special_token_1|>
#!/usr/bin/env python3
import torch
import torch.nn as nn
import torch.nn.functional as F
import pytorch_lightning as pl
import torchmetrics
class BaselineModule(pl.LightningModule):
def __init__(self, input_size, num_classes=4, lr=3e-4):
super().__init__()
self.backbone = nn.Sequential( # CBR-Tiny arXiv:1902.07208
nn.Conv2d(3, 64, 5),
nn.BatchNorm2d(64),
nn.ReLU(),
nn.MaxPool2d(3, 2),
nn.Conv2d(64, 256, 5),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.MaxPool2d(3, 2),
nn.Conv2d(256, 512, 5),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.MaxPool2d(3, 2),
nn.AdaptiveAvgPool2d((1, 1)),
)
hidden_size = self._get_hidden_size(input_size)
self.classifier = nn.Linear(hidden_size, num_classes)
self.lr = lr
self.train_acc = torchmetrics.Accuracy()
self.val_acc = torchmetrics.Accuracy()
def _get_hidden_size(self, input_size):
self.backbone(torch.randn(1, 3, input_size, input_size))
def forward(self, input_tensor):
hidden = self.backbone(input_tensor)
return self.classifier(hidden.squeeze())
def training_step(self, batch, batch_idx):
input_tensor, target = batch
logits = self(input_tensor)
loss = F.cross_entropy(logits, target)
self.train_acc(F.softmax(logits, 1), target)
self.log('train_acc', self.train_acc, on_epoch=True)
self.log('train_loss', loss, on_epoch=True)
return loss
def validation_step(self, batch, batch_idx):
input_tensor, target = batch
logits = self(input_tensor)
loss = F.cross_entropy(logits, target)
self.val_acc(F.softmax(logits, 1), target)
self.log('val_acc', self.val_acc, on_epoch=True)
self.log('val_loss', loss, on_epoch=True)
def configure_optimizers(self):
optimizer = torch.optim.Adam(self.parameters(), lr=self.lr)
return optimizer
|
flexible
|
{
"blob_id": "7d43b20ebee2f4cd509bbd896c9e6ae8b2c4b354",
"index": 7128,
"step-1": "<mask token>\n\n\nclass BaselineModule(pl.LightningModule):\n <mask token>\n\n def _get_hidden_size(self, input_size):\n self.backbone(torch.randn(1, 3, input_size, input_size))\n\n def forward(self, input_tensor):\n hidden = self.backbone(input_tensor)\n return self.classifier(hidden.squeeze())\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass BaselineModule(pl.LightningModule):\n <mask token>\n\n def _get_hidden_size(self, input_size):\n self.backbone(torch.randn(1, 3, input_size, input_size))\n\n def forward(self, input_tensor):\n hidden = self.backbone(input_tensor)\n return self.classifier(hidden.squeeze())\n\n def training_step(self, batch, batch_idx):\n input_tensor, target = batch\n logits = self(input_tensor)\n loss = F.cross_entropy(logits, target)\n self.train_acc(F.softmax(logits, 1), target)\n self.log('train_acc', self.train_acc, on_epoch=True)\n self.log('train_loss', loss, on_epoch=True)\n return loss\n\n def validation_step(self, batch, batch_idx):\n input_tensor, target = batch\n logits = self(input_tensor)\n loss = F.cross_entropy(logits, target)\n self.val_acc(F.softmax(logits, 1), target)\n self.log('val_acc', self.val_acc, on_epoch=True)\n self.log('val_loss', loss, on_epoch=True)\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass BaselineModule(pl.LightningModule):\n <mask token>\n\n def _get_hidden_size(self, input_size):\n self.backbone(torch.randn(1, 3, input_size, input_size))\n\n def forward(self, input_tensor):\n hidden = self.backbone(input_tensor)\n return self.classifier(hidden.squeeze())\n\n def training_step(self, batch, batch_idx):\n input_tensor, target = batch\n logits = self(input_tensor)\n loss = F.cross_entropy(logits, target)\n self.train_acc(F.softmax(logits, 1), target)\n self.log('train_acc', self.train_acc, on_epoch=True)\n self.log('train_loss', loss, on_epoch=True)\n return loss\n\n def validation_step(self, batch, batch_idx):\n input_tensor, target = batch\n logits = self(input_tensor)\n loss = F.cross_entropy(logits, target)\n self.val_acc(F.softmax(logits, 1), target)\n self.log('val_acc', self.val_acc, on_epoch=True)\n self.log('val_loss', loss, on_epoch=True)\n\n def configure_optimizers(self):\n optimizer = torch.optim.Adam(self.parameters(), lr=self.lr)\n return optimizer\n",
"step-4": "<mask token>\n\n\nclass BaselineModule(pl.LightningModule):\n\n def __init__(self, input_size, num_classes=4, lr=0.0003):\n super().__init__()\n self.backbone = nn.Sequential(nn.Conv2d(3, 64, 5), nn.BatchNorm2d(\n 64), nn.ReLU(), nn.MaxPool2d(3, 2), nn.Conv2d(64, 256, 5), nn.\n BatchNorm2d(256), nn.ReLU(), nn.MaxPool2d(3, 2), nn.Conv2d(256,\n 512, 5), nn.BatchNorm2d(512), nn.ReLU(), nn.MaxPool2d(3, 2), nn\n .AdaptiveAvgPool2d((1, 1)))\n hidden_size = self._get_hidden_size(input_size)\n self.classifier = nn.Linear(hidden_size, num_classes)\n self.lr = lr\n self.train_acc = torchmetrics.Accuracy()\n self.val_acc = torchmetrics.Accuracy()\n\n def _get_hidden_size(self, input_size):\n self.backbone(torch.randn(1, 3, input_size, input_size))\n\n def forward(self, input_tensor):\n hidden = self.backbone(input_tensor)\n return self.classifier(hidden.squeeze())\n\n def training_step(self, batch, batch_idx):\n input_tensor, target = batch\n logits = self(input_tensor)\n loss = F.cross_entropy(logits, target)\n self.train_acc(F.softmax(logits, 1), target)\n self.log('train_acc', self.train_acc, on_epoch=True)\n self.log('train_loss', loss, on_epoch=True)\n return loss\n\n def validation_step(self, batch, batch_idx):\n input_tensor, target = batch\n logits = self(input_tensor)\n loss = F.cross_entropy(logits, target)\n self.val_acc(F.softmax(logits, 1), target)\n self.log('val_acc', self.val_acc, on_epoch=True)\n self.log('val_loss', loss, on_epoch=True)\n\n def configure_optimizers(self):\n optimizer = torch.optim.Adam(self.parameters(), lr=self.lr)\n return optimizer\n",
"step-5": "#!/usr/bin/env python3\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport pytorch_lightning as pl\nimport torchmetrics\n\nclass BaselineModule(pl.LightningModule):\n def __init__(self, input_size, num_classes=4, lr=3e-4):\n super().__init__()\n\n self.backbone = nn.Sequential( # CBR-Tiny arXiv:1902.07208\n nn.Conv2d(3, 64, 5),\n nn.BatchNorm2d(64),\n nn.ReLU(),\n nn.MaxPool2d(3, 2),\n nn.Conv2d(64, 256, 5),\n nn.BatchNorm2d(256),\n nn.ReLU(),\n nn.MaxPool2d(3, 2),\n nn.Conv2d(256, 512, 5),\n nn.BatchNorm2d(512),\n nn.ReLU(),\n nn.MaxPool2d(3, 2),\n nn.AdaptiveAvgPool2d((1, 1)),\n )\n\n hidden_size = self._get_hidden_size(input_size)\n\n self.classifier = nn.Linear(hidden_size, num_classes)\n self.lr = lr\n\n self.train_acc = torchmetrics.Accuracy()\n self.val_acc = torchmetrics.Accuracy()\n\n def _get_hidden_size(self, input_size):\n self.backbone(torch.randn(1, 3, input_size, input_size))\n\n def forward(self, input_tensor):\n hidden = self.backbone(input_tensor)\n return self.classifier(hidden.squeeze())\n\n def training_step(self, batch, batch_idx):\n input_tensor, target = batch\n\n logits = self(input_tensor)\n loss = F.cross_entropy(logits, target)\n\n self.train_acc(F.softmax(logits, 1), target)\n self.log('train_acc', self.train_acc, on_epoch=True)\n self.log('train_loss', loss, on_epoch=True)\n\n return loss\n\n def validation_step(self, batch, batch_idx):\n input_tensor, target = batch\n\n logits = self(input_tensor)\n loss = F.cross_entropy(logits, target)\n\n self.val_acc(F.softmax(logits, 1), target)\n self.log('val_acc', self.val_acc, on_epoch=True)\n self.log('val_loss', loss, on_epoch=True)\n\n\n def configure_optimizers(self):\n optimizer = torch.optim.Adam(self.parameters(), lr=self.lr)\n return optimizer\n",
"step-ids": [
3,
5,
6,
7,
9
]
}
|
[
3,
5,
6,
7,
9
] |
from abc import ABC
class Parent(ABC):
def printing(self):
print(self._words)
class Parent2(ABC):
form = "Parent2 Setup: %s"
class Child(Parent, Parent2):
def __init__(self, words):
self._words = self.form % words
super(Child, self).printing()
if __name__ == "__main__":
Child("hello world")
|
normal
|
{
"blob_id": "9ba60270a4afcf242de53692afd8ebff7d9b37a7",
"index": 4361,
"step-1": "<mask token>\n\n\nclass Parent2(ABC):\n form = 'Parent2 Setup: %s'\n\n\nclass Child(Parent, Parent2):\n\n def __init__(self, words):\n self._words = self.form % words\n super(Child, self).printing()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Parent(ABC):\n\n def printing(self):\n print(self._words)\n\n\nclass Parent2(ABC):\n form = 'Parent2 Setup: %s'\n\n\nclass Child(Parent, Parent2):\n\n def __init__(self, words):\n self._words = self.form % words\n super(Child, self).printing()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Parent(ABC):\n\n def printing(self):\n print(self._words)\n\n\nclass Parent2(ABC):\n form = 'Parent2 Setup: %s'\n\n\nclass Child(Parent, Parent2):\n\n def __init__(self, words):\n self._words = self.form % words\n super(Child, self).printing()\n\n\nif __name__ == '__main__':\n Child('hello world')\n",
"step-4": "from abc import ABC\n\n\nclass Parent(ABC):\n\n def printing(self):\n print(self._words)\n\n\nclass Parent2(ABC):\n form = 'Parent2 Setup: %s'\n\n\nclass Child(Parent, Parent2):\n\n def __init__(self, words):\n self._words = self.form % words\n super(Child, self).printing()\n\n\nif __name__ == '__main__':\n Child('hello world')\n",
"step-5": "from abc import ABC\n\nclass Parent(ABC):\n\t\n\tdef printing(self):\n\t\tprint(self._words)\n\nclass Parent2(ABC):\n\tform = \"Parent2 Setup: %s\"\n\nclass Child(Parent, Parent2):\n\t\n\tdef __init__(self, words):\n\t\tself._words = self.form % words\n\t\tsuper(Child, self).printing()\n\t\t\nif __name__ == \"__main__\":\n\tChild(\"hello world\")\n",
"step-ids": [
4,
6,
7,
8,
9
]
}
|
[
4,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('main_app', '0001_initial')]
operations = [migrations.AlterField(model_name='tea', name=
'caffeineLvl', field=models.PositiveIntegerField(default=1,
validators=[django.core.validators.MaxValueValidator(5), django.
core.validators.MinValueValidator(1)])), migrations.AlterField(
model_name='tea', name='quantPerBox', field=models.
PositiveIntegerField(default=1, validators=[django.core.validators.
MaxValueValidator(100), django.core.validators.MinValueValidator(1)
])), migrations.AlterField(model_name='tea', name='quantity', field
=models.PositiveIntegerField(default=1, validators=[django.core.
validators.MaxValueValidator(100), django.core.validators.
MinValueValidator(1)]))]
<|reserved_special_token_1|>
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('main_app', '0001_initial')]
operations = [migrations.AlterField(model_name='tea', name=
'caffeineLvl', field=models.PositiveIntegerField(default=1,
validators=[django.core.validators.MaxValueValidator(5), django.
core.validators.MinValueValidator(1)])), migrations.AlterField(
model_name='tea', name='quantPerBox', field=models.
PositiveIntegerField(default=1, validators=[django.core.validators.
MaxValueValidator(100), django.core.validators.MinValueValidator(1)
])), migrations.AlterField(model_name='tea', name='quantity', field
=models.PositiveIntegerField(default=1, validators=[django.core.
validators.MaxValueValidator(100), django.core.validators.
MinValueValidator(1)]))]
<|reserved_special_token_1|>
# Generated by Django 3.1.6 on 2021-02-05 00:27
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main_app', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='tea',
name='caffeineLvl',
field=models.PositiveIntegerField(default=1, validators=[django.core.validators.MaxValueValidator(5), django.core.validators.MinValueValidator(1)]),
),
migrations.AlterField(
model_name='tea',
name='quantPerBox',
field=models.PositiveIntegerField(default=1, validators=[django.core.validators.MaxValueValidator(100), django.core.validators.MinValueValidator(1)]),
),
migrations.AlterField(
model_name='tea',
name='quantity',
field=models.PositiveIntegerField(default=1, validators=[django.core.validators.MaxValueValidator(100), django.core.validators.MinValueValidator(1)]),
),
]
|
flexible
|
{
"blob_id": "db920f4aadfb53bb26c5ba1fb182f12b95e14a2f",
"index": 7899,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('main_app', '0001_initial')]\n operations = [migrations.AlterField(model_name='tea', name=\n 'caffeineLvl', field=models.PositiveIntegerField(default=1,\n validators=[django.core.validators.MaxValueValidator(5), django.\n core.validators.MinValueValidator(1)])), migrations.AlterField(\n model_name='tea', name='quantPerBox', field=models.\n PositiveIntegerField(default=1, validators=[django.core.validators.\n MaxValueValidator(100), django.core.validators.MinValueValidator(1)\n ])), migrations.AlterField(model_name='tea', name='quantity', field\n =models.PositiveIntegerField(default=1, validators=[django.core.\n validators.MaxValueValidator(100), django.core.validators.\n MinValueValidator(1)]))]\n",
"step-4": "import django.core.validators\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('main_app', '0001_initial')]\n operations = [migrations.AlterField(model_name='tea', name=\n 'caffeineLvl', field=models.PositiveIntegerField(default=1,\n validators=[django.core.validators.MaxValueValidator(5), django.\n core.validators.MinValueValidator(1)])), migrations.AlterField(\n model_name='tea', name='quantPerBox', field=models.\n PositiveIntegerField(default=1, validators=[django.core.validators.\n MaxValueValidator(100), django.core.validators.MinValueValidator(1)\n ])), migrations.AlterField(model_name='tea', name='quantity', field\n =models.PositiveIntegerField(default=1, validators=[django.core.\n validators.MaxValueValidator(100), django.core.validators.\n MinValueValidator(1)]))]\n",
"step-5": "# Generated by Django 3.1.6 on 2021-02-05 00:27\n\nimport django.core.validators\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('main_app', '0001_initial'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='tea',\n name='caffeineLvl',\n field=models.PositiveIntegerField(default=1, validators=[django.core.validators.MaxValueValidator(5), django.core.validators.MinValueValidator(1)]),\n ),\n migrations.AlterField(\n model_name='tea',\n name='quantPerBox',\n field=models.PositiveIntegerField(default=1, validators=[django.core.validators.MaxValueValidator(100), django.core.validators.MinValueValidator(1)]),\n ),\n migrations.AlterField(\n model_name='tea',\n name='quantity',\n field=models.PositiveIntegerField(default=1, validators=[django.core.validators.MaxValueValidator(100), django.core.validators.MinValueValidator(1)]),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from Graph import *
from PrioQueue import *
from GShortestPath import *
from GSpanTree import *
from User import *
infinity = float("inf")
# 这是根据关键字找地点的方法,已经形成了某个依据属性的表后,通过关键词匹配来解决问题
# 最终输出一个yield出的迭代器,将其list化后就可以向末端输出了
def find_by_word(lst, word):
# 这个是字符串匹配函数,word是客户输入,lst是循环的东西
# 最好排成优先队列
# 若没找到,我们可以造一个关于word的任意位置的切片,长度比word短,由此来寻找想要的名称
# 由于景点,地名的长度一般不长,所以即使这里的时间代价极高,我们也可以保证这样做不会引发混乱
ans = []
for x in lst:
if word == x:
ans.append(x)
if len(word) > 20:
raise ValuError("in find_by_word, we don't think it's possible for a city or a town\
to own a name longer than 20")
# 如果客户输入的地名在地名总集中,我们有理由相信他没有输错
if ans != []:
return ans
slices = []
for i in range(len(word)):
# 这里为了保证效率,我们可以通过控制内部循环来使得表中名字串长度从小到大排列
# 并且这样排出来的结果是相似度高的在前面
for j in range(0, len(word) - i + 1):
slices.append(word[j:j + i])
for x in lst:
for i in range(1, len(word)):
if slices[-i] in x:
ans.append(x)
return ans
categorys = {"历史文化", "现代都市", "山区", "海景", "综合"}
infnum = float("inf")
class web:
# land_list是一个list对象,适用相应方法
def __init__(self, lnum=0, land_list=[], graph_money=GraphAL(), graph_time=GraphAL(), graph_line=GraphAL()):
self.graph_money = graph_money
self.graph_time = graph_time
self.graph_line = graph_line
self.lnum = lnum
self.land_list = land_list
def is_empty(self):
return self.lnum == 0
# 获得所有景点名称,用list储存
# self._land_list是以landscape为元素的表
def _get_name(self):
if self.is_empty():
raise WebLandsError("in 'get_all_position'")
namee = []
for x in self.land_list():
namee.append(x.name)
return namee
# 获得所有景点位置
def lst_pos(self, land):
return self.land_list.index(land)
def _get_position(self):
if self.is_empty():
raise WebLandsError("in 'get_all_position'")
positionn = []
for x in self.land_list():
positionn.append(x.position)
return positionn
def add_land(self, landscape):
self.land_list.append(landscape)
self.graph_money.add_vertex()
self.graph_time.add_vertex()
self.graph_line.add_vertex()
self.lnum += 1
# 如果不设置money,time或line,自然landscape之间没有边相连
def set_all(self, land1, land2, money=infnum, time=infnum, line=1):
graph_money.add_edge(self.land_list().index(land1),
self.land_list().index(land2), money)
graph_time.add_edge(self.land_list().index(land1),
self.land_list().index(land2), time)
graph_line.add_edge(self.land_list().index(land1),
self.land_list().index(land2), line)
# 以下基于Dijkstra算法来搞定最短路径问题,可同时作用于时间,金钱和路径长度做邻接图
def set_money(self, land1, land2, money):
self.graph_money.add_edge(self.land_list.index(land1),
self.land_list.index(land2), money)
def get_money(self, land1, land2):
a = self.graph_money.get_edge(self.land_list.index(land1),
self.land_list.index(land2))
return a
def set_time(self, land1, land2, time):
self.graph_money.add_edge(self.land_list.index(land1),
self.land_list.index(land2), time)
def get_time(self, land1, land2):
a = self.graph_time.get_edge(self.land_list.index(land1),
self.land_list.index(land2))
return a
def set_line(self, land1, land2, line):
self.graph_line.add_edge(self.land_list.index(land1),
self.land_list.index(land2), line)
def get_line(self, land1, land2):
a = self.graph_line.get_edge(self.land_list.index(land1),
self.land_list.index(land2))
return a
# shortestmoney等开始
def shortest_money(web, land1, land2):
vi = web.lst_pos(land1)
vj = web.lst_pos(land2)
if vi == vj:
raise ValuError("in shortest_money,\
if the begining is the same as the ending, you don't have to pay anything")
path = dijkstra_shortest_paths(web.graph_money, vi)
path_list = [vi]
while vi != path[vj][0]:
path_list.append(path[vj][0])
vi = path[vj][0]
return path_list, path[vj][1]
def shortest_money_str(web, land1, land2):
str_ = ""
path, pay = shortest_money(web, land1, land2)
for i in range(len(path)):
str_ += str(web.land_list[path[i]].name)
str_ += "->"
str_ += land2.name
return "所求的最短路money路径为", str_, "总money代价为", pay
def shortest_time(web, land1, land2):
vi = web.lst_pos(land1)
vj = web.lst_pos(land2)
if vi == vj:
raise ValuError("in shortest_time,\
if the begining is the same as the ending, you don't have to pay anything")
path = dijkstra_shortest_paths(web.graph_time(), vi)
path_list = [vi]
while vi != vj:
path_list.append(path[vj][0])
vi = path[vj][0]
return path_list, path[vj][1]
def shortest_time_str(web, land1, land2):
str_ = ""
path, pay = shortest_time(web, land1, land2)
for i in range(len(path)):
str_ += str(path[i])
return "所求的最短路time路径为", str_, "总time代价为", pay
def shortest_line(web, land1, land2):
vi = web.lst_pos(land1)
vj = web.lst_pos(land2)
if vi == vj:
raise ValuError("in shortest_line,\
if the begining is the same as the ending, you don't have to pay anything")
path = dijkstra_shortest_paths(web.graph_line(), vi)
path_list = [vi]
while vi != vj:
path_list.append(path[vj][0])
vi = path[vj][0]
return path_list, path[vj][1]
def shortest_time_str(web, land1, land2):
str_ = ""
path, pay = shortest_line(web, land1, land2)
for i in range(len(path)):
str_ += str(path[i])
return "所求的最短路line路径为", str_, "总line代价为", pay
# shortest等结束
class landscape: # landscape代表一个景点,rank表示在图中list的位置
def __init__(self, name, position, category=None, hot=0): # 其中position是一个数,代表一个景点
self.name = name
self.position = position
self.category = category
self.hot = hot
def position(self):
return self._position
def category(self):
return self._category
def name(self):
return self._name
def hot(self):
return hot
def set_category(self, sorts):
if sorts not in categorys:
raise ValuError("in set_category, we do not have {}".format(sorts))
self.category = sorts
# 对于多目标问题,先用既有方法构造一个web,web保存了所有目标landscape
# 现在基于Prim算法给出一个关于多目标问题的算法,其实就是最小生成树问题
def muti_aim_solve(land_list):
sub_web = web()
for x in land_list:
sub_web.add_land(x)
lanst = web.land_list().copy()
for x in lanst:
for y in lanst:
if x == y:
continue
vi = lst_pos(web, x)
vj = lst_pos(web, y)
a, b, c = Edges([0, 2, 4])
lst = ["东方明珠", "西湖", "迪士尼"]
china = web(3, lst, a, b, c)
|
normal
|
{
"blob_id": "b5ec6e0fc4239a53a882b455a113eaac4db6cef5",
"index": 2331,
"step-1": "<mask token>\n\n\nclass web:\n\n def __init__(self, lnum=0, land_list=[], graph_money=GraphAL(),\n graph_time=GraphAL(), graph_line=GraphAL()):\n self.graph_money = graph_money\n self.graph_time = graph_time\n self.graph_line = graph_line\n self.lnum = lnum\n self.land_list = land_list\n\n def is_empty(self):\n return self.lnum == 0\n <mask token>\n\n def lst_pos(self, land):\n return self.land_list.index(land)\n\n def _get_position(self):\n if self.is_empty():\n raise WebLandsError(\"in 'get_all_position'\")\n positionn = []\n for x in self.land_list():\n positionn.append(x.position)\n return positionn\n\n def add_land(self, landscape):\n self.land_list.append(landscape)\n self.graph_money.add_vertex()\n self.graph_time.add_vertex()\n self.graph_line.add_vertex()\n self.lnum += 1\n <mask token>\n <mask token>\n\n def get_money(self, land1, land2):\n a = self.graph_money.get_edge(self.land_list.index(land1), self.\n land_list.index(land2))\n return a\n\n def set_time(self, land1, land2, time):\n self.graph_money.add_edge(self.land_list.index(land1), self.\n land_list.index(land2), time)\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n\n\nclass landscape:\n\n def __init__(self, name, position, category=None, hot=0):\n self.name = name\n self.position = position\n self.category = category\n self.hot = hot\n\n def position(self):\n return self._position\n\n def category(self):\n return self._category\n\n def name(self):\n return self._name\n\n def hot(self):\n return hot\n\n def set_category(self, sorts):\n if sorts not in categorys:\n raise ValuError('in set_category, we do not have {}'.format(sorts))\n self.category = sorts\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass web:\n\n def __init__(self, lnum=0, land_list=[], graph_money=GraphAL(),\n graph_time=GraphAL(), graph_line=GraphAL()):\n self.graph_money = graph_money\n self.graph_time = graph_time\n self.graph_line = graph_line\n self.lnum = lnum\n self.land_list = land_list\n\n def is_empty(self):\n return self.lnum == 0\n\n def _get_name(self):\n if self.is_empty():\n raise WebLandsError(\"in 'get_all_position'\")\n namee = []\n for x in self.land_list():\n namee.append(x.name)\n return namee\n\n def lst_pos(self, land):\n return self.land_list.index(land)\n\n def _get_position(self):\n if self.is_empty():\n raise WebLandsError(\"in 'get_all_position'\")\n positionn = []\n for x in self.land_list():\n positionn.append(x.position)\n return positionn\n\n def add_land(self, landscape):\n self.land_list.append(landscape)\n self.graph_money.add_vertex()\n self.graph_time.add_vertex()\n self.graph_line.add_vertex()\n self.lnum += 1\n\n def set_all(self, land1, land2, money=infnum, time=infnum, line=1):\n graph_money.add_edge(self.land_list().index(land1), self.land_list(\n ).index(land2), money)\n graph_time.add_edge(self.land_list().index(land1), self.land_list()\n .index(land2), time)\n graph_line.add_edge(self.land_list().index(land1), self.land_list()\n .index(land2), line)\n\n def set_money(self, land1, land2, money):\n self.graph_money.add_edge(self.land_list.index(land1), self.\n land_list.index(land2), money)\n\n def get_money(self, land1, land2):\n a = self.graph_money.get_edge(self.land_list.index(land1), self.\n land_list.index(land2))\n return a\n\n def set_time(self, land1, land2, time):\n self.graph_money.add_edge(self.land_list.index(land1), self.\n land_list.index(land2), time)\n\n def get_time(self, land1, land2):\n a = self.graph_time.get_edge(self.land_list.index(land1), self.\n land_list.index(land2))\n return a\n\n def set_line(self, land1, land2, line):\n self.graph_line.add_edge(self.land_list.index(land1), self.\n land_list.index(land2), line)\n\n def get_line(self, land1, land2):\n a = self.graph_line.get_edge(self.land_list.index(land1), self.\n land_list.index(land2))\n return a\n\n\n<mask token>\n\n\nclass landscape:\n\n def __init__(self, name, position, category=None, hot=0):\n self.name = name\n self.position = position\n self.category = category\n self.hot = hot\n\n def position(self):\n return self._position\n\n def category(self):\n return self._category\n\n def name(self):\n return self._name\n\n def hot(self):\n return hot\n\n def set_category(self, sorts):\n if sorts not in categorys:\n raise ValuError('in set_category, we do not have {}'.format(sorts))\n self.category = sorts\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef find_by_word(lst, word):\n ans = []\n for x in lst:\n if word == x:\n ans.append(x)\n if len(word) > 20:\n raise ValuError(\n \"in find_by_word, we don't think it's possible for a city or a town to own a name longer than 20\"\n )\n if ans != []:\n return ans\n slices = []\n for i in range(len(word)):\n for j in range(0, len(word) - i + 1):\n slices.append(word[j:j + i])\n for x in lst:\n for i in range(1, len(word)):\n if slices[-i] in x:\n ans.append(x)\n return ans\n\n\n<mask token>\n\n\nclass web:\n\n def __init__(self, lnum=0, land_list=[], graph_money=GraphAL(),\n graph_time=GraphAL(), graph_line=GraphAL()):\n self.graph_money = graph_money\n self.graph_time = graph_time\n self.graph_line = graph_line\n self.lnum = lnum\n self.land_list = land_list\n\n def is_empty(self):\n return self.lnum == 0\n\n def _get_name(self):\n if self.is_empty():\n raise WebLandsError(\"in 'get_all_position'\")\n namee = []\n for x in self.land_list():\n namee.append(x.name)\n return namee\n\n def lst_pos(self, land):\n return self.land_list.index(land)\n\n def _get_position(self):\n if self.is_empty():\n raise WebLandsError(\"in 'get_all_position'\")\n positionn = []\n for x in self.land_list():\n positionn.append(x.position)\n return positionn\n\n def add_land(self, landscape):\n self.land_list.append(landscape)\n self.graph_money.add_vertex()\n self.graph_time.add_vertex()\n self.graph_line.add_vertex()\n self.lnum += 1\n\n def set_all(self, land1, land2, money=infnum, time=infnum, line=1):\n graph_money.add_edge(self.land_list().index(land1), self.land_list(\n ).index(land2), money)\n graph_time.add_edge(self.land_list().index(land1), self.land_list()\n .index(land2), time)\n graph_line.add_edge(self.land_list().index(land1), self.land_list()\n .index(land2), line)\n\n def set_money(self, land1, land2, money):\n self.graph_money.add_edge(self.land_list.index(land1), self.\n land_list.index(land2), money)\n\n def get_money(self, land1, land2):\n a = self.graph_money.get_edge(self.land_list.index(land1), self.\n land_list.index(land2))\n return a\n\n def set_time(self, land1, land2, time):\n self.graph_money.add_edge(self.land_list.index(land1), self.\n land_list.index(land2), time)\n\n def get_time(self, land1, land2):\n a = self.graph_time.get_edge(self.land_list.index(land1), self.\n land_list.index(land2))\n return a\n\n def set_line(self, land1, land2, line):\n self.graph_line.add_edge(self.land_list.index(land1), self.\n land_list.index(land2), line)\n\n def get_line(self, land1, land2):\n a = self.graph_line.get_edge(self.land_list.index(land1), self.\n land_list.index(land2))\n return a\n\n\ndef shortest_money(web, land1, land2):\n vi = web.lst_pos(land1)\n vj = web.lst_pos(land2)\n if vi == vj:\n raise ValuError(\n \"in shortest_money, if the begining is the same as the ending, you don't have to pay anything\"\n )\n path = dijkstra_shortest_paths(web.graph_money, vi)\n path_list = [vi]\n while vi != path[vj][0]:\n path_list.append(path[vj][0])\n vi = path[vj][0]\n return path_list, path[vj][1]\n\n\n<mask token>\n\n\nclass landscape:\n\n def __init__(self, name, position, category=None, hot=0):\n self.name = name\n self.position = position\n self.category = category\n self.hot = hot\n\n def position(self):\n return self._position\n\n def category(self):\n return self._category\n\n def name(self):\n return self._name\n\n def hot(self):\n return hot\n\n def set_category(self, sorts):\n if sorts not in categorys:\n raise ValuError('in set_category, we do not have {}'.format(sorts))\n self.category = sorts\n\n\ndef muti_aim_solve(land_list):\n sub_web = web()\n for x in land_list:\n sub_web.add_land(x)\n lanst = web.land_list().copy()\n for x in lanst:\n for y in lanst:\n if x == y:\n continue\n vi = lst_pos(web, x)\n vj = lst_pos(web, y)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef find_by_word(lst, word):\n ans = []\n for x in lst:\n if word == x:\n ans.append(x)\n if len(word) > 20:\n raise ValuError(\n \"in find_by_word, we don't think it's possible for a city or a town to own a name longer than 20\"\n )\n if ans != []:\n return ans\n slices = []\n for i in range(len(word)):\n for j in range(0, len(word) - i + 1):\n slices.append(word[j:j + i])\n for x in lst:\n for i in range(1, len(word)):\n if slices[-i] in x:\n ans.append(x)\n return ans\n\n\n<mask token>\n\n\nclass web:\n\n def __init__(self, lnum=0, land_list=[], graph_money=GraphAL(),\n graph_time=GraphAL(), graph_line=GraphAL()):\n self.graph_money = graph_money\n self.graph_time = graph_time\n self.graph_line = graph_line\n self.lnum = lnum\n self.land_list = land_list\n\n def is_empty(self):\n return self.lnum == 0\n\n def _get_name(self):\n if self.is_empty():\n raise WebLandsError(\"in 'get_all_position'\")\n namee = []\n for x in self.land_list():\n namee.append(x.name)\n return namee\n\n def lst_pos(self, land):\n return self.land_list.index(land)\n\n def _get_position(self):\n if self.is_empty():\n raise WebLandsError(\"in 'get_all_position'\")\n positionn = []\n for x in self.land_list():\n positionn.append(x.position)\n return positionn\n\n def add_land(self, landscape):\n self.land_list.append(landscape)\n self.graph_money.add_vertex()\n self.graph_time.add_vertex()\n self.graph_line.add_vertex()\n self.lnum += 1\n\n def set_all(self, land1, land2, money=infnum, time=infnum, line=1):\n graph_money.add_edge(self.land_list().index(land1), self.land_list(\n ).index(land2), money)\n graph_time.add_edge(self.land_list().index(land1), self.land_list()\n .index(land2), time)\n graph_line.add_edge(self.land_list().index(land1), self.land_list()\n .index(land2), line)\n\n def set_money(self, land1, land2, money):\n self.graph_money.add_edge(self.land_list.index(land1), self.\n land_list.index(land2), money)\n\n def get_money(self, land1, land2):\n a = self.graph_money.get_edge(self.land_list.index(land1), self.\n land_list.index(land2))\n return a\n\n def set_time(self, land1, land2, time):\n self.graph_money.add_edge(self.land_list.index(land1), self.\n land_list.index(land2), time)\n\n def get_time(self, land1, land2):\n a = self.graph_time.get_edge(self.land_list.index(land1), self.\n land_list.index(land2))\n return a\n\n def set_line(self, land1, land2, line):\n self.graph_line.add_edge(self.land_list.index(land1), self.\n land_list.index(land2), line)\n\n def get_line(self, land1, land2):\n a = self.graph_line.get_edge(self.land_list.index(land1), self.\n land_list.index(land2))\n return a\n\n\ndef shortest_money(web, land1, land2):\n vi = web.lst_pos(land1)\n vj = web.lst_pos(land2)\n if vi == vj:\n raise ValuError(\n \"in shortest_money, if the begining is the same as the ending, you don't have to pay anything\"\n )\n path = dijkstra_shortest_paths(web.graph_money, vi)\n path_list = [vi]\n while vi != path[vj][0]:\n path_list.append(path[vj][0])\n vi = path[vj][0]\n return path_list, path[vj][1]\n\n\ndef shortest_money_str(web, land1, land2):\n str_ = ''\n path, pay = shortest_money(web, land1, land2)\n for i in range(len(path)):\n str_ += str(web.land_list[path[i]].name)\n str_ += '->'\n str_ += land2.name\n return '所求的最短路money路径为', str_, '总money代价为', pay\n\n\ndef shortest_time(web, land1, land2):\n vi = web.lst_pos(land1)\n vj = web.lst_pos(land2)\n if vi == vj:\n raise ValuError(\n \"in shortest_time, if the begining is the same as the ending, you don't have to pay anything\"\n )\n path = dijkstra_shortest_paths(web.graph_time(), vi)\n path_list = [vi]\n while vi != vj:\n path_list.append(path[vj][0])\n vi = path[vj][0]\n return path_list, path[vj][1]\n\n\ndef shortest_time_str(web, land1, land2):\n str_ = ''\n path, pay = shortest_time(web, land1, land2)\n for i in range(len(path)):\n str_ += str(path[i])\n return '所求的最短路time路径为', str_, '总time代价为', pay\n\n\ndef shortest_line(web, land1, land2):\n vi = web.lst_pos(land1)\n vj = web.lst_pos(land2)\n if vi == vj:\n raise ValuError(\n \"in shortest_line, if the begining is the same as the ending, you don't have to pay anything\"\n )\n path = dijkstra_shortest_paths(web.graph_line(), vi)\n path_list = [vi]\n while vi != vj:\n path_list.append(path[vj][0])\n vi = path[vj][0]\n return path_list, path[vj][1]\n\n\ndef shortest_time_str(web, land1, land2):\n str_ = ''\n path, pay = shortest_line(web, land1, land2)\n for i in range(len(path)):\n str_ += str(path[i])\n return '所求的最短路line路径为', str_, '总line代价为', pay\n\n\nclass landscape:\n\n def __init__(self, name, position, category=None, hot=0):\n self.name = name\n self.position = position\n self.category = category\n self.hot = hot\n\n def position(self):\n return self._position\n\n def category(self):\n return self._category\n\n def name(self):\n return self._name\n\n def hot(self):\n return hot\n\n def set_category(self, sorts):\n if sorts not in categorys:\n raise ValuError('in set_category, we do not have {}'.format(sorts))\n self.category = sorts\n\n\ndef muti_aim_solve(land_list):\n sub_web = web()\n for x in land_list:\n sub_web.add_land(x)\n lanst = web.land_list().copy()\n for x in lanst:\n for y in lanst:\n if x == y:\n continue\n vi = lst_pos(web, x)\n vj = lst_pos(web, y)\n\n\n<mask token>\n",
"step-5": "from Graph import *\r\nfrom PrioQueue import *\r\nfrom GShortestPath import *\r\nfrom GSpanTree import *\r\nfrom User import *\r\ninfinity = float(\"inf\")\r\n\r\n\r\n# 这是根据关键字找地点的方法,已经形成了某个依据属性的表后,通过关键词匹配来解决问题\r\n# 最终输出一个yield出的迭代器,将其list化后就可以向末端输出了\r\ndef find_by_word(lst, word):\r\n # 这个是字符串匹配函数,word是客户输入,lst是循环的东西\r\n # 最好排成优先队列\r\n # 若没找到,我们可以造一个关于word的任意位置的切片,长度比word短,由此来寻找想要的名称\r\n # 由于景点,地名的长度一般不长,所以即使这里的时间代价极高,我们也可以保证这样做不会引发混乱\r\n ans = []\r\n for x in lst:\r\n if word == x:\r\n ans.append(x)\r\n if len(word) > 20:\r\n raise ValuError(\"in find_by_word, we don't think it's possible for a city or a town\\\r\n to own a name longer than 20\")\r\n # 如果客户输入的地名在地名总集中,我们有理由相信他没有输错\r\n if ans != []:\r\n return ans\r\n slices = []\r\n for i in range(len(word)):\r\n # 这里为了保证效率,我们可以通过控制内部循环来使得表中名字串长度从小到大排列\r\n # 并且这样排出来的结果是相似度高的在前面\r\n for j in range(0, len(word) - i + 1):\r\n slices.append(word[j:j + i])\r\n for x in lst:\r\n for i in range(1, len(word)):\r\n if slices[-i] in x:\r\n ans.append(x)\r\n return ans\r\n\r\n\r\ncategorys = {\"历史文化\", \"现代都市\", \"山区\", \"海景\", \"综合\"}\r\ninfnum = float(\"inf\")\r\n\r\n\r\nclass web:\r\n # land_list是一个list对象,适用相应方法\r\n\r\n def __init__(self, lnum=0, land_list=[], graph_money=GraphAL(), graph_time=GraphAL(), graph_line=GraphAL()):\r\n self.graph_money = graph_money\r\n self.graph_time = graph_time\r\n self.graph_line = graph_line\r\n self.lnum = lnum\r\n self.land_list = land_list\r\n\r\n def is_empty(self):\r\n return self.lnum == 0\r\n # 获得所有景点名称,用list储存\r\n # self._land_list是以landscape为元素的表\r\n\r\n def _get_name(self):\r\n if self.is_empty():\r\n raise WebLandsError(\"in 'get_all_position'\")\r\n namee = []\r\n for x in self.land_list():\r\n namee.append(x.name)\r\n return namee\r\n # 获得所有景点位置\r\n\r\n def lst_pos(self, land):\r\n return self.land_list.index(land)\r\n\r\n def _get_position(self):\r\n if self.is_empty():\r\n raise WebLandsError(\"in 'get_all_position'\")\r\n positionn = []\r\n for x in self.land_list():\r\n positionn.append(x.position)\r\n return positionn\r\n\r\n def add_land(self, landscape):\r\n self.land_list.append(landscape)\r\n self.graph_money.add_vertex()\r\n self.graph_time.add_vertex()\r\n self.graph_line.add_vertex()\r\n self.lnum += 1\r\n\r\n # 如果不设置money,time或line,自然landscape之间没有边相连\r\n\r\n def set_all(self, land1, land2, money=infnum, time=infnum, line=1):\r\n graph_money.add_edge(self.land_list().index(land1),\r\n self.land_list().index(land2), money)\r\n graph_time.add_edge(self.land_list().index(land1),\r\n self.land_list().index(land2), time)\r\n graph_line.add_edge(self.land_list().index(land1),\r\n self.land_list().index(land2), line)\r\n\r\n# 以下基于Dijkstra算法来搞定最短路径问题,可同时作用于时间,金钱和路径长度做邻接图\r\n def set_money(self, land1, land2, money):\r\n self.graph_money.add_edge(self.land_list.index(land1),\r\n self.land_list.index(land2), money)\r\n\r\n def get_money(self, land1, land2):\r\n a = self.graph_money.get_edge(self.land_list.index(land1),\r\n self.land_list.index(land2))\r\n return a\r\n\r\n def set_time(self, land1, land2, time):\r\n self.graph_money.add_edge(self.land_list.index(land1),\r\n self.land_list.index(land2), time)\r\n\r\n def get_time(self, land1, land2):\r\n a = self.graph_time.get_edge(self.land_list.index(land1),\r\n self.land_list.index(land2))\r\n return a\r\n\r\n def set_line(self, land1, land2, line):\r\n self.graph_line.add_edge(self.land_list.index(land1),\r\n self.land_list.index(land2), line)\r\n\r\n def get_line(self, land1, land2):\r\n a = self.graph_line.get_edge(self.land_list.index(land1),\r\n self.land_list.index(land2))\r\n return a\r\n\r\n# shortestmoney等开始\r\n\r\n\r\ndef shortest_money(web, land1, land2):\r\n vi = web.lst_pos(land1)\r\n vj = web.lst_pos(land2)\r\n if vi == vj:\r\n raise ValuError(\"in shortest_money,\\\r\n if the begining is the same as the ending, you don't have to pay anything\")\r\n path = dijkstra_shortest_paths(web.graph_money, vi)\r\n path_list = [vi]\r\n while vi != path[vj][0]:\r\n path_list.append(path[vj][0])\r\n vi = path[vj][0]\r\n return path_list, path[vj][1]\r\n\r\n\r\ndef shortest_money_str(web, land1, land2):\r\n str_ = \"\"\r\n path, pay = shortest_money(web, land1, land2)\r\n for i in range(len(path)):\r\n str_ += str(web.land_list[path[i]].name)\r\n str_ += \"->\"\r\n str_ += land2.name\r\n return \"所求的最短路money路径为\", str_, \"总money代价为\", pay\r\n\r\n\r\ndef shortest_time(web, land1, land2):\r\n vi = web.lst_pos(land1)\r\n vj = web.lst_pos(land2)\r\n if vi == vj:\r\n raise ValuError(\"in shortest_time,\\\r\n if the begining is the same as the ending, you don't have to pay anything\")\r\n path = dijkstra_shortest_paths(web.graph_time(), vi)\r\n path_list = [vi]\r\n while vi != vj:\r\n path_list.append(path[vj][0])\r\n vi = path[vj][0]\r\n return path_list, path[vj][1]\r\n\r\n\r\ndef shortest_time_str(web, land1, land2):\r\n str_ = \"\"\r\n path, pay = shortest_time(web, land1, land2)\r\n for i in range(len(path)):\r\n str_ += str(path[i])\r\n return \"所求的最短路time路径为\", str_, \"总time代价为\", pay\r\n\r\n\r\ndef shortest_line(web, land1, land2):\r\n vi = web.lst_pos(land1)\r\n vj = web.lst_pos(land2)\r\n if vi == vj:\r\n raise ValuError(\"in shortest_line,\\\r\n if the begining is the same as the ending, you don't have to pay anything\")\r\n path = dijkstra_shortest_paths(web.graph_line(), vi)\r\n path_list = [vi]\r\n while vi != vj:\r\n path_list.append(path[vj][0])\r\n vi = path[vj][0]\r\n return path_list, path[vj][1]\r\n\r\n\r\ndef shortest_time_str(web, land1, land2):\r\n str_ = \"\"\r\n path, pay = shortest_line(web, land1, land2)\r\n for i in range(len(path)):\r\n str_ += str(path[i])\r\n return \"所求的最短路line路径为\", str_, \"总line代价为\", pay\r\n# shortest等结束\r\n\r\n\r\nclass landscape: # landscape代表一个景点,rank表示在图中list的位置\r\n\r\n def __init__(self, name, position, category=None, hot=0): # 其中position是一个数,代表一个景点\r\n self.name = name\r\n self.position = position\r\n self.category = category\r\n self.hot = hot\r\n\r\n def position(self):\r\n return self._position\r\n\r\n def category(self):\r\n return self._category\r\n\r\n def name(self):\r\n return self._name\r\n\r\n def hot(self):\r\n return hot\r\n\r\n def set_category(self, sorts):\r\n if sorts not in categorys:\r\n raise ValuError(\"in set_category, we do not have {}\".format(sorts))\r\n self.category = sorts\r\n\r\n# 对于多目标问题,先用既有方法构造一个web,web保存了所有目标landscape\r\n# 现在基于Prim算法给出一个关于多目标问题的算法,其实就是最小生成树问题\r\n\r\n\r\ndef muti_aim_solve(land_list):\r\n sub_web = web()\r\n for x in land_list:\r\n sub_web.add_land(x)\r\n lanst = web.land_list().copy()\r\n for x in lanst:\r\n for y in lanst:\r\n if x == y:\r\n continue\r\n vi = lst_pos(web, x)\r\n vj = lst_pos(web, y)\r\n\r\na, b, c = Edges([0, 2, 4])\r\nlst = [\"东方明珠\", \"西湖\", \"迪士尼\"]\r\nchina = web(3, lst, a, b, c)\r\n",
"step-ids": [
15,
21,
24,
29,
32
]
}
|
[
15,
21,
24,
29,
32
] |
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.views import LoginView
from django.shortcuts import render
from django.views import View
from django.views.generic import CreateView
from resume.forms import NewResumeForm
from vacancy.forms import NewVacancyForm
class MenuView(View):
def get(self, request, *args, **kwargs):
context = {
'is_authenticated': request.user.is_authenticated,
'username': request.user.username,
}
return render(request, 'main.html', context=context)
class MySignupView(CreateView):
form_class = UserCreationForm
success_url = 'login'
template_name = 'signup.html'
class MyLoginView(LoginView):
redirect_authenticated_user = True
template_name = 'login.html'
class HomeView(View):
def get(self, request, *args, **kwargs):
form = NewVacancyForm() if request.user.is_staff else NewResumeForm()
context = {
'form': form,
'is_authenticated': request.user.is_authenticated,
'is_staff': request.user.is_staff,
'username': request.user.username,
}
return render(request, 'home.html', context=context)
|
normal
|
{
"blob_id": "a75691af17f6d1effd469d5c2ded340c71521ee1",
"index": 9310,
"step-1": "<mask token>\n\n\nclass MyLoginView(LoginView):\n redirect_authenticated_user = True\n template_name = 'login.html'\n\n\nclass HomeView(View):\n\n def get(self, request, *args, **kwargs):\n form = NewVacancyForm() if request.user.is_staff else NewResumeForm()\n context = {'form': form, 'is_authenticated': request.user.\n is_authenticated, 'is_staff': request.user.is_staff, 'username':\n request.user.username}\n return render(request, 'home.html', context=context)\n",
"step-2": "<mask token>\n\n\nclass MySignupView(CreateView):\n form_class = UserCreationForm\n success_url = 'login'\n template_name = 'signup.html'\n\n\nclass MyLoginView(LoginView):\n redirect_authenticated_user = True\n template_name = 'login.html'\n\n\nclass HomeView(View):\n\n def get(self, request, *args, **kwargs):\n form = NewVacancyForm() if request.user.is_staff else NewResumeForm()\n context = {'form': form, 'is_authenticated': request.user.\n is_authenticated, 'is_staff': request.user.is_staff, 'username':\n request.user.username}\n return render(request, 'home.html', context=context)\n",
"step-3": "<mask token>\n\n\nclass MenuView(View):\n <mask token>\n\n\nclass MySignupView(CreateView):\n form_class = UserCreationForm\n success_url = 'login'\n template_name = 'signup.html'\n\n\nclass MyLoginView(LoginView):\n redirect_authenticated_user = True\n template_name = 'login.html'\n\n\nclass HomeView(View):\n\n def get(self, request, *args, **kwargs):\n form = NewVacancyForm() if request.user.is_staff else NewResumeForm()\n context = {'form': form, 'is_authenticated': request.user.\n is_authenticated, 'is_staff': request.user.is_staff, 'username':\n request.user.username}\n return render(request, 'home.html', context=context)\n",
"step-4": "<mask token>\n\n\nclass MenuView(View):\n\n def get(self, request, *args, **kwargs):\n context = {'is_authenticated': request.user.is_authenticated,\n 'username': request.user.username}\n return render(request, 'main.html', context=context)\n\n\nclass MySignupView(CreateView):\n form_class = UserCreationForm\n success_url = 'login'\n template_name = 'signup.html'\n\n\nclass MyLoginView(LoginView):\n redirect_authenticated_user = True\n template_name = 'login.html'\n\n\nclass HomeView(View):\n\n def get(self, request, *args, **kwargs):\n form = NewVacancyForm() if request.user.is_staff else NewResumeForm()\n context = {'form': form, 'is_authenticated': request.user.\n is_authenticated, 'is_staff': request.user.is_staff, 'username':\n request.user.username}\n return render(request, 'home.html', context=context)\n",
"step-5": "from django.contrib.auth.forms import UserCreationForm\nfrom django.contrib.auth.views import LoginView\nfrom django.shortcuts import render\nfrom django.views import View\nfrom django.views.generic import CreateView\n\nfrom resume.forms import NewResumeForm\nfrom vacancy.forms import NewVacancyForm\n\n\nclass MenuView(View):\n def get(self, request, *args, **kwargs):\n context = {\n 'is_authenticated': request.user.is_authenticated,\n 'username': request.user.username,\n }\n return render(request, 'main.html', context=context)\n\n\nclass MySignupView(CreateView):\n form_class = UserCreationForm\n success_url = 'login'\n template_name = 'signup.html'\n\n\nclass MyLoginView(LoginView):\n redirect_authenticated_user = True\n template_name = 'login.html'\n\n\nclass HomeView(View):\n def get(self, request, *args, **kwargs):\n form = NewVacancyForm() if request.user.is_staff else NewResumeForm()\n context = {\n 'form': form,\n 'is_authenticated': request.user.is_authenticated,\n 'is_staff': request.user.is_staff,\n 'username': request.user.username,\n }\n return render(request, 'home.html', context=context)\n",
"step-ids": [
4,
6,
7,
8,
10
]
}
|
[
4,
6,
7,
8,
10
] |
# -*- coding: utf-8 -*-
import pytest
from bravado.client import ResourceDecorator
from bravado.client import SwaggerClient
def test_resource_exists(petstore_client):
assert type(petstore_client.pet) == ResourceDecorator
def test_resource_not_found(petstore_client):
with pytest.raises(AttributeError) as excinfo:
petstore_client.foo
assert 'foo not found' in str(excinfo.value)
@pytest.fixture
def client_tags_with_spaces():
return SwaggerClient.from_spec({
'swagger': '2.0',
'info': {
'version': '',
'title': 'API'
},
'paths': {
'/ping': {
'get': {
'operationId': 'ping',
'responses': {
'200': {
'description': 'ping'
}
},
'tags': [
'my tag'
]
}
}
}
})
def test_get_resource(client_tags_with_spaces):
assert type(client_tags_with_spaces._get_resource('my tag')) == ResourceDecorator
|
normal
|
{
"blob_id": "5ee1d8ef7ec4b191e0789ceb9c6dd2d58af526a0",
"index": 7875,
"step-1": "<mask token>\n\n\ndef test_resource_not_found(petstore_client):\n with pytest.raises(AttributeError) as excinfo:\n petstore_client.foo\n assert 'foo not found' in str(excinfo.value)\n\n\n@pytest.fixture\ndef client_tags_with_spaces():\n return SwaggerClient.from_spec({'swagger': '2.0', 'info': {'version':\n '', 'title': 'API'}, 'paths': {'/ping': {'get': {'operationId':\n 'ping', 'responses': {'200': {'description': 'ping'}}, 'tags': [\n 'my tag']}}}})\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_resource_exists(petstore_client):\n assert type(petstore_client.pet) == ResourceDecorator\n\n\ndef test_resource_not_found(petstore_client):\n with pytest.raises(AttributeError) as excinfo:\n petstore_client.foo\n assert 'foo not found' in str(excinfo.value)\n\n\n@pytest.fixture\ndef client_tags_with_spaces():\n return SwaggerClient.from_spec({'swagger': '2.0', 'info': {'version':\n '', 'title': 'API'}, 'paths': {'/ping': {'get': {'operationId':\n 'ping', 'responses': {'200': {'description': 'ping'}}, 'tags': [\n 'my tag']}}}})\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef test_resource_exists(petstore_client):\n assert type(petstore_client.pet) == ResourceDecorator\n\n\ndef test_resource_not_found(petstore_client):\n with pytest.raises(AttributeError) as excinfo:\n petstore_client.foo\n assert 'foo not found' in str(excinfo.value)\n\n\n@pytest.fixture\ndef client_tags_with_spaces():\n return SwaggerClient.from_spec({'swagger': '2.0', 'info': {'version':\n '', 'title': 'API'}, 'paths': {'/ping': {'get': {'operationId':\n 'ping', 'responses': {'200': {'description': 'ping'}}, 'tags': [\n 'my tag']}}}})\n\n\ndef test_get_resource(client_tags_with_spaces):\n assert type(client_tags_with_spaces._get_resource('my tag')\n ) == ResourceDecorator\n",
"step-4": "import pytest\nfrom bravado.client import ResourceDecorator\nfrom bravado.client import SwaggerClient\n\n\ndef test_resource_exists(petstore_client):\n assert type(petstore_client.pet) == ResourceDecorator\n\n\ndef test_resource_not_found(petstore_client):\n with pytest.raises(AttributeError) as excinfo:\n petstore_client.foo\n assert 'foo not found' in str(excinfo.value)\n\n\n@pytest.fixture\ndef client_tags_with_spaces():\n return SwaggerClient.from_spec({'swagger': '2.0', 'info': {'version':\n '', 'title': 'API'}, 'paths': {'/ping': {'get': {'operationId':\n 'ping', 'responses': {'200': {'description': 'ping'}}, 'tags': [\n 'my tag']}}}})\n\n\ndef test_get_resource(client_tags_with_spaces):\n assert type(client_tags_with_spaces._get_resource('my tag')\n ) == ResourceDecorator\n",
"step-5": "# -*- coding: utf-8 -*-\nimport pytest\n\nfrom bravado.client import ResourceDecorator\nfrom bravado.client import SwaggerClient\n\n\ndef test_resource_exists(petstore_client):\n assert type(petstore_client.pet) == ResourceDecorator\n\n\ndef test_resource_not_found(petstore_client):\n with pytest.raises(AttributeError) as excinfo:\n petstore_client.foo\n assert 'foo not found' in str(excinfo.value)\n\n\n@pytest.fixture\ndef client_tags_with_spaces():\n return SwaggerClient.from_spec({\n 'swagger': '2.0',\n 'info': {\n 'version': '',\n 'title': 'API'\n },\n 'paths': {\n '/ping': {\n 'get': {\n 'operationId': 'ping',\n 'responses': {\n '200': {\n 'description': 'ping'\n }\n },\n 'tags': [\n 'my tag'\n ]\n }\n }\n }\n })\n\n\ndef test_get_resource(client_tags_with_spaces):\n assert type(client_tags_with_spaces._get_resource('my tag')) == ResourceDecorator\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
def edit_team(request):
def get():
team_id = request.GET['team']
team = Team.objects.get(id=team_id)
model = Context({'team': team})
t = loader.get_template('edit_team.html')
return HttpResponse(t.render(model))
def post():
new_notes = request.POST['notes']
team_id = request.POST['team']
team = Team.objects.get(id=team_id)
team.notes = new_notes
team.save()
return HttpResponseRedirect('/app/teams/edit?team=%s' % team_id)
if request.method == 'POST':
return post()
else:
return get()
def create_tournament(request):
def get():
inprogress = Tournament.objects.filter(completed=False)
finished = Tournament.objects.filter(completed=True)
model = Context({'teams': get_team_groups(), 'in_progress':
inprogress, 'finished': finished})
t = loader.get_template('tournament/create_tournament.html')
return HttpResponse(t.render(model))
@transaction.commit_on_success
def post():
tournament = Tournament()
tournament.completed = False
tournament.save()
for team_id in request.POST.getlist('participant'):
if team_id != '':
team = Team.objects.get(id=team_id)
tourney_team = TournamentTeam()
tourney_team.tournament = tournament
tourney_team.team = team
tourney_team.save()
return HttpResponseRedirect(
'/app/tournament/matchups?tournament=%s' % str(tournament.id))
if request.method == 'POST':
return post()
else:
return get()
def view_tournament(request):
def get():
tourney = Tournament.objects.get(id=request.GET['tournament'])
pending_teams = []
teams = []
for team in tourney.tourney_team_set.all():
if team.matchup_index == None:
pending_teams.append(team.team)
else:
teams.append(team.team)
matches = [[i for i in range(0, 4)], [i for i in range(0, 2)], [0]]
for match in tourney.tourney_match_set.all():
matches[match.round][match.index] = match
model = Context({'pending_teams': pending_teams, 'teams': teams,
'matches': matches, 'tourney': tourney})
t = loader.get_template('tournament/view_tournament.html')
return HttpResponse(t.render(model))
@transaction.commit_on_success
def post():
tourney_id = request.GET['tournament']
tourney = Tournament.objects.get(id=tourney_id)
versus = request.POST.getlist('versus')
teams = []
for team_id in versus:
if team_id != '':
teams.append(Team.objects.get(id=team_id))
existing_matches = TournamentMatchup.objects.filter(tournament=tourney)
match = Matchup()
match.team1 = teams[0]
match.team2 = teams[1]
match.save()
tourney_match = TournamentMatchup()
tourney_match.tournament = tourney
tourney_match.matchup = match
tourney_match.round = 0
tourney_match.index = existing_matches.count()
tourney_match.save()
tourney_teams = []
tourney_teams.append(TournamentTeam.objects.filter(tournament=
tourney).filter(team=teams[0]).get())
tourney_teams.append(TournamentTeam.objects.filter(tournament=
tourney).filter(team=teams[1]).get())
tourney_teams[0].matchup_index = tourney_match.index * 2
tourney_teams[1].matchup_index = tourney_match.index * 2 + 1
tourney_teams[0].save()
tourney_teams[1].save()
return HttpResponseRedirect(
'/app/tournament/matchups?tournament=%s' % tourney_id)
if request.method == 'POST':
return post()
else:
return get()
def result_tournament(request):
@transaction.commit_on_success
def post():
tournament_match_id = request.GET['tournament_match_key']
match = TournamentMatchup.objects.get(id=tournament_match_id)
winner_id = int(request.POST['winner'])
matchup = match.matchup
result = MatchResult()
if winner_id == matchup.team1.id:
result.winner = matchup.team1
result.loser = matchup.team2
elif winner_id == matchup.team2.id:
result.winner = matchup.team2
result.loser = matchup.team1
else:
raise Exception('could not determine winner key: %s (%s, %s)' %
(winner_id, matchup.team1.id, matchup.team2.id))
update_stats(result.winner, result.loser)
result.save()
next_round_indices = {(0): 0, (1): 0, (2): 1, (3): 1}
next_round_index = next_round_indices[match.index]
next_round = match.round + 1
if match.round < 2:
existing = TournamentMatchup.objects.filter(tournament=match.
tournament).filter(round=next_round).filter(index=
next_round_index)
if existing.count() == 1:
next_match = existing[0]
next_matchup = next_match.matchup
next_matchup.team2 = result.winner
next_matchup.save()
elif existing.count() == 0:
next_match = TournamentMatchup()
next_matchup = Matchup()
next_matchup.team1 = result.winner
next_matchup.save()
next_match.tournament = match.tournament
next_match.round = next_round
next_match.index = next_round_index
next_match.matchup = next_matchup
next_match.save()
else:
tourney = match.tournament
tourney.completed = True
tourney.winner = result.winner
tourney.save()
match.matchup.delete()
match.matchup = None
match.result = result
match.save()
return HttpResponseRedirect(
'/app/tournament/matchups?tournament=%s' % match.tournament.id)
if request.method == 'POST':
return post()
else:
return HttpResponseRedirect(
'/app/tournament/matchups?tournament=%s' % request.GET[
'tournament'])
<|reserved_special_token_0|>
def get_team_groups():
teams = Team.objects.all()
team_groups = {}
for team in teams:
if not team.leader in team_groups:
team_groups[team.leader] = []
team_groups[team.leader].append(team)
team_groups = [sorted(team_groups[k], lambda x, y: cmp(x.id, y.id)) for
k in sorted(team_groups.keys(), lambda x, y: cmp(x.name, y.name))]
return team_groups
def update_stats(winner, loser):
existing = MatchupStatistics.objects.filter(Q(team1__in=[winner.id,
loser.id]) & Q(team2__in=[winner.id, loser.id]))
stats = None
if existing.count() == 0:
newStats = MatchupStatistics()
newStats.team1 = winner
newStats.team2 = loser
newStats.team1_wins = 1
newStats.team2_wins = 0
winner.wins = winner.wins + 1
loser.losses = loser.losses + 1
newStats.save()
winner.save()
loser.save()
return 1, 0
elif existing.count() == 1:
oldStats = existing.fetch(1)[0]
if oldStats.team1.id == winner.id:
oldStats.team1_wins = oldStats.team1_wins + 1
else:
oldStats.team2_wins = oldStats.team2_wins + 1
winner.wins = winner.wins + 1
loser.losses = loser.losses + 1
oldStats.save()
winner.save()
loser.save()
return 0, 1
else:
logging.error(
'unexpected state: %s matchup statistics for the same team pair (expected 1)'
% existing.count())
return 0, 0
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def index(request):
model = Context({})
t = loader.get_template('index.html')
return HttpResponse(t.render(model))
<|reserved_special_token_0|>
def edit_team(request):
def get():
team_id = request.GET['team']
team = Team.objects.get(id=team_id)
model = Context({'team': team})
t = loader.get_template('edit_team.html')
return HttpResponse(t.render(model))
def post():
new_notes = request.POST['notes']
team_id = request.POST['team']
team = Team.objects.get(id=team_id)
team.notes = new_notes
team.save()
return HttpResponseRedirect('/app/teams/edit?team=%s' % team_id)
if request.method == 'POST':
return post()
else:
return get()
def create_tournament(request):
def get():
inprogress = Tournament.objects.filter(completed=False)
finished = Tournament.objects.filter(completed=True)
model = Context({'teams': get_team_groups(), 'in_progress':
inprogress, 'finished': finished})
t = loader.get_template('tournament/create_tournament.html')
return HttpResponse(t.render(model))
@transaction.commit_on_success
def post():
tournament = Tournament()
tournament.completed = False
tournament.save()
for team_id in request.POST.getlist('participant'):
if team_id != '':
team = Team.objects.get(id=team_id)
tourney_team = TournamentTeam()
tourney_team.tournament = tournament
tourney_team.team = team
tourney_team.save()
return HttpResponseRedirect(
'/app/tournament/matchups?tournament=%s' % str(tournament.id))
if request.method == 'POST':
return post()
else:
return get()
def view_tournament(request):
def get():
tourney = Tournament.objects.get(id=request.GET['tournament'])
pending_teams = []
teams = []
for team in tourney.tourney_team_set.all():
if team.matchup_index == None:
pending_teams.append(team.team)
else:
teams.append(team.team)
matches = [[i for i in range(0, 4)], [i for i in range(0, 2)], [0]]
for match in tourney.tourney_match_set.all():
matches[match.round][match.index] = match
model = Context({'pending_teams': pending_teams, 'teams': teams,
'matches': matches, 'tourney': tourney})
t = loader.get_template('tournament/view_tournament.html')
return HttpResponse(t.render(model))
@transaction.commit_on_success
def post():
tourney_id = request.GET['tournament']
tourney = Tournament.objects.get(id=tourney_id)
versus = request.POST.getlist('versus')
teams = []
for team_id in versus:
if team_id != '':
teams.append(Team.objects.get(id=team_id))
existing_matches = TournamentMatchup.objects.filter(tournament=tourney)
match = Matchup()
match.team1 = teams[0]
match.team2 = teams[1]
match.save()
tourney_match = TournamentMatchup()
tourney_match.tournament = tourney
tourney_match.matchup = match
tourney_match.round = 0
tourney_match.index = existing_matches.count()
tourney_match.save()
tourney_teams = []
tourney_teams.append(TournamentTeam.objects.filter(tournament=
tourney).filter(team=teams[0]).get())
tourney_teams.append(TournamentTeam.objects.filter(tournament=
tourney).filter(team=teams[1]).get())
tourney_teams[0].matchup_index = tourney_match.index * 2
tourney_teams[1].matchup_index = tourney_match.index * 2 + 1
tourney_teams[0].save()
tourney_teams[1].save()
return HttpResponseRedirect(
'/app/tournament/matchups?tournament=%s' % tourney_id)
if request.method == 'POST':
return post()
else:
return get()
def result_tournament(request):
@transaction.commit_on_success
def post():
tournament_match_id = request.GET['tournament_match_key']
match = TournamentMatchup.objects.get(id=tournament_match_id)
winner_id = int(request.POST['winner'])
matchup = match.matchup
result = MatchResult()
if winner_id == matchup.team1.id:
result.winner = matchup.team1
result.loser = matchup.team2
elif winner_id == matchup.team2.id:
result.winner = matchup.team2
result.loser = matchup.team1
else:
raise Exception('could not determine winner key: %s (%s, %s)' %
(winner_id, matchup.team1.id, matchup.team2.id))
update_stats(result.winner, result.loser)
result.save()
next_round_indices = {(0): 0, (1): 0, (2): 1, (3): 1}
next_round_index = next_round_indices[match.index]
next_round = match.round + 1
if match.round < 2:
existing = TournamentMatchup.objects.filter(tournament=match.
tournament).filter(round=next_round).filter(index=
next_round_index)
if existing.count() == 1:
next_match = existing[0]
next_matchup = next_match.matchup
next_matchup.team2 = result.winner
next_matchup.save()
elif existing.count() == 0:
next_match = TournamentMatchup()
next_matchup = Matchup()
next_matchup.team1 = result.winner
next_matchup.save()
next_match.tournament = match.tournament
next_match.round = next_round
next_match.index = next_round_index
next_match.matchup = next_matchup
next_match.save()
else:
tourney = match.tournament
tourney.completed = True
tourney.winner = result.winner
tourney.save()
match.matchup.delete()
match.matchup = None
match.result = result
match.save()
return HttpResponseRedirect(
'/app/tournament/matchups?tournament=%s' % match.tournament.id)
if request.method == 'POST':
return post()
else:
return HttpResponseRedirect(
'/app/tournament/matchups?tournament=%s' % request.GET[
'tournament'])
<|reserved_special_token_0|>
def get_team_groups():
teams = Team.objects.all()
team_groups = {}
for team in teams:
if not team.leader in team_groups:
team_groups[team.leader] = []
team_groups[team.leader].append(team)
team_groups = [sorted(team_groups[k], lambda x, y: cmp(x.id, y.id)) for
k in sorted(team_groups.keys(), lambda x, y: cmp(x.name, y.name))]
return team_groups
def update_stats(winner, loser):
existing = MatchupStatistics.objects.filter(Q(team1__in=[winner.id,
loser.id]) & Q(team2__in=[winner.id, loser.id]))
stats = None
if existing.count() == 0:
newStats = MatchupStatistics()
newStats.team1 = winner
newStats.team2 = loser
newStats.team1_wins = 1
newStats.team2_wins = 0
winner.wins = winner.wins + 1
loser.losses = loser.losses + 1
newStats.save()
winner.save()
loser.save()
return 1, 0
elif existing.count() == 1:
oldStats = existing.fetch(1)[0]
if oldStats.team1.id == winner.id:
oldStats.team1_wins = oldStats.team1_wins + 1
else:
oldStats.team2_wins = oldStats.team2_wins + 1
winner.wins = winner.wins + 1
loser.losses = loser.losses + 1
oldStats.save()
winner.save()
loser.save()
return 0, 1
else:
logging.error(
'unexpected state: %s matchup statistics for the same team pair (expected 1)'
% existing.count())
return 0, 0
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def index(request):
model = Context({})
t = loader.get_template('index.html')
return HttpResponse(t.render(model))
def create_team(request):
def get():
heroes = Mercenary.objects.filter(type='HERO')
pawns = Mercenary.objects.filter(type='PAWN')
model = Context({'heroes': heroes, 'pawns': pawns, 'mercrange':
range(1, 7), 'teams': get_team_groups()})
t = loader.get_template('teams.html')
return HttpResponse(t.render(model))
def post():
team = Team()
class_c = request.POST['hero']
leader = Mercenary.objects.filter(type='HERO').filter(name=class_c)
team.leader = leader[0]
team.wins = 0
team.losses = 0
team.notes = ''
team.save()
for i in range(1, 10):
who = request.POST['pawn%s' % i]
if who != '':
merc = Mercenary.objects.filter(type='PAWN').filter(name=who)
current = TeamMember()
current.team = team
current.merc = merc[0]
current.location = i
current.save()
return HttpResponseRedirect('/app/teams')
if request.method == 'POST':
return post()
else:
return get()
def edit_team(request):
def get():
team_id = request.GET['team']
team = Team.objects.get(id=team_id)
model = Context({'team': team})
t = loader.get_template('edit_team.html')
return HttpResponse(t.render(model))
def post():
new_notes = request.POST['notes']
team_id = request.POST['team']
team = Team.objects.get(id=team_id)
team.notes = new_notes
team.save()
return HttpResponseRedirect('/app/teams/edit?team=%s' % team_id)
if request.method == 'POST':
return post()
else:
return get()
def create_tournament(request):
def get():
inprogress = Tournament.objects.filter(completed=False)
finished = Tournament.objects.filter(completed=True)
model = Context({'teams': get_team_groups(), 'in_progress':
inprogress, 'finished': finished})
t = loader.get_template('tournament/create_tournament.html')
return HttpResponse(t.render(model))
@transaction.commit_on_success
def post():
tournament = Tournament()
tournament.completed = False
tournament.save()
for team_id in request.POST.getlist('participant'):
if team_id != '':
team = Team.objects.get(id=team_id)
tourney_team = TournamentTeam()
tourney_team.tournament = tournament
tourney_team.team = team
tourney_team.save()
return HttpResponseRedirect(
'/app/tournament/matchups?tournament=%s' % str(tournament.id))
if request.method == 'POST':
return post()
else:
return get()
def view_tournament(request):
def get():
tourney = Tournament.objects.get(id=request.GET['tournament'])
pending_teams = []
teams = []
for team in tourney.tourney_team_set.all():
if team.matchup_index == None:
pending_teams.append(team.team)
else:
teams.append(team.team)
matches = [[i for i in range(0, 4)], [i for i in range(0, 2)], [0]]
for match in tourney.tourney_match_set.all():
matches[match.round][match.index] = match
model = Context({'pending_teams': pending_teams, 'teams': teams,
'matches': matches, 'tourney': tourney})
t = loader.get_template('tournament/view_tournament.html')
return HttpResponse(t.render(model))
@transaction.commit_on_success
def post():
tourney_id = request.GET['tournament']
tourney = Tournament.objects.get(id=tourney_id)
versus = request.POST.getlist('versus')
teams = []
for team_id in versus:
if team_id != '':
teams.append(Team.objects.get(id=team_id))
existing_matches = TournamentMatchup.objects.filter(tournament=tourney)
match = Matchup()
match.team1 = teams[0]
match.team2 = teams[1]
match.save()
tourney_match = TournamentMatchup()
tourney_match.tournament = tourney
tourney_match.matchup = match
tourney_match.round = 0
tourney_match.index = existing_matches.count()
tourney_match.save()
tourney_teams = []
tourney_teams.append(TournamentTeam.objects.filter(tournament=
tourney).filter(team=teams[0]).get())
tourney_teams.append(TournamentTeam.objects.filter(tournament=
tourney).filter(team=teams[1]).get())
tourney_teams[0].matchup_index = tourney_match.index * 2
tourney_teams[1].matchup_index = tourney_match.index * 2 + 1
tourney_teams[0].save()
tourney_teams[1].save()
return HttpResponseRedirect(
'/app/tournament/matchups?tournament=%s' % tourney_id)
if request.method == 'POST':
return post()
else:
return get()
def result_tournament(request):
@transaction.commit_on_success
def post():
tournament_match_id = request.GET['tournament_match_key']
match = TournamentMatchup.objects.get(id=tournament_match_id)
winner_id = int(request.POST['winner'])
matchup = match.matchup
result = MatchResult()
if winner_id == matchup.team1.id:
result.winner = matchup.team1
result.loser = matchup.team2
elif winner_id == matchup.team2.id:
result.winner = matchup.team2
result.loser = matchup.team1
else:
raise Exception('could not determine winner key: %s (%s, %s)' %
(winner_id, matchup.team1.id, matchup.team2.id))
update_stats(result.winner, result.loser)
result.save()
next_round_indices = {(0): 0, (1): 0, (2): 1, (3): 1}
next_round_index = next_round_indices[match.index]
next_round = match.round + 1
if match.round < 2:
existing = TournamentMatchup.objects.filter(tournament=match.
tournament).filter(round=next_round).filter(index=
next_round_index)
if existing.count() == 1:
next_match = existing[0]
next_matchup = next_match.matchup
next_matchup.team2 = result.winner
next_matchup.save()
elif existing.count() == 0:
next_match = TournamentMatchup()
next_matchup = Matchup()
next_matchup.team1 = result.winner
next_matchup.save()
next_match.tournament = match.tournament
next_match.round = next_round
next_match.index = next_round_index
next_match.matchup = next_matchup
next_match.save()
else:
tourney = match.tournament
tourney.completed = True
tourney.winner = result.winner
tourney.save()
match.matchup.delete()
match.matchup = None
match.result = result
match.save()
return HttpResponseRedirect(
'/app/tournament/matchups?tournament=%s' % match.tournament.id)
if request.method == 'POST':
return post()
else:
return HttpResponseRedirect(
'/app/tournament/matchups?tournament=%s' % request.GET[
'tournament'])
def result_detail(request):
result_id = request.GET['match']
match = MatchResult.objects.get(id=result_id)
model = Context({'match': match})
t = loader.get_template('result_detail.html')
return HttpResponse(t.render(model))
def get_team_groups():
teams = Team.objects.all()
team_groups = {}
for team in teams:
if not team.leader in team_groups:
team_groups[team.leader] = []
team_groups[team.leader].append(team)
team_groups = [sorted(team_groups[k], lambda x, y: cmp(x.id, y.id)) for
k in sorted(team_groups.keys(), lambda x, y: cmp(x.name, y.name))]
return team_groups
def update_stats(winner, loser):
existing = MatchupStatistics.objects.filter(Q(team1__in=[winner.id,
loser.id]) & Q(team2__in=[winner.id, loser.id]))
stats = None
if existing.count() == 0:
newStats = MatchupStatistics()
newStats.team1 = winner
newStats.team2 = loser
newStats.team1_wins = 1
newStats.team2_wins = 0
winner.wins = winner.wins + 1
loser.losses = loser.losses + 1
newStats.save()
winner.save()
loser.save()
return 1, 0
elif existing.count() == 1:
oldStats = existing.fetch(1)[0]
if oldStats.team1.id == winner.id:
oldStats.team1_wins = oldStats.team1_wins + 1
else:
oldStats.team2_wins = oldStats.team2_wins + 1
winner.wins = winner.wins + 1
loser.losses = loser.losses + 1
oldStats.save()
winner.save()
loser.save()
return 0, 1
else:
logging.error(
'unexpected state: %s matchup statistics for the same team pair (expected 1)'
% existing.count())
return 0, 0
<|reserved_special_token_1|>
from django.http import HttpResponse, HttpResponseRedirect
from django.template import Context, loader
from django.db import transaction
from django.db.models import Q
from maximus.models import Mercenary, Team, TeamMember, Tournament, TournamentTeam, TournamentMatchup, Matchup, MatchupStatistics, MatchResult
def index(request):
model = Context({})
t = loader.get_template('index.html')
return HttpResponse(t.render(model))
def create_team(request):
def get():
heroes = Mercenary.objects.filter(type='HERO')
pawns = Mercenary.objects.filter(type='PAWN')
model = Context({'heroes': heroes, 'pawns': pawns, 'mercrange':
range(1, 7), 'teams': get_team_groups()})
t = loader.get_template('teams.html')
return HttpResponse(t.render(model))
def post():
team = Team()
class_c = request.POST['hero']
leader = Mercenary.objects.filter(type='HERO').filter(name=class_c)
team.leader = leader[0]
team.wins = 0
team.losses = 0
team.notes = ''
team.save()
for i in range(1, 10):
who = request.POST['pawn%s' % i]
if who != '':
merc = Mercenary.objects.filter(type='PAWN').filter(name=who)
current = TeamMember()
current.team = team
current.merc = merc[0]
current.location = i
current.save()
return HttpResponseRedirect('/app/teams')
if request.method == 'POST':
return post()
else:
return get()
def edit_team(request):
def get():
team_id = request.GET['team']
team = Team.objects.get(id=team_id)
model = Context({'team': team})
t = loader.get_template('edit_team.html')
return HttpResponse(t.render(model))
def post():
new_notes = request.POST['notes']
team_id = request.POST['team']
team = Team.objects.get(id=team_id)
team.notes = new_notes
team.save()
return HttpResponseRedirect('/app/teams/edit?team=%s' % team_id)
if request.method == 'POST':
return post()
else:
return get()
def create_tournament(request):
def get():
inprogress = Tournament.objects.filter(completed=False)
finished = Tournament.objects.filter(completed=True)
model = Context({'teams': get_team_groups(), 'in_progress':
inprogress, 'finished': finished})
t = loader.get_template('tournament/create_tournament.html')
return HttpResponse(t.render(model))
@transaction.commit_on_success
def post():
tournament = Tournament()
tournament.completed = False
tournament.save()
for team_id in request.POST.getlist('participant'):
if team_id != '':
team = Team.objects.get(id=team_id)
tourney_team = TournamentTeam()
tourney_team.tournament = tournament
tourney_team.team = team
tourney_team.save()
return HttpResponseRedirect(
'/app/tournament/matchups?tournament=%s' % str(tournament.id))
if request.method == 'POST':
return post()
else:
return get()
def view_tournament(request):
def get():
tourney = Tournament.objects.get(id=request.GET['tournament'])
pending_teams = []
teams = []
for team in tourney.tourney_team_set.all():
if team.matchup_index == None:
pending_teams.append(team.team)
else:
teams.append(team.team)
matches = [[i for i in range(0, 4)], [i for i in range(0, 2)], [0]]
for match in tourney.tourney_match_set.all():
matches[match.round][match.index] = match
model = Context({'pending_teams': pending_teams, 'teams': teams,
'matches': matches, 'tourney': tourney})
t = loader.get_template('tournament/view_tournament.html')
return HttpResponse(t.render(model))
@transaction.commit_on_success
def post():
tourney_id = request.GET['tournament']
tourney = Tournament.objects.get(id=tourney_id)
versus = request.POST.getlist('versus')
teams = []
for team_id in versus:
if team_id != '':
teams.append(Team.objects.get(id=team_id))
existing_matches = TournamentMatchup.objects.filter(tournament=tourney)
match = Matchup()
match.team1 = teams[0]
match.team2 = teams[1]
match.save()
tourney_match = TournamentMatchup()
tourney_match.tournament = tourney
tourney_match.matchup = match
tourney_match.round = 0
tourney_match.index = existing_matches.count()
tourney_match.save()
tourney_teams = []
tourney_teams.append(TournamentTeam.objects.filter(tournament=
tourney).filter(team=teams[0]).get())
tourney_teams.append(TournamentTeam.objects.filter(tournament=
tourney).filter(team=teams[1]).get())
tourney_teams[0].matchup_index = tourney_match.index * 2
tourney_teams[1].matchup_index = tourney_match.index * 2 + 1
tourney_teams[0].save()
tourney_teams[1].save()
return HttpResponseRedirect(
'/app/tournament/matchups?tournament=%s' % tourney_id)
if request.method == 'POST':
return post()
else:
return get()
def result_tournament(request):
@transaction.commit_on_success
def post():
tournament_match_id = request.GET['tournament_match_key']
match = TournamentMatchup.objects.get(id=tournament_match_id)
winner_id = int(request.POST['winner'])
matchup = match.matchup
result = MatchResult()
if winner_id == matchup.team1.id:
result.winner = matchup.team1
result.loser = matchup.team2
elif winner_id == matchup.team2.id:
result.winner = matchup.team2
result.loser = matchup.team1
else:
raise Exception('could not determine winner key: %s (%s, %s)' %
(winner_id, matchup.team1.id, matchup.team2.id))
update_stats(result.winner, result.loser)
result.save()
next_round_indices = {(0): 0, (1): 0, (2): 1, (3): 1}
next_round_index = next_round_indices[match.index]
next_round = match.round + 1
if match.round < 2:
existing = TournamentMatchup.objects.filter(tournament=match.
tournament).filter(round=next_round).filter(index=
next_round_index)
if existing.count() == 1:
next_match = existing[0]
next_matchup = next_match.matchup
next_matchup.team2 = result.winner
next_matchup.save()
elif existing.count() == 0:
next_match = TournamentMatchup()
next_matchup = Matchup()
next_matchup.team1 = result.winner
next_matchup.save()
next_match.tournament = match.tournament
next_match.round = next_round
next_match.index = next_round_index
next_match.matchup = next_matchup
next_match.save()
else:
tourney = match.tournament
tourney.completed = True
tourney.winner = result.winner
tourney.save()
match.matchup.delete()
match.matchup = None
match.result = result
match.save()
return HttpResponseRedirect(
'/app/tournament/matchups?tournament=%s' % match.tournament.id)
if request.method == 'POST':
return post()
else:
return HttpResponseRedirect(
'/app/tournament/matchups?tournament=%s' % request.GET[
'tournament'])
def result_detail(request):
result_id = request.GET['match']
match = MatchResult.objects.get(id=result_id)
model = Context({'match': match})
t = loader.get_template('result_detail.html')
return HttpResponse(t.render(model))
def get_team_groups():
teams = Team.objects.all()
team_groups = {}
for team in teams:
if not team.leader in team_groups:
team_groups[team.leader] = []
team_groups[team.leader].append(team)
team_groups = [sorted(team_groups[k], lambda x, y: cmp(x.id, y.id)) for
k in sorted(team_groups.keys(), lambda x, y: cmp(x.name, y.name))]
return team_groups
def update_stats(winner, loser):
existing = MatchupStatistics.objects.filter(Q(team1__in=[winner.id,
loser.id]) & Q(team2__in=[winner.id, loser.id]))
stats = None
if existing.count() == 0:
newStats = MatchupStatistics()
newStats.team1 = winner
newStats.team2 = loser
newStats.team1_wins = 1
newStats.team2_wins = 0
winner.wins = winner.wins + 1
loser.losses = loser.losses + 1
newStats.save()
winner.save()
loser.save()
return 1, 0
elif existing.count() == 1:
oldStats = existing.fetch(1)[0]
if oldStats.team1.id == winner.id:
oldStats.team1_wins = oldStats.team1_wins + 1
else:
oldStats.team2_wins = oldStats.team2_wins + 1
winner.wins = winner.wins + 1
loser.losses = loser.losses + 1
oldStats.save()
winner.save()
loser.save()
return 0, 1
else:
logging.error(
'unexpected state: %s matchup statistics for the same team pair (expected 1)'
% existing.count())
return 0, 0
<|reserved_special_token_1|>
# Create your views here.
from django.http import HttpResponse, HttpResponseRedirect
from django.template import Context, loader
from django.db import transaction
from django.db.models import Q
from maximus.models import Mercenary, Team, TeamMember, Tournament, TournamentTeam, TournamentMatchup, Matchup, MatchupStatistics, MatchResult
def index(request):
model = Context({})
t = loader.get_template('index.html')
return HttpResponse(t.render(model))
def create_team(request):
def get():
heroes = Mercenary.objects.filter(type='HERO')
pawns = Mercenary.objects.filter(type='PAWN')
model = Context({ 'heroes': heroes, 'pawns': pawns, 'mercrange': range(1,7), 'teams': get_team_groups() })
t = loader.get_template('teams.html')
return HttpResponse(t.render(model))
def post():
team = Team()
class_c = request.POST['hero']
leader = Mercenary.objects.filter(type='HERO').filter(name=class_c)
team.leader = leader[0]
team.wins = 0
team.losses = 0
team.notes = ""
team.save()
for i in range(1,10):
who = request.POST['pawn%s' % i]
if who != '':
merc = Mercenary.objects.filter(type='PAWN').filter(name=who)
current = TeamMember()
current.team = team
current.merc = merc[0]
current.location = i
current.save()
return HttpResponseRedirect('/app/teams')
if request.method == "POST":
return post()
else:
return get()
def edit_team(request):
def get():
team_id = request.GET["team"]
team = Team.objects.get(id=team_id)
model = Context({ 'team': team })
t = loader.get_template('edit_team.html')
return HttpResponse(t.render(model))
def post():
new_notes = request.POST["notes"]
team_id = request.POST["team"]
team = Team.objects.get(id=team_id)
team.notes = new_notes
team.save()
return HttpResponseRedirect('/app/teams/edit?team=%s' % team_id)
if request.method == "POST":
return post()
else:
return get()
def create_tournament(request):
def get():
inprogress = Tournament.objects.filter(completed=False);
finished = Tournament.objects.filter(completed=True);
model = Context({ 'teams': get_team_groups(), "in_progress": inprogress, "finished": finished })
t = loader.get_template('tournament/create_tournament.html')
return HttpResponse(t.render(model))
@transaction.commit_on_success
def post():
tournament = Tournament()
tournament.completed = False
tournament.save()
for team_id in request.POST.getlist('participant'):
if team_id != "":
team = Team.objects.get(id=team_id)
tourney_team = TournamentTeam()
tourney_team.tournament = tournament
tourney_team.team = team
tourney_team.save()
return HttpResponseRedirect('/app/tournament/matchups?tournament=%s' % str(tournament.id))
if request.method == "POST":
return post()
else:
return get()
def view_tournament(request):
def get():
tourney = Tournament.objects.get(id=request.GET["tournament"])
pending_teams = []
teams = []
for team in tourney.tourney_team_set.all():
if team.matchup_index == None:
pending_teams.append(team.team)
else:
teams.append(team.team)
matches = [[i for i in range(0,4)],[i for i in range(0,2)],[0]]
for match in tourney.tourney_match_set.all():
matches[match.round][match.index] = match
model = Context({ "pending_teams": pending_teams, "teams": teams, "matches": matches, "tourney": tourney})
t = loader.get_template('tournament/view_tournament.html')
return HttpResponse(t.render(model))
@transaction.commit_on_success
def post():
tourney_id = request.GET["tournament"]
tourney = Tournament.objects.get(id=tourney_id)
versus = request.POST.getlist("versus")
teams = []
for team_id in versus:
if team_id != "":
teams.append(Team.objects.get(id=team_id))
existing_matches = TournamentMatchup.objects.filter(tournament=tourney)
match = Matchup()
match.team1 = teams[0]
match.team2 = teams[1]
match.save()
tourney_match = TournamentMatchup()
tourney_match.tournament = tourney
tourney_match.matchup = match
tourney_match.round = 0
tourney_match.index = existing_matches.count()
tourney_match.save()
tourney_teams = []
tourney_teams.append(TournamentTeam.objects.filter(tournament=tourney).filter(team=teams[0]).get())
tourney_teams.append(TournamentTeam.objects.filter(tournament=tourney).filter(team=teams[1]).get())
tourney_teams[0].matchup_index = tourney_match.index * 2
tourney_teams[1].matchup_index = tourney_match.index * 2 + 1
tourney_teams[0].save();
tourney_teams[1].save();
return HttpResponseRedirect("/app/tournament/matchups?tournament=%s" % tourney_id)
if request.method == "POST":
return post()
else:
return get()
def result_tournament(request):
@transaction.commit_on_success
def post():
tournament_match_id = request.GET['tournament_match_key']
match = TournamentMatchup.objects.get(id=tournament_match_id)
winner_id = int(request.POST['winner'])
matchup = match.matchup
result = MatchResult()
if winner_id == matchup.team1.id:
result.winner = matchup.team1
result.loser = matchup.team2
elif winner_id == matchup.team2.id:
result.winner = matchup.team2
result.loser = matchup.team1
else:
raise Exception("could not determine winner key: %s (%s, %s)" % (winner_id, matchup.team1.id, matchup.team2.id))
update_stats(result.winner, result.loser)
result.save()
next_round_indices = {0:0, 1:0, 2:1, 3:1}
next_round_index = next_round_indices[match.index]
next_round = match.round + 1
if match.round < 2:
# look in existing matches for this winner's opponent
existing = TournamentMatchup.objects.filter(tournament=match.tournament).filter(round=next_round).filter(index=next_round_index)
if existing.count() == 1:
next_match = existing[0]
next_matchup = next_match.matchup
next_matchup.team2 = result.winner
next_matchup.save()
elif existing.count() == 0:
next_match = TournamentMatchup()
next_matchup = Matchup()
next_matchup.team1 = result.winner
next_matchup.save()
next_match.tournament = match.tournament
next_match.round = next_round
next_match.index = next_round_index
next_match.matchup = next_matchup
next_match.save()
else:
tourney = match.tournament
tourney.completed = True
tourney.winner = result.winner
tourney.save()
match.matchup.delete()
match.matchup = None
match.result = result
match.save()
return HttpResponseRedirect("/app/tournament/matchups?tournament=%s" % match.tournament.id)
if request.method == "POST":
return post()
else:
return HttpResponseRedirect("/app/tournament/matchups?tournament=%s" % request.GET["tournament"])
def result_detail(request):
result_id = request.GET['match']
match = MatchResult.objects.get(id=result_id)
model = Context({ 'match': match })
t = loader.get_template('result_detail.html')
return HttpResponse(t.render(model))
def get_team_groups():
teams = Team.objects.all()
team_groups = { }
for team in teams:
if not team.leader in team_groups:
team_groups[team.leader] = []
team_groups[team.leader].append(team)
team_groups = [sorted(team_groups[k], lambda x,y: cmp(x.id, y.id)) for k in sorted(team_groups.keys(), lambda x,y: cmp(x.name, y.name))]
return team_groups
def update_stats(winner, loser):
existing = MatchupStatistics.objects.filter(Q(team1__in=[winner.id, loser.id]) & Q(team2__in=[winner.id, loser.id]))
stats = None
if existing.count() == 0:
newStats = MatchupStatistics()
newStats.team1 = winner
newStats.team2 = loser
newStats.team1_wins = 1
newStats.team2_wins = 0
winner.wins = winner.wins + 1
loser.losses = loser.losses + 1
newStats.save()
winner.save()
loser.save()
return (1, 0)
elif existing.count() == 1:
oldStats = existing.fetch(1)[0]
if oldStats.team1.id == winner.id:
oldStats.team1_wins = oldStats.team1_wins + 1
else:
oldStats.team2_wins = oldStats.team2_wins + 1
winner.wins = winner.wins + 1
loser.losses = loser.losses + 1
oldStats.save()
winner.save()
loser.save()
return (0, 1)
else:
logging.error("unexpected state: %s matchup statistics for the same team pair (expected 1)" % existing.count())
return (0, 0)
|
flexible
|
{
"blob_id": "f66f82c5c2842fc4fcae2251d4a16a9850230041",
"index": 3547,
"step-1": "<mask token>\n\n\ndef edit_team(request):\n\n def get():\n team_id = request.GET['team']\n team = Team.objects.get(id=team_id)\n model = Context({'team': team})\n t = loader.get_template('edit_team.html')\n return HttpResponse(t.render(model))\n\n def post():\n new_notes = request.POST['notes']\n team_id = request.POST['team']\n team = Team.objects.get(id=team_id)\n team.notes = new_notes\n team.save()\n return HttpResponseRedirect('/app/teams/edit?team=%s' % team_id)\n if request.method == 'POST':\n return post()\n else:\n return get()\n\n\ndef create_tournament(request):\n\n def get():\n inprogress = Tournament.objects.filter(completed=False)\n finished = Tournament.objects.filter(completed=True)\n model = Context({'teams': get_team_groups(), 'in_progress':\n inprogress, 'finished': finished})\n t = loader.get_template('tournament/create_tournament.html')\n return HttpResponse(t.render(model))\n\n @transaction.commit_on_success\n def post():\n tournament = Tournament()\n tournament.completed = False\n tournament.save()\n for team_id in request.POST.getlist('participant'):\n if team_id != '':\n team = Team.objects.get(id=team_id)\n tourney_team = TournamentTeam()\n tourney_team.tournament = tournament\n tourney_team.team = team\n tourney_team.save()\n return HttpResponseRedirect(\n '/app/tournament/matchups?tournament=%s' % str(tournament.id))\n if request.method == 'POST':\n return post()\n else:\n return get()\n\n\ndef view_tournament(request):\n\n def get():\n tourney = Tournament.objects.get(id=request.GET['tournament'])\n pending_teams = []\n teams = []\n for team in tourney.tourney_team_set.all():\n if team.matchup_index == None:\n pending_teams.append(team.team)\n else:\n teams.append(team.team)\n matches = [[i for i in range(0, 4)], [i for i in range(0, 2)], [0]]\n for match in tourney.tourney_match_set.all():\n matches[match.round][match.index] = match\n model = Context({'pending_teams': pending_teams, 'teams': teams,\n 'matches': matches, 'tourney': tourney})\n t = loader.get_template('tournament/view_tournament.html')\n return HttpResponse(t.render(model))\n\n @transaction.commit_on_success\n def post():\n tourney_id = request.GET['tournament']\n tourney = Tournament.objects.get(id=tourney_id)\n versus = request.POST.getlist('versus')\n teams = []\n for team_id in versus:\n if team_id != '':\n teams.append(Team.objects.get(id=team_id))\n existing_matches = TournamentMatchup.objects.filter(tournament=tourney)\n match = Matchup()\n match.team1 = teams[0]\n match.team2 = teams[1]\n match.save()\n tourney_match = TournamentMatchup()\n tourney_match.tournament = tourney\n tourney_match.matchup = match\n tourney_match.round = 0\n tourney_match.index = existing_matches.count()\n tourney_match.save()\n tourney_teams = []\n tourney_teams.append(TournamentTeam.objects.filter(tournament=\n tourney).filter(team=teams[0]).get())\n tourney_teams.append(TournamentTeam.objects.filter(tournament=\n tourney).filter(team=teams[1]).get())\n tourney_teams[0].matchup_index = tourney_match.index * 2\n tourney_teams[1].matchup_index = tourney_match.index * 2 + 1\n tourney_teams[0].save()\n tourney_teams[1].save()\n return HttpResponseRedirect(\n '/app/tournament/matchups?tournament=%s' % tourney_id)\n if request.method == 'POST':\n return post()\n else:\n return get()\n\n\ndef result_tournament(request):\n\n @transaction.commit_on_success\n def post():\n tournament_match_id = request.GET['tournament_match_key']\n match = TournamentMatchup.objects.get(id=tournament_match_id)\n winner_id = int(request.POST['winner'])\n matchup = match.matchup\n result = MatchResult()\n if winner_id == matchup.team1.id:\n result.winner = matchup.team1\n result.loser = matchup.team2\n elif winner_id == matchup.team2.id:\n result.winner = matchup.team2\n result.loser = matchup.team1\n else:\n raise Exception('could not determine winner key: %s (%s, %s)' %\n (winner_id, matchup.team1.id, matchup.team2.id))\n update_stats(result.winner, result.loser)\n result.save()\n next_round_indices = {(0): 0, (1): 0, (2): 1, (3): 1}\n next_round_index = next_round_indices[match.index]\n next_round = match.round + 1\n if match.round < 2:\n existing = TournamentMatchup.objects.filter(tournament=match.\n tournament).filter(round=next_round).filter(index=\n next_round_index)\n if existing.count() == 1:\n next_match = existing[0]\n next_matchup = next_match.matchup\n next_matchup.team2 = result.winner\n next_matchup.save()\n elif existing.count() == 0:\n next_match = TournamentMatchup()\n next_matchup = Matchup()\n next_matchup.team1 = result.winner\n next_matchup.save()\n next_match.tournament = match.tournament\n next_match.round = next_round\n next_match.index = next_round_index\n next_match.matchup = next_matchup\n next_match.save()\n else:\n tourney = match.tournament\n tourney.completed = True\n tourney.winner = result.winner\n tourney.save()\n match.matchup.delete()\n match.matchup = None\n match.result = result\n match.save()\n return HttpResponseRedirect(\n '/app/tournament/matchups?tournament=%s' % match.tournament.id)\n if request.method == 'POST':\n return post()\n else:\n return HttpResponseRedirect(\n '/app/tournament/matchups?tournament=%s' % request.GET[\n 'tournament'])\n\n\n<mask token>\n\n\ndef get_team_groups():\n teams = Team.objects.all()\n team_groups = {}\n for team in teams:\n if not team.leader in team_groups:\n team_groups[team.leader] = []\n team_groups[team.leader].append(team)\n team_groups = [sorted(team_groups[k], lambda x, y: cmp(x.id, y.id)) for\n k in sorted(team_groups.keys(), lambda x, y: cmp(x.name, y.name))]\n return team_groups\n\n\ndef update_stats(winner, loser):\n existing = MatchupStatistics.objects.filter(Q(team1__in=[winner.id,\n loser.id]) & Q(team2__in=[winner.id, loser.id]))\n stats = None\n if existing.count() == 0:\n newStats = MatchupStatistics()\n newStats.team1 = winner\n newStats.team2 = loser\n newStats.team1_wins = 1\n newStats.team2_wins = 0\n winner.wins = winner.wins + 1\n loser.losses = loser.losses + 1\n newStats.save()\n winner.save()\n loser.save()\n return 1, 0\n elif existing.count() == 1:\n oldStats = existing.fetch(1)[0]\n if oldStats.team1.id == winner.id:\n oldStats.team1_wins = oldStats.team1_wins + 1\n else:\n oldStats.team2_wins = oldStats.team2_wins + 1\n winner.wins = winner.wins + 1\n loser.losses = loser.losses + 1\n oldStats.save()\n winner.save()\n loser.save()\n return 0, 1\n else:\n logging.error(\n 'unexpected state: %s matchup statistics for the same team pair (expected 1)'\n % existing.count())\n return 0, 0\n",
"step-2": "<mask token>\n\n\ndef index(request):\n model = Context({})\n t = loader.get_template('index.html')\n return HttpResponse(t.render(model))\n\n\n<mask token>\n\n\ndef edit_team(request):\n\n def get():\n team_id = request.GET['team']\n team = Team.objects.get(id=team_id)\n model = Context({'team': team})\n t = loader.get_template('edit_team.html')\n return HttpResponse(t.render(model))\n\n def post():\n new_notes = request.POST['notes']\n team_id = request.POST['team']\n team = Team.objects.get(id=team_id)\n team.notes = new_notes\n team.save()\n return HttpResponseRedirect('/app/teams/edit?team=%s' % team_id)\n if request.method == 'POST':\n return post()\n else:\n return get()\n\n\ndef create_tournament(request):\n\n def get():\n inprogress = Tournament.objects.filter(completed=False)\n finished = Tournament.objects.filter(completed=True)\n model = Context({'teams': get_team_groups(), 'in_progress':\n inprogress, 'finished': finished})\n t = loader.get_template('tournament/create_tournament.html')\n return HttpResponse(t.render(model))\n\n @transaction.commit_on_success\n def post():\n tournament = Tournament()\n tournament.completed = False\n tournament.save()\n for team_id in request.POST.getlist('participant'):\n if team_id != '':\n team = Team.objects.get(id=team_id)\n tourney_team = TournamentTeam()\n tourney_team.tournament = tournament\n tourney_team.team = team\n tourney_team.save()\n return HttpResponseRedirect(\n '/app/tournament/matchups?tournament=%s' % str(tournament.id))\n if request.method == 'POST':\n return post()\n else:\n return get()\n\n\ndef view_tournament(request):\n\n def get():\n tourney = Tournament.objects.get(id=request.GET['tournament'])\n pending_teams = []\n teams = []\n for team in tourney.tourney_team_set.all():\n if team.matchup_index == None:\n pending_teams.append(team.team)\n else:\n teams.append(team.team)\n matches = [[i for i in range(0, 4)], [i for i in range(0, 2)], [0]]\n for match in tourney.tourney_match_set.all():\n matches[match.round][match.index] = match\n model = Context({'pending_teams': pending_teams, 'teams': teams,\n 'matches': matches, 'tourney': tourney})\n t = loader.get_template('tournament/view_tournament.html')\n return HttpResponse(t.render(model))\n\n @transaction.commit_on_success\n def post():\n tourney_id = request.GET['tournament']\n tourney = Tournament.objects.get(id=tourney_id)\n versus = request.POST.getlist('versus')\n teams = []\n for team_id in versus:\n if team_id != '':\n teams.append(Team.objects.get(id=team_id))\n existing_matches = TournamentMatchup.objects.filter(tournament=tourney)\n match = Matchup()\n match.team1 = teams[0]\n match.team2 = teams[1]\n match.save()\n tourney_match = TournamentMatchup()\n tourney_match.tournament = tourney\n tourney_match.matchup = match\n tourney_match.round = 0\n tourney_match.index = existing_matches.count()\n tourney_match.save()\n tourney_teams = []\n tourney_teams.append(TournamentTeam.objects.filter(tournament=\n tourney).filter(team=teams[0]).get())\n tourney_teams.append(TournamentTeam.objects.filter(tournament=\n tourney).filter(team=teams[1]).get())\n tourney_teams[0].matchup_index = tourney_match.index * 2\n tourney_teams[1].matchup_index = tourney_match.index * 2 + 1\n tourney_teams[0].save()\n tourney_teams[1].save()\n return HttpResponseRedirect(\n '/app/tournament/matchups?tournament=%s' % tourney_id)\n if request.method == 'POST':\n return post()\n else:\n return get()\n\n\ndef result_tournament(request):\n\n @transaction.commit_on_success\n def post():\n tournament_match_id = request.GET['tournament_match_key']\n match = TournamentMatchup.objects.get(id=tournament_match_id)\n winner_id = int(request.POST['winner'])\n matchup = match.matchup\n result = MatchResult()\n if winner_id == matchup.team1.id:\n result.winner = matchup.team1\n result.loser = matchup.team2\n elif winner_id == matchup.team2.id:\n result.winner = matchup.team2\n result.loser = matchup.team1\n else:\n raise Exception('could not determine winner key: %s (%s, %s)' %\n (winner_id, matchup.team1.id, matchup.team2.id))\n update_stats(result.winner, result.loser)\n result.save()\n next_round_indices = {(0): 0, (1): 0, (2): 1, (3): 1}\n next_round_index = next_round_indices[match.index]\n next_round = match.round + 1\n if match.round < 2:\n existing = TournamentMatchup.objects.filter(tournament=match.\n tournament).filter(round=next_round).filter(index=\n next_round_index)\n if existing.count() == 1:\n next_match = existing[0]\n next_matchup = next_match.matchup\n next_matchup.team2 = result.winner\n next_matchup.save()\n elif existing.count() == 0:\n next_match = TournamentMatchup()\n next_matchup = Matchup()\n next_matchup.team1 = result.winner\n next_matchup.save()\n next_match.tournament = match.tournament\n next_match.round = next_round\n next_match.index = next_round_index\n next_match.matchup = next_matchup\n next_match.save()\n else:\n tourney = match.tournament\n tourney.completed = True\n tourney.winner = result.winner\n tourney.save()\n match.matchup.delete()\n match.matchup = None\n match.result = result\n match.save()\n return HttpResponseRedirect(\n '/app/tournament/matchups?tournament=%s' % match.tournament.id)\n if request.method == 'POST':\n return post()\n else:\n return HttpResponseRedirect(\n '/app/tournament/matchups?tournament=%s' % request.GET[\n 'tournament'])\n\n\n<mask token>\n\n\ndef get_team_groups():\n teams = Team.objects.all()\n team_groups = {}\n for team in teams:\n if not team.leader in team_groups:\n team_groups[team.leader] = []\n team_groups[team.leader].append(team)\n team_groups = [sorted(team_groups[k], lambda x, y: cmp(x.id, y.id)) for\n k in sorted(team_groups.keys(), lambda x, y: cmp(x.name, y.name))]\n return team_groups\n\n\ndef update_stats(winner, loser):\n existing = MatchupStatistics.objects.filter(Q(team1__in=[winner.id,\n loser.id]) & Q(team2__in=[winner.id, loser.id]))\n stats = None\n if existing.count() == 0:\n newStats = MatchupStatistics()\n newStats.team1 = winner\n newStats.team2 = loser\n newStats.team1_wins = 1\n newStats.team2_wins = 0\n winner.wins = winner.wins + 1\n loser.losses = loser.losses + 1\n newStats.save()\n winner.save()\n loser.save()\n return 1, 0\n elif existing.count() == 1:\n oldStats = existing.fetch(1)[0]\n if oldStats.team1.id == winner.id:\n oldStats.team1_wins = oldStats.team1_wins + 1\n else:\n oldStats.team2_wins = oldStats.team2_wins + 1\n winner.wins = winner.wins + 1\n loser.losses = loser.losses + 1\n oldStats.save()\n winner.save()\n loser.save()\n return 0, 1\n else:\n logging.error(\n 'unexpected state: %s matchup statistics for the same team pair (expected 1)'\n % existing.count())\n return 0, 0\n",
"step-3": "<mask token>\n\n\ndef index(request):\n model = Context({})\n t = loader.get_template('index.html')\n return HttpResponse(t.render(model))\n\n\ndef create_team(request):\n\n def get():\n heroes = Mercenary.objects.filter(type='HERO')\n pawns = Mercenary.objects.filter(type='PAWN')\n model = Context({'heroes': heroes, 'pawns': pawns, 'mercrange':\n range(1, 7), 'teams': get_team_groups()})\n t = loader.get_template('teams.html')\n return HttpResponse(t.render(model))\n\n def post():\n team = Team()\n class_c = request.POST['hero']\n leader = Mercenary.objects.filter(type='HERO').filter(name=class_c)\n team.leader = leader[0]\n team.wins = 0\n team.losses = 0\n team.notes = ''\n team.save()\n for i in range(1, 10):\n who = request.POST['pawn%s' % i]\n if who != '':\n merc = Mercenary.objects.filter(type='PAWN').filter(name=who)\n current = TeamMember()\n current.team = team\n current.merc = merc[0]\n current.location = i\n current.save()\n return HttpResponseRedirect('/app/teams')\n if request.method == 'POST':\n return post()\n else:\n return get()\n\n\ndef edit_team(request):\n\n def get():\n team_id = request.GET['team']\n team = Team.objects.get(id=team_id)\n model = Context({'team': team})\n t = loader.get_template('edit_team.html')\n return HttpResponse(t.render(model))\n\n def post():\n new_notes = request.POST['notes']\n team_id = request.POST['team']\n team = Team.objects.get(id=team_id)\n team.notes = new_notes\n team.save()\n return HttpResponseRedirect('/app/teams/edit?team=%s' % team_id)\n if request.method == 'POST':\n return post()\n else:\n return get()\n\n\ndef create_tournament(request):\n\n def get():\n inprogress = Tournament.objects.filter(completed=False)\n finished = Tournament.objects.filter(completed=True)\n model = Context({'teams': get_team_groups(), 'in_progress':\n inprogress, 'finished': finished})\n t = loader.get_template('tournament/create_tournament.html')\n return HttpResponse(t.render(model))\n\n @transaction.commit_on_success\n def post():\n tournament = Tournament()\n tournament.completed = False\n tournament.save()\n for team_id in request.POST.getlist('participant'):\n if team_id != '':\n team = Team.objects.get(id=team_id)\n tourney_team = TournamentTeam()\n tourney_team.tournament = tournament\n tourney_team.team = team\n tourney_team.save()\n return HttpResponseRedirect(\n '/app/tournament/matchups?tournament=%s' % str(tournament.id))\n if request.method == 'POST':\n return post()\n else:\n return get()\n\n\ndef view_tournament(request):\n\n def get():\n tourney = Tournament.objects.get(id=request.GET['tournament'])\n pending_teams = []\n teams = []\n for team in tourney.tourney_team_set.all():\n if team.matchup_index == None:\n pending_teams.append(team.team)\n else:\n teams.append(team.team)\n matches = [[i for i in range(0, 4)], [i for i in range(0, 2)], [0]]\n for match in tourney.tourney_match_set.all():\n matches[match.round][match.index] = match\n model = Context({'pending_teams': pending_teams, 'teams': teams,\n 'matches': matches, 'tourney': tourney})\n t = loader.get_template('tournament/view_tournament.html')\n return HttpResponse(t.render(model))\n\n @transaction.commit_on_success\n def post():\n tourney_id = request.GET['tournament']\n tourney = Tournament.objects.get(id=tourney_id)\n versus = request.POST.getlist('versus')\n teams = []\n for team_id in versus:\n if team_id != '':\n teams.append(Team.objects.get(id=team_id))\n existing_matches = TournamentMatchup.objects.filter(tournament=tourney)\n match = Matchup()\n match.team1 = teams[0]\n match.team2 = teams[1]\n match.save()\n tourney_match = TournamentMatchup()\n tourney_match.tournament = tourney\n tourney_match.matchup = match\n tourney_match.round = 0\n tourney_match.index = existing_matches.count()\n tourney_match.save()\n tourney_teams = []\n tourney_teams.append(TournamentTeam.objects.filter(tournament=\n tourney).filter(team=teams[0]).get())\n tourney_teams.append(TournamentTeam.objects.filter(tournament=\n tourney).filter(team=teams[1]).get())\n tourney_teams[0].matchup_index = tourney_match.index * 2\n tourney_teams[1].matchup_index = tourney_match.index * 2 + 1\n tourney_teams[0].save()\n tourney_teams[1].save()\n return HttpResponseRedirect(\n '/app/tournament/matchups?tournament=%s' % tourney_id)\n if request.method == 'POST':\n return post()\n else:\n return get()\n\n\ndef result_tournament(request):\n\n @transaction.commit_on_success\n def post():\n tournament_match_id = request.GET['tournament_match_key']\n match = TournamentMatchup.objects.get(id=tournament_match_id)\n winner_id = int(request.POST['winner'])\n matchup = match.matchup\n result = MatchResult()\n if winner_id == matchup.team1.id:\n result.winner = matchup.team1\n result.loser = matchup.team2\n elif winner_id == matchup.team2.id:\n result.winner = matchup.team2\n result.loser = matchup.team1\n else:\n raise Exception('could not determine winner key: %s (%s, %s)' %\n (winner_id, matchup.team1.id, matchup.team2.id))\n update_stats(result.winner, result.loser)\n result.save()\n next_round_indices = {(0): 0, (1): 0, (2): 1, (3): 1}\n next_round_index = next_round_indices[match.index]\n next_round = match.round + 1\n if match.round < 2:\n existing = TournamentMatchup.objects.filter(tournament=match.\n tournament).filter(round=next_round).filter(index=\n next_round_index)\n if existing.count() == 1:\n next_match = existing[0]\n next_matchup = next_match.matchup\n next_matchup.team2 = result.winner\n next_matchup.save()\n elif existing.count() == 0:\n next_match = TournamentMatchup()\n next_matchup = Matchup()\n next_matchup.team1 = result.winner\n next_matchup.save()\n next_match.tournament = match.tournament\n next_match.round = next_round\n next_match.index = next_round_index\n next_match.matchup = next_matchup\n next_match.save()\n else:\n tourney = match.tournament\n tourney.completed = True\n tourney.winner = result.winner\n tourney.save()\n match.matchup.delete()\n match.matchup = None\n match.result = result\n match.save()\n return HttpResponseRedirect(\n '/app/tournament/matchups?tournament=%s' % match.tournament.id)\n if request.method == 'POST':\n return post()\n else:\n return HttpResponseRedirect(\n '/app/tournament/matchups?tournament=%s' % request.GET[\n 'tournament'])\n\n\ndef result_detail(request):\n result_id = request.GET['match']\n match = MatchResult.objects.get(id=result_id)\n model = Context({'match': match})\n t = loader.get_template('result_detail.html')\n return HttpResponse(t.render(model))\n\n\ndef get_team_groups():\n teams = Team.objects.all()\n team_groups = {}\n for team in teams:\n if not team.leader in team_groups:\n team_groups[team.leader] = []\n team_groups[team.leader].append(team)\n team_groups = [sorted(team_groups[k], lambda x, y: cmp(x.id, y.id)) for\n k in sorted(team_groups.keys(), lambda x, y: cmp(x.name, y.name))]\n return team_groups\n\n\ndef update_stats(winner, loser):\n existing = MatchupStatistics.objects.filter(Q(team1__in=[winner.id,\n loser.id]) & Q(team2__in=[winner.id, loser.id]))\n stats = None\n if existing.count() == 0:\n newStats = MatchupStatistics()\n newStats.team1 = winner\n newStats.team2 = loser\n newStats.team1_wins = 1\n newStats.team2_wins = 0\n winner.wins = winner.wins + 1\n loser.losses = loser.losses + 1\n newStats.save()\n winner.save()\n loser.save()\n return 1, 0\n elif existing.count() == 1:\n oldStats = existing.fetch(1)[0]\n if oldStats.team1.id == winner.id:\n oldStats.team1_wins = oldStats.team1_wins + 1\n else:\n oldStats.team2_wins = oldStats.team2_wins + 1\n winner.wins = winner.wins + 1\n loser.losses = loser.losses + 1\n oldStats.save()\n winner.save()\n loser.save()\n return 0, 1\n else:\n logging.error(\n 'unexpected state: %s matchup statistics for the same team pair (expected 1)'\n % existing.count())\n return 0, 0\n",
"step-4": "from django.http import HttpResponse, HttpResponseRedirect\nfrom django.template import Context, loader\nfrom django.db import transaction\nfrom django.db.models import Q\nfrom maximus.models import Mercenary, Team, TeamMember, Tournament, TournamentTeam, TournamentMatchup, Matchup, MatchupStatistics, MatchResult\n\n\ndef index(request):\n model = Context({})\n t = loader.get_template('index.html')\n return HttpResponse(t.render(model))\n\n\ndef create_team(request):\n\n def get():\n heroes = Mercenary.objects.filter(type='HERO')\n pawns = Mercenary.objects.filter(type='PAWN')\n model = Context({'heroes': heroes, 'pawns': pawns, 'mercrange':\n range(1, 7), 'teams': get_team_groups()})\n t = loader.get_template('teams.html')\n return HttpResponse(t.render(model))\n\n def post():\n team = Team()\n class_c = request.POST['hero']\n leader = Mercenary.objects.filter(type='HERO').filter(name=class_c)\n team.leader = leader[0]\n team.wins = 0\n team.losses = 0\n team.notes = ''\n team.save()\n for i in range(1, 10):\n who = request.POST['pawn%s' % i]\n if who != '':\n merc = Mercenary.objects.filter(type='PAWN').filter(name=who)\n current = TeamMember()\n current.team = team\n current.merc = merc[0]\n current.location = i\n current.save()\n return HttpResponseRedirect('/app/teams')\n if request.method == 'POST':\n return post()\n else:\n return get()\n\n\ndef edit_team(request):\n\n def get():\n team_id = request.GET['team']\n team = Team.objects.get(id=team_id)\n model = Context({'team': team})\n t = loader.get_template('edit_team.html')\n return HttpResponse(t.render(model))\n\n def post():\n new_notes = request.POST['notes']\n team_id = request.POST['team']\n team = Team.objects.get(id=team_id)\n team.notes = new_notes\n team.save()\n return HttpResponseRedirect('/app/teams/edit?team=%s' % team_id)\n if request.method == 'POST':\n return post()\n else:\n return get()\n\n\ndef create_tournament(request):\n\n def get():\n inprogress = Tournament.objects.filter(completed=False)\n finished = Tournament.objects.filter(completed=True)\n model = Context({'teams': get_team_groups(), 'in_progress':\n inprogress, 'finished': finished})\n t = loader.get_template('tournament/create_tournament.html')\n return HttpResponse(t.render(model))\n\n @transaction.commit_on_success\n def post():\n tournament = Tournament()\n tournament.completed = False\n tournament.save()\n for team_id in request.POST.getlist('participant'):\n if team_id != '':\n team = Team.objects.get(id=team_id)\n tourney_team = TournamentTeam()\n tourney_team.tournament = tournament\n tourney_team.team = team\n tourney_team.save()\n return HttpResponseRedirect(\n '/app/tournament/matchups?tournament=%s' % str(tournament.id))\n if request.method == 'POST':\n return post()\n else:\n return get()\n\n\ndef view_tournament(request):\n\n def get():\n tourney = Tournament.objects.get(id=request.GET['tournament'])\n pending_teams = []\n teams = []\n for team in tourney.tourney_team_set.all():\n if team.matchup_index == None:\n pending_teams.append(team.team)\n else:\n teams.append(team.team)\n matches = [[i for i in range(0, 4)], [i for i in range(0, 2)], [0]]\n for match in tourney.tourney_match_set.all():\n matches[match.round][match.index] = match\n model = Context({'pending_teams': pending_teams, 'teams': teams,\n 'matches': matches, 'tourney': tourney})\n t = loader.get_template('tournament/view_tournament.html')\n return HttpResponse(t.render(model))\n\n @transaction.commit_on_success\n def post():\n tourney_id = request.GET['tournament']\n tourney = Tournament.objects.get(id=tourney_id)\n versus = request.POST.getlist('versus')\n teams = []\n for team_id in versus:\n if team_id != '':\n teams.append(Team.objects.get(id=team_id))\n existing_matches = TournamentMatchup.objects.filter(tournament=tourney)\n match = Matchup()\n match.team1 = teams[0]\n match.team2 = teams[1]\n match.save()\n tourney_match = TournamentMatchup()\n tourney_match.tournament = tourney\n tourney_match.matchup = match\n tourney_match.round = 0\n tourney_match.index = existing_matches.count()\n tourney_match.save()\n tourney_teams = []\n tourney_teams.append(TournamentTeam.objects.filter(tournament=\n tourney).filter(team=teams[0]).get())\n tourney_teams.append(TournamentTeam.objects.filter(tournament=\n tourney).filter(team=teams[1]).get())\n tourney_teams[0].matchup_index = tourney_match.index * 2\n tourney_teams[1].matchup_index = tourney_match.index * 2 + 1\n tourney_teams[0].save()\n tourney_teams[1].save()\n return HttpResponseRedirect(\n '/app/tournament/matchups?tournament=%s' % tourney_id)\n if request.method == 'POST':\n return post()\n else:\n return get()\n\n\ndef result_tournament(request):\n\n @transaction.commit_on_success\n def post():\n tournament_match_id = request.GET['tournament_match_key']\n match = TournamentMatchup.objects.get(id=tournament_match_id)\n winner_id = int(request.POST['winner'])\n matchup = match.matchup\n result = MatchResult()\n if winner_id == matchup.team1.id:\n result.winner = matchup.team1\n result.loser = matchup.team2\n elif winner_id == matchup.team2.id:\n result.winner = matchup.team2\n result.loser = matchup.team1\n else:\n raise Exception('could not determine winner key: %s (%s, %s)' %\n (winner_id, matchup.team1.id, matchup.team2.id))\n update_stats(result.winner, result.loser)\n result.save()\n next_round_indices = {(0): 0, (1): 0, (2): 1, (3): 1}\n next_round_index = next_round_indices[match.index]\n next_round = match.round + 1\n if match.round < 2:\n existing = TournamentMatchup.objects.filter(tournament=match.\n tournament).filter(round=next_round).filter(index=\n next_round_index)\n if existing.count() == 1:\n next_match = existing[0]\n next_matchup = next_match.matchup\n next_matchup.team2 = result.winner\n next_matchup.save()\n elif existing.count() == 0:\n next_match = TournamentMatchup()\n next_matchup = Matchup()\n next_matchup.team1 = result.winner\n next_matchup.save()\n next_match.tournament = match.tournament\n next_match.round = next_round\n next_match.index = next_round_index\n next_match.matchup = next_matchup\n next_match.save()\n else:\n tourney = match.tournament\n tourney.completed = True\n tourney.winner = result.winner\n tourney.save()\n match.matchup.delete()\n match.matchup = None\n match.result = result\n match.save()\n return HttpResponseRedirect(\n '/app/tournament/matchups?tournament=%s' % match.tournament.id)\n if request.method == 'POST':\n return post()\n else:\n return HttpResponseRedirect(\n '/app/tournament/matchups?tournament=%s' % request.GET[\n 'tournament'])\n\n\ndef result_detail(request):\n result_id = request.GET['match']\n match = MatchResult.objects.get(id=result_id)\n model = Context({'match': match})\n t = loader.get_template('result_detail.html')\n return HttpResponse(t.render(model))\n\n\ndef get_team_groups():\n teams = Team.objects.all()\n team_groups = {}\n for team in teams:\n if not team.leader in team_groups:\n team_groups[team.leader] = []\n team_groups[team.leader].append(team)\n team_groups = [sorted(team_groups[k], lambda x, y: cmp(x.id, y.id)) for\n k in sorted(team_groups.keys(), lambda x, y: cmp(x.name, y.name))]\n return team_groups\n\n\ndef update_stats(winner, loser):\n existing = MatchupStatistics.objects.filter(Q(team1__in=[winner.id,\n loser.id]) & Q(team2__in=[winner.id, loser.id]))\n stats = None\n if existing.count() == 0:\n newStats = MatchupStatistics()\n newStats.team1 = winner\n newStats.team2 = loser\n newStats.team1_wins = 1\n newStats.team2_wins = 0\n winner.wins = winner.wins + 1\n loser.losses = loser.losses + 1\n newStats.save()\n winner.save()\n loser.save()\n return 1, 0\n elif existing.count() == 1:\n oldStats = existing.fetch(1)[0]\n if oldStats.team1.id == winner.id:\n oldStats.team1_wins = oldStats.team1_wins + 1\n else:\n oldStats.team2_wins = oldStats.team2_wins + 1\n winner.wins = winner.wins + 1\n loser.losses = loser.losses + 1\n oldStats.save()\n winner.save()\n loser.save()\n return 0, 1\n else:\n logging.error(\n 'unexpected state: %s matchup statistics for the same team pair (expected 1)'\n % existing.count())\n return 0, 0\n",
"step-5": "# Create your views here.\nfrom django.http import HttpResponse, HttpResponseRedirect\nfrom django.template import Context, loader\nfrom django.db import transaction\nfrom django.db.models import Q\n\nfrom maximus.models import Mercenary, Team, TeamMember, Tournament, TournamentTeam, TournamentMatchup, Matchup, MatchupStatistics, MatchResult\n\ndef index(request):\n model = Context({})\n t = loader.get_template('index.html')\n return HttpResponse(t.render(model))\n\ndef create_team(request): \n def get():\n heroes = Mercenary.objects.filter(type='HERO')\n pawns = Mercenary.objects.filter(type='PAWN')\n\n model = Context({ 'heroes': heroes, 'pawns': pawns, 'mercrange': range(1,7), 'teams': get_team_groups() })\n t = loader.get_template('teams.html')\n return HttpResponse(t.render(model))\n \n def post():\n team = Team()\n class_c = request.POST['hero']\n leader = Mercenary.objects.filter(type='HERO').filter(name=class_c)\n team.leader = leader[0]\n team.wins = 0\n team.losses = 0\n team.notes = \"\"\n team.save()\n for i in range(1,10):\n who = request.POST['pawn%s' % i]\n if who != '':\n merc = Mercenary.objects.filter(type='PAWN').filter(name=who)\n current = TeamMember()\n current.team = team\n current.merc = merc[0]\n current.location = i\n current.save()\n \n return HttpResponseRedirect('/app/teams')\n \n if request.method == \"POST\":\n return post()\n else:\n return get()\n \ndef edit_team(request):\n def get():\n team_id = request.GET[\"team\"]\n team = Team.objects.get(id=team_id)\n \n model = Context({ 'team': team })\n t = loader.get_template('edit_team.html')\n return HttpResponse(t.render(model))\n \n def post():\n new_notes = request.POST[\"notes\"]\n team_id = request.POST[\"team\"]\n \n team = Team.objects.get(id=team_id)\n team.notes = new_notes\n team.save()\n return HttpResponseRedirect('/app/teams/edit?team=%s' % team_id)\n \n if request.method == \"POST\":\n return post()\n else:\n return get() \n\ndef create_tournament(request):\n def get():\n inprogress = Tournament.objects.filter(completed=False);\n finished = Tournament.objects.filter(completed=True);\n model = Context({ 'teams': get_team_groups(), \"in_progress\": inprogress, \"finished\": finished })\n t = loader.get_template('tournament/create_tournament.html')\n return HttpResponse(t.render(model))\n \n @transaction.commit_on_success\n def post():\n tournament = Tournament()\n tournament.completed = False\n \n tournament.save()\n for team_id in request.POST.getlist('participant'):\n if team_id != \"\":\n team = Team.objects.get(id=team_id)\n tourney_team = TournamentTeam()\n tourney_team.tournament = tournament\n tourney_team.team = team\n tourney_team.save()\n \n return HttpResponseRedirect('/app/tournament/matchups?tournament=%s' % str(tournament.id))\n \n if request.method == \"POST\":\n return post()\n else:\n return get() \n\ndef view_tournament(request):\n def get():\n tourney = Tournament.objects.get(id=request.GET[\"tournament\"])\n pending_teams = []\n teams = []\n for team in tourney.tourney_team_set.all():\n if team.matchup_index == None:\n pending_teams.append(team.team)\n else:\n teams.append(team.team) \n matches = [[i for i in range(0,4)],[i for i in range(0,2)],[0]]\n for match in tourney.tourney_match_set.all():\n matches[match.round][match.index] = match\n \n model = Context({ \"pending_teams\": pending_teams, \"teams\": teams, \"matches\": matches, \"tourney\": tourney})\n \n t = loader.get_template('tournament/view_tournament.html')\n return HttpResponse(t.render(model))\n \n @transaction.commit_on_success\n def post():\n tourney_id = request.GET[\"tournament\"]\n tourney = Tournament.objects.get(id=tourney_id)\n versus = request.POST.getlist(\"versus\")\n teams = []\n for team_id in versus:\n if team_id != \"\":\n teams.append(Team.objects.get(id=team_id))\n \n existing_matches = TournamentMatchup.objects.filter(tournament=tourney)\n \n match = Matchup()\n match.team1 = teams[0]\n match.team2 = teams[1]\n match.save()\n \n tourney_match = TournamentMatchup()\n tourney_match.tournament = tourney\n tourney_match.matchup = match\n tourney_match.round = 0\n tourney_match.index = existing_matches.count()\n tourney_match.save()\n \n tourney_teams = []\n tourney_teams.append(TournamentTeam.objects.filter(tournament=tourney).filter(team=teams[0]).get())\n tourney_teams.append(TournamentTeam.objects.filter(tournament=tourney).filter(team=teams[1]).get())\n \n tourney_teams[0].matchup_index = tourney_match.index * 2\n tourney_teams[1].matchup_index = tourney_match.index * 2 + 1\n \n tourney_teams[0].save();\n tourney_teams[1].save();\n \n return HttpResponseRedirect(\"/app/tournament/matchups?tournament=%s\" % tourney_id)\n \n if request.method == \"POST\":\n return post()\n else:\n return get()\n\ndef result_tournament(request):\n @transaction.commit_on_success\n def post():\n tournament_match_id = request.GET['tournament_match_key']\n match = TournamentMatchup.objects.get(id=tournament_match_id)\n\n winner_id = int(request.POST['winner'])\n matchup = match.matchup\n result = MatchResult()\n if winner_id == matchup.team1.id:\n result.winner = matchup.team1\n result.loser = matchup.team2\n elif winner_id == matchup.team2.id:\n result.winner = matchup.team2\n result.loser = matchup.team1\n else:\n raise Exception(\"could not determine winner key: %s (%s, %s)\" % (winner_id, matchup.team1.id, matchup.team2.id))\n \n update_stats(result.winner, result.loser)\n result.save()\n \n next_round_indices = {0:0, 1:0, 2:1, 3:1}\n next_round_index = next_round_indices[match.index]\n next_round = match.round + 1\n if match.round < 2:\n # look in existing matches for this winner's opponent\n existing = TournamentMatchup.objects.filter(tournament=match.tournament).filter(round=next_round).filter(index=next_round_index)\n if existing.count() == 1:\n next_match = existing[0]\n next_matchup = next_match.matchup\n next_matchup.team2 = result.winner\n next_matchup.save()\n elif existing.count() == 0:\n next_match = TournamentMatchup()\n next_matchup = Matchup()\n next_matchup.team1 = result.winner\n next_matchup.save()\n \n next_match.tournament = match.tournament\n next_match.round = next_round\n next_match.index = next_round_index\n next_match.matchup = next_matchup\n next_match.save()\n else:\n tourney = match.tournament\n tourney.completed = True\n tourney.winner = result.winner\n tourney.save()\n \n match.matchup.delete()\n match.matchup = None\n match.result = result\n match.save()\n \n return HttpResponseRedirect(\"/app/tournament/matchups?tournament=%s\" % match.tournament.id)\n \n if request.method == \"POST\":\n return post()\n else:\n return HttpResponseRedirect(\"/app/tournament/matchups?tournament=%s\" % request.GET[\"tournament\"]) \n\ndef result_detail(request):\n result_id = request.GET['match']\n match = MatchResult.objects.get(id=result_id)\n\n model = Context({ 'match': match })\n \n t = loader.get_template('result_detail.html')\n return HttpResponse(t.render(model))\n \ndef get_team_groups():\n teams = Team.objects.all()\n team_groups = { }\n for team in teams:\n if not team.leader in team_groups:\n team_groups[team.leader] = []\n team_groups[team.leader].append(team)\n \n team_groups = [sorted(team_groups[k], lambda x,y: cmp(x.id, y.id)) for k in sorted(team_groups.keys(), lambda x,y: cmp(x.name, y.name))]\n return team_groups\n\ndef update_stats(winner, loser):\n existing = MatchupStatistics.objects.filter(Q(team1__in=[winner.id, loser.id]) & Q(team2__in=[winner.id, loser.id]))\n stats = None\n if existing.count() == 0:\n newStats = MatchupStatistics()\n newStats.team1 = winner\n newStats.team2 = loser\n newStats.team1_wins = 1\n newStats.team2_wins = 0\n \n winner.wins = winner.wins + 1\n loser.losses = loser.losses + 1\n \n newStats.save()\n winner.save()\n loser.save()\n return (1, 0)\n elif existing.count() == 1:\n oldStats = existing.fetch(1)[0]\n if oldStats.team1.id == winner.id:\n oldStats.team1_wins = oldStats.team1_wins + 1\n else:\n oldStats.team2_wins = oldStats.team2_wins + 1\n \n winner.wins = winner.wins + 1\n loser.losses = loser.losses + 1\n oldStats.save()\n winner.save()\n loser.save()\n \n return (0, 1)\n else:\n logging.error(\"unexpected state: %s matchup statistics for the same team pair (expected 1)\" % existing.count())\n return (0, 0)\n",
"step-ids": [
6,
7,
9,
10,
11
]
}
|
[
6,
7,
9,
10,
11
] |
<|reserved_special_token_0|>
class ModelBuilder:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def build_user(self, user_data):
user = User(name=user_data.nome, federal_tax_number=
FederalTaxNumber(user_data.cnpj), state_tax_number=
StateTaxNumber(user_data.inscricaoEstadual), status_number=
user_data.statusCodigo)
for contract_data in user_data.contratos:
self.build_contract(user, contract_data)
return user
def build_zip_address(self, zip_address_data):
zip_address = ZipAddress(id=zip_address_data.id, zip_code=
zip_address_data.cep, state=zip_address_data.uf, city=
zip_address_data.cidade, district=zip_address_data.bairro,
address=zip_address_data.end, complements=[zip_address_data.
complemento, zip_address_data.complemento2])
return zip_address
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def _load_invalid_event(self, tracking_code: TrackingCode, tracked_object):
event = NotFoundTrackingEvent(timestamp=datetime.now(), comment=
tracked_object.erro)
tracking_code.add_event(event)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def build_freights_list(self, response):
result = []
for service_data in response.cServico:
freight = self.build_freight(service_data=service_data)
result.append(freight)
return result
def build_freight(self, service_data):
data = {'service': Service.get(service_data.Codigo), 'error_code':
to_integer(service_data.Erro), 'delivery_time': int(
service_data.PrazoEntrega), 'value': to_decimal(service_data.
ValorSemAdicionais), 'declared_value': to_decimal(service_data.
ValorValorDeclarado), 'ar_value': to_decimal(service_data.
ValorAvisoRecebimento), 'mp_value': to_decimal(service_data.
ValorMaoPropria), 'saturday': service_data.EntregaSabado or '',
'home': service_data.EntregaDomiciliar or '', 'error_message':
service_data.MsgErro or None}
if data['error_code'] and not data['error_code'
] in ValidRestrictResponse.restricted_codes():
return FreightError(**data)
return Freight(**data)
class PostingListSerializer:
def _get_posting_list_element(self, posting_list):
element = xml_utils.Element('plp')
xml_utils.SubElement(element, 'id_plp')
xml_utils.SubElement(element, 'valor_global')
xml_utils.SubElement(element, 'mcu_unidade_postagem')
xml_utils.SubElement(element, 'nome_unidade_postagem')
xml_utils.SubElement(element, 'cartao_postagem', text=str(
posting_list.posting_card))
return element
def _get_sender_info_element(self, posting_list):
sender = posting_list.sender
posting_card = posting_list.posting_card
contract = posting_list.contract
sender_info = xml_utils.Element('remetente')
xml_utils.SubElement(sender_info, 'numero_contrato', text=str(
contract.number))
xml_utils.SubElement(sender_info, 'numero_diretoria', text=str(
contract.regional_direction_number))
xml_utils.SubElement(sender_info, 'codigo_administrativo', text=str
(posting_card.administrative_code))
xml_utils.SubElement(sender_info, 'nome_remetente', cdata=sender.name)
xml_utils.SubElement(sender_info, 'logradouro_remetente', cdata=
sender.street)
xml_utils.SubElement(sender_info, 'numero_remetente', cdata=sender.
number)
xml_utils.SubElement(sender_info, 'complemento_remetente', cdata=
sender.complement)
xml_utils.SubElement(sender_info, 'bairro_remetente', cdata=sender.
neighborhood)
xml_utils.SubElement(sender_info, 'cep_remetente', cdata=str(sender
.zip_code))
xml_utils.SubElement(sender_info, 'cidade_remetente', cdata=str(
sender.city)[:30])
xml_utils.SubElement(sender_info, 'uf_remetente', cdata=str(sender.
state))
xml_utils.SubElement(sender_info, 'telefone_remetente', cdata=
sender.phone.short)
xml_utils.SubElement(sender_info, 'fax_remetente', cdata='')
xml_utils.SubElement(sender_info, 'email_remetente', cdata=sender.email
)
return sender_info
def _get_shipping_label_element(self, shipping_label: ShippingLabel):
item = xml_utils.Element('objeto_postal')
xml_utils.SubElement(item, 'numero_etiqueta', text=str(
shipping_label.tracking_code))
xml_utils.SubElement(item, 'codigo_objeto_cliente')
xml_utils.SubElement(item, 'codigo_servico_postagem', text=str(
shipping_label.service))
xml_utils.SubElement(item, 'cubagem', text=str(shipping_label.
posting_weight).replace('.', ','))
xml_utils.SubElement(item, 'peso', text=str(shipping_label.package.
weight))
xml_utils.SubElement(item, 'rt1')
xml_utils.SubElement(item, 'rt2')
receiver = shipping_label.receiver
address = xml_utils.SubElement(item, 'destinatario')
xml_utils.SubElement(address, 'nome_destinatario', cdata=str(
receiver.name))
xml_utils.SubElement(address, 'telefone_destinatario', cdata=
receiver.phone.short)
xml_utils.SubElement(address, 'celular_destinatario', cdata=
receiver.cellphone.short)
xml_utils.SubElement(address, 'email_destinatario', cdata=str(
receiver.email))
xml_utils.SubElement(address, 'logradouro_destinatario', cdata=str(
receiver.street))
xml_utils.SubElement(address, 'complemento_destinatario', cdata=str
(receiver.complement))
xml_utils.SubElement(address, 'numero_end_destinatario', text=str(
receiver.number))
national = xml_utils.SubElement(item, 'nacional')
xml_utils.SubElement(national, 'bairro_destinatario', cdata=str(
receiver.neighborhood))
xml_utils.SubElement(national, 'cidade_destinatario', cdata=str(
receiver.city)[:30])
xml_utils.SubElement(national, 'uf_destinatario', text=str(receiver
.state))
xml_utils.SubElement(national, 'cep_destinatario', cdata=str(
receiver.zip_code))
xml_utils.SubElement(national, 'codigo_usuario_postal')
xml_utils.SubElement(national, 'centro_custo_cliente')
xml_utils.SubElement(national, 'numero_nota_fiscal', text=str(
shipping_label.invoice_number))
xml_utils.SubElement(national, 'serie_nota_fiscal', text=str(
shipping_label.invoice_series))
xml_utils.SubElement(national, 'valor_nota_fiscal', text=str(
shipping_label.value).replace('.', ','))
xml_utils.SubElement(national, 'natureza_nota_fiscal', text=str(
shipping_label.invoice_type))
xml_utils.SubElement(national, 'descricao_objeto', cdata=str(
shipping_label.text)[:20])
xml_utils.SubElement(national, 'valor_a_cobrar', text=str(
shipping_label.billing).replace('.', ','))
extra_services = xml_utils.SubElement(item, 'servico_adicional')
for extra_service in shipping_label.extra_services:
xml_utils.SubElement(extra_services, 'codigo_servico_adicional',
text='{!s:>03}'.format(extra_service.number))
xml_utils.SubElement(extra_services, 'valor_declarado', text=str(
shipping_label.value).replace('.', ','))
dimensions = xml_utils.SubElement(item, 'dimensao_objeto')
xml_utils.SubElement(dimensions, 'tipo_objeto', text='{!s:>03}'.
format(shipping_label.package.package_type))
xml_utils.SubElement(dimensions, 'dimensao_altura', text=str(
shipping_label.package.height))
xml_utils.SubElement(dimensions, 'dimensao_largura', text=str(
shipping_label.package.width))
xml_utils.SubElement(dimensions, 'dimensao_comprimento', text=str(
shipping_label.package.length))
xml_utils.SubElement(dimensions, 'dimensao_diametro', text=str(
shipping_label.package.diameter))
xml_utils.SubElement(item, 'data_postagem_sara')
xml_utils.SubElement(item, 'status_processamento', text='0')
xml_utils.SubElement(item, 'numero_comprovante_postagem')
xml_utils.SubElement(item, 'valor_cobrado')
return item
def get_document(self, posting_list: PostingList):
if not posting_list.shipping_labels:
raise PostingListSerializerError(
'Cannot serialize an empty posting list')
if posting_list.closed:
raise PostingListSerializerError(
'Cannot serialize a closed posting list')
root = xml_utils.Element('correioslog')
root.append(xml_utils.Element('tipo_arquivo', text='Postagem'))
root.append(xml_utils.Element('versao_arquivo', text='2.3'))
root.append(self._get_posting_list_element(posting_list))
root.append(self._get_sender_info_element(posting_list))
root.append(xml_utils.Element('forma_pagamento'))
for shipping_label in posting_list.shipping_labels.values():
root.append(self._get_shipping_label_element(shipping_label))
return root
def validate(self, document):
with open(os.path.join(DATADIR, 'posting_list_schema.xsd')) as xsd:
xsd_document = xml_utils.parse(xsd)
schema = xml_utils.XMLSchema(xsd_document)
return schema.assert_(document)
def get_xml(self, document) ->bytes:
xmlstring = str(xml_utils.tostring(document, encoding='unicode'))
encoded_xmlstring = xmlstring.encode('iso-8859-1', errors='ignore')
return (b'<?xml version="1.0" encoding="ISO-8859-1"?>' +
encoded_xmlstring)
class Correios:
PRODUCTION = 'production'
TEST = 'test'
MAX_TRACKING_CODES_PER_REQUEST = 50
sigep_urls = {'production': (get_wsdl_path(
'AtendeCliente-production.wsdl'), True), 'test': (get_wsdl_path(
'AtendeCliente-test.wsdl'), False)}
websro_url = get_wsdl_path('Rastro.wsdl')
freight_url = get_wsdl_path('CalcPrecoPrazo.asmx')
def __init__(self, username, password, timeout=8, environment='production'
):
self.username = username
self.password = password
self.timeout = timeout
url, verify = self.sigep_urls[environment]
self.sigep_url = url
self.sigep_verify = verify
self.sigep_client = SoapClient(self.sigep_url, verify=self.
sigep_verify, timeout=self.timeout)
self.sigep = self.sigep_client.service
self.websro_client = SoapClient(self.websro_url, timeout=self.timeout)
self.websro = self.websro_client.service
self.freight_client = SoapClient(self.freight_url, timeout=self.timeout
)
self.freight = self.freight_client.service
self.model_builder = ModelBuilder()
def _auth_call(self, method_name, *args, **kwargs):
kwargs.update({'usuario': self.username, 'senha': self.password})
return self._call(method_name, *args, **kwargs)
def _call(self, method_name, *args, **kwargs):
method = getattr(self.sigep, method_name)
return method(*args, **kwargs)
def get_user(self, contract_number: Union[int, str],
posting_card_number: Union[int, str]) ->User:
contract_number = str(contract_number)
posting_card_number = str(posting_card_number)
user_data = self._auth_call('buscaCliente', contract_number,
posting_card_number)
return self.model_builder.build_user(user_data)
def find_zipcode(self, zip_code: Union[ZipCode, str]) ->ZipAddress:
zip_address_data = self._call('consultaCEP', str(zip_code))
return self.model_builder.build_zip_address(zip_address_data)
def verify_service_availability(self, posting_card: PostingCard,
service: Service, from_zip_code: Union[ZipCode, str], to_zip_code:
Union[ZipCode, str]) ->bool:
from_zip_code = ZipCode.create(from_zip_code)
to_zip_code = ZipCode.create(to_zip_code)
result = self._auth_call('verificaDisponibilidadeServico',
posting_card.administrative_code, str(service), str(
from_zip_code), str(to_zip_code))
return result
def get_posting_card_status(self, posting_card: PostingCard) ->bool:
result = self._auth_call('getStatusCartaoPostagem', posting_card.number
)
return self.model_builder.build_posting_card_status(result)
def request_tracking_codes(self, user: User, service: Service, quantity
=1, receiver_type='C') ->list:
result = self._auth_call('solicitaEtiquetas', receiver_type, str(
user.federal_tax_number), service.id, quantity)
return self.model_builder.build_tracking_codes_list(result)
def generate_verification_digit(self, tracking_codes: Sequence[str]
) ->List[int]:
tracking_codes = [TrackingCode(tc).nodigit for tc in tracking_codes]
result = self._auth_call('geraDigitoVerificadorEtiquetas',
tracking_codes)
return result
def _generate_xml_string(self, posting_list: PostingList) ->str:
posting_list_serializer = PostingListSerializer()
document = posting_list_serializer.get_document(posting_list)
posting_list_serializer.validate(document)
xml = posting_list_serializer.get_xml(document)
return xml.decode('ISO-8859-1')
def close_posting_list(self, posting_list: PostingList, posting_card:
PostingCard) ->PostingList:
xml = self._generate_xml_string(posting_list)
tracking_codes = posting_list.get_tracking_codes()
id_ = self._auth_call('fechaPlpVariosServicos', xml, posting_list.
custom_id, posting_card.number, tracking_codes)
posting_list.close_with_id(id_)
return posting_list
def get_tracking_code_events(self, tracking_list):
if isinstance(tracking_list, (str, TrackingCode)):
tracking_list = [tracking_list]
if len(tracking_list) > Correios.MAX_TRACKING_CODES_PER_REQUEST:
msg = (
'{} tracking codes requested exceeds the limit of {} stabilished by the Correios'
)
msg = msg.format(len(tracking_list), Correios.
MAX_TRACKING_CODES_PER_REQUEST)
raise TrackingCodesLimitExceededError(msg)
tracking_codes = {}
for tracking_code in tracking_list:
tracking_code = TrackingCode.create(tracking_code)
tracking_codes[tracking_code.code] = tracking_code
response = self.websro.buscaEventosLista(self.username, self.
password, 'L', 'T', '101', tuple(tracking_codes.keys()))
return self.model_builder.load_tracking_events(tracking_codes, response
)
def calculate_freights(self, posting_card: PostingCard, services: List[
Union[Service, int]], from_zip: Union[ZipCode, int, str], to_zip:
Union[ZipCode, int, str], package: Package, value: Union[Decimal,
float]=0.0, extra_services: Optional[Sequence[Union[ExtraService,
int]]]=None):
administrative_code = posting_card.administrative_code
services = [Service.get(s) for s in services]
from_zip = ZipCode.create(from_zip)
to_zip = ZipCode.create(to_zip)
if extra_services is None:
extra_services = []
else:
extra_services = [ExtraService.get(es) for es in extra_services]
response = self.freight.CalcPrecoPrazo(administrative_code, self.
password, ','.join(str(s) for s in services), str(from_zip),
str(to_zip), package.weight / KG, package.package_type, package
.length, package.height, package.width, package.diameter, 'S' if
EXTRA_SERVICE_MP in extra_services else 'N', value, 'S' if
EXTRA_SERVICE_AR in extra_services else 'N')
return self.model_builder.build_freights_list(response)
def calculate_delivery_time(self, service: Union[Service, int],
from_zip: Union[ZipCode, int, str], to_zip: Union[ZipCode, int, str]):
service = Service.get(service)
from_zip = ZipCode.create(from_zip)
to_zip = ZipCode.create(to_zip)
response = self.freight.CalcPrazo(str(service), str(from_zip), str(
to_zip))
return response.cServico[0].PrazoEntrega
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ModelBuilder:
def build_service(self, service_data):
service = Service(code=service_data.codigo, id=service_data.id,
description=service_data.descricao, category=service_data.
servicoSigep.categoriaServico)
return service
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def build_user(self, user_data):
user = User(name=user_data.nome, federal_tax_number=
FederalTaxNumber(user_data.cnpj), state_tax_number=
StateTaxNumber(user_data.inscricaoEstadual), status_number=
user_data.statusCodigo)
for contract_data in user_data.contratos:
self.build_contract(user, contract_data)
return user
def build_zip_address(self, zip_address_data):
zip_address = ZipAddress(id=zip_address_data.id, zip_code=
zip_address_data.cep, state=zip_address_data.uf, city=
zip_address_data.cidade, district=zip_address_data.bairro,
address=zip_address_data.end, complements=[zip_address_data.
complemento, zip_address_data.complemento2])
return zip_address
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def _load_invalid_event(self, tracking_code: TrackingCode, tracked_object):
event = NotFoundTrackingEvent(timestamp=datetime.now(), comment=
tracked_object.erro)
tracking_code.add_event(event)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def build_freights_list(self, response):
result = []
for service_data in response.cServico:
freight = self.build_freight(service_data=service_data)
result.append(freight)
return result
def build_freight(self, service_data):
data = {'service': Service.get(service_data.Codigo), 'error_code':
to_integer(service_data.Erro), 'delivery_time': int(
service_data.PrazoEntrega), 'value': to_decimal(service_data.
ValorSemAdicionais), 'declared_value': to_decimal(service_data.
ValorValorDeclarado), 'ar_value': to_decimal(service_data.
ValorAvisoRecebimento), 'mp_value': to_decimal(service_data.
ValorMaoPropria), 'saturday': service_data.EntregaSabado or '',
'home': service_data.EntregaDomiciliar or '', 'error_message':
service_data.MsgErro or None}
if data['error_code'] and not data['error_code'
] in ValidRestrictResponse.restricted_codes():
return FreightError(**data)
return Freight(**data)
class PostingListSerializer:
def _get_posting_list_element(self, posting_list):
element = xml_utils.Element('plp')
xml_utils.SubElement(element, 'id_plp')
xml_utils.SubElement(element, 'valor_global')
xml_utils.SubElement(element, 'mcu_unidade_postagem')
xml_utils.SubElement(element, 'nome_unidade_postagem')
xml_utils.SubElement(element, 'cartao_postagem', text=str(
posting_list.posting_card))
return element
def _get_sender_info_element(self, posting_list):
sender = posting_list.sender
posting_card = posting_list.posting_card
contract = posting_list.contract
sender_info = xml_utils.Element('remetente')
xml_utils.SubElement(sender_info, 'numero_contrato', text=str(
contract.number))
xml_utils.SubElement(sender_info, 'numero_diretoria', text=str(
contract.regional_direction_number))
xml_utils.SubElement(sender_info, 'codigo_administrativo', text=str
(posting_card.administrative_code))
xml_utils.SubElement(sender_info, 'nome_remetente', cdata=sender.name)
xml_utils.SubElement(sender_info, 'logradouro_remetente', cdata=
sender.street)
xml_utils.SubElement(sender_info, 'numero_remetente', cdata=sender.
number)
xml_utils.SubElement(sender_info, 'complemento_remetente', cdata=
sender.complement)
xml_utils.SubElement(sender_info, 'bairro_remetente', cdata=sender.
neighborhood)
xml_utils.SubElement(sender_info, 'cep_remetente', cdata=str(sender
.zip_code))
xml_utils.SubElement(sender_info, 'cidade_remetente', cdata=str(
sender.city)[:30])
xml_utils.SubElement(sender_info, 'uf_remetente', cdata=str(sender.
state))
xml_utils.SubElement(sender_info, 'telefone_remetente', cdata=
sender.phone.short)
xml_utils.SubElement(sender_info, 'fax_remetente', cdata='')
xml_utils.SubElement(sender_info, 'email_remetente', cdata=sender.email
)
return sender_info
def _get_shipping_label_element(self, shipping_label: ShippingLabel):
item = xml_utils.Element('objeto_postal')
xml_utils.SubElement(item, 'numero_etiqueta', text=str(
shipping_label.tracking_code))
xml_utils.SubElement(item, 'codigo_objeto_cliente')
xml_utils.SubElement(item, 'codigo_servico_postagem', text=str(
shipping_label.service))
xml_utils.SubElement(item, 'cubagem', text=str(shipping_label.
posting_weight).replace('.', ','))
xml_utils.SubElement(item, 'peso', text=str(shipping_label.package.
weight))
xml_utils.SubElement(item, 'rt1')
xml_utils.SubElement(item, 'rt2')
receiver = shipping_label.receiver
address = xml_utils.SubElement(item, 'destinatario')
xml_utils.SubElement(address, 'nome_destinatario', cdata=str(
receiver.name))
xml_utils.SubElement(address, 'telefone_destinatario', cdata=
receiver.phone.short)
xml_utils.SubElement(address, 'celular_destinatario', cdata=
receiver.cellphone.short)
xml_utils.SubElement(address, 'email_destinatario', cdata=str(
receiver.email))
xml_utils.SubElement(address, 'logradouro_destinatario', cdata=str(
receiver.street))
xml_utils.SubElement(address, 'complemento_destinatario', cdata=str
(receiver.complement))
xml_utils.SubElement(address, 'numero_end_destinatario', text=str(
receiver.number))
national = xml_utils.SubElement(item, 'nacional')
xml_utils.SubElement(national, 'bairro_destinatario', cdata=str(
receiver.neighborhood))
xml_utils.SubElement(national, 'cidade_destinatario', cdata=str(
receiver.city)[:30])
xml_utils.SubElement(national, 'uf_destinatario', text=str(receiver
.state))
xml_utils.SubElement(national, 'cep_destinatario', cdata=str(
receiver.zip_code))
xml_utils.SubElement(national, 'codigo_usuario_postal')
xml_utils.SubElement(national, 'centro_custo_cliente')
xml_utils.SubElement(national, 'numero_nota_fiscal', text=str(
shipping_label.invoice_number))
xml_utils.SubElement(national, 'serie_nota_fiscal', text=str(
shipping_label.invoice_series))
xml_utils.SubElement(national, 'valor_nota_fiscal', text=str(
shipping_label.value).replace('.', ','))
xml_utils.SubElement(national, 'natureza_nota_fiscal', text=str(
shipping_label.invoice_type))
xml_utils.SubElement(national, 'descricao_objeto', cdata=str(
shipping_label.text)[:20])
xml_utils.SubElement(national, 'valor_a_cobrar', text=str(
shipping_label.billing).replace('.', ','))
extra_services = xml_utils.SubElement(item, 'servico_adicional')
for extra_service in shipping_label.extra_services:
xml_utils.SubElement(extra_services, 'codigo_servico_adicional',
text='{!s:>03}'.format(extra_service.number))
xml_utils.SubElement(extra_services, 'valor_declarado', text=str(
shipping_label.value).replace('.', ','))
dimensions = xml_utils.SubElement(item, 'dimensao_objeto')
xml_utils.SubElement(dimensions, 'tipo_objeto', text='{!s:>03}'.
format(shipping_label.package.package_type))
xml_utils.SubElement(dimensions, 'dimensao_altura', text=str(
shipping_label.package.height))
xml_utils.SubElement(dimensions, 'dimensao_largura', text=str(
shipping_label.package.width))
xml_utils.SubElement(dimensions, 'dimensao_comprimento', text=str(
shipping_label.package.length))
xml_utils.SubElement(dimensions, 'dimensao_diametro', text=str(
shipping_label.package.diameter))
xml_utils.SubElement(item, 'data_postagem_sara')
xml_utils.SubElement(item, 'status_processamento', text='0')
xml_utils.SubElement(item, 'numero_comprovante_postagem')
xml_utils.SubElement(item, 'valor_cobrado')
return item
def get_document(self, posting_list: PostingList):
if not posting_list.shipping_labels:
raise PostingListSerializerError(
'Cannot serialize an empty posting list')
if posting_list.closed:
raise PostingListSerializerError(
'Cannot serialize a closed posting list')
root = xml_utils.Element('correioslog')
root.append(xml_utils.Element('tipo_arquivo', text='Postagem'))
root.append(xml_utils.Element('versao_arquivo', text='2.3'))
root.append(self._get_posting_list_element(posting_list))
root.append(self._get_sender_info_element(posting_list))
root.append(xml_utils.Element('forma_pagamento'))
for shipping_label in posting_list.shipping_labels.values():
root.append(self._get_shipping_label_element(shipping_label))
return root
def validate(self, document):
with open(os.path.join(DATADIR, 'posting_list_schema.xsd')) as xsd:
xsd_document = xml_utils.parse(xsd)
schema = xml_utils.XMLSchema(xsd_document)
return schema.assert_(document)
def get_xml(self, document) ->bytes:
xmlstring = str(xml_utils.tostring(document, encoding='unicode'))
encoded_xmlstring = xmlstring.encode('iso-8859-1', errors='ignore')
return (b'<?xml version="1.0" encoding="ISO-8859-1"?>' +
encoded_xmlstring)
class Correios:
PRODUCTION = 'production'
TEST = 'test'
MAX_TRACKING_CODES_PER_REQUEST = 50
sigep_urls = {'production': (get_wsdl_path(
'AtendeCliente-production.wsdl'), True), 'test': (get_wsdl_path(
'AtendeCliente-test.wsdl'), False)}
websro_url = get_wsdl_path('Rastro.wsdl')
freight_url = get_wsdl_path('CalcPrecoPrazo.asmx')
def __init__(self, username, password, timeout=8, environment='production'
):
self.username = username
self.password = password
self.timeout = timeout
url, verify = self.sigep_urls[environment]
self.sigep_url = url
self.sigep_verify = verify
self.sigep_client = SoapClient(self.sigep_url, verify=self.
sigep_verify, timeout=self.timeout)
self.sigep = self.sigep_client.service
self.websro_client = SoapClient(self.websro_url, timeout=self.timeout)
self.websro = self.websro_client.service
self.freight_client = SoapClient(self.freight_url, timeout=self.timeout
)
self.freight = self.freight_client.service
self.model_builder = ModelBuilder()
def _auth_call(self, method_name, *args, **kwargs):
kwargs.update({'usuario': self.username, 'senha': self.password})
return self._call(method_name, *args, **kwargs)
def _call(self, method_name, *args, **kwargs):
method = getattr(self.sigep, method_name)
return method(*args, **kwargs)
def get_user(self, contract_number: Union[int, str],
posting_card_number: Union[int, str]) ->User:
contract_number = str(contract_number)
posting_card_number = str(posting_card_number)
user_data = self._auth_call('buscaCliente', contract_number,
posting_card_number)
return self.model_builder.build_user(user_data)
def find_zipcode(self, zip_code: Union[ZipCode, str]) ->ZipAddress:
zip_address_data = self._call('consultaCEP', str(zip_code))
return self.model_builder.build_zip_address(zip_address_data)
def verify_service_availability(self, posting_card: PostingCard,
service: Service, from_zip_code: Union[ZipCode, str], to_zip_code:
Union[ZipCode, str]) ->bool:
from_zip_code = ZipCode.create(from_zip_code)
to_zip_code = ZipCode.create(to_zip_code)
result = self._auth_call('verificaDisponibilidadeServico',
posting_card.administrative_code, str(service), str(
from_zip_code), str(to_zip_code))
return result
def get_posting_card_status(self, posting_card: PostingCard) ->bool:
result = self._auth_call('getStatusCartaoPostagem', posting_card.number
)
return self.model_builder.build_posting_card_status(result)
def request_tracking_codes(self, user: User, service: Service, quantity
=1, receiver_type='C') ->list:
result = self._auth_call('solicitaEtiquetas', receiver_type, str(
user.federal_tax_number), service.id, quantity)
return self.model_builder.build_tracking_codes_list(result)
def generate_verification_digit(self, tracking_codes: Sequence[str]
) ->List[int]:
tracking_codes = [TrackingCode(tc).nodigit for tc in tracking_codes]
result = self._auth_call('geraDigitoVerificadorEtiquetas',
tracking_codes)
return result
def _generate_xml_string(self, posting_list: PostingList) ->str:
posting_list_serializer = PostingListSerializer()
document = posting_list_serializer.get_document(posting_list)
posting_list_serializer.validate(document)
xml = posting_list_serializer.get_xml(document)
return xml.decode('ISO-8859-1')
def close_posting_list(self, posting_list: PostingList, posting_card:
PostingCard) ->PostingList:
xml = self._generate_xml_string(posting_list)
tracking_codes = posting_list.get_tracking_codes()
id_ = self._auth_call('fechaPlpVariosServicos', xml, posting_list.
custom_id, posting_card.number, tracking_codes)
posting_list.close_with_id(id_)
return posting_list
def get_tracking_code_events(self, tracking_list):
if isinstance(tracking_list, (str, TrackingCode)):
tracking_list = [tracking_list]
if len(tracking_list) > Correios.MAX_TRACKING_CODES_PER_REQUEST:
msg = (
'{} tracking codes requested exceeds the limit of {} stabilished by the Correios'
)
msg = msg.format(len(tracking_list), Correios.
MAX_TRACKING_CODES_PER_REQUEST)
raise TrackingCodesLimitExceededError(msg)
tracking_codes = {}
for tracking_code in tracking_list:
tracking_code = TrackingCode.create(tracking_code)
tracking_codes[tracking_code.code] = tracking_code
response = self.websro.buscaEventosLista(self.username, self.
password, 'L', 'T', '101', tuple(tracking_codes.keys()))
return self.model_builder.load_tracking_events(tracking_codes, response
)
def calculate_freights(self, posting_card: PostingCard, services: List[
Union[Service, int]], from_zip: Union[ZipCode, int, str], to_zip:
Union[ZipCode, int, str], package: Package, value: Union[Decimal,
float]=0.0, extra_services: Optional[Sequence[Union[ExtraService,
int]]]=None):
administrative_code = posting_card.administrative_code
services = [Service.get(s) for s in services]
from_zip = ZipCode.create(from_zip)
to_zip = ZipCode.create(to_zip)
if extra_services is None:
extra_services = []
else:
extra_services = [ExtraService.get(es) for es in extra_services]
response = self.freight.CalcPrecoPrazo(administrative_code, self.
password, ','.join(str(s) for s in services), str(from_zip),
str(to_zip), package.weight / KG, package.package_type, package
.length, package.height, package.width, package.diameter, 'S' if
EXTRA_SERVICE_MP in extra_services else 'N', value, 'S' if
EXTRA_SERVICE_AR in extra_services else 'N')
return self.model_builder.build_freights_list(response)
def calculate_delivery_time(self, service: Union[Service, int],
from_zip: Union[ZipCode, int, str], to_zip: Union[ZipCode, int, str]):
service = Service.get(service)
from_zip = ZipCode.create(from_zip)
to_zip = ZipCode.create(to_zip)
response = self.freight.CalcPrazo(str(service), str(from_zip), str(
to_zip))
return response.cServico[0].PrazoEntrega
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ModelBuilder:
def build_service(self, service_data):
service = Service(code=service_data.codigo, id=service_data.id,
description=service_data.descricao, category=service_data.
servicoSigep.categoriaServico)
return service
def build_posting_card(self, contract: Contract, posting_card_data):
posting_card = PostingCard(contract=contract, number=
posting_card_data.numero, administrative_code=posting_card_data
.codigoAdministrativo)
posting_card.start_date = posting_card_data.dataVigenciaInicio
posting_card.end_date = posting_card_data.dataVigenciaFim
posting_card.status = posting_card_data.statusCartaoPostagem
posting_card.status_code = posting_card_data.statusCodigo
posting_card.unit = posting_card_data.unidadeGenerica
for service_data in posting_card_data.servicos:
service = self.build_service(service_data)
posting_card.add_service(service)
return posting_card
def build_contract(self, user: User, contract_data):
contract = Contract(user=user, number=contract_data.contratoPK.
numero, regional_direction=contract_data.codigoDiretoria)
contract.customer_code = contract_data.codigoCliente
contract.status_code = contract_data.statusCodigo
contract.start_date = contract_data.dataVigenciaInicio
contract.end_date = contract_data.dataVigenciaFim
for posting_card_data in contract_data.cartoesPostagem:
self.build_posting_card(contract, posting_card_data)
return contract
def build_user(self, user_data):
user = User(name=user_data.nome, federal_tax_number=
FederalTaxNumber(user_data.cnpj), state_tax_number=
StateTaxNumber(user_data.inscricaoEstadual), status_number=
user_data.statusCodigo)
for contract_data in user_data.contratos:
self.build_contract(user, contract_data)
return user
def build_zip_address(self, zip_address_data):
zip_address = ZipAddress(id=zip_address_data.id, zip_code=
zip_address_data.cep, state=zip_address_data.uf, city=
zip_address_data.cidade, district=zip_address_data.bairro,
address=zip_address_data.end, complements=[zip_address_data.
complemento, zip_address_data.complemento2])
return zip_address
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def _load_invalid_event(self, tracking_code: TrackingCode, tracked_object):
event = NotFoundTrackingEvent(timestamp=datetime.now(), comment=
tracked_object.erro)
tracking_code.add_event(event)
def _load_events(self, tracking_code: TrackingCode, events):
for event in events:
timestamp = datetime.strptime('{} {}'.format(event.data, event.
hora), TrackingEvent.timestamp_format)
event = TrackingEvent(timestamp=timestamp, status=EventStatus(
event.tipo, event.status), location_zip_code=getattr(event,
'codigo', ''), location=getattr(event, 'local', ''), city=
getattr(event, 'cidade', ''), state=getattr(event, 'uf', ''
), receiver=getattr(event, 'recebedor', ''), document=
getattr(event, 'documento', ''), comment=getattr(event,
'comentario', ''), description=getattr(event, 'descricao',
''), details=getattr(event, 'detalhes', ''))
tracking_code.add_event(event)
<|reserved_special_token_0|>
def build_freights_list(self, response):
result = []
for service_data in response.cServico:
freight = self.build_freight(service_data=service_data)
result.append(freight)
return result
def build_freight(self, service_data):
data = {'service': Service.get(service_data.Codigo), 'error_code':
to_integer(service_data.Erro), 'delivery_time': int(
service_data.PrazoEntrega), 'value': to_decimal(service_data.
ValorSemAdicionais), 'declared_value': to_decimal(service_data.
ValorValorDeclarado), 'ar_value': to_decimal(service_data.
ValorAvisoRecebimento), 'mp_value': to_decimal(service_data.
ValorMaoPropria), 'saturday': service_data.EntregaSabado or '',
'home': service_data.EntregaDomiciliar or '', 'error_message':
service_data.MsgErro or None}
if data['error_code'] and not data['error_code'
] in ValidRestrictResponse.restricted_codes():
return FreightError(**data)
return Freight(**data)
class PostingListSerializer:
def _get_posting_list_element(self, posting_list):
element = xml_utils.Element('plp')
xml_utils.SubElement(element, 'id_plp')
xml_utils.SubElement(element, 'valor_global')
xml_utils.SubElement(element, 'mcu_unidade_postagem')
xml_utils.SubElement(element, 'nome_unidade_postagem')
xml_utils.SubElement(element, 'cartao_postagem', text=str(
posting_list.posting_card))
return element
def _get_sender_info_element(self, posting_list):
sender = posting_list.sender
posting_card = posting_list.posting_card
contract = posting_list.contract
sender_info = xml_utils.Element('remetente')
xml_utils.SubElement(sender_info, 'numero_contrato', text=str(
contract.number))
xml_utils.SubElement(sender_info, 'numero_diretoria', text=str(
contract.regional_direction_number))
xml_utils.SubElement(sender_info, 'codigo_administrativo', text=str
(posting_card.administrative_code))
xml_utils.SubElement(sender_info, 'nome_remetente', cdata=sender.name)
xml_utils.SubElement(sender_info, 'logradouro_remetente', cdata=
sender.street)
xml_utils.SubElement(sender_info, 'numero_remetente', cdata=sender.
number)
xml_utils.SubElement(sender_info, 'complemento_remetente', cdata=
sender.complement)
xml_utils.SubElement(sender_info, 'bairro_remetente', cdata=sender.
neighborhood)
xml_utils.SubElement(sender_info, 'cep_remetente', cdata=str(sender
.zip_code))
xml_utils.SubElement(sender_info, 'cidade_remetente', cdata=str(
sender.city)[:30])
xml_utils.SubElement(sender_info, 'uf_remetente', cdata=str(sender.
state))
xml_utils.SubElement(sender_info, 'telefone_remetente', cdata=
sender.phone.short)
xml_utils.SubElement(sender_info, 'fax_remetente', cdata='')
xml_utils.SubElement(sender_info, 'email_remetente', cdata=sender.email
)
return sender_info
def _get_shipping_label_element(self, shipping_label: ShippingLabel):
item = xml_utils.Element('objeto_postal')
xml_utils.SubElement(item, 'numero_etiqueta', text=str(
shipping_label.tracking_code))
xml_utils.SubElement(item, 'codigo_objeto_cliente')
xml_utils.SubElement(item, 'codigo_servico_postagem', text=str(
shipping_label.service))
xml_utils.SubElement(item, 'cubagem', text=str(shipping_label.
posting_weight).replace('.', ','))
xml_utils.SubElement(item, 'peso', text=str(shipping_label.package.
weight))
xml_utils.SubElement(item, 'rt1')
xml_utils.SubElement(item, 'rt2')
receiver = shipping_label.receiver
address = xml_utils.SubElement(item, 'destinatario')
xml_utils.SubElement(address, 'nome_destinatario', cdata=str(
receiver.name))
xml_utils.SubElement(address, 'telefone_destinatario', cdata=
receiver.phone.short)
xml_utils.SubElement(address, 'celular_destinatario', cdata=
receiver.cellphone.short)
xml_utils.SubElement(address, 'email_destinatario', cdata=str(
receiver.email))
xml_utils.SubElement(address, 'logradouro_destinatario', cdata=str(
receiver.street))
xml_utils.SubElement(address, 'complemento_destinatario', cdata=str
(receiver.complement))
xml_utils.SubElement(address, 'numero_end_destinatario', text=str(
receiver.number))
national = xml_utils.SubElement(item, 'nacional')
xml_utils.SubElement(national, 'bairro_destinatario', cdata=str(
receiver.neighborhood))
xml_utils.SubElement(national, 'cidade_destinatario', cdata=str(
receiver.city)[:30])
xml_utils.SubElement(national, 'uf_destinatario', text=str(receiver
.state))
xml_utils.SubElement(national, 'cep_destinatario', cdata=str(
receiver.zip_code))
xml_utils.SubElement(national, 'codigo_usuario_postal')
xml_utils.SubElement(national, 'centro_custo_cliente')
xml_utils.SubElement(national, 'numero_nota_fiscal', text=str(
shipping_label.invoice_number))
xml_utils.SubElement(national, 'serie_nota_fiscal', text=str(
shipping_label.invoice_series))
xml_utils.SubElement(national, 'valor_nota_fiscal', text=str(
shipping_label.value).replace('.', ','))
xml_utils.SubElement(national, 'natureza_nota_fiscal', text=str(
shipping_label.invoice_type))
xml_utils.SubElement(national, 'descricao_objeto', cdata=str(
shipping_label.text)[:20])
xml_utils.SubElement(national, 'valor_a_cobrar', text=str(
shipping_label.billing).replace('.', ','))
extra_services = xml_utils.SubElement(item, 'servico_adicional')
for extra_service in shipping_label.extra_services:
xml_utils.SubElement(extra_services, 'codigo_servico_adicional',
text='{!s:>03}'.format(extra_service.number))
xml_utils.SubElement(extra_services, 'valor_declarado', text=str(
shipping_label.value).replace('.', ','))
dimensions = xml_utils.SubElement(item, 'dimensao_objeto')
xml_utils.SubElement(dimensions, 'tipo_objeto', text='{!s:>03}'.
format(shipping_label.package.package_type))
xml_utils.SubElement(dimensions, 'dimensao_altura', text=str(
shipping_label.package.height))
xml_utils.SubElement(dimensions, 'dimensao_largura', text=str(
shipping_label.package.width))
xml_utils.SubElement(dimensions, 'dimensao_comprimento', text=str(
shipping_label.package.length))
xml_utils.SubElement(dimensions, 'dimensao_diametro', text=str(
shipping_label.package.diameter))
xml_utils.SubElement(item, 'data_postagem_sara')
xml_utils.SubElement(item, 'status_processamento', text='0')
xml_utils.SubElement(item, 'numero_comprovante_postagem')
xml_utils.SubElement(item, 'valor_cobrado')
return item
def get_document(self, posting_list: PostingList):
if not posting_list.shipping_labels:
raise PostingListSerializerError(
'Cannot serialize an empty posting list')
if posting_list.closed:
raise PostingListSerializerError(
'Cannot serialize a closed posting list')
root = xml_utils.Element('correioslog')
root.append(xml_utils.Element('tipo_arquivo', text='Postagem'))
root.append(xml_utils.Element('versao_arquivo', text='2.3'))
root.append(self._get_posting_list_element(posting_list))
root.append(self._get_sender_info_element(posting_list))
root.append(xml_utils.Element('forma_pagamento'))
for shipping_label in posting_list.shipping_labels.values():
root.append(self._get_shipping_label_element(shipping_label))
return root
def validate(self, document):
with open(os.path.join(DATADIR, 'posting_list_schema.xsd')) as xsd:
xsd_document = xml_utils.parse(xsd)
schema = xml_utils.XMLSchema(xsd_document)
return schema.assert_(document)
def get_xml(self, document) ->bytes:
xmlstring = str(xml_utils.tostring(document, encoding='unicode'))
encoded_xmlstring = xmlstring.encode('iso-8859-1', errors='ignore')
return (b'<?xml version="1.0" encoding="ISO-8859-1"?>' +
encoded_xmlstring)
class Correios:
PRODUCTION = 'production'
TEST = 'test'
MAX_TRACKING_CODES_PER_REQUEST = 50
sigep_urls = {'production': (get_wsdl_path(
'AtendeCliente-production.wsdl'), True), 'test': (get_wsdl_path(
'AtendeCliente-test.wsdl'), False)}
websro_url = get_wsdl_path('Rastro.wsdl')
freight_url = get_wsdl_path('CalcPrecoPrazo.asmx')
def __init__(self, username, password, timeout=8, environment='production'
):
self.username = username
self.password = password
self.timeout = timeout
url, verify = self.sigep_urls[environment]
self.sigep_url = url
self.sigep_verify = verify
self.sigep_client = SoapClient(self.sigep_url, verify=self.
sigep_verify, timeout=self.timeout)
self.sigep = self.sigep_client.service
self.websro_client = SoapClient(self.websro_url, timeout=self.timeout)
self.websro = self.websro_client.service
self.freight_client = SoapClient(self.freight_url, timeout=self.timeout
)
self.freight = self.freight_client.service
self.model_builder = ModelBuilder()
def _auth_call(self, method_name, *args, **kwargs):
kwargs.update({'usuario': self.username, 'senha': self.password})
return self._call(method_name, *args, **kwargs)
def _call(self, method_name, *args, **kwargs):
method = getattr(self.sigep, method_name)
return method(*args, **kwargs)
def get_user(self, contract_number: Union[int, str],
posting_card_number: Union[int, str]) ->User:
contract_number = str(contract_number)
posting_card_number = str(posting_card_number)
user_data = self._auth_call('buscaCliente', contract_number,
posting_card_number)
return self.model_builder.build_user(user_data)
def find_zipcode(self, zip_code: Union[ZipCode, str]) ->ZipAddress:
zip_address_data = self._call('consultaCEP', str(zip_code))
return self.model_builder.build_zip_address(zip_address_data)
def verify_service_availability(self, posting_card: PostingCard,
service: Service, from_zip_code: Union[ZipCode, str], to_zip_code:
Union[ZipCode, str]) ->bool:
from_zip_code = ZipCode.create(from_zip_code)
to_zip_code = ZipCode.create(to_zip_code)
result = self._auth_call('verificaDisponibilidadeServico',
posting_card.administrative_code, str(service), str(
from_zip_code), str(to_zip_code))
return result
def get_posting_card_status(self, posting_card: PostingCard) ->bool:
result = self._auth_call('getStatusCartaoPostagem', posting_card.number
)
return self.model_builder.build_posting_card_status(result)
def request_tracking_codes(self, user: User, service: Service, quantity
=1, receiver_type='C') ->list:
result = self._auth_call('solicitaEtiquetas', receiver_type, str(
user.federal_tax_number), service.id, quantity)
return self.model_builder.build_tracking_codes_list(result)
def generate_verification_digit(self, tracking_codes: Sequence[str]
) ->List[int]:
tracking_codes = [TrackingCode(tc).nodigit for tc in tracking_codes]
result = self._auth_call('geraDigitoVerificadorEtiquetas',
tracking_codes)
return result
def _generate_xml_string(self, posting_list: PostingList) ->str:
posting_list_serializer = PostingListSerializer()
document = posting_list_serializer.get_document(posting_list)
posting_list_serializer.validate(document)
xml = posting_list_serializer.get_xml(document)
return xml.decode('ISO-8859-1')
def close_posting_list(self, posting_list: PostingList, posting_card:
PostingCard) ->PostingList:
xml = self._generate_xml_string(posting_list)
tracking_codes = posting_list.get_tracking_codes()
id_ = self._auth_call('fechaPlpVariosServicos', xml, posting_list.
custom_id, posting_card.number, tracking_codes)
posting_list.close_with_id(id_)
return posting_list
def get_tracking_code_events(self, tracking_list):
if isinstance(tracking_list, (str, TrackingCode)):
tracking_list = [tracking_list]
if len(tracking_list) > Correios.MAX_TRACKING_CODES_PER_REQUEST:
msg = (
'{} tracking codes requested exceeds the limit of {} stabilished by the Correios'
)
msg = msg.format(len(tracking_list), Correios.
MAX_TRACKING_CODES_PER_REQUEST)
raise TrackingCodesLimitExceededError(msg)
tracking_codes = {}
for tracking_code in tracking_list:
tracking_code = TrackingCode.create(tracking_code)
tracking_codes[tracking_code.code] = tracking_code
response = self.websro.buscaEventosLista(self.username, self.
password, 'L', 'T', '101', tuple(tracking_codes.keys()))
return self.model_builder.load_tracking_events(tracking_codes, response
)
def calculate_freights(self, posting_card: PostingCard, services: List[
Union[Service, int]], from_zip: Union[ZipCode, int, str], to_zip:
Union[ZipCode, int, str], package: Package, value: Union[Decimal,
float]=0.0, extra_services: Optional[Sequence[Union[ExtraService,
int]]]=None):
administrative_code = posting_card.administrative_code
services = [Service.get(s) for s in services]
from_zip = ZipCode.create(from_zip)
to_zip = ZipCode.create(to_zip)
if extra_services is None:
extra_services = []
else:
extra_services = [ExtraService.get(es) for es in extra_services]
response = self.freight.CalcPrecoPrazo(administrative_code, self.
password, ','.join(str(s) for s in services), str(from_zip),
str(to_zip), package.weight / KG, package.package_type, package
.length, package.height, package.width, package.diameter, 'S' if
EXTRA_SERVICE_MP in extra_services else 'N', value, 'S' if
EXTRA_SERVICE_AR in extra_services else 'N')
return self.model_builder.build_freights_list(response)
def calculate_delivery_time(self, service: Union[Service, int],
from_zip: Union[ZipCode, int, str], to_zip: Union[ZipCode, int, str]):
service = Service.get(service)
from_zip = ZipCode.create(from_zip)
to_zip = ZipCode.create(to_zip)
response = self.freight.CalcPrazo(str(service), str(from_zip), str(
to_zip))
return response.cServico[0].PrazoEntrega
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ValidRestrictResponse(Enum):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class ModelBuilder:
def build_service(self, service_data):
service = Service(code=service_data.codigo, id=service_data.id,
description=service_data.descricao, category=service_data.
servicoSigep.categoriaServico)
return service
def build_posting_card(self, contract: Contract, posting_card_data):
posting_card = PostingCard(contract=contract, number=
posting_card_data.numero, administrative_code=posting_card_data
.codigoAdministrativo)
posting_card.start_date = posting_card_data.dataVigenciaInicio
posting_card.end_date = posting_card_data.dataVigenciaFim
posting_card.status = posting_card_data.statusCartaoPostagem
posting_card.status_code = posting_card_data.statusCodigo
posting_card.unit = posting_card_data.unidadeGenerica
for service_data in posting_card_data.servicos:
service = self.build_service(service_data)
posting_card.add_service(service)
return posting_card
def build_contract(self, user: User, contract_data):
contract = Contract(user=user, number=contract_data.contratoPK.
numero, regional_direction=contract_data.codigoDiretoria)
contract.customer_code = contract_data.codigoCliente
contract.status_code = contract_data.statusCodigo
contract.start_date = contract_data.dataVigenciaInicio
contract.end_date = contract_data.dataVigenciaFim
for posting_card_data in contract_data.cartoesPostagem:
self.build_posting_card(contract, posting_card_data)
return contract
def build_user(self, user_data):
user = User(name=user_data.nome, federal_tax_number=
FederalTaxNumber(user_data.cnpj), state_tax_number=
StateTaxNumber(user_data.inscricaoEstadual), status_number=
user_data.statusCodigo)
for contract_data in user_data.contratos:
self.build_contract(user, contract_data)
return user
def build_zip_address(self, zip_address_data):
zip_address = ZipAddress(id=zip_address_data.id, zip_code=
zip_address_data.cep, state=zip_address_data.uf, city=
zip_address_data.cidade, district=zip_address_data.bairro,
address=zip_address_data.end, complements=[zip_address_data.
complemento, zip_address_data.complemento2])
return zip_address
def build_posting_card_status(self, response):
if response.lower() != 'normal':
return PostingCard.CANCELLED
return PostingCard.ACTIVE
def build_tracking_codes_list(self, response):
codes = response.split(',')
return TrackingCode.create_range(codes[0], codes[1])
def _load_invalid_event(self, tracking_code: TrackingCode, tracked_object):
event = NotFoundTrackingEvent(timestamp=datetime.now(), comment=
tracked_object.erro)
tracking_code.add_event(event)
def _load_events(self, tracking_code: TrackingCode, events):
for event in events:
timestamp = datetime.strptime('{} {}'.format(event.data, event.
hora), TrackingEvent.timestamp_format)
event = TrackingEvent(timestamp=timestamp, status=EventStatus(
event.tipo, event.status), location_zip_code=getattr(event,
'codigo', ''), location=getattr(event, 'local', ''), city=
getattr(event, 'cidade', ''), state=getattr(event, 'uf', ''
), receiver=getattr(event, 'recebedor', ''), document=
getattr(event, 'documento', ''), comment=getattr(event,
'comentario', ''), description=getattr(event, 'descricao',
''), details=getattr(event, 'detalhes', ''))
tracking_code.add_event(event)
def load_tracking_events(self, tracking_codes: Dict[str, TrackingCode],
response):
result = []
for tracked_object in response.objeto:
tracking_code = tracking_codes[tracked_object.numero]
if 'erro' in tracked_object:
self._load_invalid_event(tracking_code, tracked_object)
else:
tracking_code.name = tracked_object.nome
tracking_code.initials = tracked_object.sigla
tracking_code.category = tracked_object.categoria
self._load_events(tracking_code, tracked_object.evento)
result.append(tracking_code)
return result
def build_freights_list(self, response):
result = []
for service_data in response.cServico:
freight = self.build_freight(service_data=service_data)
result.append(freight)
return result
def build_freight(self, service_data):
data = {'service': Service.get(service_data.Codigo), 'error_code':
to_integer(service_data.Erro), 'delivery_time': int(
service_data.PrazoEntrega), 'value': to_decimal(service_data.
ValorSemAdicionais), 'declared_value': to_decimal(service_data.
ValorValorDeclarado), 'ar_value': to_decimal(service_data.
ValorAvisoRecebimento), 'mp_value': to_decimal(service_data.
ValorMaoPropria), 'saturday': service_data.EntregaSabado or '',
'home': service_data.EntregaDomiciliar or '', 'error_message':
service_data.MsgErro or None}
if data['error_code'] and not data['error_code'
] in ValidRestrictResponse.restricted_codes():
return FreightError(**data)
return Freight(**data)
class PostingListSerializer:
def _get_posting_list_element(self, posting_list):
element = xml_utils.Element('plp')
xml_utils.SubElement(element, 'id_plp')
xml_utils.SubElement(element, 'valor_global')
xml_utils.SubElement(element, 'mcu_unidade_postagem')
xml_utils.SubElement(element, 'nome_unidade_postagem')
xml_utils.SubElement(element, 'cartao_postagem', text=str(
posting_list.posting_card))
return element
def _get_sender_info_element(self, posting_list):
sender = posting_list.sender
posting_card = posting_list.posting_card
contract = posting_list.contract
sender_info = xml_utils.Element('remetente')
xml_utils.SubElement(sender_info, 'numero_contrato', text=str(
contract.number))
xml_utils.SubElement(sender_info, 'numero_diretoria', text=str(
contract.regional_direction_number))
xml_utils.SubElement(sender_info, 'codigo_administrativo', text=str
(posting_card.administrative_code))
xml_utils.SubElement(sender_info, 'nome_remetente', cdata=sender.name)
xml_utils.SubElement(sender_info, 'logradouro_remetente', cdata=
sender.street)
xml_utils.SubElement(sender_info, 'numero_remetente', cdata=sender.
number)
xml_utils.SubElement(sender_info, 'complemento_remetente', cdata=
sender.complement)
xml_utils.SubElement(sender_info, 'bairro_remetente', cdata=sender.
neighborhood)
xml_utils.SubElement(sender_info, 'cep_remetente', cdata=str(sender
.zip_code))
xml_utils.SubElement(sender_info, 'cidade_remetente', cdata=str(
sender.city)[:30])
xml_utils.SubElement(sender_info, 'uf_remetente', cdata=str(sender.
state))
xml_utils.SubElement(sender_info, 'telefone_remetente', cdata=
sender.phone.short)
xml_utils.SubElement(sender_info, 'fax_remetente', cdata='')
xml_utils.SubElement(sender_info, 'email_remetente', cdata=sender.email
)
return sender_info
def _get_shipping_label_element(self, shipping_label: ShippingLabel):
item = xml_utils.Element('objeto_postal')
xml_utils.SubElement(item, 'numero_etiqueta', text=str(
shipping_label.tracking_code))
xml_utils.SubElement(item, 'codigo_objeto_cliente')
xml_utils.SubElement(item, 'codigo_servico_postagem', text=str(
shipping_label.service))
xml_utils.SubElement(item, 'cubagem', text=str(shipping_label.
posting_weight).replace('.', ','))
xml_utils.SubElement(item, 'peso', text=str(shipping_label.package.
weight))
xml_utils.SubElement(item, 'rt1')
xml_utils.SubElement(item, 'rt2')
receiver = shipping_label.receiver
address = xml_utils.SubElement(item, 'destinatario')
xml_utils.SubElement(address, 'nome_destinatario', cdata=str(
receiver.name))
xml_utils.SubElement(address, 'telefone_destinatario', cdata=
receiver.phone.short)
xml_utils.SubElement(address, 'celular_destinatario', cdata=
receiver.cellphone.short)
xml_utils.SubElement(address, 'email_destinatario', cdata=str(
receiver.email))
xml_utils.SubElement(address, 'logradouro_destinatario', cdata=str(
receiver.street))
xml_utils.SubElement(address, 'complemento_destinatario', cdata=str
(receiver.complement))
xml_utils.SubElement(address, 'numero_end_destinatario', text=str(
receiver.number))
national = xml_utils.SubElement(item, 'nacional')
xml_utils.SubElement(national, 'bairro_destinatario', cdata=str(
receiver.neighborhood))
xml_utils.SubElement(national, 'cidade_destinatario', cdata=str(
receiver.city)[:30])
xml_utils.SubElement(national, 'uf_destinatario', text=str(receiver
.state))
xml_utils.SubElement(national, 'cep_destinatario', cdata=str(
receiver.zip_code))
xml_utils.SubElement(national, 'codigo_usuario_postal')
xml_utils.SubElement(national, 'centro_custo_cliente')
xml_utils.SubElement(national, 'numero_nota_fiscal', text=str(
shipping_label.invoice_number))
xml_utils.SubElement(national, 'serie_nota_fiscal', text=str(
shipping_label.invoice_series))
xml_utils.SubElement(national, 'valor_nota_fiscal', text=str(
shipping_label.value).replace('.', ','))
xml_utils.SubElement(national, 'natureza_nota_fiscal', text=str(
shipping_label.invoice_type))
xml_utils.SubElement(national, 'descricao_objeto', cdata=str(
shipping_label.text)[:20])
xml_utils.SubElement(national, 'valor_a_cobrar', text=str(
shipping_label.billing).replace('.', ','))
extra_services = xml_utils.SubElement(item, 'servico_adicional')
for extra_service in shipping_label.extra_services:
xml_utils.SubElement(extra_services, 'codigo_servico_adicional',
text='{!s:>03}'.format(extra_service.number))
xml_utils.SubElement(extra_services, 'valor_declarado', text=str(
shipping_label.value).replace('.', ','))
dimensions = xml_utils.SubElement(item, 'dimensao_objeto')
xml_utils.SubElement(dimensions, 'tipo_objeto', text='{!s:>03}'.
format(shipping_label.package.package_type))
xml_utils.SubElement(dimensions, 'dimensao_altura', text=str(
shipping_label.package.height))
xml_utils.SubElement(dimensions, 'dimensao_largura', text=str(
shipping_label.package.width))
xml_utils.SubElement(dimensions, 'dimensao_comprimento', text=str(
shipping_label.package.length))
xml_utils.SubElement(dimensions, 'dimensao_diametro', text=str(
shipping_label.package.diameter))
xml_utils.SubElement(item, 'data_postagem_sara')
xml_utils.SubElement(item, 'status_processamento', text='0')
xml_utils.SubElement(item, 'numero_comprovante_postagem')
xml_utils.SubElement(item, 'valor_cobrado')
return item
def get_document(self, posting_list: PostingList):
if not posting_list.shipping_labels:
raise PostingListSerializerError(
'Cannot serialize an empty posting list')
if posting_list.closed:
raise PostingListSerializerError(
'Cannot serialize a closed posting list')
root = xml_utils.Element('correioslog')
root.append(xml_utils.Element('tipo_arquivo', text='Postagem'))
root.append(xml_utils.Element('versao_arquivo', text='2.3'))
root.append(self._get_posting_list_element(posting_list))
root.append(self._get_sender_info_element(posting_list))
root.append(xml_utils.Element('forma_pagamento'))
for shipping_label in posting_list.shipping_labels.values():
root.append(self._get_shipping_label_element(shipping_label))
return root
def validate(self, document):
with open(os.path.join(DATADIR, 'posting_list_schema.xsd')) as xsd:
xsd_document = xml_utils.parse(xsd)
schema = xml_utils.XMLSchema(xsd_document)
return schema.assert_(document)
def get_xml(self, document) ->bytes:
xmlstring = str(xml_utils.tostring(document, encoding='unicode'))
encoded_xmlstring = xmlstring.encode('iso-8859-1', errors='ignore')
return (b'<?xml version="1.0" encoding="ISO-8859-1"?>' +
encoded_xmlstring)
class Correios:
PRODUCTION = 'production'
TEST = 'test'
MAX_TRACKING_CODES_PER_REQUEST = 50
sigep_urls = {'production': (get_wsdl_path(
'AtendeCliente-production.wsdl'), True), 'test': (get_wsdl_path(
'AtendeCliente-test.wsdl'), False)}
websro_url = get_wsdl_path('Rastro.wsdl')
freight_url = get_wsdl_path('CalcPrecoPrazo.asmx')
def __init__(self, username, password, timeout=8, environment='production'
):
self.username = username
self.password = password
self.timeout = timeout
url, verify = self.sigep_urls[environment]
self.sigep_url = url
self.sigep_verify = verify
self.sigep_client = SoapClient(self.sigep_url, verify=self.
sigep_verify, timeout=self.timeout)
self.sigep = self.sigep_client.service
self.websro_client = SoapClient(self.websro_url, timeout=self.timeout)
self.websro = self.websro_client.service
self.freight_client = SoapClient(self.freight_url, timeout=self.timeout
)
self.freight = self.freight_client.service
self.model_builder = ModelBuilder()
def _auth_call(self, method_name, *args, **kwargs):
kwargs.update({'usuario': self.username, 'senha': self.password})
return self._call(method_name, *args, **kwargs)
def _call(self, method_name, *args, **kwargs):
method = getattr(self.sigep, method_name)
return method(*args, **kwargs)
def get_user(self, contract_number: Union[int, str],
posting_card_number: Union[int, str]) ->User:
contract_number = str(contract_number)
posting_card_number = str(posting_card_number)
user_data = self._auth_call('buscaCliente', contract_number,
posting_card_number)
return self.model_builder.build_user(user_data)
def find_zipcode(self, zip_code: Union[ZipCode, str]) ->ZipAddress:
zip_address_data = self._call('consultaCEP', str(zip_code))
return self.model_builder.build_zip_address(zip_address_data)
def verify_service_availability(self, posting_card: PostingCard,
service: Service, from_zip_code: Union[ZipCode, str], to_zip_code:
Union[ZipCode, str]) ->bool:
from_zip_code = ZipCode.create(from_zip_code)
to_zip_code = ZipCode.create(to_zip_code)
result = self._auth_call('verificaDisponibilidadeServico',
posting_card.administrative_code, str(service), str(
from_zip_code), str(to_zip_code))
return result
def get_posting_card_status(self, posting_card: PostingCard) ->bool:
result = self._auth_call('getStatusCartaoPostagem', posting_card.number
)
return self.model_builder.build_posting_card_status(result)
def request_tracking_codes(self, user: User, service: Service, quantity
=1, receiver_type='C') ->list:
result = self._auth_call('solicitaEtiquetas', receiver_type, str(
user.federal_tax_number), service.id, quantity)
return self.model_builder.build_tracking_codes_list(result)
def generate_verification_digit(self, tracking_codes: Sequence[str]
) ->List[int]:
tracking_codes = [TrackingCode(tc).nodigit for tc in tracking_codes]
result = self._auth_call('geraDigitoVerificadorEtiquetas',
tracking_codes)
return result
def _generate_xml_string(self, posting_list: PostingList) ->str:
posting_list_serializer = PostingListSerializer()
document = posting_list_serializer.get_document(posting_list)
posting_list_serializer.validate(document)
xml = posting_list_serializer.get_xml(document)
return xml.decode('ISO-8859-1')
def close_posting_list(self, posting_list: PostingList, posting_card:
PostingCard) ->PostingList:
xml = self._generate_xml_string(posting_list)
tracking_codes = posting_list.get_tracking_codes()
id_ = self._auth_call('fechaPlpVariosServicos', xml, posting_list.
custom_id, posting_card.number, tracking_codes)
posting_list.close_with_id(id_)
return posting_list
def get_tracking_code_events(self, tracking_list):
if isinstance(tracking_list, (str, TrackingCode)):
tracking_list = [tracking_list]
if len(tracking_list) > Correios.MAX_TRACKING_CODES_PER_REQUEST:
msg = (
'{} tracking codes requested exceeds the limit of {} stabilished by the Correios'
)
msg = msg.format(len(tracking_list), Correios.
MAX_TRACKING_CODES_PER_REQUEST)
raise TrackingCodesLimitExceededError(msg)
tracking_codes = {}
for tracking_code in tracking_list:
tracking_code = TrackingCode.create(tracking_code)
tracking_codes[tracking_code.code] = tracking_code
response = self.websro.buscaEventosLista(self.username, self.
password, 'L', 'T', '101', tuple(tracking_codes.keys()))
return self.model_builder.load_tracking_events(tracking_codes, response
)
def calculate_freights(self, posting_card: PostingCard, services: List[
Union[Service, int]], from_zip: Union[ZipCode, int, str], to_zip:
Union[ZipCode, int, str], package: Package, value: Union[Decimal,
float]=0.0, extra_services: Optional[Sequence[Union[ExtraService,
int]]]=None):
administrative_code = posting_card.administrative_code
services = [Service.get(s) for s in services]
from_zip = ZipCode.create(from_zip)
to_zip = ZipCode.create(to_zip)
if extra_services is None:
extra_services = []
else:
extra_services = [ExtraService.get(es) for es in extra_services]
response = self.freight.CalcPrecoPrazo(administrative_code, self.
password, ','.join(str(s) for s in services), str(from_zip),
str(to_zip), package.weight / KG, package.package_type, package
.length, package.height, package.width, package.diameter, 'S' if
EXTRA_SERVICE_MP in extra_services else 'N', value, 'S' if
EXTRA_SERVICE_AR in extra_services else 'N')
return self.model_builder.build_freights_list(response)
def calculate_delivery_time(self, service: Union[Service, int],
from_zip: Union[ZipCode, int, str], to_zip: Union[ZipCode, int, str]):
service = Service.get(service)
from_zip = ZipCode.create(from_zip)
to_zip = ZipCode.create(to_zip)
response = self.freight.CalcPrazo(str(service), str(from_zip), str(
to_zip))
return response.cServico[0].PrazoEntrega
<|reserved_special_token_1|>
# Copyright 2016 Osvaldo Santana Neto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from datetime import datetime
from decimal import Decimal
from enum import Enum
from typing import Dict, List, Optional, Sequence, Union
from correios import DATADIR, xml_utils
from correios.exceptions import PostingListSerializerError, TrackingCodesLimitExceededError
from correios.models.data import EXTRA_SERVICE_AR, EXTRA_SERVICE_MP
from correios.utils import get_wsdl_path, to_decimal, to_integer
from .models.address import ZipAddress, ZipCode
from .models.posting import (
EventStatus,
Freight,
FreightError,
NotFoundTrackingEvent,
Package,
PostingList,
ShippingLabel,
TrackingCode,
TrackingEvent
)
from .models.user import Contract, ExtraService, FederalTaxNumber, PostingCard, Service, StateTaxNumber, User
from .soap import SoapClient
KG = 1000 # g
class ValidRestrictResponse(Enum):
INITIAL_ZIPCODE_RESTRICTED = 9
FINAL_ZIPCODE_RESTRICTED = 10
INITIAL_AND_FINAL_ZIPCODE_RESTRICTED = 11
@classmethod
def restricted_codes(cls):
return [
cls.FINAL_ZIPCODE_RESTRICTED.value,
cls.INITIAL_AND_FINAL_ZIPCODE_RESTRICTED.value,
cls.FINAL_ZIPCODE_RESTRICTED.value
]
class ModelBuilder:
def build_service(self, service_data):
service = Service(
code=service_data.codigo,
id=service_data.id,
description=service_data.descricao,
category=service_data.servicoSigep.categoriaServico
)
return service
def build_posting_card(self, contract: Contract, posting_card_data):
posting_card = PostingCard(
contract=contract,
number=posting_card_data.numero,
administrative_code=posting_card_data.codigoAdministrativo,
)
posting_card.start_date = posting_card_data.dataVigenciaInicio
posting_card.end_date = posting_card_data.dataVigenciaFim
posting_card.status = posting_card_data.statusCartaoPostagem
posting_card.status_code = posting_card_data.statusCodigo
posting_card.unit = posting_card_data.unidadeGenerica
for service_data in posting_card_data.servicos:
service = self.build_service(service_data)
posting_card.add_service(service)
return posting_card
def build_contract(self, user: User, contract_data):
contract = Contract(
user=user,
number=contract_data.contratoPK.numero,
regional_direction=contract_data.codigoDiretoria,
)
contract.customer_code = contract_data.codigoCliente
contract.status_code = contract_data.statusCodigo
contract.start_date = contract_data.dataVigenciaInicio
contract.end_date = contract_data.dataVigenciaFim
for posting_card_data in contract_data.cartoesPostagem:
self.build_posting_card(contract, posting_card_data)
return contract
def build_user(self, user_data):
user = User(
name=user_data.nome,
federal_tax_number=FederalTaxNumber(user_data.cnpj),
state_tax_number=StateTaxNumber(user_data.inscricaoEstadual),
status_number=user_data.statusCodigo,
)
for contract_data in user_data.contratos:
self.build_contract(user, contract_data)
return user
def build_zip_address(self, zip_address_data):
zip_address = ZipAddress(
id=zip_address_data.id,
zip_code=zip_address_data.cep,
state=zip_address_data.uf,
city=zip_address_data.cidade,
district=zip_address_data.bairro,
address=zip_address_data.end,
complements=[zip_address_data.complemento, zip_address_data.complemento2]
)
return zip_address
def build_posting_card_status(self, response):
if response.lower() != "normal":
return PostingCard.CANCELLED
return PostingCard.ACTIVE
def build_tracking_codes_list(self, response):
codes = response.split(",")
return TrackingCode.create_range(codes[0], codes[1])
def _load_invalid_event(self, tracking_code: TrackingCode, tracked_object):
event = NotFoundTrackingEvent(
timestamp=datetime.now(),
comment=tracked_object.erro,
)
tracking_code.add_event(event)
def _load_events(self, tracking_code: TrackingCode, events):
for event in events:
timestamp = datetime.strptime("{} {}".format(event.data, event.hora), TrackingEvent.timestamp_format)
event = TrackingEvent(
timestamp=timestamp,
status=EventStatus(event.tipo, event.status),
location_zip_code=getattr(event, "codigo", ""),
location=getattr(event, "local", ""),
city=getattr(event, "cidade", ""),
state=getattr(event, "uf", ""),
receiver=getattr(event, "recebedor", ""),
document=getattr(event, "documento", ""),
comment=getattr(event, "comentario", ""),
description=getattr(event, "descricao", ""),
details=getattr(event, "detalhes", ""),
)
tracking_code.add_event(event)
def load_tracking_events(self, tracking_codes: Dict[str, TrackingCode], response):
result = []
for tracked_object in response.objeto:
tracking_code = tracking_codes[tracked_object.numero]
if 'erro' in tracked_object:
self._load_invalid_event(tracking_code, tracked_object)
else:
tracking_code.name = tracked_object.nome
tracking_code.initials = tracked_object.sigla
tracking_code.category = tracked_object.categoria
self._load_events(tracking_code, tracked_object.evento)
result.append(tracking_code)
return result
def build_freights_list(self, response):
result = []
for service_data in response.cServico:
freight = self.build_freight(service_data=service_data)
result.append(freight)
return result
def build_freight(self, service_data):
data = {
'service': Service.get(service_data.Codigo),
'error_code': to_integer(service_data.Erro),
'delivery_time': int(service_data.PrazoEntrega),
'value': to_decimal(service_data.ValorSemAdicionais),
'declared_value': to_decimal(service_data.ValorValorDeclarado),
'ar_value': to_decimal(service_data.ValorAvisoRecebimento),
'mp_value': to_decimal(service_data.ValorMaoPropria),
'saturday': service_data.EntregaSabado or "",
'home': service_data.EntregaDomiciliar or "",
'error_message': service_data.MsgErro or None
}
if (
data['error_code'] and
not data['error_code'] in ValidRestrictResponse.restricted_codes()
):
return FreightError(**data)
return Freight(**data)
class PostingListSerializer:
def _get_posting_list_element(self, posting_list):
element = xml_utils.Element("plp")
xml_utils.SubElement(element, "id_plp")
xml_utils.SubElement(element, "valor_global")
xml_utils.SubElement(element, "mcu_unidade_postagem")
xml_utils.SubElement(element, "nome_unidade_postagem")
xml_utils.SubElement(element, "cartao_postagem", text=str(posting_list.posting_card))
return element
def _get_sender_info_element(self, posting_list):
sender = posting_list.sender
posting_card = posting_list.posting_card
contract = posting_list.contract
sender_info = xml_utils.Element("remetente")
xml_utils.SubElement(sender_info, "numero_contrato", text=str(contract.number))
xml_utils.SubElement(sender_info, "numero_diretoria", text=str(contract.regional_direction_number))
xml_utils.SubElement(sender_info, "codigo_administrativo", text=str(posting_card.administrative_code))
xml_utils.SubElement(sender_info, "nome_remetente", cdata=sender.name)
xml_utils.SubElement(sender_info, "logradouro_remetente", cdata=sender.street)
xml_utils.SubElement(sender_info, "numero_remetente", cdata=sender.number)
xml_utils.SubElement(sender_info, "complemento_remetente", cdata=sender.complement)
xml_utils.SubElement(sender_info, "bairro_remetente", cdata=sender.neighborhood)
xml_utils.SubElement(sender_info, "cep_remetente", cdata=str(sender.zip_code))
xml_utils.SubElement(sender_info, "cidade_remetente", cdata=str(sender.city)[:30])
xml_utils.SubElement(sender_info, "uf_remetente", cdata=str(sender.state))
xml_utils.SubElement(sender_info, "telefone_remetente", cdata=sender.phone.short)
xml_utils.SubElement(sender_info, "fax_remetente", cdata="")
xml_utils.SubElement(sender_info, "email_remetente", cdata=sender.email)
return sender_info
def _get_shipping_label_element(self, shipping_label: ShippingLabel):
item = xml_utils.Element("objeto_postal")
xml_utils.SubElement(item, "numero_etiqueta", text=str(shipping_label.tracking_code))
xml_utils.SubElement(item, "codigo_objeto_cliente")
xml_utils.SubElement(item, "codigo_servico_postagem", text=str(shipping_label.service))
xml_utils.SubElement(item, "cubagem", text=str(shipping_label.posting_weight).replace(".", ","))
xml_utils.SubElement(item, "peso", text=str(shipping_label.package.weight))
xml_utils.SubElement(item, "rt1")
xml_utils.SubElement(item, "rt2")
receiver = shipping_label.receiver
address = xml_utils.SubElement(item, "destinatario")
xml_utils.SubElement(address, "nome_destinatario", cdata=str(receiver.name))
xml_utils.SubElement(address, "telefone_destinatario", cdata=receiver.phone.short)
xml_utils.SubElement(address, "celular_destinatario", cdata=receiver.cellphone.short)
xml_utils.SubElement(address, "email_destinatario", cdata=str(receiver.email))
xml_utils.SubElement(address, "logradouro_destinatario", cdata=str(receiver.street))
xml_utils.SubElement(address, "complemento_destinatario", cdata=str(receiver.complement))
xml_utils.SubElement(address, "numero_end_destinatario", text=str(receiver.number))
national = xml_utils.SubElement(item, "nacional")
xml_utils.SubElement(national, "bairro_destinatario", cdata=str(receiver.neighborhood))
xml_utils.SubElement(national, "cidade_destinatario", cdata=str(receiver.city)[:30])
xml_utils.SubElement(national, "uf_destinatario", text=str(receiver.state))
xml_utils.SubElement(national, "cep_destinatario", cdata=str(receiver.zip_code))
xml_utils.SubElement(national, "codigo_usuario_postal")
xml_utils.SubElement(national, "centro_custo_cliente")
xml_utils.SubElement(national, "numero_nota_fiscal", text=str(shipping_label.invoice_number))
xml_utils.SubElement(national, "serie_nota_fiscal", text=str(shipping_label.invoice_series))
xml_utils.SubElement(national, "valor_nota_fiscal", text=str(shipping_label.value).replace(".", ","))
xml_utils.SubElement(national, "natureza_nota_fiscal", text=str(shipping_label.invoice_type))
xml_utils.SubElement(national, "descricao_objeto", cdata=str(shipping_label.text)[:20])
xml_utils.SubElement(national, "valor_a_cobrar", text=str(shipping_label.billing).replace(".", ","))
extra_services = xml_utils.SubElement(item, "servico_adicional")
for extra_service in shipping_label.extra_services:
xml_utils.SubElement(extra_services, "codigo_servico_adicional",
text="{!s:>03}".format(extra_service.number))
xml_utils.SubElement(extra_services, "valor_declarado", text=str(shipping_label.value).replace(".", ","))
dimensions = xml_utils.SubElement(item, "dimensao_objeto")
xml_utils.SubElement(dimensions, "tipo_objeto", text="{!s:>03}".format(shipping_label.package.package_type))
xml_utils.SubElement(dimensions, "dimensao_altura", text=str(shipping_label.package.height))
xml_utils.SubElement(dimensions, "dimensao_largura", text=str(shipping_label.package.width))
xml_utils.SubElement(dimensions, "dimensao_comprimento", text=str(shipping_label.package.length))
xml_utils.SubElement(dimensions, "dimensao_diametro", text=str(shipping_label.package.diameter))
xml_utils.SubElement(item, "data_postagem_sara")
xml_utils.SubElement(item, "status_processamento", text="0")
xml_utils.SubElement(item, "numero_comprovante_postagem")
xml_utils.SubElement(item, "valor_cobrado")
return item
def get_document(self, posting_list: PostingList):
if not posting_list.shipping_labels:
raise PostingListSerializerError("Cannot serialize an empty posting list")
if posting_list.closed:
raise PostingListSerializerError("Cannot serialize a closed posting list")
root = xml_utils.Element("correioslog")
root.append(xml_utils.Element("tipo_arquivo", text="Postagem"))
root.append(xml_utils.Element("versao_arquivo", text="2.3"))
root.append(self._get_posting_list_element(posting_list))
root.append(self._get_sender_info_element(posting_list))
root.append(xml_utils.Element("forma_pagamento"))
for shipping_label in posting_list.shipping_labels.values():
root.append(self._get_shipping_label_element(shipping_label))
return root
def validate(self, document):
with open(os.path.join(DATADIR, "posting_list_schema.xsd")) as xsd:
xsd_document = xml_utils.parse(xsd)
schema = xml_utils.XMLSchema(xsd_document)
return schema.assert_(document)
def get_xml(self, document) -> bytes:
xmlstring = str(xml_utils.tostring(document, encoding="unicode"))
encoded_xmlstring = xmlstring.encode("iso-8859-1", errors='ignore')
return b'<?xml version="1.0" encoding="ISO-8859-1"?>' + encoded_xmlstring
class Correios:
PRODUCTION = "production"
TEST = "test"
MAX_TRACKING_CODES_PER_REQUEST = 50
# 'environment': ('url', 'ssl_verification')
sigep_urls = {
'production': (get_wsdl_path('AtendeCliente-production.wsdl'), True),
'test': (get_wsdl_path('AtendeCliente-test.wsdl'), False),
}
websro_url = get_wsdl_path('Rastro.wsdl')
freight_url = get_wsdl_path('CalcPrecoPrazo.asmx')
def __init__(self, username, password, timeout=8, environment="production"):
self.username = username
self.password = password
self.timeout = timeout
url, verify = self.sigep_urls[environment]
self.sigep_url = url
self.sigep_verify = verify
self.sigep_client = SoapClient(self.sigep_url, verify=self.sigep_verify, timeout=self.timeout)
self.sigep = self.sigep_client.service
self.websro_client = SoapClient(self.websro_url, timeout=self.timeout)
self.websro = self.websro_client.service
self.freight_client = SoapClient(self.freight_url, timeout=self.timeout)
self.freight = self.freight_client.service
self.model_builder = ModelBuilder()
def _auth_call(self, method_name, *args, **kwargs):
kwargs.update({
"usuario": self.username,
"senha": self.password,
})
return self._call(method_name, *args, **kwargs)
def _call(self, method_name, *args, **kwargs):
method = getattr(self.sigep, method_name)
return method(*args, **kwargs) # TODO: handle errors
def get_user(self, contract_number: Union[int, str], posting_card_number: Union[int, str]) -> User:
contract_number = str(contract_number)
posting_card_number = str(posting_card_number)
user_data = self._auth_call("buscaCliente", contract_number, posting_card_number)
return self.model_builder.build_user(user_data)
def find_zipcode(self, zip_code: Union[ZipCode, str]) -> ZipAddress:
zip_address_data = self._call("consultaCEP", str(zip_code))
return self.model_builder.build_zip_address(zip_address_data)
def verify_service_availability(self,
posting_card: PostingCard,
service: Service,
from_zip_code: Union[ZipCode, str],
to_zip_code: Union[ZipCode, str]) -> bool:
from_zip_code = ZipCode.create(from_zip_code)
to_zip_code = ZipCode.create(to_zip_code)
result = self._auth_call("verificaDisponibilidadeServico",
posting_card.administrative_code, str(service),
str(from_zip_code), str(to_zip_code))
return result
def get_posting_card_status(self, posting_card: PostingCard) -> bool:
result = self._auth_call("getStatusCartaoPostagem", posting_card.number)
return self.model_builder.build_posting_card_status(result)
def request_tracking_codes(self, user: User, service: Service, quantity=1, receiver_type="C") -> list:
result = self._auth_call("solicitaEtiquetas",
receiver_type, str(user.federal_tax_number),
service.id, quantity)
return self.model_builder.build_tracking_codes_list(result)
def generate_verification_digit(self, tracking_codes: Sequence[str]) -> List[int]:
tracking_codes = [TrackingCode(tc).nodigit for tc in tracking_codes]
result = self._auth_call("geraDigitoVerificadorEtiquetas",
tracking_codes)
return result
def _generate_xml_string(self, posting_list: PostingList) -> str:
posting_list_serializer = PostingListSerializer()
document = posting_list_serializer.get_document(posting_list)
posting_list_serializer.validate(document)
xml = posting_list_serializer.get_xml(document)
return xml.decode("ISO-8859-1")
def close_posting_list(self, posting_list: PostingList, posting_card: PostingCard) -> PostingList:
xml = self._generate_xml_string(posting_list)
tracking_codes = posting_list.get_tracking_codes()
id_ = self._auth_call("fechaPlpVariosServicos", xml,
posting_list.custom_id, posting_card.number, tracking_codes)
posting_list.close_with_id(id_)
return posting_list
def get_tracking_code_events(self, tracking_list):
if isinstance(tracking_list, (str, TrackingCode)):
tracking_list = [tracking_list]
if len(tracking_list) > Correios.MAX_TRACKING_CODES_PER_REQUEST:
msg = '{} tracking codes requested exceeds the limit of {} stabilished by the Correios'
msg = msg.format(len(tracking_list), Correios.MAX_TRACKING_CODES_PER_REQUEST)
raise TrackingCodesLimitExceededError(msg)
tracking_codes = {}
for tracking_code in tracking_list:
tracking_code = TrackingCode.create(tracking_code)
tracking_codes[tracking_code.code] = tracking_code
response = self.websro.buscaEventosLista(self.username, self.password, "L", "T", "101",
tuple(tracking_codes.keys()))
return self.model_builder.load_tracking_events(tracking_codes, response)
def calculate_freights(self,
posting_card: PostingCard,
services: List[Union[Service, int]],
from_zip: Union[ZipCode, int, str], to_zip: Union[ZipCode, int, str],
package: Package,
value: Union[Decimal, float] = 0.00,
extra_services: Optional[Sequence[Union[ExtraService, int]]] = None):
administrative_code = posting_card.administrative_code
services = [Service.get(s) for s in services]
from_zip = ZipCode.create(from_zip)
to_zip = ZipCode.create(to_zip)
if extra_services is None:
extra_services = []
else:
extra_services = [ExtraService.get(es) for es in extra_services]
response = self.freight.CalcPrecoPrazo(
administrative_code,
self.password,
",".join(str(s) for s in services),
str(from_zip),
str(to_zip),
package.weight / KG,
package.package_type,
package.length,
package.height,
package.width,
package.diameter,
"S" if EXTRA_SERVICE_MP in extra_services else "N",
value,
"S" if EXTRA_SERVICE_AR in extra_services else "N",
)
return self.model_builder.build_freights_list(response)
def calculate_delivery_time(self,
service: Union[Service, int],
from_zip: Union[ZipCode, int, str],
to_zip: Union[ZipCode, int, str]):
service = Service.get(service)
from_zip = ZipCode.create(from_zip)
to_zip = ZipCode.create(to_zip)
response = self.freight.CalcPrazo(str(service), str(from_zip), str(to_zip))
return response.cServico[0].PrazoEntrega
|
flexible
|
{
"blob_id": "09284a96467b09c2ad7b65530c015fdb64b198a4",
"index": 2638,
"step-1": "<mask token>\n\n\nclass ModelBuilder:\n <mask token>\n <mask token>\n <mask token>\n\n def build_user(self, user_data):\n user = User(name=user_data.nome, federal_tax_number=\n FederalTaxNumber(user_data.cnpj), state_tax_number=\n StateTaxNumber(user_data.inscricaoEstadual), status_number=\n user_data.statusCodigo)\n for contract_data in user_data.contratos:\n self.build_contract(user, contract_data)\n return user\n\n def build_zip_address(self, zip_address_data):\n zip_address = ZipAddress(id=zip_address_data.id, zip_code=\n zip_address_data.cep, state=zip_address_data.uf, city=\n zip_address_data.cidade, district=zip_address_data.bairro,\n address=zip_address_data.end, complements=[zip_address_data.\n complemento, zip_address_data.complemento2])\n return zip_address\n <mask token>\n <mask token>\n\n def _load_invalid_event(self, tracking_code: TrackingCode, tracked_object):\n event = NotFoundTrackingEvent(timestamp=datetime.now(), comment=\n tracked_object.erro)\n tracking_code.add_event(event)\n <mask token>\n <mask token>\n\n def build_freights_list(self, response):\n result = []\n for service_data in response.cServico:\n freight = self.build_freight(service_data=service_data)\n result.append(freight)\n return result\n\n def build_freight(self, service_data):\n data = {'service': Service.get(service_data.Codigo), 'error_code':\n to_integer(service_data.Erro), 'delivery_time': int(\n service_data.PrazoEntrega), 'value': to_decimal(service_data.\n ValorSemAdicionais), 'declared_value': to_decimal(service_data.\n ValorValorDeclarado), 'ar_value': to_decimal(service_data.\n ValorAvisoRecebimento), 'mp_value': to_decimal(service_data.\n ValorMaoPropria), 'saturday': service_data.EntregaSabado or '',\n 'home': service_data.EntregaDomiciliar or '', 'error_message': \n service_data.MsgErro or None}\n if data['error_code'] and not data['error_code'\n ] in ValidRestrictResponse.restricted_codes():\n return FreightError(**data)\n return Freight(**data)\n\n\nclass PostingListSerializer:\n\n def _get_posting_list_element(self, posting_list):\n element = xml_utils.Element('plp')\n xml_utils.SubElement(element, 'id_plp')\n xml_utils.SubElement(element, 'valor_global')\n xml_utils.SubElement(element, 'mcu_unidade_postagem')\n xml_utils.SubElement(element, 'nome_unidade_postagem')\n xml_utils.SubElement(element, 'cartao_postagem', text=str(\n posting_list.posting_card))\n return element\n\n def _get_sender_info_element(self, posting_list):\n sender = posting_list.sender\n posting_card = posting_list.posting_card\n contract = posting_list.contract\n sender_info = xml_utils.Element('remetente')\n xml_utils.SubElement(sender_info, 'numero_contrato', text=str(\n contract.number))\n xml_utils.SubElement(sender_info, 'numero_diretoria', text=str(\n contract.regional_direction_number))\n xml_utils.SubElement(sender_info, 'codigo_administrativo', text=str\n (posting_card.administrative_code))\n xml_utils.SubElement(sender_info, 'nome_remetente', cdata=sender.name)\n xml_utils.SubElement(sender_info, 'logradouro_remetente', cdata=\n sender.street)\n xml_utils.SubElement(sender_info, 'numero_remetente', cdata=sender.\n number)\n xml_utils.SubElement(sender_info, 'complemento_remetente', cdata=\n sender.complement)\n xml_utils.SubElement(sender_info, 'bairro_remetente', cdata=sender.\n neighborhood)\n xml_utils.SubElement(sender_info, 'cep_remetente', cdata=str(sender\n .zip_code))\n xml_utils.SubElement(sender_info, 'cidade_remetente', cdata=str(\n sender.city)[:30])\n xml_utils.SubElement(sender_info, 'uf_remetente', cdata=str(sender.\n state))\n xml_utils.SubElement(sender_info, 'telefone_remetente', cdata=\n sender.phone.short)\n xml_utils.SubElement(sender_info, 'fax_remetente', cdata='')\n xml_utils.SubElement(sender_info, 'email_remetente', cdata=sender.email\n )\n return sender_info\n\n def _get_shipping_label_element(self, shipping_label: ShippingLabel):\n item = xml_utils.Element('objeto_postal')\n xml_utils.SubElement(item, 'numero_etiqueta', text=str(\n shipping_label.tracking_code))\n xml_utils.SubElement(item, 'codigo_objeto_cliente')\n xml_utils.SubElement(item, 'codigo_servico_postagem', text=str(\n shipping_label.service))\n xml_utils.SubElement(item, 'cubagem', text=str(shipping_label.\n posting_weight).replace('.', ','))\n xml_utils.SubElement(item, 'peso', text=str(shipping_label.package.\n weight))\n xml_utils.SubElement(item, 'rt1')\n xml_utils.SubElement(item, 'rt2')\n receiver = shipping_label.receiver\n address = xml_utils.SubElement(item, 'destinatario')\n xml_utils.SubElement(address, 'nome_destinatario', cdata=str(\n receiver.name))\n xml_utils.SubElement(address, 'telefone_destinatario', cdata=\n receiver.phone.short)\n xml_utils.SubElement(address, 'celular_destinatario', cdata=\n receiver.cellphone.short)\n xml_utils.SubElement(address, 'email_destinatario', cdata=str(\n receiver.email))\n xml_utils.SubElement(address, 'logradouro_destinatario', cdata=str(\n receiver.street))\n xml_utils.SubElement(address, 'complemento_destinatario', cdata=str\n (receiver.complement))\n xml_utils.SubElement(address, 'numero_end_destinatario', text=str(\n receiver.number))\n national = xml_utils.SubElement(item, 'nacional')\n xml_utils.SubElement(national, 'bairro_destinatario', cdata=str(\n receiver.neighborhood))\n xml_utils.SubElement(national, 'cidade_destinatario', cdata=str(\n receiver.city)[:30])\n xml_utils.SubElement(national, 'uf_destinatario', text=str(receiver\n .state))\n xml_utils.SubElement(national, 'cep_destinatario', cdata=str(\n receiver.zip_code))\n xml_utils.SubElement(national, 'codigo_usuario_postal')\n xml_utils.SubElement(national, 'centro_custo_cliente')\n xml_utils.SubElement(national, 'numero_nota_fiscal', text=str(\n shipping_label.invoice_number))\n xml_utils.SubElement(national, 'serie_nota_fiscal', text=str(\n shipping_label.invoice_series))\n xml_utils.SubElement(national, 'valor_nota_fiscal', text=str(\n shipping_label.value).replace('.', ','))\n xml_utils.SubElement(national, 'natureza_nota_fiscal', text=str(\n shipping_label.invoice_type))\n xml_utils.SubElement(national, 'descricao_objeto', cdata=str(\n shipping_label.text)[:20])\n xml_utils.SubElement(national, 'valor_a_cobrar', text=str(\n shipping_label.billing).replace('.', ','))\n extra_services = xml_utils.SubElement(item, 'servico_adicional')\n for extra_service in shipping_label.extra_services:\n xml_utils.SubElement(extra_services, 'codigo_servico_adicional',\n text='{!s:>03}'.format(extra_service.number))\n xml_utils.SubElement(extra_services, 'valor_declarado', text=str(\n shipping_label.value).replace('.', ','))\n dimensions = xml_utils.SubElement(item, 'dimensao_objeto')\n xml_utils.SubElement(dimensions, 'tipo_objeto', text='{!s:>03}'.\n format(shipping_label.package.package_type))\n xml_utils.SubElement(dimensions, 'dimensao_altura', text=str(\n shipping_label.package.height))\n xml_utils.SubElement(dimensions, 'dimensao_largura', text=str(\n shipping_label.package.width))\n xml_utils.SubElement(dimensions, 'dimensao_comprimento', text=str(\n shipping_label.package.length))\n xml_utils.SubElement(dimensions, 'dimensao_diametro', text=str(\n shipping_label.package.diameter))\n xml_utils.SubElement(item, 'data_postagem_sara')\n xml_utils.SubElement(item, 'status_processamento', text='0')\n xml_utils.SubElement(item, 'numero_comprovante_postagem')\n xml_utils.SubElement(item, 'valor_cobrado')\n return item\n\n def get_document(self, posting_list: PostingList):\n if not posting_list.shipping_labels:\n raise PostingListSerializerError(\n 'Cannot serialize an empty posting list')\n if posting_list.closed:\n raise PostingListSerializerError(\n 'Cannot serialize a closed posting list')\n root = xml_utils.Element('correioslog')\n root.append(xml_utils.Element('tipo_arquivo', text='Postagem'))\n root.append(xml_utils.Element('versao_arquivo', text='2.3'))\n root.append(self._get_posting_list_element(posting_list))\n root.append(self._get_sender_info_element(posting_list))\n root.append(xml_utils.Element('forma_pagamento'))\n for shipping_label in posting_list.shipping_labels.values():\n root.append(self._get_shipping_label_element(shipping_label))\n return root\n\n def validate(self, document):\n with open(os.path.join(DATADIR, 'posting_list_schema.xsd')) as xsd:\n xsd_document = xml_utils.parse(xsd)\n schema = xml_utils.XMLSchema(xsd_document)\n return schema.assert_(document)\n\n def get_xml(self, document) ->bytes:\n xmlstring = str(xml_utils.tostring(document, encoding='unicode'))\n encoded_xmlstring = xmlstring.encode('iso-8859-1', errors='ignore')\n return (b'<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>' +\n encoded_xmlstring)\n\n\nclass Correios:\n PRODUCTION = 'production'\n TEST = 'test'\n MAX_TRACKING_CODES_PER_REQUEST = 50\n sigep_urls = {'production': (get_wsdl_path(\n 'AtendeCliente-production.wsdl'), True), 'test': (get_wsdl_path(\n 'AtendeCliente-test.wsdl'), False)}\n websro_url = get_wsdl_path('Rastro.wsdl')\n freight_url = get_wsdl_path('CalcPrecoPrazo.asmx')\n\n def __init__(self, username, password, timeout=8, environment='production'\n ):\n self.username = username\n self.password = password\n self.timeout = timeout\n url, verify = self.sigep_urls[environment]\n self.sigep_url = url\n self.sigep_verify = verify\n self.sigep_client = SoapClient(self.sigep_url, verify=self.\n sigep_verify, timeout=self.timeout)\n self.sigep = self.sigep_client.service\n self.websro_client = SoapClient(self.websro_url, timeout=self.timeout)\n self.websro = self.websro_client.service\n self.freight_client = SoapClient(self.freight_url, timeout=self.timeout\n )\n self.freight = self.freight_client.service\n self.model_builder = ModelBuilder()\n\n def _auth_call(self, method_name, *args, **kwargs):\n kwargs.update({'usuario': self.username, 'senha': self.password})\n return self._call(method_name, *args, **kwargs)\n\n def _call(self, method_name, *args, **kwargs):\n method = getattr(self.sigep, method_name)\n return method(*args, **kwargs)\n\n def get_user(self, contract_number: Union[int, str],\n posting_card_number: Union[int, str]) ->User:\n contract_number = str(contract_number)\n posting_card_number = str(posting_card_number)\n user_data = self._auth_call('buscaCliente', contract_number,\n posting_card_number)\n return self.model_builder.build_user(user_data)\n\n def find_zipcode(self, zip_code: Union[ZipCode, str]) ->ZipAddress:\n zip_address_data = self._call('consultaCEP', str(zip_code))\n return self.model_builder.build_zip_address(zip_address_data)\n\n def verify_service_availability(self, posting_card: PostingCard,\n service: Service, from_zip_code: Union[ZipCode, str], to_zip_code:\n Union[ZipCode, str]) ->bool:\n from_zip_code = ZipCode.create(from_zip_code)\n to_zip_code = ZipCode.create(to_zip_code)\n result = self._auth_call('verificaDisponibilidadeServico',\n posting_card.administrative_code, str(service), str(\n from_zip_code), str(to_zip_code))\n return result\n\n def get_posting_card_status(self, posting_card: PostingCard) ->bool:\n result = self._auth_call('getStatusCartaoPostagem', posting_card.number\n )\n return self.model_builder.build_posting_card_status(result)\n\n def request_tracking_codes(self, user: User, service: Service, quantity\n =1, receiver_type='C') ->list:\n result = self._auth_call('solicitaEtiquetas', receiver_type, str(\n user.federal_tax_number), service.id, quantity)\n return self.model_builder.build_tracking_codes_list(result)\n\n def generate_verification_digit(self, tracking_codes: Sequence[str]\n ) ->List[int]:\n tracking_codes = [TrackingCode(tc).nodigit for tc in tracking_codes]\n result = self._auth_call('geraDigitoVerificadorEtiquetas',\n tracking_codes)\n return result\n\n def _generate_xml_string(self, posting_list: PostingList) ->str:\n posting_list_serializer = PostingListSerializer()\n document = posting_list_serializer.get_document(posting_list)\n posting_list_serializer.validate(document)\n xml = posting_list_serializer.get_xml(document)\n return xml.decode('ISO-8859-1')\n\n def close_posting_list(self, posting_list: PostingList, posting_card:\n PostingCard) ->PostingList:\n xml = self._generate_xml_string(posting_list)\n tracking_codes = posting_list.get_tracking_codes()\n id_ = self._auth_call('fechaPlpVariosServicos', xml, posting_list.\n custom_id, posting_card.number, tracking_codes)\n posting_list.close_with_id(id_)\n return posting_list\n\n def get_tracking_code_events(self, tracking_list):\n if isinstance(tracking_list, (str, TrackingCode)):\n tracking_list = [tracking_list]\n if len(tracking_list) > Correios.MAX_TRACKING_CODES_PER_REQUEST:\n msg = (\n '{} tracking codes requested exceeds the limit of {} stabilished by the Correios'\n )\n msg = msg.format(len(tracking_list), Correios.\n MAX_TRACKING_CODES_PER_REQUEST)\n raise TrackingCodesLimitExceededError(msg)\n tracking_codes = {}\n for tracking_code in tracking_list:\n tracking_code = TrackingCode.create(tracking_code)\n tracking_codes[tracking_code.code] = tracking_code\n response = self.websro.buscaEventosLista(self.username, self.\n password, 'L', 'T', '101', tuple(tracking_codes.keys()))\n return self.model_builder.load_tracking_events(tracking_codes, response\n )\n\n def calculate_freights(self, posting_card: PostingCard, services: List[\n Union[Service, int]], from_zip: Union[ZipCode, int, str], to_zip:\n Union[ZipCode, int, str], package: Package, value: Union[Decimal,\n float]=0.0, extra_services: Optional[Sequence[Union[ExtraService,\n int]]]=None):\n administrative_code = posting_card.administrative_code\n services = [Service.get(s) for s in services]\n from_zip = ZipCode.create(from_zip)\n to_zip = ZipCode.create(to_zip)\n if extra_services is None:\n extra_services = []\n else:\n extra_services = [ExtraService.get(es) for es in extra_services]\n response = self.freight.CalcPrecoPrazo(administrative_code, self.\n password, ','.join(str(s) for s in services), str(from_zip),\n str(to_zip), package.weight / KG, package.package_type, package\n .length, package.height, package.width, package.diameter, 'S' if\n EXTRA_SERVICE_MP in extra_services else 'N', value, 'S' if \n EXTRA_SERVICE_AR in extra_services else 'N')\n return self.model_builder.build_freights_list(response)\n\n def calculate_delivery_time(self, service: Union[Service, int],\n from_zip: Union[ZipCode, int, str], to_zip: Union[ZipCode, int, str]):\n service = Service.get(service)\n from_zip = ZipCode.create(from_zip)\n to_zip = ZipCode.create(to_zip)\n response = self.freight.CalcPrazo(str(service), str(from_zip), str(\n to_zip))\n return response.cServico[0].PrazoEntrega\n",
"step-2": "<mask token>\n\n\nclass ModelBuilder:\n\n def build_service(self, service_data):\n service = Service(code=service_data.codigo, id=service_data.id,\n description=service_data.descricao, category=service_data.\n servicoSigep.categoriaServico)\n return service\n <mask token>\n <mask token>\n\n def build_user(self, user_data):\n user = User(name=user_data.nome, federal_tax_number=\n FederalTaxNumber(user_data.cnpj), state_tax_number=\n StateTaxNumber(user_data.inscricaoEstadual), status_number=\n user_data.statusCodigo)\n for contract_data in user_data.contratos:\n self.build_contract(user, contract_data)\n return user\n\n def build_zip_address(self, zip_address_data):\n zip_address = ZipAddress(id=zip_address_data.id, zip_code=\n zip_address_data.cep, state=zip_address_data.uf, city=\n zip_address_data.cidade, district=zip_address_data.bairro,\n address=zip_address_data.end, complements=[zip_address_data.\n complemento, zip_address_data.complemento2])\n return zip_address\n <mask token>\n <mask token>\n\n def _load_invalid_event(self, tracking_code: TrackingCode, tracked_object):\n event = NotFoundTrackingEvent(timestamp=datetime.now(), comment=\n tracked_object.erro)\n tracking_code.add_event(event)\n <mask token>\n <mask token>\n\n def build_freights_list(self, response):\n result = []\n for service_data in response.cServico:\n freight = self.build_freight(service_data=service_data)\n result.append(freight)\n return result\n\n def build_freight(self, service_data):\n data = {'service': Service.get(service_data.Codigo), 'error_code':\n to_integer(service_data.Erro), 'delivery_time': int(\n service_data.PrazoEntrega), 'value': to_decimal(service_data.\n ValorSemAdicionais), 'declared_value': to_decimal(service_data.\n ValorValorDeclarado), 'ar_value': to_decimal(service_data.\n ValorAvisoRecebimento), 'mp_value': to_decimal(service_data.\n ValorMaoPropria), 'saturday': service_data.EntregaSabado or '',\n 'home': service_data.EntregaDomiciliar or '', 'error_message': \n service_data.MsgErro or None}\n if data['error_code'] and not data['error_code'\n ] in ValidRestrictResponse.restricted_codes():\n return FreightError(**data)\n return Freight(**data)\n\n\nclass PostingListSerializer:\n\n def _get_posting_list_element(self, posting_list):\n element = xml_utils.Element('plp')\n xml_utils.SubElement(element, 'id_plp')\n xml_utils.SubElement(element, 'valor_global')\n xml_utils.SubElement(element, 'mcu_unidade_postagem')\n xml_utils.SubElement(element, 'nome_unidade_postagem')\n xml_utils.SubElement(element, 'cartao_postagem', text=str(\n posting_list.posting_card))\n return element\n\n def _get_sender_info_element(self, posting_list):\n sender = posting_list.sender\n posting_card = posting_list.posting_card\n contract = posting_list.contract\n sender_info = xml_utils.Element('remetente')\n xml_utils.SubElement(sender_info, 'numero_contrato', text=str(\n contract.number))\n xml_utils.SubElement(sender_info, 'numero_diretoria', text=str(\n contract.regional_direction_number))\n xml_utils.SubElement(sender_info, 'codigo_administrativo', text=str\n (posting_card.administrative_code))\n xml_utils.SubElement(sender_info, 'nome_remetente', cdata=sender.name)\n xml_utils.SubElement(sender_info, 'logradouro_remetente', cdata=\n sender.street)\n xml_utils.SubElement(sender_info, 'numero_remetente', cdata=sender.\n number)\n xml_utils.SubElement(sender_info, 'complemento_remetente', cdata=\n sender.complement)\n xml_utils.SubElement(sender_info, 'bairro_remetente', cdata=sender.\n neighborhood)\n xml_utils.SubElement(sender_info, 'cep_remetente', cdata=str(sender\n .zip_code))\n xml_utils.SubElement(sender_info, 'cidade_remetente', cdata=str(\n sender.city)[:30])\n xml_utils.SubElement(sender_info, 'uf_remetente', cdata=str(sender.\n state))\n xml_utils.SubElement(sender_info, 'telefone_remetente', cdata=\n sender.phone.short)\n xml_utils.SubElement(sender_info, 'fax_remetente', cdata='')\n xml_utils.SubElement(sender_info, 'email_remetente', cdata=sender.email\n )\n return sender_info\n\n def _get_shipping_label_element(self, shipping_label: ShippingLabel):\n item = xml_utils.Element('objeto_postal')\n xml_utils.SubElement(item, 'numero_etiqueta', text=str(\n shipping_label.tracking_code))\n xml_utils.SubElement(item, 'codigo_objeto_cliente')\n xml_utils.SubElement(item, 'codigo_servico_postagem', text=str(\n shipping_label.service))\n xml_utils.SubElement(item, 'cubagem', text=str(shipping_label.\n posting_weight).replace('.', ','))\n xml_utils.SubElement(item, 'peso', text=str(shipping_label.package.\n weight))\n xml_utils.SubElement(item, 'rt1')\n xml_utils.SubElement(item, 'rt2')\n receiver = shipping_label.receiver\n address = xml_utils.SubElement(item, 'destinatario')\n xml_utils.SubElement(address, 'nome_destinatario', cdata=str(\n receiver.name))\n xml_utils.SubElement(address, 'telefone_destinatario', cdata=\n receiver.phone.short)\n xml_utils.SubElement(address, 'celular_destinatario', cdata=\n receiver.cellphone.short)\n xml_utils.SubElement(address, 'email_destinatario', cdata=str(\n receiver.email))\n xml_utils.SubElement(address, 'logradouro_destinatario', cdata=str(\n receiver.street))\n xml_utils.SubElement(address, 'complemento_destinatario', cdata=str\n (receiver.complement))\n xml_utils.SubElement(address, 'numero_end_destinatario', text=str(\n receiver.number))\n national = xml_utils.SubElement(item, 'nacional')\n xml_utils.SubElement(national, 'bairro_destinatario', cdata=str(\n receiver.neighborhood))\n xml_utils.SubElement(national, 'cidade_destinatario', cdata=str(\n receiver.city)[:30])\n xml_utils.SubElement(national, 'uf_destinatario', text=str(receiver\n .state))\n xml_utils.SubElement(national, 'cep_destinatario', cdata=str(\n receiver.zip_code))\n xml_utils.SubElement(national, 'codigo_usuario_postal')\n xml_utils.SubElement(national, 'centro_custo_cliente')\n xml_utils.SubElement(national, 'numero_nota_fiscal', text=str(\n shipping_label.invoice_number))\n xml_utils.SubElement(national, 'serie_nota_fiscal', text=str(\n shipping_label.invoice_series))\n xml_utils.SubElement(national, 'valor_nota_fiscal', text=str(\n shipping_label.value).replace('.', ','))\n xml_utils.SubElement(national, 'natureza_nota_fiscal', text=str(\n shipping_label.invoice_type))\n xml_utils.SubElement(national, 'descricao_objeto', cdata=str(\n shipping_label.text)[:20])\n xml_utils.SubElement(national, 'valor_a_cobrar', text=str(\n shipping_label.billing).replace('.', ','))\n extra_services = xml_utils.SubElement(item, 'servico_adicional')\n for extra_service in shipping_label.extra_services:\n xml_utils.SubElement(extra_services, 'codigo_servico_adicional',\n text='{!s:>03}'.format(extra_service.number))\n xml_utils.SubElement(extra_services, 'valor_declarado', text=str(\n shipping_label.value).replace('.', ','))\n dimensions = xml_utils.SubElement(item, 'dimensao_objeto')\n xml_utils.SubElement(dimensions, 'tipo_objeto', text='{!s:>03}'.\n format(shipping_label.package.package_type))\n xml_utils.SubElement(dimensions, 'dimensao_altura', text=str(\n shipping_label.package.height))\n xml_utils.SubElement(dimensions, 'dimensao_largura', text=str(\n shipping_label.package.width))\n xml_utils.SubElement(dimensions, 'dimensao_comprimento', text=str(\n shipping_label.package.length))\n xml_utils.SubElement(dimensions, 'dimensao_diametro', text=str(\n shipping_label.package.diameter))\n xml_utils.SubElement(item, 'data_postagem_sara')\n xml_utils.SubElement(item, 'status_processamento', text='0')\n xml_utils.SubElement(item, 'numero_comprovante_postagem')\n xml_utils.SubElement(item, 'valor_cobrado')\n return item\n\n def get_document(self, posting_list: PostingList):\n if not posting_list.shipping_labels:\n raise PostingListSerializerError(\n 'Cannot serialize an empty posting list')\n if posting_list.closed:\n raise PostingListSerializerError(\n 'Cannot serialize a closed posting list')\n root = xml_utils.Element('correioslog')\n root.append(xml_utils.Element('tipo_arquivo', text='Postagem'))\n root.append(xml_utils.Element('versao_arquivo', text='2.3'))\n root.append(self._get_posting_list_element(posting_list))\n root.append(self._get_sender_info_element(posting_list))\n root.append(xml_utils.Element('forma_pagamento'))\n for shipping_label in posting_list.shipping_labels.values():\n root.append(self._get_shipping_label_element(shipping_label))\n return root\n\n def validate(self, document):\n with open(os.path.join(DATADIR, 'posting_list_schema.xsd')) as xsd:\n xsd_document = xml_utils.parse(xsd)\n schema = xml_utils.XMLSchema(xsd_document)\n return schema.assert_(document)\n\n def get_xml(self, document) ->bytes:\n xmlstring = str(xml_utils.tostring(document, encoding='unicode'))\n encoded_xmlstring = xmlstring.encode('iso-8859-1', errors='ignore')\n return (b'<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>' +\n encoded_xmlstring)\n\n\nclass Correios:\n PRODUCTION = 'production'\n TEST = 'test'\n MAX_TRACKING_CODES_PER_REQUEST = 50\n sigep_urls = {'production': (get_wsdl_path(\n 'AtendeCliente-production.wsdl'), True), 'test': (get_wsdl_path(\n 'AtendeCliente-test.wsdl'), False)}\n websro_url = get_wsdl_path('Rastro.wsdl')\n freight_url = get_wsdl_path('CalcPrecoPrazo.asmx')\n\n def __init__(self, username, password, timeout=8, environment='production'\n ):\n self.username = username\n self.password = password\n self.timeout = timeout\n url, verify = self.sigep_urls[environment]\n self.sigep_url = url\n self.sigep_verify = verify\n self.sigep_client = SoapClient(self.sigep_url, verify=self.\n sigep_verify, timeout=self.timeout)\n self.sigep = self.sigep_client.service\n self.websro_client = SoapClient(self.websro_url, timeout=self.timeout)\n self.websro = self.websro_client.service\n self.freight_client = SoapClient(self.freight_url, timeout=self.timeout\n )\n self.freight = self.freight_client.service\n self.model_builder = ModelBuilder()\n\n def _auth_call(self, method_name, *args, **kwargs):\n kwargs.update({'usuario': self.username, 'senha': self.password})\n return self._call(method_name, *args, **kwargs)\n\n def _call(self, method_name, *args, **kwargs):\n method = getattr(self.sigep, method_name)\n return method(*args, **kwargs)\n\n def get_user(self, contract_number: Union[int, str],\n posting_card_number: Union[int, str]) ->User:\n contract_number = str(contract_number)\n posting_card_number = str(posting_card_number)\n user_data = self._auth_call('buscaCliente', contract_number,\n posting_card_number)\n return self.model_builder.build_user(user_data)\n\n def find_zipcode(self, zip_code: Union[ZipCode, str]) ->ZipAddress:\n zip_address_data = self._call('consultaCEP', str(zip_code))\n return self.model_builder.build_zip_address(zip_address_data)\n\n def verify_service_availability(self, posting_card: PostingCard,\n service: Service, from_zip_code: Union[ZipCode, str], to_zip_code:\n Union[ZipCode, str]) ->bool:\n from_zip_code = ZipCode.create(from_zip_code)\n to_zip_code = ZipCode.create(to_zip_code)\n result = self._auth_call('verificaDisponibilidadeServico',\n posting_card.administrative_code, str(service), str(\n from_zip_code), str(to_zip_code))\n return result\n\n def get_posting_card_status(self, posting_card: PostingCard) ->bool:\n result = self._auth_call('getStatusCartaoPostagem', posting_card.number\n )\n return self.model_builder.build_posting_card_status(result)\n\n def request_tracking_codes(self, user: User, service: Service, quantity\n =1, receiver_type='C') ->list:\n result = self._auth_call('solicitaEtiquetas', receiver_type, str(\n user.federal_tax_number), service.id, quantity)\n return self.model_builder.build_tracking_codes_list(result)\n\n def generate_verification_digit(self, tracking_codes: Sequence[str]\n ) ->List[int]:\n tracking_codes = [TrackingCode(tc).nodigit for tc in tracking_codes]\n result = self._auth_call('geraDigitoVerificadorEtiquetas',\n tracking_codes)\n return result\n\n def _generate_xml_string(self, posting_list: PostingList) ->str:\n posting_list_serializer = PostingListSerializer()\n document = posting_list_serializer.get_document(posting_list)\n posting_list_serializer.validate(document)\n xml = posting_list_serializer.get_xml(document)\n return xml.decode('ISO-8859-1')\n\n def close_posting_list(self, posting_list: PostingList, posting_card:\n PostingCard) ->PostingList:\n xml = self._generate_xml_string(posting_list)\n tracking_codes = posting_list.get_tracking_codes()\n id_ = self._auth_call('fechaPlpVariosServicos', xml, posting_list.\n custom_id, posting_card.number, tracking_codes)\n posting_list.close_with_id(id_)\n return posting_list\n\n def get_tracking_code_events(self, tracking_list):\n if isinstance(tracking_list, (str, TrackingCode)):\n tracking_list = [tracking_list]\n if len(tracking_list) > Correios.MAX_TRACKING_CODES_PER_REQUEST:\n msg = (\n '{} tracking codes requested exceeds the limit of {} stabilished by the Correios'\n )\n msg = msg.format(len(tracking_list), Correios.\n MAX_TRACKING_CODES_PER_REQUEST)\n raise TrackingCodesLimitExceededError(msg)\n tracking_codes = {}\n for tracking_code in tracking_list:\n tracking_code = TrackingCode.create(tracking_code)\n tracking_codes[tracking_code.code] = tracking_code\n response = self.websro.buscaEventosLista(self.username, self.\n password, 'L', 'T', '101', tuple(tracking_codes.keys()))\n return self.model_builder.load_tracking_events(tracking_codes, response\n )\n\n def calculate_freights(self, posting_card: PostingCard, services: List[\n Union[Service, int]], from_zip: Union[ZipCode, int, str], to_zip:\n Union[ZipCode, int, str], package: Package, value: Union[Decimal,\n float]=0.0, extra_services: Optional[Sequence[Union[ExtraService,\n int]]]=None):\n administrative_code = posting_card.administrative_code\n services = [Service.get(s) for s in services]\n from_zip = ZipCode.create(from_zip)\n to_zip = ZipCode.create(to_zip)\n if extra_services is None:\n extra_services = []\n else:\n extra_services = [ExtraService.get(es) for es in extra_services]\n response = self.freight.CalcPrecoPrazo(administrative_code, self.\n password, ','.join(str(s) for s in services), str(from_zip),\n str(to_zip), package.weight / KG, package.package_type, package\n .length, package.height, package.width, package.diameter, 'S' if\n EXTRA_SERVICE_MP in extra_services else 'N', value, 'S' if \n EXTRA_SERVICE_AR in extra_services else 'N')\n return self.model_builder.build_freights_list(response)\n\n def calculate_delivery_time(self, service: Union[Service, int],\n from_zip: Union[ZipCode, int, str], to_zip: Union[ZipCode, int, str]):\n service = Service.get(service)\n from_zip = ZipCode.create(from_zip)\n to_zip = ZipCode.create(to_zip)\n response = self.freight.CalcPrazo(str(service), str(from_zip), str(\n to_zip))\n return response.cServico[0].PrazoEntrega\n",
"step-3": "<mask token>\n\n\nclass ModelBuilder:\n\n def build_service(self, service_data):\n service = Service(code=service_data.codigo, id=service_data.id,\n description=service_data.descricao, category=service_data.\n servicoSigep.categoriaServico)\n return service\n\n def build_posting_card(self, contract: Contract, posting_card_data):\n posting_card = PostingCard(contract=contract, number=\n posting_card_data.numero, administrative_code=posting_card_data\n .codigoAdministrativo)\n posting_card.start_date = posting_card_data.dataVigenciaInicio\n posting_card.end_date = posting_card_data.dataVigenciaFim\n posting_card.status = posting_card_data.statusCartaoPostagem\n posting_card.status_code = posting_card_data.statusCodigo\n posting_card.unit = posting_card_data.unidadeGenerica\n for service_data in posting_card_data.servicos:\n service = self.build_service(service_data)\n posting_card.add_service(service)\n return posting_card\n\n def build_contract(self, user: User, contract_data):\n contract = Contract(user=user, number=contract_data.contratoPK.\n numero, regional_direction=contract_data.codigoDiretoria)\n contract.customer_code = contract_data.codigoCliente\n contract.status_code = contract_data.statusCodigo\n contract.start_date = contract_data.dataVigenciaInicio\n contract.end_date = contract_data.dataVigenciaFim\n for posting_card_data in contract_data.cartoesPostagem:\n self.build_posting_card(contract, posting_card_data)\n return contract\n\n def build_user(self, user_data):\n user = User(name=user_data.nome, federal_tax_number=\n FederalTaxNumber(user_data.cnpj), state_tax_number=\n StateTaxNumber(user_data.inscricaoEstadual), status_number=\n user_data.statusCodigo)\n for contract_data in user_data.contratos:\n self.build_contract(user, contract_data)\n return user\n\n def build_zip_address(self, zip_address_data):\n zip_address = ZipAddress(id=zip_address_data.id, zip_code=\n zip_address_data.cep, state=zip_address_data.uf, city=\n zip_address_data.cidade, district=zip_address_data.bairro,\n address=zip_address_data.end, complements=[zip_address_data.\n complemento, zip_address_data.complemento2])\n return zip_address\n <mask token>\n <mask token>\n\n def _load_invalid_event(self, tracking_code: TrackingCode, tracked_object):\n event = NotFoundTrackingEvent(timestamp=datetime.now(), comment=\n tracked_object.erro)\n tracking_code.add_event(event)\n\n def _load_events(self, tracking_code: TrackingCode, events):\n for event in events:\n timestamp = datetime.strptime('{} {}'.format(event.data, event.\n hora), TrackingEvent.timestamp_format)\n event = TrackingEvent(timestamp=timestamp, status=EventStatus(\n event.tipo, event.status), location_zip_code=getattr(event,\n 'codigo', ''), location=getattr(event, 'local', ''), city=\n getattr(event, 'cidade', ''), state=getattr(event, 'uf', ''\n ), receiver=getattr(event, 'recebedor', ''), document=\n getattr(event, 'documento', ''), comment=getattr(event,\n 'comentario', ''), description=getattr(event, 'descricao',\n ''), details=getattr(event, 'detalhes', ''))\n tracking_code.add_event(event)\n <mask token>\n\n def build_freights_list(self, response):\n result = []\n for service_data in response.cServico:\n freight = self.build_freight(service_data=service_data)\n result.append(freight)\n return result\n\n def build_freight(self, service_data):\n data = {'service': Service.get(service_data.Codigo), 'error_code':\n to_integer(service_data.Erro), 'delivery_time': int(\n service_data.PrazoEntrega), 'value': to_decimal(service_data.\n ValorSemAdicionais), 'declared_value': to_decimal(service_data.\n ValorValorDeclarado), 'ar_value': to_decimal(service_data.\n ValorAvisoRecebimento), 'mp_value': to_decimal(service_data.\n ValorMaoPropria), 'saturday': service_data.EntregaSabado or '',\n 'home': service_data.EntregaDomiciliar or '', 'error_message': \n service_data.MsgErro or None}\n if data['error_code'] and not data['error_code'\n ] in ValidRestrictResponse.restricted_codes():\n return FreightError(**data)\n return Freight(**data)\n\n\nclass PostingListSerializer:\n\n def _get_posting_list_element(self, posting_list):\n element = xml_utils.Element('plp')\n xml_utils.SubElement(element, 'id_plp')\n xml_utils.SubElement(element, 'valor_global')\n xml_utils.SubElement(element, 'mcu_unidade_postagem')\n xml_utils.SubElement(element, 'nome_unidade_postagem')\n xml_utils.SubElement(element, 'cartao_postagem', text=str(\n posting_list.posting_card))\n return element\n\n def _get_sender_info_element(self, posting_list):\n sender = posting_list.sender\n posting_card = posting_list.posting_card\n contract = posting_list.contract\n sender_info = xml_utils.Element('remetente')\n xml_utils.SubElement(sender_info, 'numero_contrato', text=str(\n contract.number))\n xml_utils.SubElement(sender_info, 'numero_diretoria', text=str(\n contract.regional_direction_number))\n xml_utils.SubElement(sender_info, 'codigo_administrativo', text=str\n (posting_card.administrative_code))\n xml_utils.SubElement(sender_info, 'nome_remetente', cdata=sender.name)\n xml_utils.SubElement(sender_info, 'logradouro_remetente', cdata=\n sender.street)\n xml_utils.SubElement(sender_info, 'numero_remetente', cdata=sender.\n number)\n xml_utils.SubElement(sender_info, 'complemento_remetente', cdata=\n sender.complement)\n xml_utils.SubElement(sender_info, 'bairro_remetente', cdata=sender.\n neighborhood)\n xml_utils.SubElement(sender_info, 'cep_remetente', cdata=str(sender\n .zip_code))\n xml_utils.SubElement(sender_info, 'cidade_remetente', cdata=str(\n sender.city)[:30])\n xml_utils.SubElement(sender_info, 'uf_remetente', cdata=str(sender.\n state))\n xml_utils.SubElement(sender_info, 'telefone_remetente', cdata=\n sender.phone.short)\n xml_utils.SubElement(sender_info, 'fax_remetente', cdata='')\n xml_utils.SubElement(sender_info, 'email_remetente', cdata=sender.email\n )\n return sender_info\n\n def _get_shipping_label_element(self, shipping_label: ShippingLabel):\n item = xml_utils.Element('objeto_postal')\n xml_utils.SubElement(item, 'numero_etiqueta', text=str(\n shipping_label.tracking_code))\n xml_utils.SubElement(item, 'codigo_objeto_cliente')\n xml_utils.SubElement(item, 'codigo_servico_postagem', text=str(\n shipping_label.service))\n xml_utils.SubElement(item, 'cubagem', text=str(shipping_label.\n posting_weight).replace('.', ','))\n xml_utils.SubElement(item, 'peso', text=str(shipping_label.package.\n weight))\n xml_utils.SubElement(item, 'rt1')\n xml_utils.SubElement(item, 'rt2')\n receiver = shipping_label.receiver\n address = xml_utils.SubElement(item, 'destinatario')\n xml_utils.SubElement(address, 'nome_destinatario', cdata=str(\n receiver.name))\n xml_utils.SubElement(address, 'telefone_destinatario', cdata=\n receiver.phone.short)\n xml_utils.SubElement(address, 'celular_destinatario', cdata=\n receiver.cellphone.short)\n xml_utils.SubElement(address, 'email_destinatario', cdata=str(\n receiver.email))\n xml_utils.SubElement(address, 'logradouro_destinatario', cdata=str(\n receiver.street))\n xml_utils.SubElement(address, 'complemento_destinatario', cdata=str\n (receiver.complement))\n xml_utils.SubElement(address, 'numero_end_destinatario', text=str(\n receiver.number))\n national = xml_utils.SubElement(item, 'nacional')\n xml_utils.SubElement(national, 'bairro_destinatario', cdata=str(\n receiver.neighborhood))\n xml_utils.SubElement(national, 'cidade_destinatario', cdata=str(\n receiver.city)[:30])\n xml_utils.SubElement(national, 'uf_destinatario', text=str(receiver\n .state))\n xml_utils.SubElement(national, 'cep_destinatario', cdata=str(\n receiver.zip_code))\n xml_utils.SubElement(national, 'codigo_usuario_postal')\n xml_utils.SubElement(national, 'centro_custo_cliente')\n xml_utils.SubElement(national, 'numero_nota_fiscal', text=str(\n shipping_label.invoice_number))\n xml_utils.SubElement(national, 'serie_nota_fiscal', text=str(\n shipping_label.invoice_series))\n xml_utils.SubElement(national, 'valor_nota_fiscal', text=str(\n shipping_label.value).replace('.', ','))\n xml_utils.SubElement(national, 'natureza_nota_fiscal', text=str(\n shipping_label.invoice_type))\n xml_utils.SubElement(national, 'descricao_objeto', cdata=str(\n shipping_label.text)[:20])\n xml_utils.SubElement(national, 'valor_a_cobrar', text=str(\n shipping_label.billing).replace('.', ','))\n extra_services = xml_utils.SubElement(item, 'servico_adicional')\n for extra_service in shipping_label.extra_services:\n xml_utils.SubElement(extra_services, 'codigo_servico_adicional',\n text='{!s:>03}'.format(extra_service.number))\n xml_utils.SubElement(extra_services, 'valor_declarado', text=str(\n shipping_label.value).replace('.', ','))\n dimensions = xml_utils.SubElement(item, 'dimensao_objeto')\n xml_utils.SubElement(dimensions, 'tipo_objeto', text='{!s:>03}'.\n format(shipping_label.package.package_type))\n xml_utils.SubElement(dimensions, 'dimensao_altura', text=str(\n shipping_label.package.height))\n xml_utils.SubElement(dimensions, 'dimensao_largura', text=str(\n shipping_label.package.width))\n xml_utils.SubElement(dimensions, 'dimensao_comprimento', text=str(\n shipping_label.package.length))\n xml_utils.SubElement(dimensions, 'dimensao_diametro', text=str(\n shipping_label.package.diameter))\n xml_utils.SubElement(item, 'data_postagem_sara')\n xml_utils.SubElement(item, 'status_processamento', text='0')\n xml_utils.SubElement(item, 'numero_comprovante_postagem')\n xml_utils.SubElement(item, 'valor_cobrado')\n return item\n\n def get_document(self, posting_list: PostingList):\n if not posting_list.shipping_labels:\n raise PostingListSerializerError(\n 'Cannot serialize an empty posting list')\n if posting_list.closed:\n raise PostingListSerializerError(\n 'Cannot serialize a closed posting list')\n root = xml_utils.Element('correioslog')\n root.append(xml_utils.Element('tipo_arquivo', text='Postagem'))\n root.append(xml_utils.Element('versao_arquivo', text='2.3'))\n root.append(self._get_posting_list_element(posting_list))\n root.append(self._get_sender_info_element(posting_list))\n root.append(xml_utils.Element('forma_pagamento'))\n for shipping_label in posting_list.shipping_labels.values():\n root.append(self._get_shipping_label_element(shipping_label))\n return root\n\n def validate(self, document):\n with open(os.path.join(DATADIR, 'posting_list_schema.xsd')) as xsd:\n xsd_document = xml_utils.parse(xsd)\n schema = xml_utils.XMLSchema(xsd_document)\n return schema.assert_(document)\n\n def get_xml(self, document) ->bytes:\n xmlstring = str(xml_utils.tostring(document, encoding='unicode'))\n encoded_xmlstring = xmlstring.encode('iso-8859-1', errors='ignore')\n return (b'<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>' +\n encoded_xmlstring)\n\n\nclass Correios:\n PRODUCTION = 'production'\n TEST = 'test'\n MAX_TRACKING_CODES_PER_REQUEST = 50\n sigep_urls = {'production': (get_wsdl_path(\n 'AtendeCliente-production.wsdl'), True), 'test': (get_wsdl_path(\n 'AtendeCliente-test.wsdl'), False)}\n websro_url = get_wsdl_path('Rastro.wsdl')\n freight_url = get_wsdl_path('CalcPrecoPrazo.asmx')\n\n def __init__(self, username, password, timeout=8, environment='production'\n ):\n self.username = username\n self.password = password\n self.timeout = timeout\n url, verify = self.sigep_urls[environment]\n self.sigep_url = url\n self.sigep_verify = verify\n self.sigep_client = SoapClient(self.sigep_url, verify=self.\n sigep_verify, timeout=self.timeout)\n self.sigep = self.sigep_client.service\n self.websro_client = SoapClient(self.websro_url, timeout=self.timeout)\n self.websro = self.websro_client.service\n self.freight_client = SoapClient(self.freight_url, timeout=self.timeout\n )\n self.freight = self.freight_client.service\n self.model_builder = ModelBuilder()\n\n def _auth_call(self, method_name, *args, **kwargs):\n kwargs.update({'usuario': self.username, 'senha': self.password})\n return self._call(method_name, *args, **kwargs)\n\n def _call(self, method_name, *args, **kwargs):\n method = getattr(self.sigep, method_name)\n return method(*args, **kwargs)\n\n def get_user(self, contract_number: Union[int, str],\n posting_card_number: Union[int, str]) ->User:\n contract_number = str(contract_number)\n posting_card_number = str(posting_card_number)\n user_data = self._auth_call('buscaCliente', contract_number,\n posting_card_number)\n return self.model_builder.build_user(user_data)\n\n def find_zipcode(self, zip_code: Union[ZipCode, str]) ->ZipAddress:\n zip_address_data = self._call('consultaCEP', str(zip_code))\n return self.model_builder.build_zip_address(zip_address_data)\n\n def verify_service_availability(self, posting_card: PostingCard,\n service: Service, from_zip_code: Union[ZipCode, str], to_zip_code:\n Union[ZipCode, str]) ->bool:\n from_zip_code = ZipCode.create(from_zip_code)\n to_zip_code = ZipCode.create(to_zip_code)\n result = self._auth_call('verificaDisponibilidadeServico',\n posting_card.administrative_code, str(service), str(\n from_zip_code), str(to_zip_code))\n return result\n\n def get_posting_card_status(self, posting_card: PostingCard) ->bool:\n result = self._auth_call('getStatusCartaoPostagem', posting_card.number\n )\n return self.model_builder.build_posting_card_status(result)\n\n def request_tracking_codes(self, user: User, service: Service, quantity\n =1, receiver_type='C') ->list:\n result = self._auth_call('solicitaEtiquetas', receiver_type, str(\n user.federal_tax_number), service.id, quantity)\n return self.model_builder.build_tracking_codes_list(result)\n\n def generate_verification_digit(self, tracking_codes: Sequence[str]\n ) ->List[int]:\n tracking_codes = [TrackingCode(tc).nodigit for tc in tracking_codes]\n result = self._auth_call('geraDigitoVerificadorEtiquetas',\n tracking_codes)\n return result\n\n def _generate_xml_string(self, posting_list: PostingList) ->str:\n posting_list_serializer = PostingListSerializer()\n document = posting_list_serializer.get_document(posting_list)\n posting_list_serializer.validate(document)\n xml = posting_list_serializer.get_xml(document)\n return xml.decode('ISO-8859-1')\n\n def close_posting_list(self, posting_list: PostingList, posting_card:\n PostingCard) ->PostingList:\n xml = self._generate_xml_string(posting_list)\n tracking_codes = posting_list.get_tracking_codes()\n id_ = self._auth_call('fechaPlpVariosServicos', xml, posting_list.\n custom_id, posting_card.number, tracking_codes)\n posting_list.close_with_id(id_)\n return posting_list\n\n def get_tracking_code_events(self, tracking_list):\n if isinstance(tracking_list, (str, TrackingCode)):\n tracking_list = [tracking_list]\n if len(tracking_list) > Correios.MAX_TRACKING_CODES_PER_REQUEST:\n msg = (\n '{} tracking codes requested exceeds the limit of {} stabilished by the Correios'\n )\n msg = msg.format(len(tracking_list), Correios.\n MAX_TRACKING_CODES_PER_REQUEST)\n raise TrackingCodesLimitExceededError(msg)\n tracking_codes = {}\n for tracking_code in tracking_list:\n tracking_code = TrackingCode.create(tracking_code)\n tracking_codes[tracking_code.code] = tracking_code\n response = self.websro.buscaEventosLista(self.username, self.\n password, 'L', 'T', '101', tuple(tracking_codes.keys()))\n return self.model_builder.load_tracking_events(tracking_codes, response\n )\n\n def calculate_freights(self, posting_card: PostingCard, services: List[\n Union[Service, int]], from_zip: Union[ZipCode, int, str], to_zip:\n Union[ZipCode, int, str], package: Package, value: Union[Decimal,\n float]=0.0, extra_services: Optional[Sequence[Union[ExtraService,\n int]]]=None):\n administrative_code = posting_card.administrative_code\n services = [Service.get(s) for s in services]\n from_zip = ZipCode.create(from_zip)\n to_zip = ZipCode.create(to_zip)\n if extra_services is None:\n extra_services = []\n else:\n extra_services = [ExtraService.get(es) for es in extra_services]\n response = self.freight.CalcPrecoPrazo(administrative_code, self.\n password, ','.join(str(s) for s in services), str(from_zip),\n str(to_zip), package.weight / KG, package.package_type, package\n .length, package.height, package.width, package.diameter, 'S' if\n EXTRA_SERVICE_MP in extra_services else 'N', value, 'S' if \n EXTRA_SERVICE_AR in extra_services else 'N')\n return self.model_builder.build_freights_list(response)\n\n def calculate_delivery_time(self, service: Union[Service, int],\n from_zip: Union[ZipCode, int, str], to_zip: Union[ZipCode, int, str]):\n service = Service.get(service)\n from_zip = ZipCode.create(from_zip)\n to_zip = ZipCode.create(to_zip)\n response = self.freight.CalcPrazo(str(service), str(from_zip), str(\n to_zip))\n return response.cServico[0].PrazoEntrega\n",
"step-4": "<mask token>\n\n\nclass ValidRestrictResponse(Enum):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass ModelBuilder:\n\n def build_service(self, service_data):\n service = Service(code=service_data.codigo, id=service_data.id,\n description=service_data.descricao, category=service_data.\n servicoSigep.categoriaServico)\n return service\n\n def build_posting_card(self, contract: Contract, posting_card_data):\n posting_card = PostingCard(contract=contract, number=\n posting_card_data.numero, administrative_code=posting_card_data\n .codigoAdministrativo)\n posting_card.start_date = posting_card_data.dataVigenciaInicio\n posting_card.end_date = posting_card_data.dataVigenciaFim\n posting_card.status = posting_card_data.statusCartaoPostagem\n posting_card.status_code = posting_card_data.statusCodigo\n posting_card.unit = posting_card_data.unidadeGenerica\n for service_data in posting_card_data.servicos:\n service = self.build_service(service_data)\n posting_card.add_service(service)\n return posting_card\n\n def build_contract(self, user: User, contract_data):\n contract = Contract(user=user, number=contract_data.contratoPK.\n numero, regional_direction=contract_data.codigoDiretoria)\n contract.customer_code = contract_data.codigoCliente\n contract.status_code = contract_data.statusCodigo\n contract.start_date = contract_data.dataVigenciaInicio\n contract.end_date = contract_data.dataVigenciaFim\n for posting_card_data in contract_data.cartoesPostagem:\n self.build_posting_card(contract, posting_card_data)\n return contract\n\n def build_user(self, user_data):\n user = User(name=user_data.nome, federal_tax_number=\n FederalTaxNumber(user_data.cnpj), state_tax_number=\n StateTaxNumber(user_data.inscricaoEstadual), status_number=\n user_data.statusCodigo)\n for contract_data in user_data.contratos:\n self.build_contract(user, contract_data)\n return user\n\n def build_zip_address(self, zip_address_data):\n zip_address = ZipAddress(id=zip_address_data.id, zip_code=\n zip_address_data.cep, state=zip_address_data.uf, city=\n zip_address_data.cidade, district=zip_address_data.bairro,\n address=zip_address_data.end, complements=[zip_address_data.\n complemento, zip_address_data.complemento2])\n return zip_address\n\n def build_posting_card_status(self, response):\n if response.lower() != 'normal':\n return PostingCard.CANCELLED\n return PostingCard.ACTIVE\n\n def build_tracking_codes_list(self, response):\n codes = response.split(',')\n return TrackingCode.create_range(codes[0], codes[1])\n\n def _load_invalid_event(self, tracking_code: TrackingCode, tracked_object):\n event = NotFoundTrackingEvent(timestamp=datetime.now(), comment=\n tracked_object.erro)\n tracking_code.add_event(event)\n\n def _load_events(self, tracking_code: TrackingCode, events):\n for event in events:\n timestamp = datetime.strptime('{} {}'.format(event.data, event.\n hora), TrackingEvent.timestamp_format)\n event = TrackingEvent(timestamp=timestamp, status=EventStatus(\n event.tipo, event.status), location_zip_code=getattr(event,\n 'codigo', ''), location=getattr(event, 'local', ''), city=\n getattr(event, 'cidade', ''), state=getattr(event, 'uf', ''\n ), receiver=getattr(event, 'recebedor', ''), document=\n getattr(event, 'documento', ''), comment=getattr(event,\n 'comentario', ''), description=getattr(event, 'descricao',\n ''), details=getattr(event, 'detalhes', ''))\n tracking_code.add_event(event)\n\n def load_tracking_events(self, tracking_codes: Dict[str, TrackingCode],\n response):\n result = []\n for tracked_object in response.objeto:\n tracking_code = tracking_codes[tracked_object.numero]\n if 'erro' in tracked_object:\n self._load_invalid_event(tracking_code, tracked_object)\n else:\n tracking_code.name = tracked_object.nome\n tracking_code.initials = tracked_object.sigla\n tracking_code.category = tracked_object.categoria\n self._load_events(tracking_code, tracked_object.evento)\n result.append(tracking_code)\n return result\n\n def build_freights_list(self, response):\n result = []\n for service_data in response.cServico:\n freight = self.build_freight(service_data=service_data)\n result.append(freight)\n return result\n\n def build_freight(self, service_data):\n data = {'service': Service.get(service_data.Codigo), 'error_code':\n to_integer(service_data.Erro), 'delivery_time': int(\n service_data.PrazoEntrega), 'value': to_decimal(service_data.\n ValorSemAdicionais), 'declared_value': to_decimal(service_data.\n ValorValorDeclarado), 'ar_value': to_decimal(service_data.\n ValorAvisoRecebimento), 'mp_value': to_decimal(service_data.\n ValorMaoPropria), 'saturday': service_data.EntregaSabado or '',\n 'home': service_data.EntregaDomiciliar or '', 'error_message': \n service_data.MsgErro or None}\n if data['error_code'] and not data['error_code'\n ] in ValidRestrictResponse.restricted_codes():\n return FreightError(**data)\n return Freight(**data)\n\n\nclass PostingListSerializer:\n\n def _get_posting_list_element(self, posting_list):\n element = xml_utils.Element('plp')\n xml_utils.SubElement(element, 'id_plp')\n xml_utils.SubElement(element, 'valor_global')\n xml_utils.SubElement(element, 'mcu_unidade_postagem')\n xml_utils.SubElement(element, 'nome_unidade_postagem')\n xml_utils.SubElement(element, 'cartao_postagem', text=str(\n posting_list.posting_card))\n return element\n\n def _get_sender_info_element(self, posting_list):\n sender = posting_list.sender\n posting_card = posting_list.posting_card\n contract = posting_list.contract\n sender_info = xml_utils.Element('remetente')\n xml_utils.SubElement(sender_info, 'numero_contrato', text=str(\n contract.number))\n xml_utils.SubElement(sender_info, 'numero_diretoria', text=str(\n contract.regional_direction_number))\n xml_utils.SubElement(sender_info, 'codigo_administrativo', text=str\n (posting_card.administrative_code))\n xml_utils.SubElement(sender_info, 'nome_remetente', cdata=sender.name)\n xml_utils.SubElement(sender_info, 'logradouro_remetente', cdata=\n sender.street)\n xml_utils.SubElement(sender_info, 'numero_remetente', cdata=sender.\n number)\n xml_utils.SubElement(sender_info, 'complemento_remetente', cdata=\n sender.complement)\n xml_utils.SubElement(sender_info, 'bairro_remetente', cdata=sender.\n neighborhood)\n xml_utils.SubElement(sender_info, 'cep_remetente', cdata=str(sender\n .zip_code))\n xml_utils.SubElement(sender_info, 'cidade_remetente', cdata=str(\n sender.city)[:30])\n xml_utils.SubElement(sender_info, 'uf_remetente', cdata=str(sender.\n state))\n xml_utils.SubElement(sender_info, 'telefone_remetente', cdata=\n sender.phone.short)\n xml_utils.SubElement(sender_info, 'fax_remetente', cdata='')\n xml_utils.SubElement(sender_info, 'email_remetente', cdata=sender.email\n )\n return sender_info\n\n def _get_shipping_label_element(self, shipping_label: ShippingLabel):\n item = xml_utils.Element('objeto_postal')\n xml_utils.SubElement(item, 'numero_etiqueta', text=str(\n shipping_label.tracking_code))\n xml_utils.SubElement(item, 'codigo_objeto_cliente')\n xml_utils.SubElement(item, 'codigo_servico_postagem', text=str(\n shipping_label.service))\n xml_utils.SubElement(item, 'cubagem', text=str(shipping_label.\n posting_weight).replace('.', ','))\n xml_utils.SubElement(item, 'peso', text=str(shipping_label.package.\n weight))\n xml_utils.SubElement(item, 'rt1')\n xml_utils.SubElement(item, 'rt2')\n receiver = shipping_label.receiver\n address = xml_utils.SubElement(item, 'destinatario')\n xml_utils.SubElement(address, 'nome_destinatario', cdata=str(\n receiver.name))\n xml_utils.SubElement(address, 'telefone_destinatario', cdata=\n receiver.phone.short)\n xml_utils.SubElement(address, 'celular_destinatario', cdata=\n receiver.cellphone.short)\n xml_utils.SubElement(address, 'email_destinatario', cdata=str(\n receiver.email))\n xml_utils.SubElement(address, 'logradouro_destinatario', cdata=str(\n receiver.street))\n xml_utils.SubElement(address, 'complemento_destinatario', cdata=str\n (receiver.complement))\n xml_utils.SubElement(address, 'numero_end_destinatario', text=str(\n receiver.number))\n national = xml_utils.SubElement(item, 'nacional')\n xml_utils.SubElement(national, 'bairro_destinatario', cdata=str(\n receiver.neighborhood))\n xml_utils.SubElement(national, 'cidade_destinatario', cdata=str(\n receiver.city)[:30])\n xml_utils.SubElement(national, 'uf_destinatario', text=str(receiver\n .state))\n xml_utils.SubElement(national, 'cep_destinatario', cdata=str(\n receiver.zip_code))\n xml_utils.SubElement(national, 'codigo_usuario_postal')\n xml_utils.SubElement(national, 'centro_custo_cliente')\n xml_utils.SubElement(national, 'numero_nota_fiscal', text=str(\n shipping_label.invoice_number))\n xml_utils.SubElement(national, 'serie_nota_fiscal', text=str(\n shipping_label.invoice_series))\n xml_utils.SubElement(national, 'valor_nota_fiscal', text=str(\n shipping_label.value).replace('.', ','))\n xml_utils.SubElement(national, 'natureza_nota_fiscal', text=str(\n shipping_label.invoice_type))\n xml_utils.SubElement(national, 'descricao_objeto', cdata=str(\n shipping_label.text)[:20])\n xml_utils.SubElement(national, 'valor_a_cobrar', text=str(\n shipping_label.billing).replace('.', ','))\n extra_services = xml_utils.SubElement(item, 'servico_adicional')\n for extra_service in shipping_label.extra_services:\n xml_utils.SubElement(extra_services, 'codigo_servico_adicional',\n text='{!s:>03}'.format(extra_service.number))\n xml_utils.SubElement(extra_services, 'valor_declarado', text=str(\n shipping_label.value).replace('.', ','))\n dimensions = xml_utils.SubElement(item, 'dimensao_objeto')\n xml_utils.SubElement(dimensions, 'tipo_objeto', text='{!s:>03}'.\n format(shipping_label.package.package_type))\n xml_utils.SubElement(dimensions, 'dimensao_altura', text=str(\n shipping_label.package.height))\n xml_utils.SubElement(dimensions, 'dimensao_largura', text=str(\n shipping_label.package.width))\n xml_utils.SubElement(dimensions, 'dimensao_comprimento', text=str(\n shipping_label.package.length))\n xml_utils.SubElement(dimensions, 'dimensao_diametro', text=str(\n shipping_label.package.diameter))\n xml_utils.SubElement(item, 'data_postagem_sara')\n xml_utils.SubElement(item, 'status_processamento', text='0')\n xml_utils.SubElement(item, 'numero_comprovante_postagem')\n xml_utils.SubElement(item, 'valor_cobrado')\n return item\n\n def get_document(self, posting_list: PostingList):\n if not posting_list.shipping_labels:\n raise PostingListSerializerError(\n 'Cannot serialize an empty posting list')\n if posting_list.closed:\n raise PostingListSerializerError(\n 'Cannot serialize a closed posting list')\n root = xml_utils.Element('correioslog')\n root.append(xml_utils.Element('tipo_arquivo', text='Postagem'))\n root.append(xml_utils.Element('versao_arquivo', text='2.3'))\n root.append(self._get_posting_list_element(posting_list))\n root.append(self._get_sender_info_element(posting_list))\n root.append(xml_utils.Element('forma_pagamento'))\n for shipping_label in posting_list.shipping_labels.values():\n root.append(self._get_shipping_label_element(shipping_label))\n return root\n\n def validate(self, document):\n with open(os.path.join(DATADIR, 'posting_list_schema.xsd')) as xsd:\n xsd_document = xml_utils.parse(xsd)\n schema = xml_utils.XMLSchema(xsd_document)\n return schema.assert_(document)\n\n def get_xml(self, document) ->bytes:\n xmlstring = str(xml_utils.tostring(document, encoding='unicode'))\n encoded_xmlstring = xmlstring.encode('iso-8859-1', errors='ignore')\n return (b'<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>' +\n encoded_xmlstring)\n\n\nclass Correios:\n PRODUCTION = 'production'\n TEST = 'test'\n MAX_TRACKING_CODES_PER_REQUEST = 50\n sigep_urls = {'production': (get_wsdl_path(\n 'AtendeCliente-production.wsdl'), True), 'test': (get_wsdl_path(\n 'AtendeCliente-test.wsdl'), False)}\n websro_url = get_wsdl_path('Rastro.wsdl')\n freight_url = get_wsdl_path('CalcPrecoPrazo.asmx')\n\n def __init__(self, username, password, timeout=8, environment='production'\n ):\n self.username = username\n self.password = password\n self.timeout = timeout\n url, verify = self.sigep_urls[environment]\n self.sigep_url = url\n self.sigep_verify = verify\n self.sigep_client = SoapClient(self.sigep_url, verify=self.\n sigep_verify, timeout=self.timeout)\n self.sigep = self.sigep_client.service\n self.websro_client = SoapClient(self.websro_url, timeout=self.timeout)\n self.websro = self.websro_client.service\n self.freight_client = SoapClient(self.freight_url, timeout=self.timeout\n )\n self.freight = self.freight_client.service\n self.model_builder = ModelBuilder()\n\n def _auth_call(self, method_name, *args, **kwargs):\n kwargs.update({'usuario': self.username, 'senha': self.password})\n return self._call(method_name, *args, **kwargs)\n\n def _call(self, method_name, *args, **kwargs):\n method = getattr(self.sigep, method_name)\n return method(*args, **kwargs)\n\n def get_user(self, contract_number: Union[int, str],\n posting_card_number: Union[int, str]) ->User:\n contract_number = str(contract_number)\n posting_card_number = str(posting_card_number)\n user_data = self._auth_call('buscaCliente', contract_number,\n posting_card_number)\n return self.model_builder.build_user(user_data)\n\n def find_zipcode(self, zip_code: Union[ZipCode, str]) ->ZipAddress:\n zip_address_data = self._call('consultaCEP', str(zip_code))\n return self.model_builder.build_zip_address(zip_address_data)\n\n def verify_service_availability(self, posting_card: PostingCard,\n service: Service, from_zip_code: Union[ZipCode, str], to_zip_code:\n Union[ZipCode, str]) ->bool:\n from_zip_code = ZipCode.create(from_zip_code)\n to_zip_code = ZipCode.create(to_zip_code)\n result = self._auth_call('verificaDisponibilidadeServico',\n posting_card.administrative_code, str(service), str(\n from_zip_code), str(to_zip_code))\n return result\n\n def get_posting_card_status(self, posting_card: PostingCard) ->bool:\n result = self._auth_call('getStatusCartaoPostagem', posting_card.number\n )\n return self.model_builder.build_posting_card_status(result)\n\n def request_tracking_codes(self, user: User, service: Service, quantity\n =1, receiver_type='C') ->list:\n result = self._auth_call('solicitaEtiquetas', receiver_type, str(\n user.federal_tax_number), service.id, quantity)\n return self.model_builder.build_tracking_codes_list(result)\n\n def generate_verification_digit(self, tracking_codes: Sequence[str]\n ) ->List[int]:\n tracking_codes = [TrackingCode(tc).nodigit for tc in tracking_codes]\n result = self._auth_call('geraDigitoVerificadorEtiquetas',\n tracking_codes)\n return result\n\n def _generate_xml_string(self, posting_list: PostingList) ->str:\n posting_list_serializer = PostingListSerializer()\n document = posting_list_serializer.get_document(posting_list)\n posting_list_serializer.validate(document)\n xml = posting_list_serializer.get_xml(document)\n return xml.decode('ISO-8859-1')\n\n def close_posting_list(self, posting_list: PostingList, posting_card:\n PostingCard) ->PostingList:\n xml = self._generate_xml_string(posting_list)\n tracking_codes = posting_list.get_tracking_codes()\n id_ = self._auth_call('fechaPlpVariosServicos', xml, posting_list.\n custom_id, posting_card.number, tracking_codes)\n posting_list.close_with_id(id_)\n return posting_list\n\n def get_tracking_code_events(self, tracking_list):\n if isinstance(tracking_list, (str, TrackingCode)):\n tracking_list = [tracking_list]\n if len(tracking_list) > Correios.MAX_TRACKING_CODES_PER_REQUEST:\n msg = (\n '{} tracking codes requested exceeds the limit of {} stabilished by the Correios'\n )\n msg = msg.format(len(tracking_list), Correios.\n MAX_TRACKING_CODES_PER_REQUEST)\n raise TrackingCodesLimitExceededError(msg)\n tracking_codes = {}\n for tracking_code in tracking_list:\n tracking_code = TrackingCode.create(tracking_code)\n tracking_codes[tracking_code.code] = tracking_code\n response = self.websro.buscaEventosLista(self.username, self.\n password, 'L', 'T', '101', tuple(tracking_codes.keys()))\n return self.model_builder.load_tracking_events(tracking_codes, response\n )\n\n def calculate_freights(self, posting_card: PostingCard, services: List[\n Union[Service, int]], from_zip: Union[ZipCode, int, str], to_zip:\n Union[ZipCode, int, str], package: Package, value: Union[Decimal,\n float]=0.0, extra_services: Optional[Sequence[Union[ExtraService,\n int]]]=None):\n administrative_code = posting_card.administrative_code\n services = [Service.get(s) for s in services]\n from_zip = ZipCode.create(from_zip)\n to_zip = ZipCode.create(to_zip)\n if extra_services is None:\n extra_services = []\n else:\n extra_services = [ExtraService.get(es) for es in extra_services]\n response = self.freight.CalcPrecoPrazo(administrative_code, self.\n password, ','.join(str(s) for s in services), str(from_zip),\n str(to_zip), package.weight / KG, package.package_type, package\n .length, package.height, package.width, package.diameter, 'S' if\n EXTRA_SERVICE_MP in extra_services else 'N', value, 'S' if \n EXTRA_SERVICE_AR in extra_services else 'N')\n return self.model_builder.build_freights_list(response)\n\n def calculate_delivery_time(self, service: Union[Service, int],\n from_zip: Union[ZipCode, int, str], to_zip: Union[ZipCode, int, str]):\n service = Service.get(service)\n from_zip = ZipCode.create(from_zip)\n to_zip = ZipCode.create(to_zip)\n response = self.freight.CalcPrazo(str(service), str(from_zip), str(\n to_zip))\n return response.cServico[0].PrazoEntrega\n",
"step-5": "# Copyright 2016 Osvaldo Santana Neto\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nimport os\nfrom datetime import datetime\nfrom decimal import Decimal\nfrom enum import Enum\nfrom typing import Dict, List, Optional, Sequence, Union\n\nfrom correios import DATADIR, xml_utils\nfrom correios.exceptions import PostingListSerializerError, TrackingCodesLimitExceededError\nfrom correios.models.data import EXTRA_SERVICE_AR, EXTRA_SERVICE_MP\nfrom correios.utils import get_wsdl_path, to_decimal, to_integer\n\nfrom .models.address import ZipAddress, ZipCode\nfrom .models.posting import (\n EventStatus,\n Freight,\n FreightError,\n NotFoundTrackingEvent,\n Package,\n PostingList,\n ShippingLabel,\n TrackingCode,\n TrackingEvent\n)\nfrom .models.user import Contract, ExtraService, FederalTaxNumber, PostingCard, Service, StateTaxNumber, User\nfrom .soap import SoapClient\n\nKG = 1000 # g\n\n\nclass ValidRestrictResponse(Enum):\n INITIAL_ZIPCODE_RESTRICTED = 9\n FINAL_ZIPCODE_RESTRICTED = 10\n INITIAL_AND_FINAL_ZIPCODE_RESTRICTED = 11\n\n @classmethod\n def restricted_codes(cls):\n return [\n cls.FINAL_ZIPCODE_RESTRICTED.value,\n cls.INITIAL_AND_FINAL_ZIPCODE_RESTRICTED.value,\n cls.FINAL_ZIPCODE_RESTRICTED.value\n ]\n\n\nclass ModelBuilder:\n def build_service(self, service_data):\n service = Service(\n code=service_data.codigo,\n id=service_data.id,\n description=service_data.descricao,\n category=service_data.servicoSigep.categoriaServico\n )\n return service\n\n def build_posting_card(self, contract: Contract, posting_card_data):\n posting_card = PostingCard(\n contract=contract,\n number=posting_card_data.numero,\n administrative_code=posting_card_data.codigoAdministrativo,\n )\n\n posting_card.start_date = posting_card_data.dataVigenciaInicio\n posting_card.end_date = posting_card_data.dataVigenciaFim\n posting_card.status = posting_card_data.statusCartaoPostagem\n posting_card.status_code = posting_card_data.statusCodigo\n posting_card.unit = posting_card_data.unidadeGenerica\n\n for service_data in posting_card_data.servicos:\n service = self.build_service(service_data)\n posting_card.add_service(service)\n\n return posting_card\n\n def build_contract(self, user: User, contract_data):\n contract = Contract(\n user=user,\n number=contract_data.contratoPK.numero,\n regional_direction=contract_data.codigoDiretoria,\n )\n\n contract.customer_code = contract_data.codigoCliente\n contract.status_code = contract_data.statusCodigo\n contract.start_date = contract_data.dataVigenciaInicio\n contract.end_date = contract_data.dataVigenciaFim\n\n for posting_card_data in contract_data.cartoesPostagem:\n self.build_posting_card(contract, posting_card_data)\n\n return contract\n\n def build_user(self, user_data):\n user = User(\n name=user_data.nome,\n federal_tax_number=FederalTaxNumber(user_data.cnpj),\n state_tax_number=StateTaxNumber(user_data.inscricaoEstadual),\n status_number=user_data.statusCodigo,\n )\n\n for contract_data in user_data.contratos:\n self.build_contract(user, contract_data)\n\n return user\n\n def build_zip_address(self, zip_address_data):\n zip_address = ZipAddress(\n id=zip_address_data.id,\n zip_code=zip_address_data.cep,\n state=zip_address_data.uf,\n city=zip_address_data.cidade,\n district=zip_address_data.bairro,\n address=zip_address_data.end,\n complements=[zip_address_data.complemento, zip_address_data.complemento2]\n )\n return zip_address\n\n def build_posting_card_status(self, response):\n if response.lower() != \"normal\":\n return PostingCard.CANCELLED\n return PostingCard.ACTIVE\n\n def build_tracking_codes_list(self, response):\n codes = response.split(\",\")\n return TrackingCode.create_range(codes[0], codes[1])\n\n def _load_invalid_event(self, tracking_code: TrackingCode, tracked_object):\n event = NotFoundTrackingEvent(\n timestamp=datetime.now(),\n comment=tracked_object.erro,\n )\n tracking_code.add_event(event)\n\n def _load_events(self, tracking_code: TrackingCode, events):\n for event in events:\n timestamp = datetime.strptime(\"{} {}\".format(event.data, event.hora), TrackingEvent.timestamp_format)\n event = TrackingEvent(\n timestamp=timestamp,\n status=EventStatus(event.tipo, event.status),\n location_zip_code=getattr(event, \"codigo\", \"\"),\n location=getattr(event, \"local\", \"\"),\n city=getattr(event, \"cidade\", \"\"),\n state=getattr(event, \"uf\", \"\"),\n receiver=getattr(event, \"recebedor\", \"\"),\n document=getattr(event, \"documento\", \"\"),\n comment=getattr(event, \"comentario\", \"\"),\n description=getattr(event, \"descricao\", \"\"),\n details=getattr(event, \"detalhes\", \"\"),\n )\n\n tracking_code.add_event(event)\n\n def load_tracking_events(self, tracking_codes: Dict[str, TrackingCode], response):\n result = []\n for tracked_object in response.objeto:\n tracking_code = tracking_codes[tracked_object.numero]\n\n if 'erro' in tracked_object:\n self._load_invalid_event(tracking_code, tracked_object)\n else:\n tracking_code.name = tracked_object.nome\n tracking_code.initials = tracked_object.sigla\n tracking_code.category = tracked_object.categoria\n self._load_events(tracking_code, tracked_object.evento)\n\n result.append(tracking_code)\n\n return result\n\n def build_freights_list(self, response):\n result = []\n for service_data in response.cServico:\n freight = self.build_freight(service_data=service_data)\n result.append(freight)\n return result\n\n def build_freight(self, service_data):\n data = {\n 'service': Service.get(service_data.Codigo),\n 'error_code': to_integer(service_data.Erro),\n 'delivery_time': int(service_data.PrazoEntrega),\n 'value': to_decimal(service_data.ValorSemAdicionais),\n 'declared_value': to_decimal(service_data.ValorValorDeclarado),\n 'ar_value': to_decimal(service_data.ValorAvisoRecebimento),\n 'mp_value': to_decimal(service_data.ValorMaoPropria),\n 'saturday': service_data.EntregaSabado or \"\",\n 'home': service_data.EntregaDomiciliar or \"\",\n 'error_message': service_data.MsgErro or None\n }\n\n if (\n data['error_code'] and\n not data['error_code'] in ValidRestrictResponse.restricted_codes()\n ):\n return FreightError(**data)\n return Freight(**data)\n\n\nclass PostingListSerializer:\n def _get_posting_list_element(self, posting_list):\n element = xml_utils.Element(\"plp\")\n xml_utils.SubElement(element, \"id_plp\")\n xml_utils.SubElement(element, \"valor_global\")\n xml_utils.SubElement(element, \"mcu_unidade_postagem\")\n xml_utils.SubElement(element, \"nome_unidade_postagem\")\n xml_utils.SubElement(element, \"cartao_postagem\", text=str(posting_list.posting_card))\n return element\n\n def _get_sender_info_element(self, posting_list):\n sender = posting_list.sender\n posting_card = posting_list.posting_card\n contract = posting_list.contract\n\n sender_info = xml_utils.Element(\"remetente\")\n xml_utils.SubElement(sender_info, \"numero_contrato\", text=str(contract.number))\n xml_utils.SubElement(sender_info, \"numero_diretoria\", text=str(contract.regional_direction_number))\n xml_utils.SubElement(sender_info, \"codigo_administrativo\", text=str(posting_card.administrative_code))\n xml_utils.SubElement(sender_info, \"nome_remetente\", cdata=sender.name)\n xml_utils.SubElement(sender_info, \"logradouro_remetente\", cdata=sender.street)\n xml_utils.SubElement(sender_info, \"numero_remetente\", cdata=sender.number)\n xml_utils.SubElement(sender_info, \"complemento_remetente\", cdata=sender.complement)\n xml_utils.SubElement(sender_info, \"bairro_remetente\", cdata=sender.neighborhood)\n xml_utils.SubElement(sender_info, \"cep_remetente\", cdata=str(sender.zip_code))\n xml_utils.SubElement(sender_info, \"cidade_remetente\", cdata=str(sender.city)[:30])\n xml_utils.SubElement(sender_info, \"uf_remetente\", cdata=str(sender.state))\n xml_utils.SubElement(sender_info, \"telefone_remetente\", cdata=sender.phone.short)\n xml_utils.SubElement(sender_info, \"fax_remetente\", cdata=\"\")\n xml_utils.SubElement(sender_info, \"email_remetente\", cdata=sender.email)\n return sender_info\n\n def _get_shipping_label_element(self, shipping_label: ShippingLabel):\n item = xml_utils.Element(\"objeto_postal\")\n xml_utils.SubElement(item, \"numero_etiqueta\", text=str(shipping_label.tracking_code))\n xml_utils.SubElement(item, \"codigo_objeto_cliente\")\n xml_utils.SubElement(item, \"codigo_servico_postagem\", text=str(shipping_label.service))\n xml_utils.SubElement(item, \"cubagem\", text=str(shipping_label.posting_weight).replace(\".\", \",\"))\n xml_utils.SubElement(item, \"peso\", text=str(shipping_label.package.weight))\n xml_utils.SubElement(item, \"rt1\")\n xml_utils.SubElement(item, \"rt2\")\n\n receiver = shipping_label.receiver\n address = xml_utils.SubElement(item, \"destinatario\")\n xml_utils.SubElement(address, \"nome_destinatario\", cdata=str(receiver.name))\n xml_utils.SubElement(address, \"telefone_destinatario\", cdata=receiver.phone.short)\n xml_utils.SubElement(address, \"celular_destinatario\", cdata=receiver.cellphone.short)\n xml_utils.SubElement(address, \"email_destinatario\", cdata=str(receiver.email))\n xml_utils.SubElement(address, \"logradouro_destinatario\", cdata=str(receiver.street))\n xml_utils.SubElement(address, \"complemento_destinatario\", cdata=str(receiver.complement))\n xml_utils.SubElement(address, \"numero_end_destinatario\", text=str(receiver.number))\n\n national = xml_utils.SubElement(item, \"nacional\")\n xml_utils.SubElement(national, \"bairro_destinatario\", cdata=str(receiver.neighborhood))\n xml_utils.SubElement(national, \"cidade_destinatario\", cdata=str(receiver.city)[:30])\n xml_utils.SubElement(national, \"uf_destinatario\", text=str(receiver.state))\n xml_utils.SubElement(national, \"cep_destinatario\", cdata=str(receiver.zip_code))\n xml_utils.SubElement(national, \"codigo_usuario_postal\")\n xml_utils.SubElement(national, \"centro_custo_cliente\")\n xml_utils.SubElement(national, \"numero_nota_fiscal\", text=str(shipping_label.invoice_number))\n xml_utils.SubElement(national, \"serie_nota_fiscal\", text=str(shipping_label.invoice_series))\n xml_utils.SubElement(national, \"valor_nota_fiscal\", text=str(shipping_label.value).replace(\".\", \",\"))\n xml_utils.SubElement(national, \"natureza_nota_fiscal\", text=str(shipping_label.invoice_type))\n xml_utils.SubElement(national, \"descricao_objeto\", cdata=str(shipping_label.text)[:20])\n xml_utils.SubElement(national, \"valor_a_cobrar\", text=str(shipping_label.billing).replace(\".\", \",\"))\n\n extra_services = xml_utils.SubElement(item, \"servico_adicional\")\n for extra_service in shipping_label.extra_services:\n xml_utils.SubElement(extra_services, \"codigo_servico_adicional\",\n text=\"{!s:>03}\".format(extra_service.number))\n xml_utils.SubElement(extra_services, \"valor_declarado\", text=str(shipping_label.value).replace(\".\", \",\"))\n\n dimensions = xml_utils.SubElement(item, \"dimensao_objeto\")\n xml_utils.SubElement(dimensions, \"tipo_objeto\", text=\"{!s:>03}\".format(shipping_label.package.package_type))\n xml_utils.SubElement(dimensions, \"dimensao_altura\", text=str(shipping_label.package.height))\n xml_utils.SubElement(dimensions, \"dimensao_largura\", text=str(shipping_label.package.width))\n xml_utils.SubElement(dimensions, \"dimensao_comprimento\", text=str(shipping_label.package.length))\n xml_utils.SubElement(dimensions, \"dimensao_diametro\", text=str(shipping_label.package.diameter))\n\n xml_utils.SubElement(item, \"data_postagem_sara\")\n xml_utils.SubElement(item, \"status_processamento\", text=\"0\")\n xml_utils.SubElement(item, \"numero_comprovante_postagem\")\n xml_utils.SubElement(item, \"valor_cobrado\")\n\n return item\n\n def get_document(self, posting_list: PostingList):\n if not posting_list.shipping_labels:\n raise PostingListSerializerError(\"Cannot serialize an empty posting list\")\n\n if posting_list.closed:\n raise PostingListSerializerError(\"Cannot serialize a closed posting list\")\n\n root = xml_utils.Element(\"correioslog\")\n root.append(xml_utils.Element(\"tipo_arquivo\", text=\"Postagem\"))\n root.append(xml_utils.Element(\"versao_arquivo\", text=\"2.3\"))\n root.append(self._get_posting_list_element(posting_list))\n root.append(self._get_sender_info_element(posting_list))\n root.append(xml_utils.Element(\"forma_pagamento\"))\n\n for shipping_label in posting_list.shipping_labels.values():\n root.append(self._get_shipping_label_element(shipping_label))\n\n return root\n\n def validate(self, document):\n with open(os.path.join(DATADIR, \"posting_list_schema.xsd\")) as xsd:\n xsd_document = xml_utils.parse(xsd)\n schema = xml_utils.XMLSchema(xsd_document)\n return schema.assert_(document)\n\n def get_xml(self, document) -> bytes:\n xmlstring = str(xml_utils.tostring(document, encoding=\"unicode\"))\n encoded_xmlstring = xmlstring.encode(\"iso-8859-1\", errors='ignore')\n return b'<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>' + encoded_xmlstring\n\n\nclass Correios:\n PRODUCTION = \"production\"\n TEST = \"test\"\n MAX_TRACKING_CODES_PER_REQUEST = 50\n\n # 'environment': ('url', 'ssl_verification')\n sigep_urls = {\n 'production': (get_wsdl_path('AtendeCliente-production.wsdl'), True),\n 'test': (get_wsdl_path('AtendeCliente-test.wsdl'), False),\n }\n websro_url = get_wsdl_path('Rastro.wsdl')\n freight_url = get_wsdl_path('CalcPrecoPrazo.asmx')\n\n def __init__(self, username, password, timeout=8, environment=\"production\"):\n self.username = username\n self.password = password\n self.timeout = timeout\n\n url, verify = self.sigep_urls[environment]\n self.sigep_url = url\n self.sigep_verify = verify\n\n self.sigep_client = SoapClient(self.sigep_url, verify=self.sigep_verify, timeout=self.timeout)\n self.sigep = self.sigep_client.service\n\n self.websro_client = SoapClient(self.websro_url, timeout=self.timeout)\n self.websro = self.websro_client.service\n\n self.freight_client = SoapClient(self.freight_url, timeout=self.timeout)\n self.freight = self.freight_client.service\n\n self.model_builder = ModelBuilder()\n\n def _auth_call(self, method_name, *args, **kwargs):\n kwargs.update({\n \"usuario\": self.username,\n \"senha\": self.password,\n })\n return self._call(method_name, *args, **kwargs)\n\n def _call(self, method_name, *args, **kwargs):\n method = getattr(self.sigep, method_name)\n return method(*args, **kwargs) # TODO: handle errors\n\n def get_user(self, contract_number: Union[int, str], posting_card_number: Union[int, str]) -> User:\n contract_number = str(contract_number)\n posting_card_number = str(posting_card_number)\n user_data = self._auth_call(\"buscaCliente\", contract_number, posting_card_number)\n return self.model_builder.build_user(user_data)\n\n def find_zipcode(self, zip_code: Union[ZipCode, str]) -> ZipAddress:\n zip_address_data = self._call(\"consultaCEP\", str(zip_code))\n return self.model_builder.build_zip_address(zip_address_data)\n\n def verify_service_availability(self,\n posting_card: PostingCard,\n service: Service,\n from_zip_code: Union[ZipCode, str],\n to_zip_code: Union[ZipCode, str]) -> bool:\n from_zip_code = ZipCode.create(from_zip_code)\n to_zip_code = ZipCode.create(to_zip_code)\n result = self._auth_call(\"verificaDisponibilidadeServico\",\n posting_card.administrative_code, str(service),\n str(from_zip_code), str(to_zip_code))\n return result\n\n def get_posting_card_status(self, posting_card: PostingCard) -> bool:\n result = self._auth_call(\"getStatusCartaoPostagem\", posting_card.number)\n return self.model_builder.build_posting_card_status(result)\n\n def request_tracking_codes(self, user: User, service: Service, quantity=1, receiver_type=\"C\") -> list:\n result = self._auth_call(\"solicitaEtiquetas\",\n receiver_type, str(user.federal_tax_number),\n service.id, quantity)\n return self.model_builder.build_tracking_codes_list(result)\n\n def generate_verification_digit(self, tracking_codes: Sequence[str]) -> List[int]:\n tracking_codes = [TrackingCode(tc).nodigit for tc in tracking_codes]\n result = self._auth_call(\"geraDigitoVerificadorEtiquetas\",\n tracking_codes)\n\n return result\n\n def _generate_xml_string(self, posting_list: PostingList) -> str:\n posting_list_serializer = PostingListSerializer()\n document = posting_list_serializer.get_document(posting_list)\n posting_list_serializer.validate(document)\n xml = posting_list_serializer.get_xml(document)\n return xml.decode(\"ISO-8859-1\")\n\n def close_posting_list(self, posting_list: PostingList, posting_card: PostingCard) -> PostingList:\n xml = self._generate_xml_string(posting_list)\n tracking_codes = posting_list.get_tracking_codes()\n\n id_ = self._auth_call(\"fechaPlpVariosServicos\", xml,\n posting_list.custom_id, posting_card.number, tracking_codes)\n posting_list.close_with_id(id_)\n\n return posting_list\n\n def get_tracking_code_events(self, tracking_list):\n if isinstance(tracking_list, (str, TrackingCode)):\n tracking_list = [tracking_list]\n\n if len(tracking_list) > Correios.MAX_TRACKING_CODES_PER_REQUEST:\n msg = '{} tracking codes requested exceeds the limit of {} stabilished by the Correios'\n msg = msg.format(len(tracking_list), Correios.MAX_TRACKING_CODES_PER_REQUEST)\n raise TrackingCodesLimitExceededError(msg)\n\n tracking_codes = {}\n for tracking_code in tracking_list:\n tracking_code = TrackingCode.create(tracking_code)\n tracking_codes[tracking_code.code] = tracking_code\n\n response = self.websro.buscaEventosLista(self.username, self.password, \"L\", \"T\", \"101\",\n tuple(tracking_codes.keys()))\n return self.model_builder.load_tracking_events(tracking_codes, response)\n\n def calculate_freights(self,\n posting_card: PostingCard,\n services: List[Union[Service, int]],\n from_zip: Union[ZipCode, int, str], to_zip: Union[ZipCode, int, str],\n package: Package,\n value: Union[Decimal, float] = 0.00,\n extra_services: Optional[Sequence[Union[ExtraService, int]]] = None):\n\n administrative_code = posting_card.administrative_code\n services = [Service.get(s) for s in services]\n from_zip = ZipCode.create(from_zip)\n to_zip = ZipCode.create(to_zip)\n\n if extra_services is None:\n extra_services = []\n else:\n extra_services = [ExtraService.get(es) for es in extra_services]\n\n response = self.freight.CalcPrecoPrazo(\n administrative_code,\n self.password,\n \",\".join(str(s) for s in services),\n str(from_zip),\n str(to_zip),\n package.weight / KG,\n package.package_type,\n package.length,\n package.height,\n package.width,\n package.diameter,\n \"S\" if EXTRA_SERVICE_MP in extra_services else \"N\",\n value,\n \"S\" if EXTRA_SERVICE_AR in extra_services else \"N\",\n )\n return self.model_builder.build_freights_list(response)\n\n def calculate_delivery_time(self,\n service: Union[Service, int],\n from_zip: Union[ZipCode, int, str],\n to_zip: Union[ZipCode, int, str]):\n service = Service.get(service)\n from_zip = ZipCode.create(from_zip)\n to_zip = ZipCode.create(to_zip)\n\n response = self.freight.CalcPrazo(str(service), str(from_zip), str(to_zip))\n return response.cServico[0].PrazoEntrega\n",
"step-ids": [
29,
30,
33,
37,
42
]
}
|
[
29,
30,
33,
37,
42
] |
def sqrt(number):
low = 1
high = number - 1
while low <= high:
mid = (low + high) /2
if mid * mid == number:
return mid
elif mid * mid > number:
high = mid - 1
else:
low = mid + 1
return low - 1
print sqrt(15)
|
normal
|
{
"blob_id": "67b060349e986b06a0ee6d8a1afee82d49989c29",
"index": 6818,
"step-1": "\n\n\ndef sqrt(number):\n\n low = 1\n high = number - 1\n\n while low <= high:\n\n mid = (low + high) /2\n\n if mid * mid == number:\n return mid\n\n elif mid * mid > number:\n high = mid - 1\n else:\n low = mid + 1\n\n return low - 1\n\nprint sqrt(15)\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
class SoapySDRSizeList(_object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def iterator(self):
return _SoapySDR.SoapySDRSizeList_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _SoapySDR.SoapySDRSizeList___nonzero__(self)
def __bool__(self):
return _SoapySDR.SoapySDRSizeList___bool__(self)
def __len__(self):
return _SoapySDR.SoapySDRSizeList___len__(self)
def pop(self):
return _SoapySDR.SoapySDRSizeList_pop(self)
def __getslice__(self, *args):
return _SoapySDR.SoapySDRSizeList___getslice__(self, *args)
def __setslice__(self, *args):
return _SoapySDR.SoapySDRSizeList___setslice__(self, *args)
def __delslice__(self, *args):
return _SoapySDR.SoapySDRSizeList___delslice__(self, *args)
def __delitem__(self, *args):
return _SoapySDR.SoapySDRSizeList___delitem__(self, *args)
def __getitem__(self, *args):
return _SoapySDR.SoapySDRSizeList___getitem__(self, *args)
def __setitem__(self, *args):
return _SoapySDR.SoapySDRSizeList___setitem__(self, *args)
<|reserved_special_token_0|>
def empty(self):
return _SoapySDR.SoapySDRSizeList_empty(self)
def size(self):
return _SoapySDR.SoapySDRSizeList_size(self)
def clear(self):
return _SoapySDR.SoapySDRSizeList_clear(self)
def swap(self, *args):
return _SoapySDR.SoapySDRSizeList_swap(self, *args)
def get_allocator(self):
return _SoapySDR.SoapySDRSizeList_get_allocator(self)
def begin(self):
return _SoapySDR.SoapySDRSizeList_begin(self)
<|reserved_special_token_0|>
def rbegin(self):
return _SoapySDR.SoapySDRSizeList_rbegin(self)
<|reserved_special_token_0|>
def pop_back(self):
return _SoapySDR.SoapySDRSizeList_pop_back(self)
def erase(self, *args):
return _SoapySDR.SoapySDRSizeList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRSizeList(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, *args):
return _SoapySDR.SoapySDRSizeList_push_back(self, *args)
def front(self):
return _SoapySDR.SoapySDRSizeList_front(self)
def back(self):
return _SoapySDR.SoapySDRSizeList_back(self)
def assign(self, *args):
return _SoapySDR.SoapySDRSizeList_assign(self, *args)
def resize(self, *args):
return _SoapySDR.SoapySDRSizeList_resize(self, *args)
def insert(self, *args):
return _SoapySDR.SoapySDRSizeList_insert(self, *args)
<|reserved_special_token_0|>
def capacity(self):
return _SoapySDR.SoapySDRSizeList_capacity(self)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class SoapySDRDoubleList(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
SoapySDRDoubleList, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SoapySDRDoubleList,
name)
__repr__ = _swig_repr
def iterator(self):
return _SoapySDR.SoapySDRDoubleList_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _SoapySDR.SoapySDRDoubleList___nonzero__(self)
def __bool__(self):
return _SoapySDR.SoapySDRDoubleList___bool__(self)
def __len__(self):
return _SoapySDR.SoapySDRDoubleList___len__(self)
def pop(self):
return _SoapySDR.SoapySDRDoubleList_pop(self)
def __getslice__(self, *args):
return _SoapySDR.SoapySDRDoubleList___getslice__(self, *args)
def __setslice__(self, *args):
return _SoapySDR.SoapySDRDoubleList___setslice__(self, *args)
def __delslice__(self, *args):
return _SoapySDR.SoapySDRDoubleList___delslice__(self, *args)
def __delitem__(self, *args):
return _SoapySDR.SoapySDRDoubleList___delitem__(self, *args)
def __getitem__(self, *args):
return _SoapySDR.SoapySDRDoubleList___getitem__(self, *args)
def __setitem__(self, *args):
return _SoapySDR.SoapySDRDoubleList___setitem__(self, *args)
def append(self, *args):
return _SoapySDR.SoapySDRDoubleList_append(self, *args)
def empty(self):
return _SoapySDR.SoapySDRDoubleList_empty(self)
def size(self):
return _SoapySDR.SoapySDRDoubleList_size(self)
def clear(self):
return _SoapySDR.SoapySDRDoubleList_clear(self)
def swap(self, *args):
return _SoapySDR.SoapySDRDoubleList_swap(self, *args)
def get_allocator(self):
return _SoapySDR.SoapySDRDoubleList_get_allocator(self)
def begin(self):
return _SoapySDR.SoapySDRDoubleList_begin(self)
def end(self):
return _SoapySDR.SoapySDRDoubleList_end(self)
def rbegin(self):
return _SoapySDR.SoapySDRDoubleList_rbegin(self)
def rend(self):
return _SoapySDR.SoapySDRDoubleList_rend(self)
def pop_back(self):
return _SoapySDR.SoapySDRDoubleList_pop_back(self)
def erase(self, *args):
return _SoapySDR.SoapySDRDoubleList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRDoubleList(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, *args):
return _SoapySDR.SoapySDRDoubleList_push_back(self, *args)
def front(self):
return _SoapySDR.SoapySDRDoubleList_front(self)
def back(self):
return _SoapySDR.SoapySDRDoubleList_back(self)
def assign(self, *args):
return _SoapySDR.SoapySDRDoubleList_assign(self, *args)
def resize(self, *args):
return _SoapySDR.SoapySDRDoubleList_resize(self, *args)
def insert(self, *args):
return _SoapySDR.SoapySDRDoubleList_insert(self, *args)
def reserve(self, *args):
return _SoapySDR.SoapySDRDoubleList_reserve(self, *args)
def capacity(self):
return _SoapySDR.SoapySDRDoubleList_capacity(self)
__swig_destroy__ = _SoapySDR.delete_SoapySDRDoubleList
__del__ = lambda self: None
<|reserved_special_token_0|>
class StreamResult(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
StreamResult, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StreamResult, name)
__repr__ = _swig_repr
def __init__(self):
this = _SoapySDR.new_StreamResult()
try:
self.this.append(this)
except:
self.this = this
__swig_setmethods__['ret'] = _SoapySDR.StreamResult_ret_set
__swig_getmethods__['ret'] = _SoapySDR.StreamResult_ret_get
if _newclass:
ret = _swig_property(_SoapySDR.StreamResult_ret_get, _SoapySDR.
StreamResult_ret_set)
__swig_setmethods__['flags'] = _SoapySDR.StreamResult_flags_set
__swig_getmethods__['flags'] = _SoapySDR.StreamResult_flags_get
if _newclass:
flags = _swig_property(_SoapySDR.StreamResult_flags_get, _SoapySDR.
StreamResult_flags_set)
__swig_setmethods__['timeNs'] = _SoapySDR.StreamResult_timeNs_set
__swig_getmethods__['timeNs'] = _SoapySDR.StreamResult_timeNs_get
if _newclass:
timeNs = _swig_property(_SoapySDR.StreamResult_timeNs_get,
_SoapySDR.StreamResult_timeNs_set)
__swig_setmethods__['chanMask'] = _SoapySDR.StreamResult_chanMask_set
__swig_getmethods__['chanMask'] = _SoapySDR.StreamResult_chanMask_get
if _newclass:
chanMask = _swig_property(_SoapySDR.StreamResult_chanMask_get,
_SoapySDR.StreamResult_chanMask_set)
def __str__(self):
return 'ret=%s, flags=%s, timeNs=%s' % (self.ret, self.flags, self.
timeNs)
__swig_destroy__ = _SoapySDR.delete_StreamResult
__del__ = lambda self: None
<|reserved_special_token_0|>
class Device(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Device,
name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Device, name)
def __init__(self, *args, **kwargs):
raise AttributeError('No constructor defined')
__repr__ = _swig_repr
__swig_destroy__ = _SoapySDR.delete_Device
__del__ = lambda self: None
__swig_getmethods__['enumerate'] = lambda x: _SoapySDR.Device_enumerate
if _newclass:
enumerate = staticmethod(_SoapySDR.Device_enumerate)
__swig_getmethods__['make'] = lambda x: _SoapySDR.Device_make
if _newclass:
make = staticmethod(_SoapySDR.Device_make)
__swig_getmethods__['unmake'] = lambda x: _SoapySDR.Device_unmake
if _newclass:
unmake = staticmethod(_SoapySDR.Device_unmake)
def getDriverKey(self):
return _SoapySDR.Device_getDriverKey(self)
def getHardwareKey(self):
return _SoapySDR.Device_getHardwareKey(self)
def getHardwareInfo(self):
return _SoapySDR.Device_getHardwareInfo(self)
def setFrontendMapping(self, *args):
return _SoapySDR.Device_setFrontendMapping(self, *args)
def getFrontendMapping(self, *args):
return _SoapySDR.Device_getFrontendMapping(self, *args)
def getNumChannels(self, *args):
return _SoapySDR.Device_getNumChannels(self, *args)
def getChannelInfo(self, *args):
return _SoapySDR.Device_getChannelInfo(self, *args)
def getFullDuplex(self, *args):
return _SoapySDR.Device_getFullDuplex(self, *args)
def getStreamFormats(self, *args):
return _SoapySDR.Device_getStreamFormats(self, *args)
def getNativeStreamFormat(self, *args):
return _SoapySDR.Device_getNativeStreamFormat(self, *args)
def getStreamArgsInfo(self, *args):
return _SoapySDR.Device_getStreamArgsInfo(self, *args)
def setupStream(self, *args):
return _SoapySDR.Device_setupStream(self, *args)
def closeStream(self, *args):
return _SoapySDR.Device_closeStream(self, *args)
def getStreamMTU(self, *args):
return _SoapySDR.Device_getStreamMTU(self, *args)
def activateStream(self, *args):
return _SoapySDR.Device_activateStream(self, *args)
def deactivateStream(self, *args):
return _SoapySDR.Device_deactivateStream(self, *args)
def readStream(self, *args):
return _SoapySDR.Device_readStream(self, *args)
def writeStream(self, *args):
return _SoapySDR.Device_writeStream(self, *args)
def readStreamStatus(self, *args):
return _SoapySDR.Device_readStreamStatus(self, *args)
def getNumDirectAccessBuffers(self, *args):
return _SoapySDR.Device_getNumDirectAccessBuffers(self, *args)
def getDirectAccessBufferAddrs(self, *args):
return _SoapySDR.Device_getDirectAccessBufferAddrs(self, *args)
def acquireReadBuffer(self, *args):
return _SoapySDR.Device_acquireReadBuffer(self, *args)
def releaseReadBuffer(self, *args):
return _SoapySDR.Device_releaseReadBuffer(self, *args)
def acquireWriteBuffer(self, *args):
return _SoapySDR.Device_acquireWriteBuffer(self, *args)
def releaseWriteBuffer(self, *args):
return _SoapySDR.Device_releaseWriteBuffer(self, *args)
def listAntennas(self, *args):
return _SoapySDR.Device_listAntennas(self, *args)
def setAntenna(self, *args):
return _SoapySDR.Device_setAntenna(self, *args)
def getAntenna(self, *args):
return _SoapySDR.Device_getAntenna(self, *args)
def hasDCOffsetMode(self, *args):
return _SoapySDR.Device_hasDCOffsetMode(self, *args)
def setDCOffsetMode(self, *args):
return _SoapySDR.Device_setDCOffsetMode(self, *args)
def getDCOffsetMode(self, *args):
return _SoapySDR.Device_getDCOffsetMode(self, *args)
def hasDCOffset(self, *args):
return _SoapySDR.Device_hasDCOffset(self, *args)
def setDCOffset(self, *args):
return _SoapySDR.Device_setDCOffset(self, *args)
def getDCOffset(self, *args):
return _SoapySDR.Device_getDCOffset(self, *args)
def hasIQBalance(self, *args):
return _SoapySDR.Device_hasIQBalance(self, *args)
def setIQBalance(self, *args):
return _SoapySDR.Device_setIQBalance(self, *args)
def getIQBalance(self, *args):
return _SoapySDR.Device_getIQBalance(self, *args)
def hasFrequencyCorrection(self, *args):
return _SoapySDR.Device_hasFrequencyCorrection(self, *args)
def setFrequencyCorrection(self, *args):
return _SoapySDR.Device_setFrequencyCorrection(self, *args)
def getFrequencyCorrection(self, *args):
return _SoapySDR.Device_getFrequencyCorrection(self, *args)
def listGains(self, *args):
return _SoapySDR.Device_listGains(self, *args)
def hasGainMode(self, *args):
return _SoapySDR.Device_hasGainMode(self, *args)
def setGainMode(self, *args):
return _SoapySDR.Device_setGainMode(self, *args)
def getGainMode(self, *args):
return _SoapySDR.Device_getGainMode(self, *args)
def setGain(self, *args):
return _SoapySDR.Device_setGain(self, *args)
def getGain(self, *args):
return _SoapySDR.Device_getGain(self, *args)
def getGainRange(self, *args):
return _SoapySDR.Device_getGainRange(self, *args)
def setFrequency(self, *args):
return _SoapySDR.Device_setFrequency(self, *args)
def getFrequency(self, *args):
return _SoapySDR.Device_getFrequency(self, *args)
def listFrequencies(self, *args):
return _SoapySDR.Device_listFrequencies(self, *args)
def getFrequencyRange(self, *args):
return _SoapySDR.Device_getFrequencyRange(self, *args)
def getFrequencyArgsInfo(self, *args):
return _SoapySDR.Device_getFrequencyArgsInfo(self, *args)
def setSampleRate(self, *args):
return _SoapySDR.Device_setSampleRate(self, *args)
def getSampleRate(self, *args):
return _SoapySDR.Device_getSampleRate(self, *args)
def listSampleRates(self, *args):
return _SoapySDR.Device_listSampleRates(self, *args)
def getSampleRateRange(self, *args):
return _SoapySDR.Device_getSampleRateRange(self, *args)
def setBandwidth(self, *args):
return _SoapySDR.Device_setBandwidth(self, *args)
def getBandwidth(self, *args):
return _SoapySDR.Device_getBandwidth(self, *args)
def listBandwidths(self, *args):
return _SoapySDR.Device_listBandwidths(self, *args)
def getBandwidthRange(self, *args):
return _SoapySDR.Device_getBandwidthRange(self, *args)
def setMasterClockRate(self, *args):
return _SoapySDR.Device_setMasterClockRate(self, *args)
def getMasterClockRate(self):
return _SoapySDR.Device_getMasterClockRate(self)
def getMasterClockRates(self):
return _SoapySDR.Device_getMasterClockRates(self)
def listClockSources(self):
return _SoapySDR.Device_listClockSources(self)
def setClockSource(self, *args):
return _SoapySDR.Device_setClockSource(self, *args)
def getClockSource(self):
return _SoapySDR.Device_getClockSource(self)
def listTimeSources(self):
return _SoapySDR.Device_listTimeSources(self)
def setTimeSource(self, *args):
return _SoapySDR.Device_setTimeSource(self, *args)
def getTimeSource(self):
return _SoapySDR.Device_getTimeSource(self)
def hasHardwareTime(self, what=''):
return _SoapySDR.Device_hasHardwareTime(self, what)
def getHardwareTime(self, what=''):
return _SoapySDR.Device_getHardwareTime(self, what)
def setHardwareTime(self, *args):
return _SoapySDR.Device_setHardwareTime(self, *args)
def setCommandTime(self, *args):
return _SoapySDR.Device_setCommandTime(self, *args)
def listSensors(self, *args):
return _SoapySDR.Device_listSensors(self, *args)
def getSensorInfo(self, *args):
return _SoapySDR.Device_getSensorInfo(self, *args)
def readSensor(self, *args):
return _SoapySDR.Device_readSensor(self, *args)
def listRegisterInterfaces(self):
return _SoapySDR.Device_listRegisterInterfaces(self)
def writeRegister(self, *args):
return _SoapySDR.Device_writeRegister(self, *args)
def readRegister(self, *args):
return _SoapySDR.Device_readRegister(self, *args)
def writeRegisters(self, *args):
return _SoapySDR.Device_writeRegisters(self, *args)
def readRegisters(self, *args):
return _SoapySDR.Device_readRegisters(self, *args)
def getSettingInfo(self, *args):
return _SoapySDR.Device_getSettingInfo(self, *args)
def writeSetting(self, *args):
return _SoapySDR.Device_writeSetting(self, *args)
def readSetting(self, *args):
return _SoapySDR.Device_readSetting(self, *args)
def listGPIOBanks(self):
return _SoapySDR.Device_listGPIOBanks(self)
def writeGPIO(self, *args):
return _SoapySDR.Device_writeGPIO(self, *args)
def readGPIO(self, *args):
return _SoapySDR.Device_readGPIO(self, *args)
def writeGPIODir(self, *args):
return _SoapySDR.Device_writeGPIODir(self, *args)
def readGPIODir(self, *args):
return _SoapySDR.Device_readGPIODir(self, *args)
def writeI2C(self, *args):
return _SoapySDR.Device_writeI2C(self, *args)
def readI2C(self, *args):
return _SoapySDR.Device_readI2C(self, *args)
def transactSPI(self, *args):
return _SoapySDR.Device_transactSPI(self, *args)
def listUARTs(self):
return _SoapySDR.Device_listUARTs(self)
def writeUART(self, *args):
return _SoapySDR.Device_writeUART(self, *args)
def readUART(self, *args):
return _SoapySDR.Device_readUART(self, *args)
def readStream__(self, *args):
return _SoapySDR.Device_readStream__(self, *args)
def writeStream__(self, *args):
return _SoapySDR.Device_writeStream__(self, *args)
def readStreamStatus__(self, *args):
return _SoapySDR.Device_readStreamStatus__(self, *args)
def __del__(self):
Device.unmake(self)
def __str__(self):
return '%s:%s' % (self.getDriverKey(), self.getHardwareKey())
def readStream(self, stream, buffs, numElems, flags=0, timeoutUs=100000):
ptrs = [extractBuffPointer(b) for b in buffs]
return self.readStream__(stream, ptrs, numElems, flags, timeoutUs)
def writeStream(self, stream, buffs, numElems, flags=0, timeNs=0,
timeoutUs=100000):
ptrs = [extractBuffPointer(b) for b in buffs]
return self.writeStream__(stream, ptrs, numElems, flags, timeNs,
timeoutUs)
def readStreamStatus(self, stream, timeoutUs=100000):
return self.readStreamStatus__(stream, timeoutUs)
<|reserved_special_token_0|>
class Device(Device):
def __new__(cls, *args, **kwargs):
return cls.make(*args, **kwargs)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class SoapySDRKwargsList(_object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def iterator(self):
return _SoapySDR.SoapySDRKwargsList_iterator(self)
def __iter__(self):
return self.iterator()
<|reserved_special_token_0|>
def __bool__(self):
return _SoapySDR.SoapySDRKwargsList___bool__(self)
def __len__(self):
return _SoapySDR.SoapySDRKwargsList___len__(self)
def pop(self):
return _SoapySDR.SoapySDRKwargsList_pop(self)
def __getslice__(self, *args):
return _SoapySDR.SoapySDRKwargsList___getslice__(self, *args)
def __setslice__(self, *args):
return _SoapySDR.SoapySDRKwargsList___setslice__(self, *args)
<|reserved_special_token_0|>
def __delitem__(self, *args):
return _SoapySDR.SoapySDRKwargsList___delitem__(self, *args)
def __getitem__(self, *args):
return _SoapySDR.SoapySDRKwargsList___getitem__(self, *args)
def __setitem__(self, *args):
return _SoapySDR.SoapySDRKwargsList___setitem__(self, *args)
def append(self, *args):
return _SoapySDR.SoapySDRKwargsList_append(self, *args)
def empty(self):
return _SoapySDR.SoapySDRKwargsList_empty(self)
def size(self):
return _SoapySDR.SoapySDRKwargsList_size(self)
<|reserved_special_token_0|>
def swap(self, *args):
return _SoapySDR.SoapySDRKwargsList_swap(self, *args)
def get_allocator(self):
return _SoapySDR.SoapySDRKwargsList_get_allocator(self)
def begin(self):
return _SoapySDR.SoapySDRKwargsList_begin(self)
def end(self):
return _SoapySDR.SoapySDRKwargsList_end(self)
def rbegin(self):
return _SoapySDR.SoapySDRKwargsList_rbegin(self)
def rend(self):
return _SoapySDR.SoapySDRKwargsList_rend(self)
def pop_back(self):
return _SoapySDR.SoapySDRKwargsList_pop_back(self)
def erase(self, *args):
return _SoapySDR.SoapySDRKwargsList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRKwargsList(*args)
try:
self.this.append(this)
except:
self.this = this
<|reserved_special_token_0|>
def front(self):
return _SoapySDR.SoapySDRKwargsList_front(self)
def back(self):
return _SoapySDR.SoapySDRKwargsList_back(self)
def assign(self, *args):
return _SoapySDR.SoapySDRKwargsList_assign(self, *args)
def resize(self, *args):
return _SoapySDR.SoapySDRKwargsList_resize(self, *args)
<|reserved_special_token_0|>
def reserve(self, *args):
return _SoapySDR.SoapySDRKwargsList_reserve(self, *args)
def capacity(self):
return _SoapySDR.SoapySDRKwargsList_capacity(self)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class SoapySDRArgInfoList(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
SoapySDRArgInfoList, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self,
SoapySDRArgInfoList, name)
__repr__ = _swig_repr
def iterator(self):
return _SoapySDR.SoapySDRArgInfoList_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _SoapySDR.SoapySDRArgInfoList___nonzero__(self)
def __bool__(self):
return _SoapySDR.SoapySDRArgInfoList___bool__(self)
def __len__(self):
return _SoapySDR.SoapySDRArgInfoList___len__(self)
def pop(self):
return _SoapySDR.SoapySDRArgInfoList_pop(self)
def __getslice__(self, *args):
return _SoapySDR.SoapySDRArgInfoList___getslice__(self, *args)
def __setslice__(self, *args):
return _SoapySDR.SoapySDRArgInfoList___setslice__(self, *args)
def __delslice__(self, *args):
return _SoapySDR.SoapySDRArgInfoList___delslice__(self, *args)
def __delitem__(self, *args):
return _SoapySDR.SoapySDRArgInfoList___delitem__(self, *args)
def __getitem__(self, *args):
return _SoapySDR.SoapySDRArgInfoList___getitem__(self, *args)
def __setitem__(self, *args):
return _SoapySDR.SoapySDRArgInfoList___setitem__(self, *args)
def append(self, *args):
return _SoapySDR.SoapySDRArgInfoList_append(self, *args)
def empty(self):
return _SoapySDR.SoapySDRArgInfoList_empty(self)
def size(self):
return _SoapySDR.SoapySDRArgInfoList_size(self)
def clear(self):
return _SoapySDR.SoapySDRArgInfoList_clear(self)
def swap(self, *args):
return _SoapySDR.SoapySDRArgInfoList_swap(self, *args)
def get_allocator(self):
return _SoapySDR.SoapySDRArgInfoList_get_allocator(self)
def begin(self):
return _SoapySDR.SoapySDRArgInfoList_begin(self)
def end(self):
return _SoapySDR.SoapySDRArgInfoList_end(self)
def rbegin(self):
return _SoapySDR.SoapySDRArgInfoList_rbegin(self)
def rend(self):
return _SoapySDR.SoapySDRArgInfoList_rend(self)
def pop_back(self):
return _SoapySDR.SoapySDRArgInfoList_pop_back(self)
def erase(self, *args):
return _SoapySDR.SoapySDRArgInfoList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRArgInfoList(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, *args):
return _SoapySDR.SoapySDRArgInfoList_push_back(self, *args)
def front(self):
return _SoapySDR.SoapySDRArgInfoList_front(self)
def back(self):
return _SoapySDR.SoapySDRArgInfoList_back(self)
def assign(self, *args):
return _SoapySDR.SoapySDRArgInfoList_assign(self, *args)
def resize(self, *args):
return _SoapySDR.SoapySDRArgInfoList_resize(self, *args)
def insert(self, *args):
return _SoapySDR.SoapySDRArgInfoList_insert(self, *args)
def reserve(self, *args):
return _SoapySDR.SoapySDRArgInfoList_reserve(self, *args)
def capacity(self):
return _SoapySDR.SoapySDRArgInfoList_capacity(self)
__swig_destroy__ = _SoapySDR.delete_SoapySDRArgInfoList
__del__ = lambda self: None
<|reserved_special_token_0|>
class SoapySDRStringList(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
SoapySDRStringList, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SoapySDRStringList,
name)
__repr__ = _swig_repr
def iterator(self):
return _SoapySDR.SoapySDRStringList_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _SoapySDR.SoapySDRStringList___nonzero__(self)
def __bool__(self):
return _SoapySDR.SoapySDRStringList___bool__(self)
def __len__(self):
return _SoapySDR.SoapySDRStringList___len__(self)
def pop(self):
return _SoapySDR.SoapySDRStringList_pop(self)
def __getslice__(self, *args):
return _SoapySDR.SoapySDRStringList___getslice__(self, *args)
def __setslice__(self, *args):
return _SoapySDR.SoapySDRStringList___setslice__(self, *args)
def __delslice__(self, *args):
return _SoapySDR.SoapySDRStringList___delslice__(self, *args)
def __delitem__(self, *args):
return _SoapySDR.SoapySDRStringList___delitem__(self, *args)
def __getitem__(self, *args):
return _SoapySDR.SoapySDRStringList___getitem__(self, *args)
def __setitem__(self, *args):
return _SoapySDR.SoapySDRStringList___setitem__(self, *args)
def append(self, *args):
return _SoapySDR.SoapySDRStringList_append(self, *args)
def empty(self):
return _SoapySDR.SoapySDRStringList_empty(self)
def size(self):
return _SoapySDR.SoapySDRStringList_size(self)
def clear(self):
return _SoapySDR.SoapySDRStringList_clear(self)
def swap(self, *args):
return _SoapySDR.SoapySDRStringList_swap(self, *args)
def get_allocator(self):
return _SoapySDR.SoapySDRStringList_get_allocator(self)
def begin(self):
return _SoapySDR.SoapySDRStringList_begin(self)
def end(self):
return _SoapySDR.SoapySDRStringList_end(self)
def rbegin(self):
return _SoapySDR.SoapySDRStringList_rbegin(self)
def rend(self):
return _SoapySDR.SoapySDRStringList_rend(self)
def pop_back(self):
return _SoapySDR.SoapySDRStringList_pop_back(self)
def erase(self, *args):
return _SoapySDR.SoapySDRStringList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRStringList(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, *args):
return _SoapySDR.SoapySDRStringList_push_back(self, *args)
def front(self):
return _SoapySDR.SoapySDRStringList_front(self)
def back(self):
return _SoapySDR.SoapySDRStringList_back(self)
def assign(self, *args):
return _SoapySDR.SoapySDRStringList_assign(self, *args)
def resize(self, *args):
return _SoapySDR.SoapySDRStringList_resize(self, *args)
def insert(self, *args):
return _SoapySDR.SoapySDRStringList_insert(self, *args)
def reserve(self, *args):
return _SoapySDR.SoapySDRStringList_reserve(self, *args)
def capacity(self):
return _SoapySDR.SoapySDRStringList_capacity(self)
__swig_destroy__ = _SoapySDR.delete_SoapySDRStringList
__del__ = lambda self: None
<|reserved_special_token_0|>
class SoapySDRRangeList(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
SoapySDRRangeList, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SoapySDRRangeList,
name)
__repr__ = _swig_repr
def iterator(self):
return _SoapySDR.SoapySDRRangeList_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _SoapySDR.SoapySDRRangeList___nonzero__(self)
def __bool__(self):
return _SoapySDR.SoapySDRRangeList___bool__(self)
def __len__(self):
return _SoapySDR.SoapySDRRangeList___len__(self)
def pop(self):
return _SoapySDR.SoapySDRRangeList_pop(self)
def __getslice__(self, *args):
return _SoapySDR.SoapySDRRangeList___getslice__(self, *args)
def __setslice__(self, *args):
return _SoapySDR.SoapySDRRangeList___setslice__(self, *args)
def __delslice__(self, *args):
return _SoapySDR.SoapySDRRangeList___delslice__(self, *args)
def __delitem__(self, *args):
return _SoapySDR.SoapySDRRangeList___delitem__(self, *args)
def __getitem__(self, *args):
return _SoapySDR.SoapySDRRangeList___getitem__(self, *args)
def __setitem__(self, *args):
return _SoapySDR.SoapySDRRangeList___setitem__(self, *args)
def append(self, *args):
return _SoapySDR.SoapySDRRangeList_append(self, *args)
def empty(self):
return _SoapySDR.SoapySDRRangeList_empty(self)
def size(self):
return _SoapySDR.SoapySDRRangeList_size(self)
def clear(self):
return _SoapySDR.SoapySDRRangeList_clear(self)
def swap(self, *args):
return _SoapySDR.SoapySDRRangeList_swap(self, *args)
def get_allocator(self):
return _SoapySDR.SoapySDRRangeList_get_allocator(self)
def begin(self):
return _SoapySDR.SoapySDRRangeList_begin(self)
def end(self):
return _SoapySDR.SoapySDRRangeList_end(self)
def rbegin(self):
return _SoapySDR.SoapySDRRangeList_rbegin(self)
def rend(self):
return _SoapySDR.SoapySDRRangeList_rend(self)
def pop_back(self):
return _SoapySDR.SoapySDRRangeList_pop_back(self)
def erase(self, *args):
return _SoapySDR.SoapySDRRangeList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRRangeList(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, *args):
return _SoapySDR.SoapySDRRangeList_push_back(self, *args)
def front(self):
return _SoapySDR.SoapySDRRangeList_front(self)
def back(self):
return _SoapySDR.SoapySDRRangeList_back(self)
def assign(self, *args):
return _SoapySDR.SoapySDRRangeList_assign(self, *args)
def resize(self, *args):
return _SoapySDR.SoapySDRRangeList_resize(self, *args)
def insert(self, *args):
return _SoapySDR.SoapySDRRangeList_insert(self, *args)
def reserve(self, *args):
return _SoapySDR.SoapySDRRangeList_reserve(self, *args)
def capacity(self):
return _SoapySDR.SoapySDRRangeList_capacity(self)
__swig_destroy__ = _SoapySDR.delete_SoapySDRRangeList
__del__ = lambda self: None
<|reserved_special_token_0|>
class SoapySDRSizeList(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
SoapySDRSizeList, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SoapySDRSizeList, name
)
__repr__ = _swig_repr
def iterator(self):
return _SoapySDR.SoapySDRSizeList_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _SoapySDR.SoapySDRSizeList___nonzero__(self)
def __bool__(self):
return _SoapySDR.SoapySDRSizeList___bool__(self)
def __len__(self):
return _SoapySDR.SoapySDRSizeList___len__(self)
def pop(self):
return _SoapySDR.SoapySDRSizeList_pop(self)
def __getslice__(self, *args):
return _SoapySDR.SoapySDRSizeList___getslice__(self, *args)
def __setslice__(self, *args):
return _SoapySDR.SoapySDRSizeList___setslice__(self, *args)
def __delslice__(self, *args):
return _SoapySDR.SoapySDRSizeList___delslice__(self, *args)
def __delitem__(self, *args):
return _SoapySDR.SoapySDRSizeList___delitem__(self, *args)
def __getitem__(self, *args):
return _SoapySDR.SoapySDRSizeList___getitem__(self, *args)
def __setitem__(self, *args):
return _SoapySDR.SoapySDRSizeList___setitem__(self, *args)
def append(self, *args):
return _SoapySDR.SoapySDRSizeList_append(self, *args)
def empty(self):
return _SoapySDR.SoapySDRSizeList_empty(self)
def size(self):
return _SoapySDR.SoapySDRSizeList_size(self)
def clear(self):
return _SoapySDR.SoapySDRSizeList_clear(self)
def swap(self, *args):
return _SoapySDR.SoapySDRSizeList_swap(self, *args)
def get_allocator(self):
return _SoapySDR.SoapySDRSizeList_get_allocator(self)
def begin(self):
return _SoapySDR.SoapySDRSizeList_begin(self)
def end(self):
return _SoapySDR.SoapySDRSizeList_end(self)
def rbegin(self):
return _SoapySDR.SoapySDRSizeList_rbegin(self)
def rend(self):
return _SoapySDR.SoapySDRSizeList_rend(self)
def pop_back(self):
return _SoapySDR.SoapySDRSizeList_pop_back(self)
def erase(self, *args):
return _SoapySDR.SoapySDRSizeList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRSizeList(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, *args):
return _SoapySDR.SoapySDRSizeList_push_back(self, *args)
def front(self):
return _SoapySDR.SoapySDRSizeList_front(self)
def back(self):
return _SoapySDR.SoapySDRSizeList_back(self)
def assign(self, *args):
return _SoapySDR.SoapySDRSizeList_assign(self, *args)
def resize(self, *args):
return _SoapySDR.SoapySDRSizeList_resize(self, *args)
def insert(self, *args):
return _SoapySDR.SoapySDRSizeList_insert(self, *args)
def reserve(self, *args):
return _SoapySDR.SoapySDRSizeList_reserve(self, *args)
def capacity(self):
return _SoapySDR.SoapySDRSizeList_capacity(self)
__swig_destroy__ = _SoapySDR.delete_SoapySDRSizeList
__del__ = lambda self: None
<|reserved_special_token_0|>
class SoapySDRDoubleList(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
SoapySDRDoubleList, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SoapySDRDoubleList,
name)
__repr__ = _swig_repr
def iterator(self):
return _SoapySDR.SoapySDRDoubleList_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _SoapySDR.SoapySDRDoubleList___nonzero__(self)
def __bool__(self):
return _SoapySDR.SoapySDRDoubleList___bool__(self)
def __len__(self):
return _SoapySDR.SoapySDRDoubleList___len__(self)
def pop(self):
return _SoapySDR.SoapySDRDoubleList_pop(self)
def __getslice__(self, *args):
return _SoapySDR.SoapySDRDoubleList___getslice__(self, *args)
def __setslice__(self, *args):
return _SoapySDR.SoapySDRDoubleList___setslice__(self, *args)
def __delslice__(self, *args):
return _SoapySDR.SoapySDRDoubleList___delslice__(self, *args)
def __delitem__(self, *args):
return _SoapySDR.SoapySDRDoubleList___delitem__(self, *args)
def __getitem__(self, *args):
return _SoapySDR.SoapySDRDoubleList___getitem__(self, *args)
def __setitem__(self, *args):
return _SoapySDR.SoapySDRDoubleList___setitem__(self, *args)
def append(self, *args):
return _SoapySDR.SoapySDRDoubleList_append(self, *args)
def empty(self):
return _SoapySDR.SoapySDRDoubleList_empty(self)
def size(self):
return _SoapySDR.SoapySDRDoubleList_size(self)
def clear(self):
return _SoapySDR.SoapySDRDoubleList_clear(self)
def swap(self, *args):
return _SoapySDR.SoapySDRDoubleList_swap(self, *args)
def get_allocator(self):
return _SoapySDR.SoapySDRDoubleList_get_allocator(self)
def begin(self):
return _SoapySDR.SoapySDRDoubleList_begin(self)
def end(self):
return _SoapySDR.SoapySDRDoubleList_end(self)
def rbegin(self):
return _SoapySDR.SoapySDRDoubleList_rbegin(self)
def rend(self):
return _SoapySDR.SoapySDRDoubleList_rend(self)
def pop_back(self):
return _SoapySDR.SoapySDRDoubleList_pop_back(self)
def erase(self, *args):
return _SoapySDR.SoapySDRDoubleList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRDoubleList(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, *args):
return _SoapySDR.SoapySDRDoubleList_push_back(self, *args)
def front(self):
return _SoapySDR.SoapySDRDoubleList_front(self)
def back(self):
return _SoapySDR.SoapySDRDoubleList_back(self)
def assign(self, *args):
return _SoapySDR.SoapySDRDoubleList_assign(self, *args)
def resize(self, *args):
return _SoapySDR.SoapySDRDoubleList_resize(self, *args)
def insert(self, *args):
return _SoapySDR.SoapySDRDoubleList_insert(self, *args)
def reserve(self, *args):
return _SoapySDR.SoapySDRDoubleList_reserve(self, *args)
def capacity(self):
return _SoapySDR.SoapySDRDoubleList_capacity(self)
__swig_destroy__ = _SoapySDR.delete_SoapySDRDoubleList
__del__ = lambda self: None
<|reserved_special_token_0|>
class StreamResult(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
StreamResult, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StreamResult, name)
__repr__ = _swig_repr
def __init__(self):
this = _SoapySDR.new_StreamResult()
try:
self.this.append(this)
except:
self.this = this
__swig_setmethods__['ret'] = _SoapySDR.StreamResult_ret_set
__swig_getmethods__['ret'] = _SoapySDR.StreamResult_ret_get
if _newclass:
ret = _swig_property(_SoapySDR.StreamResult_ret_get, _SoapySDR.
StreamResult_ret_set)
__swig_setmethods__['flags'] = _SoapySDR.StreamResult_flags_set
__swig_getmethods__['flags'] = _SoapySDR.StreamResult_flags_get
if _newclass:
flags = _swig_property(_SoapySDR.StreamResult_flags_get, _SoapySDR.
StreamResult_flags_set)
__swig_setmethods__['timeNs'] = _SoapySDR.StreamResult_timeNs_set
__swig_getmethods__['timeNs'] = _SoapySDR.StreamResult_timeNs_get
if _newclass:
timeNs = _swig_property(_SoapySDR.StreamResult_timeNs_get,
_SoapySDR.StreamResult_timeNs_set)
__swig_setmethods__['chanMask'] = _SoapySDR.StreamResult_chanMask_set
__swig_getmethods__['chanMask'] = _SoapySDR.StreamResult_chanMask_get
if _newclass:
chanMask = _swig_property(_SoapySDR.StreamResult_chanMask_get,
_SoapySDR.StreamResult_chanMask_set)
def __str__(self):
return 'ret=%s, flags=%s, timeNs=%s' % (self.ret, self.flags, self.
timeNs)
__swig_destroy__ = _SoapySDR.delete_StreamResult
__del__ = lambda self: None
<|reserved_special_token_0|>
class Device(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Device,
name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Device, name)
def __init__(self, *args, **kwargs):
raise AttributeError('No constructor defined')
__repr__ = _swig_repr
__swig_destroy__ = _SoapySDR.delete_Device
__del__ = lambda self: None
__swig_getmethods__['enumerate'] = lambda x: _SoapySDR.Device_enumerate
if _newclass:
enumerate = staticmethod(_SoapySDR.Device_enumerate)
__swig_getmethods__['make'] = lambda x: _SoapySDR.Device_make
if _newclass:
make = staticmethod(_SoapySDR.Device_make)
__swig_getmethods__['unmake'] = lambda x: _SoapySDR.Device_unmake
if _newclass:
unmake = staticmethod(_SoapySDR.Device_unmake)
def getDriverKey(self):
return _SoapySDR.Device_getDriverKey(self)
def getHardwareKey(self):
return _SoapySDR.Device_getHardwareKey(self)
def getHardwareInfo(self):
return _SoapySDR.Device_getHardwareInfo(self)
def setFrontendMapping(self, *args):
return _SoapySDR.Device_setFrontendMapping(self, *args)
def getFrontendMapping(self, *args):
return _SoapySDR.Device_getFrontendMapping(self, *args)
def getNumChannels(self, *args):
return _SoapySDR.Device_getNumChannels(self, *args)
def getChannelInfo(self, *args):
return _SoapySDR.Device_getChannelInfo(self, *args)
def getFullDuplex(self, *args):
return _SoapySDR.Device_getFullDuplex(self, *args)
def getStreamFormats(self, *args):
return _SoapySDR.Device_getStreamFormats(self, *args)
def getNativeStreamFormat(self, *args):
return _SoapySDR.Device_getNativeStreamFormat(self, *args)
def getStreamArgsInfo(self, *args):
return _SoapySDR.Device_getStreamArgsInfo(self, *args)
def setupStream(self, *args):
return _SoapySDR.Device_setupStream(self, *args)
def closeStream(self, *args):
return _SoapySDR.Device_closeStream(self, *args)
def getStreamMTU(self, *args):
return _SoapySDR.Device_getStreamMTU(self, *args)
def activateStream(self, *args):
return _SoapySDR.Device_activateStream(self, *args)
def deactivateStream(self, *args):
return _SoapySDR.Device_deactivateStream(self, *args)
def readStream(self, *args):
return _SoapySDR.Device_readStream(self, *args)
def writeStream(self, *args):
return _SoapySDR.Device_writeStream(self, *args)
def readStreamStatus(self, *args):
return _SoapySDR.Device_readStreamStatus(self, *args)
def getNumDirectAccessBuffers(self, *args):
return _SoapySDR.Device_getNumDirectAccessBuffers(self, *args)
def getDirectAccessBufferAddrs(self, *args):
return _SoapySDR.Device_getDirectAccessBufferAddrs(self, *args)
def acquireReadBuffer(self, *args):
return _SoapySDR.Device_acquireReadBuffer(self, *args)
def releaseReadBuffer(self, *args):
return _SoapySDR.Device_releaseReadBuffer(self, *args)
def acquireWriteBuffer(self, *args):
return _SoapySDR.Device_acquireWriteBuffer(self, *args)
def releaseWriteBuffer(self, *args):
return _SoapySDR.Device_releaseWriteBuffer(self, *args)
def listAntennas(self, *args):
return _SoapySDR.Device_listAntennas(self, *args)
def setAntenna(self, *args):
return _SoapySDR.Device_setAntenna(self, *args)
def getAntenna(self, *args):
return _SoapySDR.Device_getAntenna(self, *args)
def hasDCOffsetMode(self, *args):
return _SoapySDR.Device_hasDCOffsetMode(self, *args)
def setDCOffsetMode(self, *args):
return _SoapySDR.Device_setDCOffsetMode(self, *args)
def getDCOffsetMode(self, *args):
return _SoapySDR.Device_getDCOffsetMode(self, *args)
def hasDCOffset(self, *args):
return _SoapySDR.Device_hasDCOffset(self, *args)
def setDCOffset(self, *args):
return _SoapySDR.Device_setDCOffset(self, *args)
def getDCOffset(self, *args):
return _SoapySDR.Device_getDCOffset(self, *args)
def hasIQBalance(self, *args):
return _SoapySDR.Device_hasIQBalance(self, *args)
def setIQBalance(self, *args):
return _SoapySDR.Device_setIQBalance(self, *args)
def getIQBalance(self, *args):
return _SoapySDR.Device_getIQBalance(self, *args)
def hasFrequencyCorrection(self, *args):
return _SoapySDR.Device_hasFrequencyCorrection(self, *args)
def setFrequencyCorrection(self, *args):
return _SoapySDR.Device_setFrequencyCorrection(self, *args)
def getFrequencyCorrection(self, *args):
return _SoapySDR.Device_getFrequencyCorrection(self, *args)
def listGains(self, *args):
return _SoapySDR.Device_listGains(self, *args)
def hasGainMode(self, *args):
return _SoapySDR.Device_hasGainMode(self, *args)
def setGainMode(self, *args):
return _SoapySDR.Device_setGainMode(self, *args)
def getGainMode(self, *args):
return _SoapySDR.Device_getGainMode(self, *args)
def setGain(self, *args):
return _SoapySDR.Device_setGain(self, *args)
def getGain(self, *args):
return _SoapySDR.Device_getGain(self, *args)
def getGainRange(self, *args):
return _SoapySDR.Device_getGainRange(self, *args)
def setFrequency(self, *args):
return _SoapySDR.Device_setFrequency(self, *args)
def getFrequency(self, *args):
return _SoapySDR.Device_getFrequency(self, *args)
def listFrequencies(self, *args):
return _SoapySDR.Device_listFrequencies(self, *args)
def getFrequencyRange(self, *args):
return _SoapySDR.Device_getFrequencyRange(self, *args)
def getFrequencyArgsInfo(self, *args):
return _SoapySDR.Device_getFrequencyArgsInfo(self, *args)
def setSampleRate(self, *args):
return _SoapySDR.Device_setSampleRate(self, *args)
def getSampleRate(self, *args):
return _SoapySDR.Device_getSampleRate(self, *args)
def listSampleRates(self, *args):
return _SoapySDR.Device_listSampleRates(self, *args)
def getSampleRateRange(self, *args):
return _SoapySDR.Device_getSampleRateRange(self, *args)
def setBandwidth(self, *args):
return _SoapySDR.Device_setBandwidth(self, *args)
def getBandwidth(self, *args):
return _SoapySDR.Device_getBandwidth(self, *args)
def listBandwidths(self, *args):
return _SoapySDR.Device_listBandwidths(self, *args)
def getBandwidthRange(self, *args):
return _SoapySDR.Device_getBandwidthRange(self, *args)
def setMasterClockRate(self, *args):
return _SoapySDR.Device_setMasterClockRate(self, *args)
def getMasterClockRate(self):
return _SoapySDR.Device_getMasterClockRate(self)
def getMasterClockRates(self):
return _SoapySDR.Device_getMasterClockRates(self)
def listClockSources(self):
return _SoapySDR.Device_listClockSources(self)
def setClockSource(self, *args):
return _SoapySDR.Device_setClockSource(self, *args)
def getClockSource(self):
return _SoapySDR.Device_getClockSource(self)
def listTimeSources(self):
return _SoapySDR.Device_listTimeSources(self)
def setTimeSource(self, *args):
return _SoapySDR.Device_setTimeSource(self, *args)
def getTimeSource(self):
return _SoapySDR.Device_getTimeSource(self)
def hasHardwareTime(self, what=''):
return _SoapySDR.Device_hasHardwareTime(self, what)
def getHardwareTime(self, what=''):
return _SoapySDR.Device_getHardwareTime(self, what)
def setHardwareTime(self, *args):
return _SoapySDR.Device_setHardwareTime(self, *args)
def setCommandTime(self, *args):
return _SoapySDR.Device_setCommandTime(self, *args)
def listSensors(self, *args):
return _SoapySDR.Device_listSensors(self, *args)
def getSensorInfo(self, *args):
return _SoapySDR.Device_getSensorInfo(self, *args)
def readSensor(self, *args):
return _SoapySDR.Device_readSensor(self, *args)
def listRegisterInterfaces(self):
return _SoapySDR.Device_listRegisterInterfaces(self)
def writeRegister(self, *args):
return _SoapySDR.Device_writeRegister(self, *args)
def readRegister(self, *args):
return _SoapySDR.Device_readRegister(self, *args)
def writeRegisters(self, *args):
return _SoapySDR.Device_writeRegisters(self, *args)
def readRegisters(self, *args):
return _SoapySDR.Device_readRegisters(self, *args)
def getSettingInfo(self, *args):
return _SoapySDR.Device_getSettingInfo(self, *args)
def writeSetting(self, *args):
return _SoapySDR.Device_writeSetting(self, *args)
def readSetting(self, *args):
return _SoapySDR.Device_readSetting(self, *args)
def listGPIOBanks(self):
return _SoapySDR.Device_listGPIOBanks(self)
def writeGPIO(self, *args):
return _SoapySDR.Device_writeGPIO(self, *args)
def readGPIO(self, *args):
return _SoapySDR.Device_readGPIO(self, *args)
def writeGPIODir(self, *args):
return _SoapySDR.Device_writeGPIODir(self, *args)
def readGPIODir(self, *args):
return _SoapySDR.Device_readGPIODir(self, *args)
def writeI2C(self, *args):
return _SoapySDR.Device_writeI2C(self, *args)
def readI2C(self, *args):
return _SoapySDR.Device_readI2C(self, *args)
def transactSPI(self, *args):
return _SoapySDR.Device_transactSPI(self, *args)
def listUARTs(self):
return _SoapySDR.Device_listUARTs(self)
def writeUART(self, *args):
return _SoapySDR.Device_writeUART(self, *args)
def readUART(self, *args):
return _SoapySDR.Device_readUART(self, *args)
def readStream__(self, *args):
return _SoapySDR.Device_readStream__(self, *args)
def writeStream__(self, *args):
return _SoapySDR.Device_writeStream__(self, *args)
def readStreamStatus__(self, *args):
return _SoapySDR.Device_readStreamStatus__(self, *args)
def __del__(self):
Device.unmake(self)
def __str__(self):
return '%s:%s' % (self.getDriverKey(), self.getHardwareKey())
def readStream(self, stream, buffs, numElems, flags=0, timeoutUs=100000):
ptrs = [extractBuffPointer(b) for b in buffs]
return self.readStream__(stream, ptrs, numElems, flags, timeoutUs)
def writeStream(self, stream, buffs, numElems, flags=0, timeNs=0,
timeoutUs=100000):
ptrs = [extractBuffPointer(b) for b in buffs]
return self.writeStream__(stream, ptrs, numElems, flags, timeNs,
timeoutUs)
def readStreamStatus(self, stream, timeoutUs=100000):
return self.readStreamStatus__(stream, timeoutUs)
<|reserved_special_token_0|>
class Device(Device):
def __new__(cls, *args, **kwargs):
return cls.make(*args, **kwargs)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr(self, class_type, name):
if name == 'thisown':
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
raise AttributeError(name)
def _swig_repr(self):
try:
strthis = 'proxy of ' + self.this.__repr__()
except:
strthis = ''
return '<%s.%s; %s >' % (self.__class__.__module__, self.__class__.
__name__, strthis)
<|reserved_special_token_0|>
class SwigPyIterator(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
SwigPyIterator, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SwigPyIterator, name)
def __init__(self, *args, **kwargs):
raise AttributeError('No constructor defined - class is abstract')
__repr__ = _swig_repr
__swig_destroy__ = _SoapySDR.delete_SwigPyIterator
__del__ = lambda self: None
def value(self):
return _SoapySDR.SwigPyIterator_value(self)
def incr(self, n=1):
return _SoapySDR.SwigPyIterator_incr(self, n)
def decr(self, n=1):
return _SoapySDR.SwigPyIterator_decr(self, n)
def distance(self, *args):
return _SoapySDR.SwigPyIterator_distance(self, *args)
def equal(self, *args):
return _SoapySDR.SwigPyIterator_equal(self, *args)
def copy(self):
return _SoapySDR.SwigPyIterator_copy(self)
def next(self):
return _SoapySDR.SwigPyIterator_next(self)
def __next__(self):
return _SoapySDR.SwigPyIterator___next__(self)
def previous(self):
return _SoapySDR.SwigPyIterator_previous(self)
def advance(self, *args):
return _SoapySDR.SwigPyIterator_advance(self, *args)
def __eq__(self, *args):
return _SoapySDR.SwigPyIterator___eq__(self, *args)
def __ne__(self, *args):
return _SoapySDR.SwigPyIterator___ne__(self, *args)
def __iadd__(self, *args):
return _SoapySDR.SwigPyIterator___iadd__(self, *args)
def __isub__(self, *args):
return _SoapySDR.SwigPyIterator___isub__(self, *args)
def __add__(self, *args):
return _SoapySDR.SwigPyIterator___add__(self, *args)
def __sub__(self, *args):
return _SoapySDR.SwigPyIterator___sub__(self, *args)
def __iter__(self):
return self
<|reserved_special_token_0|>
def KwargsToString(*args):
return _SoapySDR.KwargsToString(*args)
<|reserved_special_token_0|>
class Range(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Range, name,
value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Range, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _SoapySDR.new_Range(*args)
try:
self.this.append(this)
except:
self.this = this
def minimum(self):
return _SoapySDR.Range_minimum(self)
def maximum(self):
return _SoapySDR.Range_maximum(self)
def step(self):
return _SoapySDR.Range_step(self)
def __str__(self):
fields = [self.minimum(), self.maximum()]
if self.step() != 0.0:
fields.append(self.step())
return ', '.join([('%g' % f) for f in fields])
__swig_destroy__ = _SoapySDR.delete_Range
__del__ = lambda self: None
<|reserved_special_token_0|>
class ArgInfo(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, ArgInfo,
name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, ArgInfo, name)
__repr__ = _swig_repr
def __init__(self):
this = _SoapySDR.new_ArgInfo()
try:
self.this.append(this)
except:
self.this = this
__swig_setmethods__['key'] = _SoapySDR.ArgInfo_key_set
__swig_getmethods__['key'] = _SoapySDR.ArgInfo_key_get
if _newclass:
key = _swig_property(_SoapySDR.ArgInfo_key_get, _SoapySDR.
ArgInfo_key_set)
__swig_setmethods__['value'] = _SoapySDR.ArgInfo_value_set
__swig_getmethods__['value'] = _SoapySDR.ArgInfo_value_get
if _newclass:
value = _swig_property(_SoapySDR.ArgInfo_value_get, _SoapySDR.
ArgInfo_value_set)
__swig_setmethods__['name'] = _SoapySDR.ArgInfo_name_set
__swig_getmethods__['name'] = _SoapySDR.ArgInfo_name_get
if _newclass:
name = _swig_property(_SoapySDR.ArgInfo_name_get, _SoapySDR.
ArgInfo_name_set)
__swig_setmethods__['description'] = _SoapySDR.ArgInfo_description_set
__swig_getmethods__['description'] = _SoapySDR.ArgInfo_description_get
if _newclass:
description = _swig_property(_SoapySDR.ArgInfo_description_get,
_SoapySDR.ArgInfo_description_set)
__swig_setmethods__['units'] = _SoapySDR.ArgInfo_units_set
__swig_getmethods__['units'] = _SoapySDR.ArgInfo_units_get
if _newclass:
units = _swig_property(_SoapySDR.ArgInfo_units_get, _SoapySDR.
ArgInfo_units_set)
BOOL = _SoapySDR.ArgInfo_BOOL
INT = _SoapySDR.ArgInfo_INT
FLOAT = _SoapySDR.ArgInfo_FLOAT
STRING = _SoapySDR.ArgInfo_STRING
__swig_setmethods__['type'] = _SoapySDR.ArgInfo_type_set
__swig_getmethods__['type'] = _SoapySDR.ArgInfo_type_get
if _newclass:
type = _swig_property(_SoapySDR.ArgInfo_type_get, _SoapySDR.
ArgInfo_type_set)
__swig_setmethods__['range'] = _SoapySDR.ArgInfo_range_set
__swig_getmethods__['range'] = _SoapySDR.ArgInfo_range_get
if _newclass:
range = _swig_property(_SoapySDR.ArgInfo_range_get, _SoapySDR.
ArgInfo_range_set)
__swig_setmethods__['options'] = _SoapySDR.ArgInfo_options_set
__swig_getmethods__['options'] = _SoapySDR.ArgInfo_options_get
if _newclass:
options = _swig_property(_SoapySDR.ArgInfo_options_get, _SoapySDR.
ArgInfo_options_set)
__swig_setmethods__['optionNames'] = _SoapySDR.ArgInfo_optionNames_set
__swig_getmethods__['optionNames'] = _SoapySDR.ArgInfo_optionNames_get
if _newclass:
optionNames = _swig_property(_SoapySDR.ArgInfo_optionNames_get,
_SoapySDR.ArgInfo_optionNames_set)
__swig_destroy__ = _SoapySDR.delete_ArgInfo
__del__ = lambda self: None
<|reserved_special_token_0|>
class SoapySDRKwargs(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
SoapySDRKwargs, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SoapySDRKwargs, name)
__repr__ = _swig_repr
def iterator(self):
return _SoapySDR.SoapySDRKwargs_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _SoapySDR.SoapySDRKwargs___nonzero__(self)
def __bool__(self):
return _SoapySDR.SoapySDRKwargs___bool__(self)
def __len__(self):
return _SoapySDR.SoapySDRKwargs___len__(self)
def __iter__(self):
return self.key_iterator()
def iterkeys(self):
return self.key_iterator()
def itervalues(self):
return self.value_iterator()
def iteritems(self):
return self.iterator()
def __getitem__(self, *args):
return _SoapySDR.SoapySDRKwargs___getitem__(self, *args)
def __delitem__(self, *args):
return _SoapySDR.SoapySDRKwargs___delitem__(self, *args)
def has_key(self, *args):
return _SoapySDR.SoapySDRKwargs_has_key(self, *args)
def keys(self):
return _SoapySDR.SoapySDRKwargs_keys(self)
def values(self):
return _SoapySDR.SoapySDRKwargs_values(self)
def items(self):
return _SoapySDR.SoapySDRKwargs_items(self)
def __contains__(self, *args):
return _SoapySDR.SoapySDRKwargs___contains__(self, *args)
def key_iterator(self):
return _SoapySDR.SoapySDRKwargs_key_iterator(self)
def value_iterator(self):
return _SoapySDR.SoapySDRKwargs_value_iterator(self)
def __setitem__(self, *args):
return _SoapySDR.SoapySDRKwargs___setitem__(self, *args)
def asdict(self):
return _SoapySDR.SoapySDRKwargs_asdict(self)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRKwargs(*args)
try:
self.this.append(this)
except:
self.this = this
def empty(self):
return _SoapySDR.SoapySDRKwargs_empty(self)
def size(self):
return _SoapySDR.SoapySDRKwargs_size(self)
def clear(self):
return _SoapySDR.SoapySDRKwargs_clear(self)
def swap(self, *args):
return _SoapySDR.SoapySDRKwargs_swap(self, *args)
def get_allocator(self):
return _SoapySDR.SoapySDRKwargs_get_allocator(self)
def begin(self):
return _SoapySDR.SoapySDRKwargs_begin(self)
def end(self):
return _SoapySDR.SoapySDRKwargs_end(self)
def rbegin(self):
return _SoapySDR.SoapySDRKwargs_rbegin(self)
def rend(self):
return _SoapySDR.SoapySDRKwargs_rend(self)
def count(self, *args):
return _SoapySDR.SoapySDRKwargs_count(self, *args)
def erase(self, *args):
return _SoapySDR.SoapySDRKwargs_erase(self, *args)
def find(self, *args):
return _SoapySDR.SoapySDRKwargs_find(self, *args)
def lower_bound(self, *args):
return _SoapySDR.SoapySDRKwargs_lower_bound(self, *args)
def upper_bound(self, *args):
return _SoapySDR.SoapySDRKwargs_upper_bound(self, *args)
def __str__(self):
out = list()
for k, v in self.iteritems():
out.append('%s=%s' % (k, v))
return '{' + ', '.join(out) + '}'
__swig_destroy__ = _SoapySDR.delete_SoapySDRKwargs
__del__ = lambda self: None
<|reserved_special_token_0|>
class SoapySDRKwargsList(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
SoapySDRKwargsList, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SoapySDRKwargsList,
name)
__repr__ = _swig_repr
def iterator(self):
return _SoapySDR.SoapySDRKwargsList_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _SoapySDR.SoapySDRKwargsList___nonzero__(self)
def __bool__(self):
return _SoapySDR.SoapySDRKwargsList___bool__(self)
def __len__(self):
return _SoapySDR.SoapySDRKwargsList___len__(self)
def pop(self):
return _SoapySDR.SoapySDRKwargsList_pop(self)
def __getslice__(self, *args):
return _SoapySDR.SoapySDRKwargsList___getslice__(self, *args)
def __setslice__(self, *args):
return _SoapySDR.SoapySDRKwargsList___setslice__(self, *args)
def __delslice__(self, *args):
return _SoapySDR.SoapySDRKwargsList___delslice__(self, *args)
def __delitem__(self, *args):
return _SoapySDR.SoapySDRKwargsList___delitem__(self, *args)
def __getitem__(self, *args):
return _SoapySDR.SoapySDRKwargsList___getitem__(self, *args)
def __setitem__(self, *args):
return _SoapySDR.SoapySDRKwargsList___setitem__(self, *args)
def append(self, *args):
return _SoapySDR.SoapySDRKwargsList_append(self, *args)
def empty(self):
return _SoapySDR.SoapySDRKwargsList_empty(self)
def size(self):
return _SoapySDR.SoapySDRKwargsList_size(self)
def clear(self):
return _SoapySDR.SoapySDRKwargsList_clear(self)
def swap(self, *args):
return _SoapySDR.SoapySDRKwargsList_swap(self, *args)
def get_allocator(self):
return _SoapySDR.SoapySDRKwargsList_get_allocator(self)
def begin(self):
return _SoapySDR.SoapySDRKwargsList_begin(self)
def end(self):
return _SoapySDR.SoapySDRKwargsList_end(self)
def rbegin(self):
return _SoapySDR.SoapySDRKwargsList_rbegin(self)
def rend(self):
return _SoapySDR.SoapySDRKwargsList_rend(self)
def pop_back(self):
return _SoapySDR.SoapySDRKwargsList_pop_back(self)
def erase(self, *args):
return _SoapySDR.SoapySDRKwargsList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRKwargsList(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, *args):
return _SoapySDR.SoapySDRKwargsList_push_back(self, *args)
def front(self):
return _SoapySDR.SoapySDRKwargsList_front(self)
def back(self):
return _SoapySDR.SoapySDRKwargsList_back(self)
def assign(self, *args):
return _SoapySDR.SoapySDRKwargsList_assign(self, *args)
def resize(self, *args):
return _SoapySDR.SoapySDRKwargsList_resize(self, *args)
def insert(self, *args):
return _SoapySDR.SoapySDRKwargsList_insert(self, *args)
def reserve(self, *args):
return _SoapySDR.SoapySDRKwargsList_reserve(self, *args)
def capacity(self):
return _SoapySDR.SoapySDRKwargsList_capacity(self)
__swig_destroy__ = _SoapySDR.delete_SoapySDRKwargsList
__del__ = lambda self: None
<|reserved_special_token_0|>
class SoapySDRArgInfoList(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
SoapySDRArgInfoList, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self,
SoapySDRArgInfoList, name)
__repr__ = _swig_repr
def iterator(self):
return _SoapySDR.SoapySDRArgInfoList_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _SoapySDR.SoapySDRArgInfoList___nonzero__(self)
def __bool__(self):
return _SoapySDR.SoapySDRArgInfoList___bool__(self)
def __len__(self):
return _SoapySDR.SoapySDRArgInfoList___len__(self)
def pop(self):
return _SoapySDR.SoapySDRArgInfoList_pop(self)
def __getslice__(self, *args):
return _SoapySDR.SoapySDRArgInfoList___getslice__(self, *args)
def __setslice__(self, *args):
return _SoapySDR.SoapySDRArgInfoList___setslice__(self, *args)
def __delslice__(self, *args):
return _SoapySDR.SoapySDRArgInfoList___delslice__(self, *args)
def __delitem__(self, *args):
return _SoapySDR.SoapySDRArgInfoList___delitem__(self, *args)
def __getitem__(self, *args):
return _SoapySDR.SoapySDRArgInfoList___getitem__(self, *args)
def __setitem__(self, *args):
return _SoapySDR.SoapySDRArgInfoList___setitem__(self, *args)
def append(self, *args):
return _SoapySDR.SoapySDRArgInfoList_append(self, *args)
def empty(self):
return _SoapySDR.SoapySDRArgInfoList_empty(self)
def size(self):
return _SoapySDR.SoapySDRArgInfoList_size(self)
def clear(self):
return _SoapySDR.SoapySDRArgInfoList_clear(self)
def swap(self, *args):
return _SoapySDR.SoapySDRArgInfoList_swap(self, *args)
def get_allocator(self):
return _SoapySDR.SoapySDRArgInfoList_get_allocator(self)
def begin(self):
return _SoapySDR.SoapySDRArgInfoList_begin(self)
def end(self):
return _SoapySDR.SoapySDRArgInfoList_end(self)
def rbegin(self):
return _SoapySDR.SoapySDRArgInfoList_rbegin(self)
def rend(self):
return _SoapySDR.SoapySDRArgInfoList_rend(self)
def pop_back(self):
return _SoapySDR.SoapySDRArgInfoList_pop_back(self)
def erase(self, *args):
return _SoapySDR.SoapySDRArgInfoList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRArgInfoList(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, *args):
return _SoapySDR.SoapySDRArgInfoList_push_back(self, *args)
def front(self):
return _SoapySDR.SoapySDRArgInfoList_front(self)
def back(self):
return _SoapySDR.SoapySDRArgInfoList_back(self)
def assign(self, *args):
return _SoapySDR.SoapySDRArgInfoList_assign(self, *args)
def resize(self, *args):
return _SoapySDR.SoapySDRArgInfoList_resize(self, *args)
def insert(self, *args):
return _SoapySDR.SoapySDRArgInfoList_insert(self, *args)
def reserve(self, *args):
return _SoapySDR.SoapySDRArgInfoList_reserve(self, *args)
def capacity(self):
return _SoapySDR.SoapySDRArgInfoList_capacity(self)
__swig_destroy__ = _SoapySDR.delete_SoapySDRArgInfoList
__del__ = lambda self: None
<|reserved_special_token_0|>
class SoapySDRStringList(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
SoapySDRStringList, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SoapySDRStringList,
name)
__repr__ = _swig_repr
def iterator(self):
return _SoapySDR.SoapySDRStringList_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _SoapySDR.SoapySDRStringList___nonzero__(self)
def __bool__(self):
return _SoapySDR.SoapySDRStringList___bool__(self)
def __len__(self):
return _SoapySDR.SoapySDRStringList___len__(self)
def pop(self):
return _SoapySDR.SoapySDRStringList_pop(self)
def __getslice__(self, *args):
return _SoapySDR.SoapySDRStringList___getslice__(self, *args)
def __setslice__(self, *args):
return _SoapySDR.SoapySDRStringList___setslice__(self, *args)
def __delslice__(self, *args):
return _SoapySDR.SoapySDRStringList___delslice__(self, *args)
def __delitem__(self, *args):
return _SoapySDR.SoapySDRStringList___delitem__(self, *args)
def __getitem__(self, *args):
return _SoapySDR.SoapySDRStringList___getitem__(self, *args)
def __setitem__(self, *args):
return _SoapySDR.SoapySDRStringList___setitem__(self, *args)
def append(self, *args):
return _SoapySDR.SoapySDRStringList_append(self, *args)
def empty(self):
return _SoapySDR.SoapySDRStringList_empty(self)
def size(self):
return _SoapySDR.SoapySDRStringList_size(self)
def clear(self):
return _SoapySDR.SoapySDRStringList_clear(self)
def swap(self, *args):
return _SoapySDR.SoapySDRStringList_swap(self, *args)
def get_allocator(self):
return _SoapySDR.SoapySDRStringList_get_allocator(self)
def begin(self):
return _SoapySDR.SoapySDRStringList_begin(self)
def end(self):
return _SoapySDR.SoapySDRStringList_end(self)
def rbegin(self):
return _SoapySDR.SoapySDRStringList_rbegin(self)
def rend(self):
return _SoapySDR.SoapySDRStringList_rend(self)
def pop_back(self):
return _SoapySDR.SoapySDRStringList_pop_back(self)
def erase(self, *args):
return _SoapySDR.SoapySDRStringList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRStringList(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, *args):
return _SoapySDR.SoapySDRStringList_push_back(self, *args)
def front(self):
return _SoapySDR.SoapySDRStringList_front(self)
def back(self):
return _SoapySDR.SoapySDRStringList_back(self)
def assign(self, *args):
return _SoapySDR.SoapySDRStringList_assign(self, *args)
def resize(self, *args):
return _SoapySDR.SoapySDRStringList_resize(self, *args)
def insert(self, *args):
return _SoapySDR.SoapySDRStringList_insert(self, *args)
def reserve(self, *args):
return _SoapySDR.SoapySDRStringList_reserve(self, *args)
def capacity(self):
return _SoapySDR.SoapySDRStringList_capacity(self)
__swig_destroy__ = _SoapySDR.delete_SoapySDRStringList
__del__ = lambda self: None
<|reserved_special_token_0|>
class SoapySDRRangeList(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
SoapySDRRangeList, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SoapySDRRangeList,
name)
__repr__ = _swig_repr
def iterator(self):
return _SoapySDR.SoapySDRRangeList_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _SoapySDR.SoapySDRRangeList___nonzero__(self)
def __bool__(self):
return _SoapySDR.SoapySDRRangeList___bool__(self)
def __len__(self):
return _SoapySDR.SoapySDRRangeList___len__(self)
def pop(self):
return _SoapySDR.SoapySDRRangeList_pop(self)
def __getslice__(self, *args):
return _SoapySDR.SoapySDRRangeList___getslice__(self, *args)
def __setslice__(self, *args):
return _SoapySDR.SoapySDRRangeList___setslice__(self, *args)
def __delslice__(self, *args):
return _SoapySDR.SoapySDRRangeList___delslice__(self, *args)
def __delitem__(self, *args):
return _SoapySDR.SoapySDRRangeList___delitem__(self, *args)
def __getitem__(self, *args):
return _SoapySDR.SoapySDRRangeList___getitem__(self, *args)
def __setitem__(self, *args):
return _SoapySDR.SoapySDRRangeList___setitem__(self, *args)
def append(self, *args):
return _SoapySDR.SoapySDRRangeList_append(self, *args)
def empty(self):
return _SoapySDR.SoapySDRRangeList_empty(self)
def size(self):
return _SoapySDR.SoapySDRRangeList_size(self)
def clear(self):
return _SoapySDR.SoapySDRRangeList_clear(self)
def swap(self, *args):
return _SoapySDR.SoapySDRRangeList_swap(self, *args)
def get_allocator(self):
return _SoapySDR.SoapySDRRangeList_get_allocator(self)
def begin(self):
return _SoapySDR.SoapySDRRangeList_begin(self)
def end(self):
return _SoapySDR.SoapySDRRangeList_end(self)
def rbegin(self):
return _SoapySDR.SoapySDRRangeList_rbegin(self)
def rend(self):
return _SoapySDR.SoapySDRRangeList_rend(self)
def pop_back(self):
return _SoapySDR.SoapySDRRangeList_pop_back(self)
def erase(self, *args):
return _SoapySDR.SoapySDRRangeList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRRangeList(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, *args):
return _SoapySDR.SoapySDRRangeList_push_back(self, *args)
def front(self):
return _SoapySDR.SoapySDRRangeList_front(self)
def back(self):
return _SoapySDR.SoapySDRRangeList_back(self)
def assign(self, *args):
return _SoapySDR.SoapySDRRangeList_assign(self, *args)
def resize(self, *args):
return _SoapySDR.SoapySDRRangeList_resize(self, *args)
def insert(self, *args):
return _SoapySDR.SoapySDRRangeList_insert(self, *args)
def reserve(self, *args):
return _SoapySDR.SoapySDRRangeList_reserve(self, *args)
def capacity(self):
return _SoapySDR.SoapySDRRangeList_capacity(self)
__swig_destroy__ = _SoapySDR.delete_SoapySDRRangeList
__del__ = lambda self: None
<|reserved_special_token_0|>
class SoapySDRSizeList(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
SoapySDRSizeList, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SoapySDRSizeList, name
)
__repr__ = _swig_repr
def iterator(self):
return _SoapySDR.SoapySDRSizeList_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _SoapySDR.SoapySDRSizeList___nonzero__(self)
def __bool__(self):
return _SoapySDR.SoapySDRSizeList___bool__(self)
def __len__(self):
return _SoapySDR.SoapySDRSizeList___len__(self)
def pop(self):
return _SoapySDR.SoapySDRSizeList_pop(self)
def __getslice__(self, *args):
return _SoapySDR.SoapySDRSizeList___getslice__(self, *args)
def __setslice__(self, *args):
return _SoapySDR.SoapySDRSizeList___setslice__(self, *args)
def __delslice__(self, *args):
return _SoapySDR.SoapySDRSizeList___delslice__(self, *args)
def __delitem__(self, *args):
return _SoapySDR.SoapySDRSizeList___delitem__(self, *args)
def __getitem__(self, *args):
return _SoapySDR.SoapySDRSizeList___getitem__(self, *args)
def __setitem__(self, *args):
return _SoapySDR.SoapySDRSizeList___setitem__(self, *args)
def append(self, *args):
return _SoapySDR.SoapySDRSizeList_append(self, *args)
def empty(self):
return _SoapySDR.SoapySDRSizeList_empty(self)
def size(self):
return _SoapySDR.SoapySDRSizeList_size(self)
def clear(self):
return _SoapySDR.SoapySDRSizeList_clear(self)
def swap(self, *args):
return _SoapySDR.SoapySDRSizeList_swap(self, *args)
def get_allocator(self):
return _SoapySDR.SoapySDRSizeList_get_allocator(self)
def begin(self):
return _SoapySDR.SoapySDRSizeList_begin(self)
def end(self):
return _SoapySDR.SoapySDRSizeList_end(self)
def rbegin(self):
return _SoapySDR.SoapySDRSizeList_rbegin(self)
def rend(self):
return _SoapySDR.SoapySDRSizeList_rend(self)
def pop_back(self):
return _SoapySDR.SoapySDRSizeList_pop_back(self)
def erase(self, *args):
return _SoapySDR.SoapySDRSizeList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRSizeList(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, *args):
return _SoapySDR.SoapySDRSizeList_push_back(self, *args)
def front(self):
return _SoapySDR.SoapySDRSizeList_front(self)
def back(self):
return _SoapySDR.SoapySDRSizeList_back(self)
def assign(self, *args):
return _SoapySDR.SoapySDRSizeList_assign(self, *args)
def resize(self, *args):
return _SoapySDR.SoapySDRSizeList_resize(self, *args)
def insert(self, *args):
return _SoapySDR.SoapySDRSizeList_insert(self, *args)
def reserve(self, *args):
return _SoapySDR.SoapySDRSizeList_reserve(self, *args)
def capacity(self):
return _SoapySDR.SoapySDRSizeList_capacity(self)
__swig_destroy__ = _SoapySDR.delete_SoapySDRSizeList
__del__ = lambda self: None
<|reserved_special_token_0|>
class SoapySDRDoubleList(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
SoapySDRDoubleList, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SoapySDRDoubleList,
name)
__repr__ = _swig_repr
def iterator(self):
return _SoapySDR.SoapySDRDoubleList_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _SoapySDR.SoapySDRDoubleList___nonzero__(self)
def __bool__(self):
return _SoapySDR.SoapySDRDoubleList___bool__(self)
def __len__(self):
return _SoapySDR.SoapySDRDoubleList___len__(self)
def pop(self):
return _SoapySDR.SoapySDRDoubleList_pop(self)
def __getslice__(self, *args):
return _SoapySDR.SoapySDRDoubleList___getslice__(self, *args)
def __setslice__(self, *args):
return _SoapySDR.SoapySDRDoubleList___setslice__(self, *args)
def __delslice__(self, *args):
return _SoapySDR.SoapySDRDoubleList___delslice__(self, *args)
def __delitem__(self, *args):
return _SoapySDR.SoapySDRDoubleList___delitem__(self, *args)
def __getitem__(self, *args):
return _SoapySDR.SoapySDRDoubleList___getitem__(self, *args)
def __setitem__(self, *args):
return _SoapySDR.SoapySDRDoubleList___setitem__(self, *args)
def append(self, *args):
return _SoapySDR.SoapySDRDoubleList_append(self, *args)
def empty(self):
return _SoapySDR.SoapySDRDoubleList_empty(self)
def size(self):
return _SoapySDR.SoapySDRDoubleList_size(self)
def clear(self):
return _SoapySDR.SoapySDRDoubleList_clear(self)
def swap(self, *args):
return _SoapySDR.SoapySDRDoubleList_swap(self, *args)
def get_allocator(self):
return _SoapySDR.SoapySDRDoubleList_get_allocator(self)
def begin(self):
return _SoapySDR.SoapySDRDoubleList_begin(self)
def end(self):
return _SoapySDR.SoapySDRDoubleList_end(self)
def rbegin(self):
return _SoapySDR.SoapySDRDoubleList_rbegin(self)
def rend(self):
return _SoapySDR.SoapySDRDoubleList_rend(self)
def pop_back(self):
return _SoapySDR.SoapySDRDoubleList_pop_back(self)
def erase(self, *args):
return _SoapySDR.SoapySDRDoubleList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRDoubleList(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, *args):
return _SoapySDR.SoapySDRDoubleList_push_back(self, *args)
def front(self):
return _SoapySDR.SoapySDRDoubleList_front(self)
def back(self):
return _SoapySDR.SoapySDRDoubleList_back(self)
def assign(self, *args):
return _SoapySDR.SoapySDRDoubleList_assign(self, *args)
def resize(self, *args):
return _SoapySDR.SoapySDRDoubleList_resize(self, *args)
def insert(self, *args):
return _SoapySDR.SoapySDRDoubleList_insert(self, *args)
def reserve(self, *args):
return _SoapySDR.SoapySDRDoubleList_reserve(self, *args)
def capacity(self):
return _SoapySDR.SoapySDRDoubleList_capacity(self)
__swig_destroy__ = _SoapySDR.delete_SoapySDRDoubleList
__del__ = lambda self: None
<|reserved_special_token_0|>
class StreamResult(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
StreamResult, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StreamResult, name)
__repr__ = _swig_repr
def __init__(self):
this = _SoapySDR.new_StreamResult()
try:
self.this.append(this)
except:
self.this = this
__swig_setmethods__['ret'] = _SoapySDR.StreamResult_ret_set
__swig_getmethods__['ret'] = _SoapySDR.StreamResult_ret_get
if _newclass:
ret = _swig_property(_SoapySDR.StreamResult_ret_get, _SoapySDR.
StreamResult_ret_set)
__swig_setmethods__['flags'] = _SoapySDR.StreamResult_flags_set
__swig_getmethods__['flags'] = _SoapySDR.StreamResult_flags_get
if _newclass:
flags = _swig_property(_SoapySDR.StreamResult_flags_get, _SoapySDR.
StreamResult_flags_set)
__swig_setmethods__['timeNs'] = _SoapySDR.StreamResult_timeNs_set
__swig_getmethods__['timeNs'] = _SoapySDR.StreamResult_timeNs_get
if _newclass:
timeNs = _swig_property(_SoapySDR.StreamResult_timeNs_get,
_SoapySDR.StreamResult_timeNs_set)
__swig_setmethods__['chanMask'] = _SoapySDR.StreamResult_chanMask_set
__swig_getmethods__['chanMask'] = _SoapySDR.StreamResult_chanMask_get
if _newclass:
chanMask = _swig_property(_SoapySDR.StreamResult_chanMask_get,
_SoapySDR.StreamResult_chanMask_set)
def __str__(self):
return 'ret=%s, flags=%s, timeNs=%s' % (self.ret, self.flags, self.
timeNs)
__swig_destroy__ = _SoapySDR.delete_StreamResult
__del__ = lambda self: None
<|reserved_special_token_0|>
def SoapySDR_getAPIVersion():
return _SoapySDR.SoapySDR_getAPIVersion()
<|reserved_special_token_0|>
def SoapySDR_getABIVersion():
return _SoapySDR.SoapySDR_getABIVersion()
<|reserved_special_token_0|>
def loadModules():
return _SoapySDR.loadModules()
<|reserved_special_token_0|>
def formatToSize(*args):
return _SoapySDR.formatToSize(*args)
<|reserved_special_token_0|>
def ticksToTimeNs(*args):
return _SoapySDR.ticksToTimeNs(*args)
<|reserved_special_token_0|>
def setLogLevel(*args):
return _SoapySDR.setLogLevel(*args)
<|reserved_special_token_0|>
class Device(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Device,
name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Device, name)
def __init__(self, *args, **kwargs):
raise AttributeError('No constructor defined')
__repr__ = _swig_repr
__swig_destroy__ = _SoapySDR.delete_Device
__del__ = lambda self: None
__swig_getmethods__['enumerate'] = lambda x: _SoapySDR.Device_enumerate
if _newclass:
enumerate = staticmethod(_SoapySDR.Device_enumerate)
__swig_getmethods__['make'] = lambda x: _SoapySDR.Device_make
if _newclass:
make = staticmethod(_SoapySDR.Device_make)
__swig_getmethods__['unmake'] = lambda x: _SoapySDR.Device_unmake
if _newclass:
unmake = staticmethod(_SoapySDR.Device_unmake)
def getDriverKey(self):
return _SoapySDR.Device_getDriverKey(self)
def getHardwareKey(self):
return _SoapySDR.Device_getHardwareKey(self)
def getHardwareInfo(self):
return _SoapySDR.Device_getHardwareInfo(self)
def setFrontendMapping(self, *args):
return _SoapySDR.Device_setFrontendMapping(self, *args)
def getFrontendMapping(self, *args):
return _SoapySDR.Device_getFrontendMapping(self, *args)
def getNumChannels(self, *args):
return _SoapySDR.Device_getNumChannels(self, *args)
def getChannelInfo(self, *args):
return _SoapySDR.Device_getChannelInfo(self, *args)
def getFullDuplex(self, *args):
return _SoapySDR.Device_getFullDuplex(self, *args)
def getStreamFormats(self, *args):
return _SoapySDR.Device_getStreamFormats(self, *args)
def getNativeStreamFormat(self, *args):
return _SoapySDR.Device_getNativeStreamFormat(self, *args)
def getStreamArgsInfo(self, *args):
return _SoapySDR.Device_getStreamArgsInfo(self, *args)
def setupStream(self, *args):
return _SoapySDR.Device_setupStream(self, *args)
def closeStream(self, *args):
return _SoapySDR.Device_closeStream(self, *args)
def getStreamMTU(self, *args):
return _SoapySDR.Device_getStreamMTU(self, *args)
def activateStream(self, *args):
return _SoapySDR.Device_activateStream(self, *args)
def deactivateStream(self, *args):
return _SoapySDR.Device_deactivateStream(self, *args)
def readStream(self, *args):
return _SoapySDR.Device_readStream(self, *args)
def writeStream(self, *args):
return _SoapySDR.Device_writeStream(self, *args)
def readStreamStatus(self, *args):
return _SoapySDR.Device_readStreamStatus(self, *args)
def getNumDirectAccessBuffers(self, *args):
return _SoapySDR.Device_getNumDirectAccessBuffers(self, *args)
def getDirectAccessBufferAddrs(self, *args):
return _SoapySDR.Device_getDirectAccessBufferAddrs(self, *args)
def acquireReadBuffer(self, *args):
return _SoapySDR.Device_acquireReadBuffer(self, *args)
def releaseReadBuffer(self, *args):
return _SoapySDR.Device_releaseReadBuffer(self, *args)
def acquireWriteBuffer(self, *args):
return _SoapySDR.Device_acquireWriteBuffer(self, *args)
def releaseWriteBuffer(self, *args):
return _SoapySDR.Device_releaseWriteBuffer(self, *args)
def listAntennas(self, *args):
return _SoapySDR.Device_listAntennas(self, *args)
def setAntenna(self, *args):
return _SoapySDR.Device_setAntenna(self, *args)
def getAntenna(self, *args):
return _SoapySDR.Device_getAntenna(self, *args)
def hasDCOffsetMode(self, *args):
return _SoapySDR.Device_hasDCOffsetMode(self, *args)
def setDCOffsetMode(self, *args):
return _SoapySDR.Device_setDCOffsetMode(self, *args)
def getDCOffsetMode(self, *args):
return _SoapySDR.Device_getDCOffsetMode(self, *args)
def hasDCOffset(self, *args):
return _SoapySDR.Device_hasDCOffset(self, *args)
def setDCOffset(self, *args):
return _SoapySDR.Device_setDCOffset(self, *args)
def getDCOffset(self, *args):
return _SoapySDR.Device_getDCOffset(self, *args)
def hasIQBalance(self, *args):
return _SoapySDR.Device_hasIQBalance(self, *args)
def setIQBalance(self, *args):
return _SoapySDR.Device_setIQBalance(self, *args)
def getIQBalance(self, *args):
return _SoapySDR.Device_getIQBalance(self, *args)
def hasFrequencyCorrection(self, *args):
return _SoapySDR.Device_hasFrequencyCorrection(self, *args)
def setFrequencyCorrection(self, *args):
return _SoapySDR.Device_setFrequencyCorrection(self, *args)
def getFrequencyCorrection(self, *args):
return _SoapySDR.Device_getFrequencyCorrection(self, *args)
def listGains(self, *args):
return _SoapySDR.Device_listGains(self, *args)
def hasGainMode(self, *args):
return _SoapySDR.Device_hasGainMode(self, *args)
def setGainMode(self, *args):
return _SoapySDR.Device_setGainMode(self, *args)
def getGainMode(self, *args):
return _SoapySDR.Device_getGainMode(self, *args)
def setGain(self, *args):
return _SoapySDR.Device_setGain(self, *args)
def getGain(self, *args):
return _SoapySDR.Device_getGain(self, *args)
def getGainRange(self, *args):
return _SoapySDR.Device_getGainRange(self, *args)
def setFrequency(self, *args):
return _SoapySDR.Device_setFrequency(self, *args)
def getFrequency(self, *args):
return _SoapySDR.Device_getFrequency(self, *args)
def listFrequencies(self, *args):
return _SoapySDR.Device_listFrequencies(self, *args)
def getFrequencyRange(self, *args):
return _SoapySDR.Device_getFrequencyRange(self, *args)
def getFrequencyArgsInfo(self, *args):
return _SoapySDR.Device_getFrequencyArgsInfo(self, *args)
def setSampleRate(self, *args):
return _SoapySDR.Device_setSampleRate(self, *args)
def getSampleRate(self, *args):
return _SoapySDR.Device_getSampleRate(self, *args)
def listSampleRates(self, *args):
return _SoapySDR.Device_listSampleRates(self, *args)
def getSampleRateRange(self, *args):
return _SoapySDR.Device_getSampleRateRange(self, *args)
def setBandwidth(self, *args):
return _SoapySDR.Device_setBandwidth(self, *args)
def getBandwidth(self, *args):
return _SoapySDR.Device_getBandwidth(self, *args)
def listBandwidths(self, *args):
return _SoapySDR.Device_listBandwidths(self, *args)
def getBandwidthRange(self, *args):
return _SoapySDR.Device_getBandwidthRange(self, *args)
def setMasterClockRate(self, *args):
return _SoapySDR.Device_setMasterClockRate(self, *args)
def getMasterClockRate(self):
return _SoapySDR.Device_getMasterClockRate(self)
def getMasterClockRates(self):
return _SoapySDR.Device_getMasterClockRates(self)
def listClockSources(self):
return _SoapySDR.Device_listClockSources(self)
def setClockSource(self, *args):
return _SoapySDR.Device_setClockSource(self, *args)
def getClockSource(self):
return _SoapySDR.Device_getClockSource(self)
def listTimeSources(self):
return _SoapySDR.Device_listTimeSources(self)
def setTimeSource(self, *args):
return _SoapySDR.Device_setTimeSource(self, *args)
def getTimeSource(self):
return _SoapySDR.Device_getTimeSource(self)
def hasHardwareTime(self, what=''):
return _SoapySDR.Device_hasHardwareTime(self, what)
def getHardwareTime(self, what=''):
return _SoapySDR.Device_getHardwareTime(self, what)
def setHardwareTime(self, *args):
return _SoapySDR.Device_setHardwareTime(self, *args)
def setCommandTime(self, *args):
return _SoapySDR.Device_setCommandTime(self, *args)
def listSensors(self, *args):
return _SoapySDR.Device_listSensors(self, *args)
def getSensorInfo(self, *args):
return _SoapySDR.Device_getSensorInfo(self, *args)
def readSensor(self, *args):
return _SoapySDR.Device_readSensor(self, *args)
def listRegisterInterfaces(self):
return _SoapySDR.Device_listRegisterInterfaces(self)
def writeRegister(self, *args):
return _SoapySDR.Device_writeRegister(self, *args)
def readRegister(self, *args):
return _SoapySDR.Device_readRegister(self, *args)
def writeRegisters(self, *args):
return _SoapySDR.Device_writeRegisters(self, *args)
def readRegisters(self, *args):
return _SoapySDR.Device_readRegisters(self, *args)
def getSettingInfo(self, *args):
return _SoapySDR.Device_getSettingInfo(self, *args)
def writeSetting(self, *args):
return _SoapySDR.Device_writeSetting(self, *args)
def readSetting(self, *args):
return _SoapySDR.Device_readSetting(self, *args)
def listGPIOBanks(self):
return _SoapySDR.Device_listGPIOBanks(self)
def writeGPIO(self, *args):
return _SoapySDR.Device_writeGPIO(self, *args)
def readGPIO(self, *args):
return _SoapySDR.Device_readGPIO(self, *args)
def writeGPIODir(self, *args):
return _SoapySDR.Device_writeGPIODir(self, *args)
def readGPIODir(self, *args):
return _SoapySDR.Device_readGPIODir(self, *args)
def writeI2C(self, *args):
return _SoapySDR.Device_writeI2C(self, *args)
def readI2C(self, *args):
return _SoapySDR.Device_readI2C(self, *args)
def transactSPI(self, *args):
return _SoapySDR.Device_transactSPI(self, *args)
def listUARTs(self):
return _SoapySDR.Device_listUARTs(self)
def writeUART(self, *args):
return _SoapySDR.Device_writeUART(self, *args)
def readUART(self, *args):
return _SoapySDR.Device_readUART(self, *args)
def readStream__(self, *args):
return _SoapySDR.Device_readStream__(self, *args)
def writeStream__(self, *args):
return _SoapySDR.Device_writeStream__(self, *args)
def readStreamStatus__(self, *args):
return _SoapySDR.Device_readStreamStatus__(self, *args)
def __del__(self):
Device.unmake(self)
def __str__(self):
return '%s:%s' % (self.getDriverKey(), self.getHardwareKey())
def readStream(self, stream, buffs, numElems, flags=0, timeoutUs=100000):
ptrs = [extractBuffPointer(b) for b in buffs]
return self.readStream__(stream, ptrs, numElems, flags, timeoutUs)
def writeStream(self, stream, buffs, numElems, flags=0, timeNs=0,
timeoutUs=100000):
ptrs = [extractBuffPointer(b) for b in buffs]
return self.writeStream__(stream, ptrs, numElems, flags, timeNs,
timeoutUs)
def readStreamStatus(self, stream, timeoutUs=100000):
return self.readStreamStatus__(stream, timeoutUs)
<|reserved_special_token_0|>
class Device(Device):
def __new__(cls, *args, **kwargs):
return cls.make(*args, **kwargs)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr(self, class_type, name):
if name == 'thisown':
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
raise AttributeError(name)
def _swig_repr(self):
try:
strthis = 'proxy of ' + self.this.__repr__()
except:
strthis = ''
return '<%s.%s; %s >' % (self.__class__.__module__, self.__class__.
__name__, strthis)
<|reserved_special_token_0|>
class SwigPyIterator(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
SwigPyIterator, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SwigPyIterator, name)
def __init__(self, *args, **kwargs):
raise AttributeError('No constructor defined - class is abstract')
__repr__ = _swig_repr
__swig_destroy__ = _SoapySDR.delete_SwigPyIterator
__del__ = lambda self: None
def value(self):
return _SoapySDR.SwigPyIterator_value(self)
def incr(self, n=1):
return _SoapySDR.SwigPyIterator_incr(self, n)
def decr(self, n=1):
return _SoapySDR.SwigPyIterator_decr(self, n)
def distance(self, *args):
return _SoapySDR.SwigPyIterator_distance(self, *args)
def equal(self, *args):
return _SoapySDR.SwigPyIterator_equal(self, *args)
def copy(self):
return _SoapySDR.SwigPyIterator_copy(self)
def next(self):
return _SoapySDR.SwigPyIterator_next(self)
def __next__(self):
return _SoapySDR.SwigPyIterator___next__(self)
def previous(self):
return _SoapySDR.SwigPyIterator_previous(self)
def advance(self, *args):
return _SoapySDR.SwigPyIterator_advance(self, *args)
def __eq__(self, *args):
return _SoapySDR.SwigPyIterator___eq__(self, *args)
def __ne__(self, *args):
return _SoapySDR.SwigPyIterator___ne__(self, *args)
def __iadd__(self, *args):
return _SoapySDR.SwigPyIterator___iadd__(self, *args)
def __isub__(self, *args):
return _SoapySDR.SwigPyIterator___isub__(self, *args)
def __add__(self, *args):
return _SoapySDR.SwigPyIterator___add__(self, *args)
def __sub__(self, *args):
return _SoapySDR.SwigPyIterator___sub__(self, *args)
def __iter__(self):
return self
<|reserved_special_token_0|>
def KwargsToString(*args):
return _SoapySDR.KwargsToString(*args)
<|reserved_special_token_0|>
class Range(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Range, name,
value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Range, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _SoapySDR.new_Range(*args)
try:
self.this.append(this)
except:
self.this = this
def minimum(self):
return _SoapySDR.Range_minimum(self)
def maximum(self):
return _SoapySDR.Range_maximum(self)
def step(self):
return _SoapySDR.Range_step(self)
def __str__(self):
fields = [self.minimum(), self.maximum()]
if self.step() != 0.0:
fields.append(self.step())
return ', '.join([('%g' % f) for f in fields])
__swig_destroy__ = _SoapySDR.delete_Range
__del__ = lambda self: None
<|reserved_special_token_0|>
class ArgInfo(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, ArgInfo,
name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, ArgInfo, name)
__repr__ = _swig_repr
def __init__(self):
this = _SoapySDR.new_ArgInfo()
try:
self.this.append(this)
except:
self.this = this
__swig_setmethods__['key'] = _SoapySDR.ArgInfo_key_set
__swig_getmethods__['key'] = _SoapySDR.ArgInfo_key_get
if _newclass:
key = _swig_property(_SoapySDR.ArgInfo_key_get, _SoapySDR.
ArgInfo_key_set)
__swig_setmethods__['value'] = _SoapySDR.ArgInfo_value_set
__swig_getmethods__['value'] = _SoapySDR.ArgInfo_value_get
if _newclass:
value = _swig_property(_SoapySDR.ArgInfo_value_get, _SoapySDR.
ArgInfo_value_set)
__swig_setmethods__['name'] = _SoapySDR.ArgInfo_name_set
__swig_getmethods__['name'] = _SoapySDR.ArgInfo_name_get
if _newclass:
name = _swig_property(_SoapySDR.ArgInfo_name_get, _SoapySDR.
ArgInfo_name_set)
__swig_setmethods__['description'] = _SoapySDR.ArgInfo_description_set
__swig_getmethods__['description'] = _SoapySDR.ArgInfo_description_get
if _newclass:
description = _swig_property(_SoapySDR.ArgInfo_description_get,
_SoapySDR.ArgInfo_description_set)
__swig_setmethods__['units'] = _SoapySDR.ArgInfo_units_set
__swig_getmethods__['units'] = _SoapySDR.ArgInfo_units_get
if _newclass:
units = _swig_property(_SoapySDR.ArgInfo_units_get, _SoapySDR.
ArgInfo_units_set)
BOOL = _SoapySDR.ArgInfo_BOOL
INT = _SoapySDR.ArgInfo_INT
FLOAT = _SoapySDR.ArgInfo_FLOAT
STRING = _SoapySDR.ArgInfo_STRING
__swig_setmethods__['type'] = _SoapySDR.ArgInfo_type_set
__swig_getmethods__['type'] = _SoapySDR.ArgInfo_type_get
if _newclass:
type = _swig_property(_SoapySDR.ArgInfo_type_get, _SoapySDR.
ArgInfo_type_set)
__swig_setmethods__['range'] = _SoapySDR.ArgInfo_range_set
__swig_getmethods__['range'] = _SoapySDR.ArgInfo_range_get
if _newclass:
range = _swig_property(_SoapySDR.ArgInfo_range_get, _SoapySDR.
ArgInfo_range_set)
__swig_setmethods__['options'] = _SoapySDR.ArgInfo_options_set
__swig_getmethods__['options'] = _SoapySDR.ArgInfo_options_get
if _newclass:
options = _swig_property(_SoapySDR.ArgInfo_options_get, _SoapySDR.
ArgInfo_options_set)
__swig_setmethods__['optionNames'] = _SoapySDR.ArgInfo_optionNames_set
__swig_getmethods__['optionNames'] = _SoapySDR.ArgInfo_optionNames_get
if _newclass:
optionNames = _swig_property(_SoapySDR.ArgInfo_optionNames_get,
_SoapySDR.ArgInfo_optionNames_set)
__swig_destroy__ = _SoapySDR.delete_ArgInfo
__del__ = lambda self: None
<|reserved_special_token_0|>
class SoapySDRKwargs(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
SoapySDRKwargs, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SoapySDRKwargs, name)
__repr__ = _swig_repr
def iterator(self):
return _SoapySDR.SoapySDRKwargs_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _SoapySDR.SoapySDRKwargs___nonzero__(self)
def __bool__(self):
return _SoapySDR.SoapySDRKwargs___bool__(self)
def __len__(self):
return _SoapySDR.SoapySDRKwargs___len__(self)
def __iter__(self):
return self.key_iterator()
def iterkeys(self):
return self.key_iterator()
def itervalues(self):
return self.value_iterator()
def iteritems(self):
return self.iterator()
def __getitem__(self, *args):
return _SoapySDR.SoapySDRKwargs___getitem__(self, *args)
def __delitem__(self, *args):
return _SoapySDR.SoapySDRKwargs___delitem__(self, *args)
def has_key(self, *args):
return _SoapySDR.SoapySDRKwargs_has_key(self, *args)
def keys(self):
return _SoapySDR.SoapySDRKwargs_keys(self)
def values(self):
return _SoapySDR.SoapySDRKwargs_values(self)
def items(self):
return _SoapySDR.SoapySDRKwargs_items(self)
def __contains__(self, *args):
return _SoapySDR.SoapySDRKwargs___contains__(self, *args)
def key_iterator(self):
return _SoapySDR.SoapySDRKwargs_key_iterator(self)
def value_iterator(self):
return _SoapySDR.SoapySDRKwargs_value_iterator(self)
def __setitem__(self, *args):
return _SoapySDR.SoapySDRKwargs___setitem__(self, *args)
def asdict(self):
return _SoapySDR.SoapySDRKwargs_asdict(self)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRKwargs(*args)
try:
self.this.append(this)
except:
self.this = this
def empty(self):
return _SoapySDR.SoapySDRKwargs_empty(self)
def size(self):
return _SoapySDR.SoapySDRKwargs_size(self)
def clear(self):
return _SoapySDR.SoapySDRKwargs_clear(self)
def swap(self, *args):
return _SoapySDR.SoapySDRKwargs_swap(self, *args)
def get_allocator(self):
return _SoapySDR.SoapySDRKwargs_get_allocator(self)
def begin(self):
return _SoapySDR.SoapySDRKwargs_begin(self)
def end(self):
return _SoapySDR.SoapySDRKwargs_end(self)
def rbegin(self):
return _SoapySDR.SoapySDRKwargs_rbegin(self)
def rend(self):
return _SoapySDR.SoapySDRKwargs_rend(self)
def count(self, *args):
return _SoapySDR.SoapySDRKwargs_count(self, *args)
def erase(self, *args):
return _SoapySDR.SoapySDRKwargs_erase(self, *args)
def find(self, *args):
return _SoapySDR.SoapySDRKwargs_find(self, *args)
def lower_bound(self, *args):
return _SoapySDR.SoapySDRKwargs_lower_bound(self, *args)
def upper_bound(self, *args):
return _SoapySDR.SoapySDRKwargs_upper_bound(self, *args)
def __str__(self):
out = list()
for k, v in self.iteritems():
out.append('%s=%s' % (k, v))
return '{' + ', '.join(out) + '}'
__swig_destroy__ = _SoapySDR.delete_SoapySDRKwargs
__del__ = lambda self: None
<|reserved_special_token_0|>
class SoapySDRKwargsList(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
SoapySDRKwargsList, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SoapySDRKwargsList,
name)
__repr__ = _swig_repr
def iterator(self):
return _SoapySDR.SoapySDRKwargsList_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _SoapySDR.SoapySDRKwargsList___nonzero__(self)
def __bool__(self):
return _SoapySDR.SoapySDRKwargsList___bool__(self)
def __len__(self):
return _SoapySDR.SoapySDRKwargsList___len__(self)
def pop(self):
return _SoapySDR.SoapySDRKwargsList_pop(self)
def __getslice__(self, *args):
return _SoapySDR.SoapySDRKwargsList___getslice__(self, *args)
def __setslice__(self, *args):
return _SoapySDR.SoapySDRKwargsList___setslice__(self, *args)
def __delslice__(self, *args):
return _SoapySDR.SoapySDRKwargsList___delslice__(self, *args)
def __delitem__(self, *args):
return _SoapySDR.SoapySDRKwargsList___delitem__(self, *args)
def __getitem__(self, *args):
return _SoapySDR.SoapySDRKwargsList___getitem__(self, *args)
def __setitem__(self, *args):
return _SoapySDR.SoapySDRKwargsList___setitem__(self, *args)
def append(self, *args):
return _SoapySDR.SoapySDRKwargsList_append(self, *args)
def empty(self):
return _SoapySDR.SoapySDRKwargsList_empty(self)
def size(self):
return _SoapySDR.SoapySDRKwargsList_size(self)
def clear(self):
return _SoapySDR.SoapySDRKwargsList_clear(self)
def swap(self, *args):
return _SoapySDR.SoapySDRKwargsList_swap(self, *args)
def get_allocator(self):
return _SoapySDR.SoapySDRKwargsList_get_allocator(self)
def begin(self):
return _SoapySDR.SoapySDRKwargsList_begin(self)
def end(self):
return _SoapySDR.SoapySDRKwargsList_end(self)
def rbegin(self):
return _SoapySDR.SoapySDRKwargsList_rbegin(self)
def rend(self):
return _SoapySDR.SoapySDRKwargsList_rend(self)
def pop_back(self):
return _SoapySDR.SoapySDRKwargsList_pop_back(self)
def erase(self, *args):
return _SoapySDR.SoapySDRKwargsList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRKwargsList(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, *args):
return _SoapySDR.SoapySDRKwargsList_push_back(self, *args)
def front(self):
return _SoapySDR.SoapySDRKwargsList_front(self)
def back(self):
return _SoapySDR.SoapySDRKwargsList_back(self)
def assign(self, *args):
return _SoapySDR.SoapySDRKwargsList_assign(self, *args)
def resize(self, *args):
return _SoapySDR.SoapySDRKwargsList_resize(self, *args)
def insert(self, *args):
return _SoapySDR.SoapySDRKwargsList_insert(self, *args)
def reserve(self, *args):
return _SoapySDR.SoapySDRKwargsList_reserve(self, *args)
def capacity(self):
return _SoapySDR.SoapySDRKwargsList_capacity(self)
__swig_destroy__ = _SoapySDR.delete_SoapySDRKwargsList
__del__ = lambda self: None
<|reserved_special_token_0|>
class SoapySDRArgInfoList(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
SoapySDRArgInfoList, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self,
SoapySDRArgInfoList, name)
__repr__ = _swig_repr
def iterator(self):
return _SoapySDR.SoapySDRArgInfoList_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _SoapySDR.SoapySDRArgInfoList___nonzero__(self)
def __bool__(self):
return _SoapySDR.SoapySDRArgInfoList___bool__(self)
def __len__(self):
return _SoapySDR.SoapySDRArgInfoList___len__(self)
def pop(self):
return _SoapySDR.SoapySDRArgInfoList_pop(self)
def __getslice__(self, *args):
return _SoapySDR.SoapySDRArgInfoList___getslice__(self, *args)
def __setslice__(self, *args):
return _SoapySDR.SoapySDRArgInfoList___setslice__(self, *args)
def __delslice__(self, *args):
return _SoapySDR.SoapySDRArgInfoList___delslice__(self, *args)
def __delitem__(self, *args):
return _SoapySDR.SoapySDRArgInfoList___delitem__(self, *args)
def __getitem__(self, *args):
return _SoapySDR.SoapySDRArgInfoList___getitem__(self, *args)
def __setitem__(self, *args):
return _SoapySDR.SoapySDRArgInfoList___setitem__(self, *args)
def append(self, *args):
return _SoapySDR.SoapySDRArgInfoList_append(self, *args)
def empty(self):
return _SoapySDR.SoapySDRArgInfoList_empty(self)
def size(self):
return _SoapySDR.SoapySDRArgInfoList_size(self)
def clear(self):
return _SoapySDR.SoapySDRArgInfoList_clear(self)
def swap(self, *args):
return _SoapySDR.SoapySDRArgInfoList_swap(self, *args)
def get_allocator(self):
return _SoapySDR.SoapySDRArgInfoList_get_allocator(self)
def begin(self):
return _SoapySDR.SoapySDRArgInfoList_begin(self)
def end(self):
return _SoapySDR.SoapySDRArgInfoList_end(self)
def rbegin(self):
return _SoapySDR.SoapySDRArgInfoList_rbegin(self)
def rend(self):
return _SoapySDR.SoapySDRArgInfoList_rend(self)
def pop_back(self):
return _SoapySDR.SoapySDRArgInfoList_pop_back(self)
def erase(self, *args):
return _SoapySDR.SoapySDRArgInfoList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRArgInfoList(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, *args):
return _SoapySDR.SoapySDRArgInfoList_push_back(self, *args)
def front(self):
return _SoapySDR.SoapySDRArgInfoList_front(self)
def back(self):
return _SoapySDR.SoapySDRArgInfoList_back(self)
def assign(self, *args):
return _SoapySDR.SoapySDRArgInfoList_assign(self, *args)
def resize(self, *args):
return _SoapySDR.SoapySDRArgInfoList_resize(self, *args)
def insert(self, *args):
return _SoapySDR.SoapySDRArgInfoList_insert(self, *args)
def reserve(self, *args):
return _SoapySDR.SoapySDRArgInfoList_reserve(self, *args)
def capacity(self):
return _SoapySDR.SoapySDRArgInfoList_capacity(self)
__swig_destroy__ = _SoapySDR.delete_SoapySDRArgInfoList
__del__ = lambda self: None
<|reserved_special_token_0|>
class SoapySDRStringList(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
SoapySDRStringList, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SoapySDRStringList,
name)
__repr__ = _swig_repr
def iterator(self):
return _SoapySDR.SoapySDRStringList_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _SoapySDR.SoapySDRStringList___nonzero__(self)
def __bool__(self):
return _SoapySDR.SoapySDRStringList___bool__(self)
def __len__(self):
return _SoapySDR.SoapySDRStringList___len__(self)
def pop(self):
return _SoapySDR.SoapySDRStringList_pop(self)
def __getslice__(self, *args):
return _SoapySDR.SoapySDRStringList___getslice__(self, *args)
def __setslice__(self, *args):
return _SoapySDR.SoapySDRStringList___setslice__(self, *args)
def __delslice__(self, *args):
return _SoapySDR.SoapySDRStringList___delslice__(self, *args)
def __delitem__(self, *args):
return _SoapySDR.SoapySDRStringList___delitem__(self, *args)
def __getitem__(self, *args):
return _SoapySDR.SoapySDRStringList___getitem__(self, *args)
def __setitem__(self, *args):
return _SoapySDR.SoapySDRStringList___setitem__(self, *args)
def append(self, *args):
return _SoapySDR.SoapySDRStringList_append(self, *args)
def empty(self):
return _SoapySDR.SoapySDRStringList_empty(self)
def size(self):
return _SoapySDR.SoapySDRStringList_size(self)
def clear(self):
return _SoapySDR.SoapySDRStringList_clear(self)
def swap(self, *args):
return _SoapySDR.SoapySDRStringList_swap(self, *args)
def get_allocator(self):
return _SoapySDR.SoapySDRStringList_get_allocator(self)
def begin(self):
return _SoapySDR.SoapySDRStringList_begin(self)
def end(self):
return _SoapySDR.SoapySDRStringList_end(self)
def rbegin(self):
return _SoapySDR.SoapySDRStringList_rbegin(self)
def rend(self):
return _SoapySDR.SoapySDRStringList_rend(self)
def pop_back(self):
return _SoapySDR.SoapySDRStringList_pop_back(self)
def erase(self, *args):
return _SoapySDR.SoapySDRStringList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRStringList(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, *args):
return _SoapySDR.SoapySDRStringList_push_back(self, *args)
def front(self):
return _SoapySDR.SoapySDRStringList_front(self)
def back(self):
return _SoapySDR.SoapySDRStringList_back(self)
def assign(self, *args):
return _SoapySDR.SoapySDRStringList_assign(self, *args)
def resize(self, *args):
return _SoapySDR.SoapySDRStringList_resize(self, *args)
def insert(self, *args):
return _SoapySDR.SoapySDRStringList_insert(self, *args)
def reserve(self, *args):
return _SoapySDR.SoapySDRStringList_reserve(self, *args)
def capacity(self):
return _SoapySDR.SoapySDRStringList_capacity(self)
__swig_destroy__ = _SoapySDR.delete_SoapySDRStringList
__del__ = lambda self: None
<|reserved_special_token_0|>
class SoapySDRRangeList(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
SoapySDRRangeList, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SoapySDRRangeList,
name)
__repr__ = _swig_repr
def iterator(self):
return _SoapySDR.SoapySDRRangeList_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _SoapySDR.SoapySDRRangeList___nonzero__(self)
def __bool__(self):
return _SoapySDR.SoapySDRRangeList___bool__(self)
def __len__(self):
return _SoapySDR.SoapySDRRangeList___len__(self)
def pop(self):
return _SoapySDR.SoapySDRRangeList_pop(self)
def __getslice__(self, *args):
return _SoapySDR.SoapySDRRangeList___getslice__(self, *args)
def __setslice__(self, *args):
return _SoapySDR.SoapySDRRangeList___setslice__(self, *args)
def __delslice__(self, *args):
return _SoapySDR.SoapySDRRangeList___delslice__(self, *args)
def __delitem__(self, *args):
return _SoapySDR.SoapySDRRangeList___delitem__(self, *args)
def __getitem__(self, *args):
return _SoapySDR.SoapySDRRangeList___getitem__(self, *args)
def __setitem__(self, *args):
return _SoapySDR.SoapySDRRangeList___setitem__(self, *args)
def append(self, *args):
return _SoapySDR.SoapySDRRangeList_append(self, *args)
def empty(self):
return _SoapySDR.SoapySDRRangeList_empty(self)
def size(self):
return _SoapySDR.SoapySDRRangeList_size(self)
def clear(self):
return _SoapySDR.SoapySDRRangeList_clear(self)
def swap(self, *args):
return _SoapySDR.SoapySDRRangeList_swap(self, *args)
def get_allocator(self):
return _SoapySDR.SoapySDRRangeList_get_allocator(self)
def begin(self):
return _SoapySDR.SoapySDRRangeList_begin(self)
def end(self):
return _SoapySDR.SoapySDRRangeList_end(self)
def rbegin(self):
return _SoapySDR.SoapySDRRangeList_rbegin(self)
def rend(self):
return _SoapySDR.SoapySDRRangeList_rend(self)
def pop_back(self):
return _SoapySDR.SoapySDRRangeList_pop_back(self)
def erase(self, *args):
return _SoapySDR.SoapySDRRangeList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRRangeList(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, *args):
return _SoapySDR.SoapySDRRangeList_push_back(self, *args)
def front(self):
return _SoapySDR.SoapySDRRangeList_front(self)
def back(self):
return _SoapySDR.SoapySDRRangeList_back(self)
def assign(self, *args):
return _SoapySDR.SoapySDRRangeList_assign(self, *args)
def resize(self, *args):
return _SoapySDR.SoapySDRRangeList_resize(self, *args)
def insert(self, *args):
return _SoapySDR.SoapySDRRangeList_insert(self, *args)
def reserve(self, *args):
return _SoapySDR.SoapySDRRangeList_reserve(self, *args)
def capacity(self):
return _SoapySDR.SoapySDRRangeList_capacity(self)
__swig_destroy__ = _SoapySDR.delete_SoapySDRRangeList
__del__ = lambda self: None
<|reserved_special_token_0|>
class SoapySDRSizeList(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
SoapySDRSizeList, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SoapySDRSizeList, name
)
__repr__ = _swig_repr
def iterator(self):
return _SoapySDR.SoapySDRSizeList_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _SoapySDR.SoapySDRSizeList___nonzero__(self)
def __bool__(self):
return _SoapySDR.SoapySDRSizeList___bool__(self)
def __len__(self):
return _SoapySDR.SoapySDRSizeList___len__(self)
def pop(self):
return _SoapySDR.SoapySDRSizeList_pop(self)
def __getslice__(self, *args):
return _SoapySDR.SoapySDRSizeList___getslice__(self, *args)
def __setslice__(self, *args):
return _SoapySDR.SoapySDRSizeList___setslice__(self, *args)
def __delslice__(self, *args):
return _SoapySDR.SoapySDRSizeList___delslice__(self, *args)
def __delitem__(self, *args):
return _SoapySDR.SoapySDRSizeList___delitem__(self, *args)
def __getitem__(self, *args):
return _SoapySDR.SoapySDRSizeList___getitem__(self, *args)
def __setitem__(self, *args):
return _SoapySDR.SoapySDRSizeList___setitem__(self, *args)
def append(self, *args):
return _SoapySDR.SoapySDRSizeList_append(self, *args)
def empty(self):
return _SoapySDR.SoapySDRSizeList_empty(self)
def size(self):
return _SoapySDR.SoapySDRSizeList_size(self)
def clear(self):
return _SoapySDR.SoapySDRSizeList_clear(self)
def swap(self, *args):
return _SoapySDR.SoapySDRSizeList_swap(self, *args)
def get_allocator(self):
return _SoapySDR.SoapySDRSizeList_get_allocator(self)
def begin(self):
return _SoapySDR.SoapySDRSizeList_begin(self)
def end(self):
return _SoapySDR.SoapySDRSizeList_end(self)
def rbegin(self):
return _SoapySDR.SoapySDRSizeList_rbegin(self)
def rend(self):
return _SoapySDR.SoapySDRSizeList_rend(self)
def pop_back(self):
return _SoapySDR.SoapySDRSizeList_pop_back(self)
def erase(self, *args):
return _SoapySDR.SoapySDRSizeList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRSizeList(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, *args):
return _SoapySDR.SoapySDRSizeList_push_back(self, *args)
def front(self):
return _SoapySDR.SoapySDRSizeList_front(self)
def back(self):
return _SoapySDR.SoapySDRSizeList_back(self)
def assign(self, *args):
return _SoapySDR.SoapySDRSizeList_assign(self, *args)
def resize(self, *args):
return _SoapySDR.SoapySDRSizeList_resize(self, *args)
def insert(self, *args):
return _SoapySDR.SoapySDRSizeList_insert(self, *args)
def reserve(self, *args):
return _SoapySDR.SoapySDRSizeList_reserve(self, *args)
def capacity(self):
return _SoapySDR.SoapySDRSizeList_capacity(self)
__swig_destroy__ = _SoapySDR.delete_SoapySDRSizeList
__del__ = lambda self: None
<|reserved_special_token_0|>
class SoapySDRDoubleList(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
SoapySDRDoubleList, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SoapySDRDoubleList,
name)
__repr__ = _swig_repr
def iterator(self):
return _SoapySDR.SoapySDRDoubleList_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _SoapySDR.SoapySDRDoubleList___nonzero__(self)
def __bool__(self):
return _SoapySDR.SoapySDRDoubleList___bool__(self)
def __len__(self):
return _SoapySDR.SoapySDRDoubleList___len__(self)
def pop(self):
return _SoapySDR.SoapySDRDoubleList_pop(self)
def __getslice__(self, *args):
return _SoapySDR.SoapySDRDoubleList___getslice__(self, *args)
def __setslice__(self, *args):
return _SoapySDR.SoapySDRDoubleList___setslice__(self, *args)
def __delslice__(self, *args):
return _SoapySDR.SoapySDRDoubleList___delslice__(self, *args)
def __delitem__(self, *args):
return _SoapySDR.SoapySDRDoubleList___delitem__(self, *args)
def __getitem__(self, *args):
return _SoapySDR.SoapySDRDoubleList___getitem__(self, *args)
def __setitem__(self, *args):
return _SoapySDR.SoapySDRDoubleList___setitem__(self, *args)
def append(self, *args):
return _SoapySDR.SoapySDRDoubleList_append(self, *args)
def empty(self):
return _SoapySDR.SoapySDRDoubleList_empty(self)
def size(self):
return _SoapySDR.SoapySDRDoubleList_size(self)
def clear(self):
return _SoapySDR.SoapySDRDoubleList_clear(self)
def swap(self, *args):
return _SoapySDR.SoapySDRDoubleList_swap(self, *args)
def get_allocator(self):
return _SoapySDR.SoapySDRDoubleList_get_allocator(self)
def begin(self):
return _SoapySDR.SoapySDRDoubleList_begin(self)
def end(self):
return _SoapySDR.SoapySDRDoubleList_end(self)
def rbegin(self):
return _SoapySDR.SoapySDRDoubleList_rbegin(self)
def rend(self):
return _SoapySDR.SoapySDRDoubleList_rend(self)
def pop_back(self):
return _SoapySDR.SoapySDRDoubleList_pop_back(self)
def erase(self, *args):
return _SoapySDR.SoapySDRDoubleList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRDoubleList(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, *args):
return _SoapySDR.SoapySDRDoubleList_push_back(self, *args)
def front(self):
return _SoapySDR.SoapySDRDoubleList_front(self)
def back(self):
return _SoapySDR.SoapySDRDoubleList_back(self)
def assign(self, *args):
return _SoapySDR.SoapySDRDoubleList_assign(self, *args)
def resize(self, *args):
return _SoapySDR.SoapySDRDoubleList_resize(self, *args)
def insert(self, *args):
return _SoapySDR.SoapySDRDoubleList_insert(self, *args)
def reserve(self, *args):
return _SoapySDR.SoapySDRDoubleList_reserve(self, *args)
def capacity(self):
return _SoapySDR.SoapySDRDoubleList_capacity(self)
__swig_destroy__ = _SoapySDR.delete_SoapySDRDoubleList
__del__ = lambda self: None
<|reserved_special_token_0|>
class StreamResult(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self,
StreamResult, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StreamResult, name)
__repr__ = _swig_repr
def __init__(self):
this = _SoapySDR.new_StreamResult()
try:
self.this.append(this)
except:
self.this = this
__swig_setmethods__['ret'] = _SoapySDR.StreamResult_ret_set
__swig_getmethods__['ret'] = _SoapySDR.StreamResult_ret_get
if _newclass:
ret = _swig_property(_SoapySDR.StreamResult_ret_get, _SoapySDR.
StreamResult_ret_set)
__swig_setmethods__['flags'] = _SoapySDR.StreamResult_flags_set
__swig_getmethods__['flags'] = _SoapySDR.StreamResult_flags_get
if _newclass:
flags = _swig_property(_SoapySDR.StreamResult_flags_get, _SoapySDR.
StreamResult_flags_set)
__swig_setmethods__['timeNs'] = _SoapySDR.StreamResult_timeNs_set
__swig_getmethods__['timeNs'] = _SoapySDR.StreamResult_timeNs_get
if _newclass:
timeNs = _swig_property(_SoapySDR.StreamResult_timeNs_get,
_SoapySDR.StreamResult_timeNs_set)
__swig_setmethods__['chanMask'] = _SoapySDR.StreamResult_chanMask_set
__swig_getmethods__['chanMask'] = _SoapySDR.StreamResult_chanMask_get
if _newclass:
chanMask = _swig_property(_SoapySDR.StreamResult_chanMask_get,
_SoapySDR.StreamResult_chanMask_set)
def __str__(self):
return 'ret=%s, flags=%s, timeNs=%s' % (self.ret, self.flags, self.
timeNs)
__swig_destroy__ = _SoapySDR.delete_StreamResult
__del__ = lambda self: None
<|reserved_special_token_0|>
def SoapySDR_errToStr(*args):
return _SoapySDR.SoapySDR_errToStr(*args)
<|reserved_special_token_0|>
def SoapySDR_getAPIVersion():
return _SoapySDR.SoapySDR_getAPIVersion()
<|reserved_special_token_0|>
def SoapySDR_getABIVersion():
return _SoapySDR.SoapySDR_getABIVersion()
<|reserved_special_token_0|>
def SoapySDR_getLibVersion():
return _SoapySDR.SoapySDR_getLibVersion()
<|reserved_special_token_0|>
def SoapySDR_log(*args):
return _SoapySDR.SoapySDR_log(*args)
<|reserved_special_token_0|>
def SoapySDR_setLogLevel(*args):
return _SoapySDR.SoapySDR_setLogLevel(*args)
<|reserved_special_token_0|>
def errToStr(*args):
return _SoapySDR.errToStr(*args)
<|reserved_special_token_0|>
def getAPIVersion():
return _SoapySDR.getAPIVersion()
<|reserved_special_token_0|>
def getABIVersion():
return _SoapySDR.getABIVersion()
<|reserved_special_token_0|>
def getLibVersion():
return _SoapySDR.getLibVersion()
<|reserved_special_token_0|>
def getRootPath():
return _SoapySDR.getRootPath()
<|reserved_special_token_0|>
def listSearchPaths():
return _SoapySDR.listSearchPaths()
<|reserved_special_token_0|>
def listModules(*args):
return _SoapySDR.listModules(*args)
<|reserved_special_token_0|>
def loadModule(*args):
return _SoapySDR.loadModule(*args)
<|reserved_special_token_0|>
def getLoaderResult(*args):
return _SoapySDR.getLoaderResult(*args)
<|reserved_special_token_0|>
def unloadModule(*args):
return _SoapySDR.unloadModule(*args)
<|reserved_special_token_0|>
def loadModules():
return _SoapySDR.loadModules()
<|reserved_special_token_0|>
def formatToSize(*args):
return _SoapySDR.formatToSize(*args)
<|reserved_special_token_0|>
def ticksToTimeNs(*args):
return _SoapySDR.ticksToTimeNs(*args)
<|reserved_special_token_0|>
def log(*args):
return _SoapySDR.log(*args)
<|reserved_special_token_0|>
def setLogLevel(*args):
return _SoapySDR.setLogLevel(*args)
<|reserved_special_token_0|>
class Device(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Device,
name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Device, name)
def __init__(self, *args, **kwargs):
raise AttributeError('No constructor defined')
__repr__ = _swig_repr
__swig_destroy__ = _SoapySDR.delete_Device
__del__ = lambda self: None
__swig_getmethods__['enumerate'] = lambda x: _SoapySDR.Device_enumerate
if _newclass:
enumerate = staticmethod(_SoapySDR.Device_enumerate)
__swig_getmethods__['make'] = lambda x: _SoapySDR.Device_make
if _newclass:
make = staticmethod(_SoapySDR.Device_make)
__swig_getmethods__['unmake'] = lambda x: _SoapySDR.Device_unmake
if _newclass:
unmake = staticmethod(_SoapySDR.Device_unmake)
def getDriverKey(self):
return _SoapySDR.Device_getDriverKey(self)
def getHardwareKey(self):
return _SoapySDR.Device_getHardwareKey(self)
def getHardwareInfo(self):
return _SoapySDR.Device_getHardwareInfo(self)
def setFrontendMapping(self, *args):
return _SoapySDR.Device_setFrontendMapping(self, *args)
def getFrontendMapping(self, *args):
return _SoapySDR.Device_getFrontendMapping(self, *args)
def getNumChannels(self, *args):
return _SoapySDR.Device_getNumChannels(self, *args)
def getChannelInfo(self, *args):
return _SoapySDR.Device_getChannelInfo(self, *args)
def getFullDuplex(self, *args):
return _SoapySDR.Device_getFullDuplex(self, *args)
def getStreamFormats(self, *args):
return _SoapySDR.Device_getStreamFormats(self, *args)
def getNativeStreamFormat(self, *args):
return _SoapySDR.Device_getNativeStreamFormat(self, *args)
def getStreamArgsInfo(self, *args):
return _SoapySDR.Device_getStreamArgsInfo(self, *args)
def setupStream(self, *args):
return _SoapySDR.Device_setupStream(self, *args)
def closeStream(self, *args):
return _SoapySDR.Device_closeStream(self, *args)
def getStreamMTU(self, *args):
return _SoapySDR.Device_getStreamMTU(self, *args)
def activateStream(self, *args):
return _SoapySDR.Device_activateStream(self, *args)
def deactivateStream(self, *args):
return _SoapySDR.Device_deactivateStream(self, *args)
def readStream(self, *args):
return _SoapySDR.Device_readStream(self, *args)
def writeStream(self, *args):
return _SoapySDR.Device_writeStream(self, *args)
def readStreamStatus(self, *args):
return _SoapySDR.Device_readStreamStatus(self, *args)
def getNumDirectAccessBuffers(self, *args):
return _SoapySDR.Device_getNumDirectAccessBuffers(self, *args)
def getDirectAccessBufferAddrs(self, *args):
return _SoapySDR.Device_getDirectAccessBufferAddrs(self, *args)
def acquireReadBuffer(self, *args):
return _SoapySDR.Device_acquireReadBuffer(self, *args)
def releaseReadBuffer(self, *args):
return _SoapySDR.Device_releaseReadBuffer(self, *args)
def acquireWriteBuffer(self, *args):
return _SoapySDR.Device_acquireWriteBuffer(self, *args)
def releaseWriteBuffer(self, *args):
return _SoapySDR.Device_releaseWriteBuffer(self, *args)
def listAntennas(self, *args):
return _SoapySDR.Device_listAntennas(self, *args)
def setAntenna(self, *args):
return _SoapySDR.Device_setAntenna(self, *args)
def getAntenna(self, *args):
return _SoapySDR.Device_getAntenna(self, *args)
def hasDCOffsetMode(self, *args):
return _SoapySDR.Device_hasDCOffsetMode(self, *args)
def setDCOffsetMode(self, *args):
return _SoapySDR.Device_setDCOffsetMode(self, *args)
def getDCOffsetMode(self, *args):
return _SoapySDR.Device_getDCOffsetMode(self, *args)
def hasDCOffset(self, *args):
return _SoapySDR.Device_hasDCOffset(self, *args)
def setDCOffset(self, *args):
return _SoapySDR.Device_setDCOffset(self, *args)
def getDCOffset(self, *args):
return _SoapySDR.Device_getDCOffset(self, *args)
def hasIQBalance(self, *args):
return _SoapySDR.Device_hasIQBalance(self, *args)
def setIQBalance(self, *args):
return _SoapySDR.Device_setIQBalance(self, *args)
def getIQBalance(self, *args):
return _SoapySDR.Device_getIQBalance(self, *args)
def hasFrequencyCorrection(self, *args):
return _SoapySDR.Device_hasFrequencyCorrection(self, *args)
def setFrequencyCorrection(self, *args):
return _SoapySDR.Device_setFrequencyCorrection(self, *args)
def getFrequencyCorrection(self, *args):
return _SoapySDR.Device_getFrequencyCorrection(self, *args)
def listGains(self, *args):
return _SoapySDR.Device_listGains(self, *args)
def hasGainMode(self, *args):
return _SoapySDR.Device_hasGainMode(self, *args)
def setGainMode(self, *args):
return _SoapySDR.Device_setGainMode(self, *args)
def getGainMode(self, *args):
return _SoapySDR.Device_getGainMode(self, *args)
def setGain(self, *args):
return _SoapySDR.Device_setGain(self, *args)
def getGain(self, *args):
return _SoapySDR.Device_getGain(self, *args)
def getGainRange(self, *args):
return _SoapySDR.Device_getGainRange(self, *args)
def setFrequency(self, *args):
return _SoapySDR.Device_setFrequency(self, *args)
def getFrequency(self, *args):
return _SoapySDR.Device_getFrequency(self, *args)
def listFrequencies(self, *args):
return _SoapySDR.Device_listFrequencies(self, *args)
def getFrequencyRange(self, *args):
return _SoapySDR.Device_getFrequencyRange(self, *args)
def getFrequencyArgsInfo(self, *args):
return _SoapySDR.Device_getFrequencyArgsInfo(self, *args)
def setSampleRate(self, *args):
return _SoapySDR.Device_setSampleRate(self, *args)
def getSampleRate(self, *args):
return _SoapySDR.Device_getSampleRate(self, *args)
def listSampleRates(self, *args):
return _SoapySDR.Device_listSampleRates(self, *args)
def getSampleRateRange(self, *args):
return _SoapySDR.Device_getSampleRateRange(self, *args)
def setBandwidth(self, *args):
return _SoapySDR.Device_setBandwidth(self, *args)
def getBandwidth(self, *args):
return _SoapySDR.Device_getBandwidth(self, *args)
def listBandwidths(self, *args):
return _SoapySDR.Device_listBandwidths(self, *args)
def getBandwidthRange(self, *args):
return _SoapySDR.Device_getBandwidthRange(self, *args)
def setMasterClockRate(self, *args):
return _SoapySDR.Device_setMasterClockRate(self, *args)
def getMasterClockRate(self):
return _SoapySDR.Device_getMasterClockRate(self)
def getMasterClockRates(self):
return _SoapySDR.Device_getMasterClockRates(self)
def listClockSources(self):
return _SoapySDR.Device_listClockSources(self)
def setClockSource(self, *args):
return _SoapySDR.Device_setClockSource(self, *args)
def getClockSource(self):
return _SoapySDR.Device_getClockSource(self)
def listTimeSources(self):
return _SoapySDR.Device_listTimeSources(self)
def setTimeSource(self, *args):
return _SoapySDR.Device_setTimeSource(self, *args)
def getTimeSource(self):
return _SoapySDR.Device_getTimeSource(self)
def hasHardwareTime(self, what=''):
return _SoapySDR.Device_hasHardwareTime(self, what)
def getHardwareTime(self, what=''):
return _SoapySDR.Device_getHardwareTime(self, what)
def setHardwareTime(self, *args):
return _SoapySDR.Device_setHardwareTime(self, *args)
def setCommandTime(self, *args):
return _SoapySDR.Device_setCommandTime(self, *args)
def listSensors(self, *args):
return _SoapySDR.Device_listSensors(self, *args)
def getSensorInfo(self, *args):
return _SoapySDR.Device_getSensorInfo(self, *args)
def readSensor(self, *args):
return _SoapySDR.Device_readSensor(self, *args)
def listRegisterInterfaces(self):
return _SoapySDR.Device_listRegisterInterfaces(self)
def writeRegister(self, *args):
return _SoapySDR.Device_writeRegister(self, *args)
def readRegister(self, *args):
return _SoapySDR.Device_readRegister(self, *args)
def writeRegisters(self, *args):
return _SoapySDR.Device_writeRegisters(self, *args)
def readRegisters(self, *args):
return _SoapySDR.Device_readRegisters(self, *args)
def getSettingInfo(self, *args):
return _SoapySDR.Device_getSettingInfo(self, *args)
def writeSetting(self, *args):
return _SoapySDR.Device_writeSetting(self, *args)
def readSetting(self, *args):
return _SoapySDR.Device_readSetting(self, *args)
def listGPIOBanks(self):
return _SoapySDR.Device_listGPIOBanks(self)
def writeGPIO(self, *args):
return _SoapySDR.Device_writeGPIO(self, *args)
def readGPIO(self, *args):
return _SoapySDR.Device_readGPIO(self, *args)
def writeGPIODir(self, *args):
return _SoapySDR.Device_writeGPIODir(self, *args)
def readGPIODir(self, *args):
return _SoapySDR.Device_readGPIODir(self, *args)
def writeI2C(self, *args):
return _SoapySDR.Device_writeI2C(self, *args)
def readI2C(self, *args):
return _SoapySDR.Device_readI2C(self, *args)
def transactSPI(self, *args):
return _SoapySDR.Device_transactSPI(self, *args)
def listUARTs(self):
return _SoapySDR.Device_listUARTs(self)
def writeUART(self, *args):
return _SoapySDR.Device_writeUART(self, *args)
def readUART(self, *args):
return _SoapySDR.Device_readUART(self, *args)
def readStream__(self, *args):
return _SoapySDR.Device_readStream__(self, *args)
def writeStream__(self, *args):
return _SoapySDR.Device_writeStream__(self, *args)
def readStreamStatus__(self, *args):
return _SoapySDR.Device_readStreamStatus__(self, *args)
def __del__(self):
Device.unmake(self)
def __str__(self):
return '%s:%s' % (self.getDriverKey(), self.getHardwareKey())
def readStream(self, stream, buffs, numElems, flags=0, timeoutUs=100000):
ptrs = [extractBuffPointer(b) for b in buffs]
return self.readStream__(stream, ptrs, numElems, flags, timeoutUs)
def writeStream(self, stream, buffs, numElems, flags=0, timeNs=0,
timeoutUs=100000):
ptrs = [extractBuffPointer(b) for b in buffs]
return self.writeStream__(stream, ptrs, numElems, flags, timeNs,
timeoutUs)
def readStreamStatus(self, stream, timeoutUs=100000):
return self.readStreamStatus__(stream, timeoutUs)
<|reserved_special_token_0|>
def Device_enumerate(*args):
return _SoapySDR.Device_enumerate(*args)
<|reserved_special_token_0|>
def Device_make(*args):
return _SoapySDR.Device_make(*args)
<|reserved_special_token_0|>
def Device_unmake(*args):
return _SoapySDR.Device_unmake(*args)
<|reserved_special_token_0|>
class Device(Device):
def __new__(cls, *args, **kwargs):
return cls.make(*args, **kwargs)
def extractBuffPointer(buff):
if hasattr(buff, '__array_interface__'):
return buff.__array_interface__['data'][0]
if hasattr(buff, '__long__'):
return long(buff)
if hasattr(buff, '__int__'):
return int(buff)
raise Exception('Unrecognized data format: ' + str(type(buff)))
<|reserved_special_token_1|>
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 2.0.12
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_SoapySDR', [dirname(__file__)])
except ImportError:
import _SoapySDR
return _SoapySDR
if fp is not None:
try:
_mod = imp.load_module('_SoapySDR', fp, pathname, description)
finally:
fp.close()
return _mod
_SoapySDR = swig_import_helper()
del swig_import_helper
else:
import _SoapySDR
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
class SwigPyIterator(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SwigPyIterator, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SwigPyIterator, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _SoapySDR.delete_SwigPyIterator
__del__ = lambda self : None;
def value(self): return _SoapySDR.SwigPyIterator_value(self)
def incr(self, n=1): return _SoapySDR.SwigPyIterator_incr(self, n)
def decr(self, n=1): return _SoapySDR.SwigPyIterator_decr(self, n)
def distance(self, *args): return _SoapySDR.SwigPyIterator_distance(self, *args)
def equal(self, *args): return _SoapySDR.SwigPyIterator_equal(self, *args)
def copy(self): return _SoapySDR.SwigPyIterator_copy(self)
def next(self): return _SoapySDR.SwigPyIterator_next(self)
def __next__(self): return _SoapySDR.SwigPyIterator___next__(self)
def previous(self): return _SoapySDR.SwigPyIterator_previous(self)
def advance(self, *args): return _SoapySDR.SwigPyIterator_advance(self, *args)
def __eq__(self, *args): return _SoapySDR.SwigPyIterator___eq__(self, *args)
def __ne__(self, *args): return _SoapySDR.SwigPyIterator___ne__(self, *args)
def __iadd__(self, *args): return _SoapySDR.SwigPyIterator___iadd__(self, *args)
def __isub__(self, *args): return _SoapySDR.SwigPyIterator___isub__(self, *args)
def __add__(self, *args): return _SoapySDR.SwigPyIterator___add__(self, *args)
def __sub__(self, *args): return _SoapySDR.SwigPyIterator___sub__(self, *args)
def __iter__(self): return self
SwigPyIterator_swigregister = _SoapySDR.SwigPyIterator_swigregister
SwigPyIterator_swigregister(SwigPyIterator)
def KwargsFromString(*args):
return _SoapySDR.KwargsFromString(*args)
KwargsFromString = _SoapySDR.KwargsFromString
def KwargsToString(*args):
return _SoapySDR.KwargsToString(*args)
KwargsToString = _SoapySDR.KwargsToString
class Range(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Range, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Range, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _SoapySDR.new_Range(*args)
try: self.this.append(this)
except: self.this = this
def minimum(self): return _SoapySDR.Range_minimum(self)
def maximum(self): return _SoapySDR.Range_maximum(self)
def step(self): return _SoapySDR.Range_step(self)
def __str__(self):
fields = [self.minimum(), self.maximum()]
if self.step() != 0.0: fields.append(self.step())
return ', '.join(['%g'%f for f in fields])
__swig_destroy__ = _SoapySDR.delete_Range
__del__ = lambda self : None;
Range_swigregister = _SoapySDR.Range_swigregister
Range_swigregister(Range)
class ArgInfo(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, ArgInfo, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, ArgInfo, name)
__repr__ = _swig_repr
def __init__(self):
this = _SoapySDR.new_ArgInfo()
try: self.this.append(this)
except: self.this = this
__swig_setmethods__["key"] = _SoapySDR.ArgInfo_key_set
__swig_getmethods__["key"] = _SoapySDR.ArgInfo_key_get
if _newclass:key = _swig_property(_SoapySDR.ArgInfo_key_get, _SoapySDR.ArgInfo_key_set)
__swig_setmethods__["value"] = _SoapySDR.ArgInfo_value_set
__swig_getmethods__["value"] = _SoapySDR.ArgInfo_value_get
if _newclass:value = _swig_property(_SoapySDR.ArgInfo_value_get, _SoapySDR.ArgInfo_value_set)
__swig_setmethods__["name"] = _SoapySDR.ArgInfo_name_set
__swig_getmethods__["name"] = _SoapySDR.ArgInfo_name_get
if _newclass:name = _swig_property(_SoapySDR.ArgInfo_name_get, _SoapySDR.ArgInfo_name_set)
__swig_setmethods__["description"] = _SoapySDR.ArgInfo_description_set
__swig_getmethods__["description"] = _SoapySDR.ArgInfo_description_get
if _newclass:description = _swig_property(_SoapySDR.ArgInfo_description_get, _SoapySDR.ArgInfo_description_set)
__swig_setmethods__["units"] = _SoapySDR.ArgInfo_units_set
__swig_getmethods__["units"] = _SoapySDR.ArgInfo_units_get
if _newclass:units = _swig_property(_SoapySDR.ArgInfo_units_get, _SoapySDR.ArgInfo_units_set)
BOOL = _SoapySDR.ArgInfo_BOOL
INT = _SoapySDR.ArgInfo_INT
FLOAT = _SoapySDR.ArgInfo_FLOAT
STRING = _SoapySDR.ArgInfo_STRING
__swig_setmethods__["type"] = _SoapySDR.ArgInfo_type_set
__swig_getmethods__["type"] = _SoapySDR.ArgInfo_type_get
if _newclass:type = _swig_property(_SoapySDR.ArgInfo_type_get, _SoapySDR.ArgInfo_type_set)
__swig_setmethods__["range"] = _SoapySDR.ArgInfo_range_set
__swig_getmethods__["range"] = _SoapySDR.ArgInfo_range_get
if _newclass:range = _swig_property(_SoapySDR.ArgInfo_range_get, _SoapySDR.ArgInfo_range_set)
__swig_setmethods__["options"] = _SoapySDR.ArgInfo_options_set
__swig_getmethods__["options"] = _SoapySDR.ArgInfo_options_get
if _newclass:options = _swig_property(_SoapySDR.ArgInfo_options_get, _SoapySDR.ArgInfo_options_set)
__swig_setmethods__["optionNames"] = _SoapySDR.ArgInfo_optionNames_set
__swig_getmethods__["optionNames"] = _SoapySDR.ArgInfo_optionNames_get
if _newclass:optionNames = _swig_property(_SoapySDR.ArgInfo_optionNames_get, _SoapySDR.ArgInfo_optionNames_set)
__swig_destroy__ = _SoapySDR.delete_ArgInfo
__del__ = lambda self : None;
ArgInfo_swigregister = _SoapySDR.ArgInfo_swigregister
ArgInfo_swigregister(ArgInfo)
class SoapySDRKwargs(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SoapySDRKwargs, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SoapySDRKwargs, name)
__repr__ = _swig_repr
def iterator(self): return _SoapySDR.SoapySDRKwargs_iterator(self)
def __iter__(self): return self.iterator()
def __nonzero__(self): return _SoapySDR.SoapySDRKwargs___nonzero__(self)
def __bool__(self): return _SoapySDR.SoapySDRKwargs___bool__(self)
def __len__(self): return _SoapySDR.SoapySDRKwargs___len__(self)
def __iter__(self): return self.key_iterator()
def iterkeys(self): return self.key_iterator()
def itervalues(self): return self.value_iterator()
def iteritems(self): return self.iterator()
def __getitem__(self, *args): return _SoapySDR.SoapySDRKwargs___getitem__(self, *args)
def __delitem__(self, *args): return _SoapySDR.SoapySDRKwargs___delitem__(self, *args)
def has_key(self, *args): return _SoapySDR.SoapySDRKwargs_has_key(self, *args)
def keys(self): return _SoapySDR.SoapySDRKwargs_keys(self)
def values(self): return _SoapySDR.SoapySDRKwargs_values(self)
def items(self): return _SoapySDR.SoapySDRKwargs_items(self)
def __contains__(self, *args): return _SoapySDR.SoapySDRKwargs___contains__(self, *args)
def key_iterator(self): return _SoapySDR.SoapySDRKwargs_key_iterator(self)
def value_iterator(self): return _SoapySDR.SoapySDRKwargs_value_iterator(self)
def __setitem__(self, *args): return _SoapySDR.SoapySDRKwargs___setitem__(self, *args)
def asdict(self): return _SoapySDR.SoapySDRKwargs_asdict(self)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRKwargs(*args)
try: self.this.append(this)
except: self.this = this
def empty(self): return _SoapySDR.SoapySDRKwargs_empty(self)
def size(self): return _SoapySDR.SoapySDRKwargs_size(self)
def clear(self): return _SoapySDR.SoapySDRKwargs_clear(self)
def swap(self, *args): return _SoapySDR.SoapySDRKwargs_swap(self, *args)
def get_allocator(self): return _SoapySDR.SoapySDRKwargs_get_allocator(self)
def begin(self): return _SoapySDR.SoapySDRKwargs_begin(self)
def end(self): return _SoapySDR.SoapySDRKwargs_end(self)
def rbegin(self): return _SoapySDR.SoapySDRKwargs_rbegin(self)
def rend(self): return _SoapySDR.SoapySDRKwargs_rend(self)
def count(self, *args): return _SoapySDR.SoapySDRKwargs_count(self, *args)
def erase(self, *args): return _SoapySDR.SoapySDRKwargs_erase(self, *args)
def find(self, *args): return _SoapySDR.SoapySDRKwargs_find(self, *args)
def lower_bound(self, *args): return _SoapySDR.SoapySDRKwargs_lower_bound(self, *args)
def upper_bound(self, *args): return _SoapySDR.SoapySDRKwargs_upper_bound(self, *args)
def __str__(self):
out = list()
for k, v in self.iteritems():
out.append("%s=%s"%(k, v))
return '{'+(', '.join(out))+'}'
__swig_destroy__ = _SoapySDR.delete_SoapySDRKwargs
__del__ = lambda self : None;
SoapySDRKwargs_swigregister = _SoapySDR.SoapySDRKwargs_swigregister
SoapySDRKwargs_swigregister(SoapySDRKwargs)
class SoapySDRKwargsList(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SoapySDRKwargsList, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SoapySDRKwargsList, name)
__repr__ = _swig_repr
def iterator(self): return _SoapySDR.SoapySDRKwargsList_iterator(self)
def __iter__(self): return self.iterator()
def __nonzero__(self): return _SoapySDR.SoapySDRKwargsList___nonzero__(self)
def __bool__(self): return _SoapySDR.SoapySDRKwargsList___bool__(self)
def __len__(self): return _SoapySDR.SoapySDRKwargsList___len__(self)
def pop(self): return _SoapySDR.SoapySDRKwargsList_pop(self)
def __getslice__(self, *args): return _SoapySDR.SoapySDRKwargsList___getslice__(self, *args)
def __setslice__(self, *args): return _SoapySDR.SoapySDRKwargsList___setslice__(self, *args)
def __delslice__(self, *args): return _SoapySDR.SoapySDRKwargsList___delslice__(self, *args)
def __delitem__(self, *args): return _SoapySDR.SoapySDRKwargsList___delitem__(self, *args)
def __getitem__(self, *args): return _SoapySDR.SoapySDRKwargsList___getitem__(self, *args)
def __setitem__(self, *args): return _SoapySDR.SoapySDRKwargsList___setitem__(self, *args)
def append(self, *args): return _SoapySDR.SoapySDRKwargsList_append(self, *args)
def empty(self): return _SoapySDR.SoapySDRKwargsList_empty(self)
def size(self): return _SoapySDR.SoapySDRKwargsList_size(self)
def clear(self): return _SoapySDR.SoapySDRKwargsList_clear(self)
def swap(self, *args): return _SoapySDR.SoapySDRKwargsList_swap(self, *args)
def get_allocator(self): return _SoapySDR.SoapySDRKwargsList_get_allocator(self)
def begin(self): return _SoapySDR.SoapySDRKwargsList_begin(self)
def end(self): return _SoapySDR.SoapySDRKwargsList_end(self)
def rbegin(self): return _SoapySDR.SoapySDRKwargsList_rbegin(self)
def rend(self): return _SoapySDR.SoapySDRKwargsList_rend(self)
def pop_back(self): return _SoapySDR.SoapySDRKwargsList_pop_back(self)
def erase(self, *args): return _SoapySDR.SoapySDRKwargsList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRKwargsList(*args)
try: self.this.append(this)
except: self.this = this
def push_back(self, *args): return _SoapySDR.SoapySDRKwargsList_push_back(self, *args)
def front(self): return _SoapySDR.SoapySDRKwargsList_front(self)
def back(self): return _SoapySDR.SoapySDRKwargsList_back(self)
def assign(self, *args): return _SoapySDR.SoapySDRKwargsList_assign(self, *args)
def resize(self, *args): return _SoapySDR.SoapySDRKwargsList_resize(self, *args)
def insert(self, *args): return _SoapySDR.SoapySDRKwargsList_insert(self, *args)
def reserve(self, *args): return _SoapySDR.SoapySDRKwargsList_reserve(self, *args)
def capacity(self): return _SoapySDR.SoapySDRKwargsList_capacity(self)
__swig_destroy__ = _SoapySDR.delete_SoapySDRKwargsList
__del__ = lambda self : None;
SoapySDRKwargsList_swigregister = _SoapySDR.SoapySDRKwargsList_swigregister
SoapySDRKwargsList_swigregister(SoapySDRKwargsList)
class SoapySDRArgInfoList(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SoapySDRArgInfoList, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SoapySDRArgInfoList, name)
__repr__ = _swig_repr
def iterator(self): return _SoapySDR.SoapySDRArgInfoList_iterator(self)
def __iter__(self): return self.iterator()
def __nonzero__(self): return _SoapySDR.SoapySDRArgInfoList___nonzero__(self)
def __bool__(self): return _SoapySDR.SoapySDRArgInfoList___bool__(self)
def __len__(self): return _SoapySDR.SoapySDRArgInfoList___len__(self)
def pop(self): return _SoapySDR.SoapySDRArgInfoList_pop(self)
def __getslice__(self, *args): return _SoapySDR.SoapySDRArgInfoList___getslice__(self, *args)
def __setslice__(self, *args): return _SoapySDR.SoapySDRArgInfoList___setslice__(self, *args)
def __delslice__(self, *args): return _SoapySDR.SoapySDRArgInfoList___delslice__(self, *args)
def __delitem__(self, *args): return _SoapySDR.SoapySDRArgInfoList___delitem__(self, *args)
def __getitem__(self, *args): return _SoapySDR.SoapySDRArgInfoList___getitem__(self, *args)
def __setitem__(self, *args): return _SoapySDR.SoapySDRArgInfoList___setitem__(self, *args)
def append(self, *args): return _SoapySDR.SoapySDRArgInfoList_append(self, *args)
def empty(self): return _SoapySDR.SoapySDRArgInfoList_empty(self)
def size(self): return _SoapySDR.SoapySDRArgInfoList_size(self)
def clear(self): return _SoapySDR.SoapySDRArgInfoList_clear(self)
def swap(self, *args): return _SoapySDR.SoapySDRArgInfoList_swap(self, *args)
def get_allocator(self): return _SoapySDR.SoapySDRArgInfoList_get_allocator(self)
def begin(self): return _SoapySDR.SoapySDRArgInfoList_begin(self)
def end(self): return _SoapySDR.SoapySDRArgInfoList_end(self)
def rbegin(self): return _SoapySDR.SoapySDRArgInfoList_rbegin(self)
def rend(self): return _SoapySDR.SoapySDRArgInfoList_rend(self)
def pop_back(self): return _SoapySDR.SoapySDRArgInfoList_pop_back(self)
def erase(self, *args): return _SoapySDR.SoapySDRArgInfoList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRArgInfoList(*args)
try: self.this.append(this)
except: self.this = this
def push_back(self, *args): return _SoapySDR.SoapySDRArgInfoList_push_back(self, *args)
def front(self): return _SoapySDR.SoapySDRArgInfoList_front(self)
def back(self): return _SoapySDR.SoapySDRArgInfoList_back(self)
def assign(self, *args): return _SoapySDR.SoapySDRArgInfoList_assign(self, *args)
def resize(self, *args): return _SoapySDR.SoapySDRArgInfoList_resize(self, *args)
def insert(self, *args): return _SoapySDR.SoapySDRArgInfoList_insert(self, *args)
def reserve(self, *args): return _SoapySDR.SoapySDRArgInfoList_reserve(self, *args)
def capacity(self): return _SoapySDR.SoapySDRArgInfoList_capacity(self)
__swig_destroy__ = _SoapySDR.delete_SoapySDRArgInfoList
__del__ = lambda self : None;
SoapySDRArgInfoList_swigregister = _SoapySDR.SoapySDRArgInfoList_swigregister
SoapySDRArgInfoList_swigregister(SoapySDRArgInfoList)
class SoapySDRStringList(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SoapySDRStringList, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SoapySDRStringList, name)
__repr__ = _swig_repr
def iterator(self): return _SoapySDR.SoapySDRStringList_iterator(self)
def __iter__(self): return self.iterator()
def __nonzero__(self): return _SoapySDR.SoapySDRStringList___nonzero__(self)
def __bool__(self): return _SoapySDR.SoapySDRStringList___bool__(self)
def __len__(self): return _SoapySDR.SoapySDRStringList___len__(self)
def pop(self): return _SoapySDR.SoapySDRStringList_pop(self)
def __getslice__(self, *args): return _SoapySDR.SoapySDRStringList___getslice__(self, *args)
def __setslice__(self, *args): return _SoapySDR.SoapySDRStringList___setslice__(self, *args)
def __delslice__(self, *args): return _SoapySDR.SoapySDRStringList___delslice__(self, *args)
def __delitem__(self, *args): return _SoapySDR.SoapySDRStringList___delitem__(self, *args)
def __getitem__(self, *args): return _SoapySDR.SoapySDRStringList___getitem__(self, *args)
def __setitem__(self, *args): return _SoapySDR.SoapySDRStringList___setitem__(self, *args)
def append(self, *args): return _SoapySDR.SoapySDRStringList_append(self, *args)
def empty(self): return _SoapySDR.SoapySDRStringList_empty(self)
def size(self): return _SoapySDR.SoapySDRStringList_size(self)
def clear(self): return _SoapySDR.SoapySDRStringList_clear(self)
def swap(self, *args): return _SoapySDR.SoapySDRStringList_swap(self, *args)
def get_allocator(self): return _SoapySDR.SoapySDRStringList_get_allocator(self)
def begin(self): return _SoapySDR.SoapySDRStringList_begin(self)
def end(self): return _SoapySDR.SoapySDRStringList_end(self)
def rbegin(self): return _SoapySDR.SoapySDRStringList_rbegin(self)
def rend(self): return _SoapySDR.SoapySDRStringList_rend(self)
def pop_back(self): return _SoapySDR.SoapySDRStringList_pop_back(self)
def erase(self, *args): return _SoapySDR.SoapySDRStringList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRStringList(*args)
try: self.this.append(this)
except: self.this = this
def push_back(self, *args): return _SoapySDR.SoapySDRStringList_push_back(self, *args)
def front(self): return _SoapySDR.SoapySDRStringList_front(self)
def back(self): return _SoapySDR.SoapySDRStringList_back(self)
def assign(self, *args): return _SoapySDR.SoapySDRStringList_assign(self, *args)
def resize(self, *args): return _SoapySDR.SoapySDRStringList_resize(self, *args)
def insert(self, *args): return _SoapySDR.SoapySDRStringList_insert(self, *args)
def reserve(self, *args): return _SoapySDR.SoapySDRStringList_reserve(self, *args)
def capacity(self): return _SoapySDR.SoapySDRStringList_capacity(self)
__swig_destroy__ = _SoapySDR.delete_SoapySDRStringList
__del__ = lambda self : None;
SoapySDRStringList_swigregister = _SoapySDR.SoapySDRStringList_swigregister
SoapySDRStringList_swigregister(SoapySDRStringList)
class SoapySDRRangeList(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SoapySDRRangeList, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SoapySDRRangeList, name)
__repr__ = _swig_repr
def iterator(self): return _SoapySDR.SoapySDRRangeList_iterator(self)
def __iter__(self): return self.iterator()
def __nonzero__(self): return _SoapySDR.SoapySDRRangeList___nonzero__(self)
def __bool__(self): return _SoapySDR.SoapySDRRangeList___bool__(self)
def __len__(self): return _SoapySDR.SoapySDRRangeList___len__(self)
def pop(self): return _SoapySDR.SoapySDRRangeList_pop(self)
def __getslice__(self, *args): return _SoapySDR.SoapySDRRangeList___getslice__(self, *args)
def __setslice__(self, *args): return _SoapySDR.SoapySDRRangeList___setslice__(self, *args)
def __delslice__(self, *args): return _SoapySDR.SoapySDRRangeList___delslice__(self, *args)
def __delitem__(self, *args): return _SoapySDR.SoapySDRRangeList___delitem__(self, *args)
def __getitem__(self, *args): return _SoapySDR.SoapySDRRangeList___getitem__(self, *args)
def __setitem__(self, *args): return _SoapySDR.SoapySDRRangeList___setitem__(self, *args)
def append(self, *args): return _SoapySDR.SoapySDRRangeList_append(self, *args)
def empty(self): return _SoapySDR.SoapySDRRangeList_empty(self)
def size(self): return _SoapySDR.SoapySDRRangeList_size(self)
def clear(self): return _SoapySDR.SoapySDRRangeList_clear(self)
def swap(self, *args): return _SoapySDR.SoapySDRRangeList_swap(self, *args)
def get_allocator(self): return _SoapySDR.SoapySDRRangeList_get_allocator(self)
def begin(self): return _SoapySDR.SoapySDRRangeList_begin(self)
def end(self): return _SoapySDR.SoapySDRRangeList_end(self)
def rbegin(self): return _SoapySDR.SoapySDRRangeList_rbegin(self)
def rend(self): return _SoapySDR.SoapySDRRangeList_rend(self)
def pop_back(self): return _SoapySDR.SoapySDRRangeList_pop_back(self)
def erase(self, *args): return _SoapySDR.SoapySDRRangeList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRRangeList(*args)
try: self.this.append(this)
except: self.this = this
def push_back(self, *args): return _SoapySDR.SoapySDRRangeList_push_back(self, *args)
def front(self): return _SoapySDR.SoapySDRRangeList_front(self)
def back(self): return _SoapySDR.SoapySDRRangeList_back(self)
def assign(self, *args): return _SoapySDR.SoapySDRRangeList_assign(self, *args)
def resize(self, *args): return _SoapySDR.SoapySDRRangeList_resize(self, *args)
def insert(self, *args): return _SoapySDR.SoapySDRRangeList_insert(self, *args)
def reserve(self, *args): return _SoapySDR.SoapySDRRangeList_reserve(self, *args)
def capacity(self): return _SoapySDR.SoapySDRRangeList_capacity(self)
__swig_destroy__ = _SoapySDR.delete_SoapySDRRangeList
__del__ = lambda self : None;
SoapySDRRangeList_swigregister = _SoapySDR.SoapySDRRangeList_swigregister
SoapySDRRangeList_swigregister(SoapySDRRangeList)
class SoapySDRSizeList(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SoapySDRSizeList, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SoapySDRSizeList, name)
__repr__ = _swig_repr
def iterator(self): return _SoapySDR.SoapySDRSizeList_iterator(self)
def __iter__(self): return self.iterator()
def __nonzero__(self): return _SoapySDR.SoapySDRSizeList___nonzero__(self)
def __bool__(self): return _SoapySDR.SoapySDRSizeList___bool__(self)
def __len__(self): return _SoapySDR.SoapySDRSizeList___len__(self)
def pop(self): return _SoapySDR.SoapySDRSizeList_pop(self)
def __getslice__(self, *args): return _SoapySDR.SoapySDRSizeList___getslice__(self, *args)
def __setslice__(self, *args): return _SoapySDR.SoapySDRSizeList___setslice__(self, *args)
def __delslice__(self, *args): return _SoapySDR.SoapySDRSizeList___delslice__(self, *args)
def __delitem__(self, *args): return _SoapySDR.SoapySDRSizeList___delitem__(self, *args)
def __getitem__(self, *args): return _SoapySDR.SoapySDRSizeList___getitem__(self, *args)
def __setitem__(self, *args): return _SoapySDR.SoapySDRSizeList___setitem__(self, *args)
def append(self, *args): return _SoapySDR.SoapySDRSizeList_append(self, *args)
def empty(self): return _SoapySDR.SoapySDRSizeList_empty(self)
def size(self): return _SoapySDR.SoapySDRSizeList_size(self)
def clear(self): return _SoapySDR.SoapySDRSizeList_clear(self)
def swap(self, *args): return _SoapySDR.SoapySDRSizeList_swap(self, *args)
def get_allocator(self): return _SoapySDR.SoapySDRSizeList_get_allocator(self)
def begin(self): return _SoapySDR.SoapySDRSizeList_begin(self)
def end(self): return _SoapySDR.SoapySDRSizeList_end(self)
def rbegin(self): return _SoapySDR.SoapySDRSizeList_rbegin(self)
def rend(self): return _SoapySDR.SoapySDRSizeList_rend(self)
def pop_back(self): return _SoapySDR.SoapySDRSizeList_pop_back(self)
def erase(self, *args): return _SoapySDR.SoapySDRSizeList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRSizeList(*args)
try: self.this.append(this)
except: self.this = this
def push_back(self, *args): return _SoapySDR.SoapySDRSizeList_push_back(self, *args)
def front(self): return _SoapySDR.SoapySDRSizeList_front(self)
def back(self): return _SoapySDR.SoapySDRSizeList_back(self)
def assign(self, *args): return _SoapySDR.SoapySDRSizeList_assign(self, *args)
def resize(self, *args): return _SoapySDR.SoapySDRSizeList_resize(self, *args)
def insert(self, *args): return _SoapySDR.SoapySDRSizeList_insert(self, *args)
def reserve(self, *args): return _SoapySDR.SoapySDRSizeList_reserve(self, *args)
def capacity(self): return _SoapySDR.SoapySDRSizeList_capacity(self)
__swig_destroy__ = _SoapySDR.delete_SoapySDRSizeList
__del__ = lambda self : None;
SoapySDRSizeList_swigregister = _SoapySDR.SoapySDRSizeList_swigregister
SoapySDRSizeList_swigregister(SoapySDRSizeList)
class SoapySDRDoubleList(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SoapySDRDoubleList, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SoapySDRDoubleList, name)
__repr__ = _swig_repr
def iterator(self): return _SoapySDR.SoapySDRDoubleList_iterator(self)
def __iter__(self): return self.iterator()
def __nonzero__(self): return _SoapySDR.SoapySDRDoubleList___nonzero__(self)
def __bool__(self): return _SoapySDR.SoapySDRDoubleList___bool__(self)
def __len__(self): return _SoapySDR.SoapySDRDoubleList___len__(self)
def pop(self): return _SoapySDR.SoapySDRDoubleList_pop(self)
def __getslice__(self, *args): return _SoapySDR.SoapySDRDoubleList___getslice__(self, *args)
def __setslice__(self, *args): return _SoapySDR.SoapySDRDoubleList___setslice__(self, *args)
def __delslice__(self, *args): return _SoapySDR.SoapySDRDoubleList___delslice__(self, *args)
def __delitem__(self, *args): return _SoapySDR.SoapySDRDoubleList___delitem__(self, *args)
def __getitem__(self, *args): return _SoapySDR.SoapySDRDoubleList___getitem__(self, *args)
def __setitem__(self, *args): return _SoapySDR.SoapySDRDoubleList___setitem__(self, *args)
def append(self, *args): return _SoapySDR.SoapySDRDoubleList_append(self, *args)
def empty(self): return _SoapySDR.SoapySDRDoubleList_empty(self)
def size(self): return _SoapySDR.SoapySDRDoubleList_size(self)
def clear(self): return _SoapySDR.SoapySDRDoubleList_clear(self)
def swap(self, *args): return _SoapySDR.SoapySDRDoubleList_swap(self, *args)
def get_allocator(self): return _SoapySDR.SoapySDRDoubleList_get_allocator(self)
def begin(self): return _SoapySDR.SoapySDRDoubleList_begin(self)
def end(self): return _SoapySDR.SoapySDRDoubleList_end(self)
def rbegin(self): return _SoapySDR.SoapySDRDoubleList_rbegin(self)
def rend(self): return _SoapySDR.SoapySDRDoubleList_rend(self)
def pop_back(self): return _SoapySDR.SoapySDRDoubleList_pop_back(self)
def erase(self, *args): return _SoapySDR.SoapySDRDoubleList_erase(self, *args)
def __init__(self, *args):
this = _SoapySDR.new_SoapySDRDoubleList(*args)
try: self.this.append(this)
except: self.this = this
def push_back(self, *args): return _SoapySDR.SoapySDRDoubleList_push_back(self, *args)
def front(self): return _SoapySDR.SoapySDRDoubleList_front(self)
def back(self): return _SoapySDR.SoapySDRDoubleList_back(self)
def assign(self, *args): return _SoapySDR.SoapySDRDoubleList_assign(self, *args)
def resize(self, *args): return _SoapySDR.SoapySDRDoubleList_resize(self, *args)
def insert(self, *args): return _SoapySDR.SoapySDRDoubleList_insert(self, *args)
def reserve(self, *args): return _SoapySDR.SoapySDRDoubleList_reserve(self, *args)
def capacity(self): return _SoapySDR.SoapySDRDoubleList_capacity(self)
__swig_destroy__ = _SoapySDR.delete_SoapySDRDoubleList
__del__ = lambda self : None;
SoapySDRDoubleList_swigregister = _SoapySDR.SoapySDRDoubleList_swigregister
SoapySDRDoubleList_swigregister(SoapySDRDoubleList)
class StreamResult(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, StreamResult, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StreamResult, name)
__repr__ = _swig_repr
def __init__(self):
this = _SoapySDR.new_StreamResult()
try: self.this.append(this)
except: self.this = this
__swig_setmethods__["ret"] = _SoapySDR.StreamResult_ret_set
__swig_getmethods__["ret"] = _SoapySDR.StreamResult_ret_get
if _newclass:ret = _swig_property(_SoapySDR.StreamResult_ret_get, _SoapySDR.StreamResult_ret_set)
__swig_setmethods__["flags"] = _SoapySDR.StreamResult_flags_set
__swig_getmethods__["flags"] = _SoapySDR.StreamResult_flags_get
if _newclass:flags = _swig_property(_SoapySDR.StreamResult_flags_get, _SoapySDR.StreamResult_flags_set)
__swig_setmethods__["timeNs"] = _SoapySDR.StreamResult_timeNs_set
__swig_getmethods__["timeNs"] = _SoapySDR.StreamResult_timeNs_get
if _newclass:timeNs = _swig_property(_SoapySDR.StreamResult_timeNs_get, _SoapySDR.StreamResult_timeNs_set)
__swig_setmethods__["chanMask"] = _SoapySDR.StreamResult_chanMask_set
__swig_getmethods__["chanMask"] = _SoapySDR.StreamResult_chanMask_get
if _newclass:chanMask = _swig_property(_SoapySDR.StreamResult_chanMask_get, _SoapySDR.StreamResult_chanMask_set)
def __str__(self):
return "ret=%s, flags=%s, timeNs=%s"%(self.ret, self.flags, self.timeNs)
__swig_destroy__ = _SoapySDR.delete_StreamResult
__del__ = lambda self : None;
StreamResult_swigregister = _SoapySDR.StreamResult_swigregister
StreamResult_swigregister(StreamResult)
SOAPY_SDR_TX = _SoapySDR.SOAPY_SDR_TX
SOAPY_SDR_RX = _SoapySDR.SOAPY_SDR_RX
SOAPY_SDR_END_BURST = _SoapySDR.SOAPY_SDR_END_BURST
SOAPY_SDR_HAS_TIME = _SoapySDR.SOAPY_SDR_HAS_TIME
SOAPY_SDR_END_ABRUPT = _SoapySDR.SOAPY_SDR_END_ABRUPT
SOAPY_SDR_ONE_PACKET = _SoapySDR.SOAPY_SDR_ONE_PACKET
SOAPY_SDR_MORE_FRAGMENTS = _SoapySDR.SOAPY_SDR_MORE_FRAGMENTS
SOAPY_SDR_WAIT_TRIGGER = _SoapySDR.SOAPY_SDR_WAIT_TRIGGER
def SoapySDR_errToStr(*args):
return _SoapySDR.SoapySDR_errToStr(*args)
SoapySDR_errToStr = _SoapySDR.SoapySDR_errToStr
SOAPY_SDR_TIMEOUT = _SoapySDR.SOAPY_SDR_TIMEOUT
SOAPY_SDR_STREAM_ERROR = _SoapySDR.SOAPY_SDR_STREAM_ERROR
SOAPY_SDR_CORRUPTION = _SoapySDR.SOAPY_SDR_CORRUPTION
SOAPY_SDR_OVERFLOW = _SoapySDR.SOAPY_SDR_OVERFLOW
SOAPY_SDR_NOT_SUPPORTED = _SoapySDR.SOAPY_SDR_NOT_SUPPORTED
SOAPY_SDR_TIME_ERROR = _SoapySDR.SOAPY_SDR_TIME_ERROR
SOAPY_SDR_UNDERFLOW = _SoapySDR.SOAPY_SDR_UNDERFLOW
SOAPY_SDR_API_VERSION = _SoapySDR.SOAPY_SDR_API_VERSION
SOAPY_SDR_ABI_VERSION = _SoapySDR.SOAPY_SDR_ABI_VERSION
def SoapySDR_getAPIVersion():
return _SoapySDR.SoapySDR_getAPIVersion()
SoapySDR_getAPIVersion = _SoapySDR.SoapySDR_getAPIVersion
def SoapySDR_getABIVersion():
return _SoapySDR.SoapySDR_getABIVersion()
SoapySDR_getABIVersion = _SoapySDR.SoapySDR_getABIVersion
def SoapySDR_getLibVersion():
return _SoapySDR.SoapySDR_getLibVersion()
SoapySDR_getLibVersion = _SoapySDR.SoapySDR_getLibVersion
SOAPY_SDR_CF64 = _SoapySDR.SOAPY_SDR_CF64
SOAPY_SDR_CF32 = _SoapySDR.SOAPY_SDR_CF32
SOAPY_SDR_CS32 = _SoapySDR.SOAPY_SDR_CS32
SOAPY_SDR_CU32 = _SoapySDR.SOAPY_SDR_CU32
SOAPY_SDR_CS16 = _SoapySDR.SOAPY_SDR_CS16
SOAPY_SDR_CU16 = _SoapySDR.SOAPY_SDR_CU16
SOAPY_SDR_CS12 = _SoapySDR.SOAPY_SDR_CS12
SOAPY_SDR_CU12 = _SoapySDR.SOAPY_SDR_CU12
SOAPY_SDR_CS8 = _SoapySDR.SOAPY_SDR_CS8
SOAPY_SDR_CU8 = _SoapySDR.SOAPY_SDR_CU8
SOAPY_SDR_CS4 = _SoapySDR.SOAPY_SDR_CS4
SOAPY_SDR_CU4 = _SoapySDR.SOAPY_SDR_CU4
SOAPY_SDR_F64 = _SoapySDR.SOAPY_SDR_F64
SOAPY_SDR_F32 = _SoapySDR.SOAPY_SDR_F32
SOAPY_SDR_S32 = _SoapySDR.SOAPY_SDR_S32
SOAPY_SDR_U32 = _SoapySDR.SOAPY_SDR_U32
SOAPY_SDR_S16 = _SoapySDR.SOAPY_SDR_S16
SOAPY_SDR_U16 = _SoapySDR.SOAPY_SDR_U16
SOAPY_SDR_S8 = _SoapySDR.SOAPY_SDR_S8
SOAPY_SDR_U8 = _SoapySDR.SOAPY_SDR_U8
def SoapySDR_formatToSize(*args):
return _SoapySDR.SoapySDR_formatToSize(*args)
SoapySDR_formatToSize = _SoapySDR.SoapySDR_formatToSize
SOAPY_SDR_FATAL = _SoapySDR.SOAPY_SDR_FATAL
SOAPY_SDR_CRITICAL = _SoapySDR.SOAPY_SDR_CRITICAL
SOAPY_SDR_ERROR = _SoapySDR.SOAPY_SDR_ERROR
SOAPY_SDR_WARNING = _SoapySDR.SOAPY_SDR_WARNING
SOAPY_SDR_NOTICE = _SoapySDR.SOAPY_SDR_NOTICE
SOAPY_SDR_INFO = _SoapySDR.SOAPY_SDR_INFO
SOAPY_SDR_DEBUG = _SoapySDR.SOAPY_SDR_DEBUG
SOAPY_SDR_TRACE = _SoapySDR.SOAPY_SDR_TRACE
SOAPY_SDR_SSI = _SoapySDR.SOAPY_SDR_SSI
def SoapySDR_log(*args):
return _SoapySDR.SoapySDR_log(*args)
SoapySDR_log = _SoapySDR.SoapySDR_log
def SoapySDR_setLogLevel(*args):
return _SoapySDR.SoapySDR_setLogLevel(*args)
SoapySDR_setLogLevel = _SoapySDR.SoapySDR_setLogLevel
def errToStr(*args):
return _SoapySDR.errToStr(*args)
errToStr = _SoapySDR.errToStr
def getAPIVersion():
return _SoapySDR.getAPIVersion()
getAPIVersion = _SoapySDR.getAPIVersion
def getABIVersion():
return _SoapySDR.getABIVersion()
getABIVersion = _SoapySDR.getABIVersion
def getLibVersion():
return _SoapySDR.getLibVersion()
getLibVersion = _SoapySDR.getLibVersion
def getRootPath():
return _SoapySDR.getRootPath()
getRootPath = _SoapySDR.getRootPath
def listSearchPaths():
return _SoapySDR.listSearchPaths()
listSearchPaths = _SoapySDR.listSearchPaths
def listModules(*args):
return _SoapySDR.listModules(*args)
listModules = _SoapySDR.listModules
def loadModule(*args):
return _SoapySDR.loadModule(*args)
loadModule = _SoapySDR.loadModule
def getLoaderResult(*args):
return _SoapySDR.getLoaderResult(*args)
getLoaderResult = _SoapySDR.getLoaderResult
def unloadModule(*args):
return _SoapySDR.unloadModule(*args)
unloadModule = _SoapySDR.unloadModule
def loadModules():
return _SoapySDR.loadModules()
loadModules = _SoapySDR.loadModules
def formatToSize(*args):
return _SoapySDR.formatToSize(*args)
formatToSize = _SoapySDR.formatToSize
def ticksToTimeNs(*args):
return _SoapySDR.ticksToTimeNs(*args)
ticksToTimeNs = _SoapySDR.ticksToTimeNs
def timeNsToTicks(*args):
return _SoapySDR.timeNsToTicks(*args)
timeNsToTicks = _SoapySDR.timeNsToTicks
def log(*args):
return _SoapySDR.log(*args)
log = _SoapySDR.log
def setLogLevel(*args):
return _SoapySDR.setLogLevel(*args)
setLogLevel = _SoapySDR.setLogLevel
class Device(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Device, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Device, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
__swig_destroy__ = _SoapySDR.delete_Device
__del__ = lambda self : None;
__swig_getmethods__["enumerate"] = lambda x: _SoapySDR.Device_enumerate
if _newclass:enumerate = staticmethod(_SoapySDR.Device_enumerate)
__swig_getmethods__["make"] = lambda x: _SoapySDR.Device_make
if _newclass:make = staticmethod(_SoapySDR.Device_make)
__swig_getmethods__["unmake"] = lambda x: _SoapySDR.Device_unmake
if _newclass:unmake = staticmethod(_SoapySDR.Device_unmake)
def getDriverKey(self): return _SoapySDR.Device_getDriverKey(self)
def getHardwareKey(self): return _SoapySDR.Device_getHardwareKey(self)
def getHardwareInfo(self): return _SoapySDR.Device_getHardwareInfo(self)
def setFrontendMapping(self, *args): return _SoapySDR.Device_setFrontendMapping(self, *args)
def getFrontendMapping(self, *args): return _SoapySDR.Device_getFrontendMapping(self, *args)
def getNumChannels(self, *args): return _SoapySDR.Device_getNumChannels(self, *args)
def getChannelInfo(self, *args): return _SoapySDR.Device_getChannelInfo(self, *args)
def getFullDuplex(self, *args): return _SoapySDR.Device_getFullDuplex(self, *args)
def getStreamFormats(self, *args): return _SoapySDR.Device_getStreamFormats(self, *args)
def getNativeStreamFormat(self, *args): return _SoapySDR.Device_getNativeStreamFormat(self, *args)
def getStreamArgsInfo(self, *args): return _SoapySDR.Device_getStreamArgsInfo(self, *args)
def setupStream(self, *args): return _SoapySDR.Device_setupStream(self, *args)
def closeStream(self, *args): return _SoapySDR.Device_closeStream(self, *args)
def getStreamMTU(self, *args): return _SoapySDR.Device_getStreamMTU(self, *args)
def activateStream(self, *args): return _SoapySDR.Device_activateStream(self, *args)
def deactivateStream(self, *args): return _SoapySDR.Device_deactivateStream(self, *args)
def readStream(self, *args): return _SoapySDR.Device_readStream(self, *args)
def writeStream(self, *args): return _SoapySDR.Device_writeStream(self, *args)
def readStreamStatus(self, *args): return _SoapySDR.Device_readStreamStatus(self, *args)
def getNumDirectAccessBuffers(self, *args): return _SoapySDR.Device_getNumDirectAccessBuffers(self, *args)
def getDirectAccessBufferAddrs(self, *args): return _SoapySDR.Device_getDirectAccessBufferAddrs(self, *args)
def acquireReadBuffer(self, *args): return _SoapySDR.Device_acquireReadBuffer(self, *args)
def releaseReadBuffer(self, *args): return _SoapySDR.Device_releaseReadBuffer(self, *args)
def acquireWriteBuffer(self, *args): return _SoapySDR.Device_acquireWriteBuffer(self, *args)
def releaseWriteBuffer(self, *args): return _SoapySDR.Device_releaseWriteBuffer(self, *args)
def listAntennas(self, *args): return _SoapySDR.Device_listAntennas(self, *args)
def setAntenna(self, *args): return _SoapySDR.Device_setAntenna(self, *args)
def getAntenna(self, *args): return _SoapySDR.Device_getAntenna(self, *args)
def hasDCOffsetMode(self, *args): return _SoapySDR.Device_hasDCOffsetMode(self, *args)
def setDCOffsetMode(self, *args): return _SoapySDR.Device_setDCOffsetMode(self, *args)
def getDCOffsetMode(self, *args): return _SoapySDR.Device_getDCOffsetMode(self, *args)
def hasDCOffset(self, *args): return _SoapySDR.Device_hasDCOffset(self, *args)
def setDCOffset(self, *args): return _SoapySDR.Device_setDCOffset(self, *args)
def getDCOffset(self, *args): return _SoapySDR.Device_getDCOffset(self, *args)
def hasIQBalance(self, *args): return _SoapySDR.Device_hasIQBalance(self, *args)
def setIQBalance(self, *args): return _SoapySDR.Device_setIQBalance(self, *args)
def getIQBalance(self, *args): return _SoapySDR.Device_getIQBalance(self, *args)
def hasFrequencyCorrection(self, *args): return _SoapySDR.Device_hasFrequencyCorrection(self, *args)
def setFrequencyCorrection(self, *args): return _SoapySDR.Device_setFrequencyCorrection(self, *args)
def getFrequencyCorrection(self, *args): return _SoapySDR.Device_getFrequencyCorrection(self, *args)
def listGains(self, *args): return _SoapySDR.Device_listGains(self, *args)
def hasGainMode(self, *args): return _SoapySDR.Device_hasGainMode(self, *args)
def setGainMode(self, *args): return _SoapySDR.Device_setGainMode(self, *args)
def getGainMode(self, *args): return _SoapySDR.Device_getGainMode(self, *args)
def setGain(self, *args): return _SoapySDR.Device_setGain(self, *args)
def getGain(self, *args): return _SoapySDR.Device_getGain(self, *args)
def getGainRange(self, *args): return _SoapySDR.Device_getGainRange(self, *args)
def setFrequency(self, *args): return _SoapySDR.Device_setFrequency(self, *args)
def getFrequency(self, *args): return _SoapySDR.Device_getFrequency(self, *args)
def listFrequencies(self, *args): return _SoapySDR.Device_listFrequencies(self, *args)
def getFrequencyRange(self, *args): return _SoapySDR.Device_getFrequencyRange(self, *args)
def getFrequencyArgsInfo(self, *args): return _SoapySDR.Device_getFrequencyArgsInfo(self, *args)
def setSampleRate(self, *args): return _SoapySDR.Device_setSampleRate(self, *args)
def getSampleRate(self, *args): return _SoapySDR.Device_getSampleRate(self, *args)
def listSampleRates(self, *args): return _SoapySDR.Device_listSampleRates(self, *args)
def getSampleRateRange(self, *args): return _SoapySDR.Device_getSampleRateRange(self, *args)
def setBandwidth(self, *args): return _SoapySDR.Device_setBandwidth(self, *args)
def getBandwidth(self, *args): return _SoapySDR.Device_getBandwidth(self, *args)
def listBandwidths(self, *args): return _SoapySDR.Device_listBandwidths(self, *args)
def getBandwidthRange(self, *args): return _SoapySDR.Device_getBandwidthRange(self, *args)
def setMasterClockRate(self, *args): return _SoapySDR.Device_setMasterClockRate(self, *args)
def getMasterClockRate(self): return _SoapySDR.Device_getMasterClockRate(self)
def getMasterClockRates(self): return _SoapySDR.Device_getMasterClockRates(self)
def listClockSources(self): return _SoapySDR.Device_listClockSources(self)
def setClockSource(self, *args): return _SoapySDR.Device_setClockSource(self, *args)
def getClockSource(self): return _SoapySDR.Device_getClockSource(self)
def listTimeSources(self): return _SoapySDR.Device_listTimeSources(self)
def setTimeSource(self, *args): return _SoapySDR.Device_setTimeSource(self, *args)
def getTimeSource(self): return _SoapySDR.Device_getTimeSource(self)
def hasHardwareTime(self, what=""): return _SoapySDR.Device_hasHardwareTime(self, what)
def getHardwareTime(self, what=""): return _SoapySDR.Device_getHardwareTime(self, what)
def setHardwareTime(self, *args): return _SoapySDR.Device_setHardwareTime(self, *args)
def setCommandTime(self, *args): return _SoapySDR.Device_setCommandTime(self, *args)
def listSensors(self, *args): return _SoapySDR.Device_listSensors(self, *args)
def getSensorInfo(self, *args): return _SoapySDR.Device_getSensorInfo(self, *args)
def readSensor(self, *args): return _SoapySDR.Device_readSensor(self, *args)
def listRegisterInterfaces(self): return _SoapySDR.Device_listRegisterInterfaces(self)
def writeRegister(self, *args): return _SoapySDR.Device_writeRegister(self, *args)
def readRegister(self, *args): return _SoapySDR.Device_readRegister(self, *args)
def writeRegisters(self, *args): return _SoapySDR.Device_writeRegisters(self, *args)
def readRegisters(self, *args): return _SoapySDR.Device_readRegisters(self, *args)
def getSettingInfo(self, *args): return _SoapySDR.Device_getSettingInfo(self, *args)
def writeSetting(self, *args): return _SoapySDR.Device_writeSetting(self, *args)
def readSetting(self, *args): return _SoapySDR.Device_readSetting(self, *args)
def listGPIOBanks(self): return _SoapySDR.Device_listGPIOBanks(self)
def writeGPIO(self, *args): return _SoapySDR.Device_writeGPIO(self, *args)
def readGPIO(self, *args): return _SoapySDR.Device_readGPIO(self, *args)
def writeGPIODir(self, *args): return _SoapySDR.Device_writeGPIODir(self, *args)
def readGPIODir(self, *args): return _SoapySDR.Device_readGPIODir(self, *args)
def writeI2C(self, *args): return _SoapySDR.Device_writeI2C(self, *args)
def readI2C(self, *args): return _SoapySDR.Device_readI2C(self, *args)
def transactSPI(self, *args): return _SoapySDR.Device_transactSPI(self, *args)
def listUARTs(self): return _SoapySDR.Device_listUARTs(self)
def writeUART(self, *args): return _SoapySDR.Device_writeUART(self, *args)
def readUART(self, *args): return _SoapySDR.Device_readUART(self, *args)
def readStream__(self, *args): return _SoapySDR.Device_readStream__(self, *args)
def writeStream__(self, *args): return _SoapySDR.Device_writeStream__(self, *args)
def readStreamStatus__(self, *args): return _SoapySDR.Device_readStreamStatus__(self, *args)
#call unmake from custom deleter
def __del__(self):
Device.unmake(self)
def __str__(self):
return "%s:%s"%(self.getDriverKey(), self.getHardwareKey())
def readStream(self, stream, buffs, numElems, flags = 0, timeoutUs = 100000):
ptrs = [extractBuffPointer(b) for b in buffs]
return self.readStream__(stream, ptrs, numElems, flags, timeoutUs)
def writeStream(self, stream, buffs, numElems, flags = 0, timeNs = 0, timeoutUs = 100000):
ptrs = [extractBuffPointer(b) for b in buffs]
return self.writeStream__(stream, ptrs, numElems, flags, timeNs, timeoutUs)
def readStreamStatus(self, stream, timeoutUs = 100000):
return self.readStreamStatus__(stream, timeoutUs)
Device_swigregister = _SoapySDR.Device_swigregister
Device_swigregister(Device)
def Device_enumerate(*args):
return _SoapySDR.Device_enumerate(*args)
Device_enumerate = _SoapySDR.Device_enumerate
def Device_make(*args):
return _SoapySDR.Device_make(*args)
Device_make = _SoapySDR.Device_make
def Device_unmake(*args):
return _SoapySDR.Device_unmake(*args)
Device_unmake = _SoapySDR.Device_unmake
__all__ = list()
for key in sorted(globals().keys()):
if key.startswith('SOAPY_SDR_'):
__all__.append(key)
_Device = Device
class Device(Device):
def __new__(cls, *args, **kwargs):
return cls.make(*args, **kwargs)
def extractBuffPointer(buff):
if hasattr(buff, '__array_interface__'): return buff.__array_interface__['data'][0]
if hasattr(buff, '__long__'): return long(buff)
if hasattr(buff, '__int__'): return int(buff)
raise Exception("Unrecognized data format: " + str(type(buff)))
# This file is compatible with both classic and new-style classes.
|
flexible
|
{
"blob_id": "a6670d0d09f02b674bc31b770f42d4d8a01a4a4e",
"index": 9884,
"step-1": "<mask token>\n\n\nclass SoapySDRSizeList(_object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def iterator(self):\n return _SoapySDR.SoapySDRSizeList_iterator(self)\n\n def __iter__(self):\n return self.iterator()\n\n def __nonzero__(self):\n return _SoapySDR.SoapySDRSizeList___nonzero__(self)\n\n def __bool__(self):\n return _SoapySDR.SoapySDRSizeList___bool__(self)\n\n def __len__(self):\n return _SoapySDR.SoapySDRSizeList___len__(self)\n\n def pop(self):\n return _SoapySDR.SoapySDRSizeList_pop(self)\n\n def __getslice__(self, *args):\n return _SoapySDR.SoapySDRSizeList___getslice__(self, *args)\n\n def __setslice__(self, *args):\n return _SoapySDR.SoapySDRSizeList___setslice__(self, *args)\n\n def __delslice__(self, *args):\n return _SoapySDR.SoapySDRSizeList___delslice__(self, *args)\n\n def __delitem__(self, *args):\n return _SoapySDR.SoapySDRSizeList___delitem__(self, *args)\n\n def __getitem__(self, *args):\n return _SoapySDR.SoapySDRSizeList___getitem__(self, *args)\n\n def __setitem__(self, *args):\n return _SoapySDR.SoapySDRSizeList___setitem__(self, *args)\n <mask token>\n\n def empty(self):\n return _SoapySDR.SoapySDRSizeList_empty(self)\n\n def size(self):\n return _SoapySDR.SoapySDRSizeList_size(self)\n\n def clear(self):\n return _SoapySDR.SoapySDRSizeList_clear(self)\n\n def swap(self, *args):\n return _SoapySDR.SoapySDRSizeList_swap(self, *args)\n\n def get_allocator(self):\n return _SoapySDR.SoapySDRSizeList_get_allocator(self)\n\n def begin(self):\n return _SoapySDR.SoapySDRSizeList_begin(self)\n <mask token>\n\n def rbegin(self):\n return _SoapySDR.SoapySDRSizeList_rbegin(self)\n <mask token>\n\n def pop_back(self):\n return _SoapySDR.SoapySDRSizeList_pop_back(self)\n\n def erase(self, *args):\n return _SoapySDR.SoapySDRSizeList_erase(self, *args)\n\n def __init__(self, *args):\n this = _SoapySDR.new_SoapySDRSizeList(*args)\n try:\n self.this.append(this)\n except:\n self.this = this\n\n def push_back(self, *args):\n return _SoapySDR.SoapySDRSizeList_push_back(self, *args)\n\n def front(self):\n return _SoapySDR.SoapySDRSizeList_front(self)\n\n def back(self):\n return _SoapySDR.SoapySDRSizeList_back(self)\n\n def assign(self, *args):\n return _SoapySDR.SoapySDRSizeList_assign(self, *args)\n\n def resize(self, *args):\n return _SoapySDR.SoapySDRSizeList_resize(self, *args)\n\n def insert(self, *args):\n return _SoapySDR.SoapySDRSizeList_insert(self, *args)\n <mask token>\n\n def capacity(self):\n return _SoapySDR.SoapySDRSizeList_capacity(self)\n <mask token>\n <mask token>\n\n\n<mask token>\n\n\nclass SoapySDRDoubleList(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n SoapySDRDoubleList, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SoapySDRDoubleList,\n name)\n __repr__ = _swig_repr\n\n def iterator(self):\n return _SoapySDR.SoapySDRDoubleList_iterator(self)\n\n def __iter__(self):\n return self.iterator()\n\n def __nonzero__(self):\n return _SoapySDR.SoapySDRDoubleList___nonzero__(self)\n\n def __bool__(self):\n return _SoapySDR.SoapySDRDoubleList___bool__(self)\n\n def __len__(self):\n return _SoapySDR.SoapySDRDoubleList___len__(self)\n\n def pop(self):\n return _SoapySDR.SoapySDRDoubleList_pop(self)\n\n def __getslice__(self, *args):\n return _SoapySDR.SoapySDRDoubleList___getslice__(self, *args)\n\n def __setslice__(self, *args):\n return _SoapySDR.SoapySDRDoubleList___setslice__(self, *args)\n\n def __delslice__(self, *args):\n return _SoapySDR.SoapySDRDoubleList___delslice__(self, *args)\n\n def __delitem__(self, *args):\n return _SoapySDR.SoapySDRDoubleList___delitem__(self, *args)\n\n def __getitem__(self, *args):\n return _SoapySDR.SoapySDRDoubleList___getitem__(self, *args)\n\n def __setitem__(self, *args):\n return _SoapySDR.SoapySDRDoubleList___setitem__(self, *args)\n\n def append(self, *args):\n return _SoapySDR.SoapySDRDoubleList_append(self, *args)\n\n def empty(self):\n return _SoapySDR.SoapySDRDoubleList_empty(self)\n\n def size(self):\n return _SoapySDR.SoapySDRDoubleList_size(self)\n\n def clear(self):\n return _SoapySDR.SoapySDRDoubleList_clear(self)\n\n def swap(self, *args):\n return _SoapySDR.SoapySDRDoubleList_swap(self, *args)\n\n def get_allocator(self):\n return _SoapySDR.SoapySDRDoubleList_get_allocator(self)\n\n def begin(self):\n return _SoapySDR.SoapySDRDoubleList_begin(self)\n\n def end(self):\n return _SoapySDR.SoapySDRDoubleList_end(self)\n\n def rbegin(self):\n return _SoapySDR.SoapySDRDoubleList_rbegin(self)\n\n def rend(self):\n return _SoapySDR.SoapySDRDoubleList_rend(self)\n\n def pop_back(self):\n return _SoapySDR.SoapySDRDoubleList_pop_back(self)\n\n def erase(self, *args):\n return _SoapySDR.SoapySDRDoubleList_erase(self, *args)\n\n def __init__(self, *args):\n this = _SoapySDR.new_SoapySDRDoubleList(*args)\n try:\n self.this.append(this)\n except:\n self.this = this\n\n def push_back(self, *args):\n return _SoapySDR.SoapySDRDoubleList_push_back(self, *args)\n\n def front(self):\n return _SoapySDR.SoapySDRDoubleList_front(self)\n\n def back(self):\n return _SoapySDR.SoapySDRDoubleList_back(self)\n\n def assign(self, *args):\n return _SoapySDR.SoapySDRDoubleList_assign(self, *args)\n\n def resize(self, *args):\n return _SoapySDR.SoapySDRDoubleList_resize(self, *args)\n\n def insert(self, *args):\n return _SoapySDR.SoapySDRDoubleList_insert(self, *args)\n\n def reserve(self, *args):\n return _SoapySDR.SoapySDRDoubleList_reserve(self, *args)\n\n def capacity(self):\n return _SoapySDR.SoapySDRDoubleList_capacity(self)\n __swig_destroy__ = _SoapySDR.delete_SoapySDRDoubleList\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass StreamResult(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n StreamResult, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, StreamResult, name)\n __repr__ = _swig_repr\n\n def __init__(self):\n this = _SoapySDR.new_StreamResult()\n try:\n self.this.append(this)\n except:\n self.this = this\n __swig_setmethods__['ret'] = _SoapySDR.StreamResult_ret_set\n __swig_getmethods__['ret'] = _SoapySDR.StreamResult_ret_get\n if _newclass:\n ret = _swig_property(_SoapySDR.StreamResult_ret_get, _SoapySDR.\n StreamResult_ret_set)\n __swig_setmethods__['flags'] = _SoapySDR.StreamResult_flags_set\n __swig_getmethods__['flags'] = _SoapySDR.StreamResult_flags_get\n if _newclass:\n flags = _swig_property(_SoapySDR.StreamResult_flags_get, _SoapySDR.\n StreamResult_flags_set)\n __swig_setmethods__['timeNs'] = _SoapySDR.StreamResult_timeNs_set\n __swig_getmethods__['timeNs'] = _SoapySDR.StreamResult_timeNs_get\n if _newclass:\n timeNs = _swig_property(_SoapySDR.StreamResult_timeNs_get,\n _SoapySDR.StreamResult_timeNs_set)\n __swig_setmethods__['chanMask'] = _SoapySDR.StreamResult_chanMask_set\n __swig_getmethods__['chanMask'] = _SoapySDR.StreamResult_chanMask_get\n if _newclass:\n chanMask = _swig_property(_SoapySDR.StreamResult_chanMask_get,\n _SoapySDR.StreamResult_chanMask_set)\n\n def __str__(self):\n return 'ret=%s, flags=%s, timeNs=%s' % (self.ret, self.flags, self.\n timeNs)\n __swig_destroy__ = _SoapySDR.delete_StreamResult\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass Device(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self, Device,\n name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, Device, name)\n\n def __init__(self, *args, **kwargs):\n raise AttributeError('No constructor defined')\n __repr__ = _swig_repr\n __swig_destroy__ = _SoapySDR.delete_Device\n __del__ = lambda self: None\n __swig_getmethods__['enumerate'] = lambda x: _SoapySDR.Device_enumerate\n if _newclass:\n enumerate = staticmethod(_SoapySDR.Device_enumerate)\n __swig_getmethods__['make'] = lambda x: _SoapySDR.Device_make\n if _newclass:\n make = staticmethod(_SoapySDR.Device_make)\n __swig_getmethods__['unmake'] = lambda x: _SoapySDR.Device_unmake\n if _newclass:\n unmake = staticmethod(_SoapySDR.Device_unmake)\n\n def getDriverKey(self):\n return _SoapySDR.Device_getDriverKey(self)\n\n def getHardwareKey(self):\n return _SoapySDR.Device_getHardwareKey(self)\n\n def getHardwareInfo(self):\n return _SoapySDR.Device_getHardwareInfo(self)\n\n def setFrontendMapping(self, *args):\n return _SoapySDR.Device_setFrontendMapping(self, *args)\n\n def getFrontendMapping(self, *args):\n return _SoapySDR.Device_getFrontendMapping(self, *args)\n\n def getNumChannels(self, *args):\n return _SoapySDR.Device_getNumChannels(self, *args)\n\n def getChannelInfo(self, *args):\n return _SoapySDR.Device_getChannelInfo(self, *args)\n\n def getFullDuplex(self, *args):\n return _SoapySDR.Device_getFullDuplex(self, *args)\n\n def getStreamFormats(self, *args):\n return _SoapySDR.Device_getStreamFormats(self, *args)\n\n def getNativeStreamFormat(self, *args):\n return _SoapySDR.Device_getNativeStreamFormat(self, *args)\n\n def getStreamArgsInfo(self, *args):\n return _SoapySDR.Device_getStreamArgsInfo(self, *args)\n\n def setupStream(self, *args):\n return _SoapySDR.Device_setupStream(self, *args)\n\n def closeStream(self, *args):\n return _SoapySDR.Device_closeStream(self, *args)\n\n def getStreamMTU(self, *args):\n return _SoapySDR.Device_getStreamMTU(self, *args)\n\n def activateStream(self, *args):\n return _SoapySDR.Device_activateStream(self, *args)\n\n def deactivateStream(self, *args):\n return _SoapySDR.Device_deactivateStream(self, *args)\n\n def readStream(self, *args):\n return _SoapySDR.Device_readStream(self, *args)\n\n def writeStream(self, *args):\n return _SoapySDR.Device_writeStream(self, *args)\n\n def readStreamStatus(self, *args):\n return _SoapySDR.Device_readStreamStatus(self, *args)\n\n def getNumDirectAccessBuffers(self, *args):\n return _SoapySDR.Device_getNumDirectAccessBuffers(self, *args)\n\n def getDirectAccessBufferAddrs(self, *args):\n return _SoapySDR.Device_getDirectAccessBufferAddrs(self, *args)\n\n def acquireReadBuffer(self, *args):\n return _SoapySDR.Device_acquireReadBuffer(self, *args)\n\n def releaseReadBuffer(self, *args):\n return _SoapySDR.Device_releaseReadBuffer(self, *args)\n\n def acquireWriteBuffer(self, *args):\n return _SoapySDR.Device_acquireWriteBuffer(self, *args)\n\n def releaseWriteBuffer(self, *args):\n return _SoapySDR.Device_releaseWriteBuffer(self, *args)\n\n def listAntennas(self, *args):\n return _SoapySDR.Device_listAntennas(self, *args)\n\n def setAntenna(self, *args):\n return _SoapySDR.Device_setAntenna(self, *args)\n\n def getAntenna(self, *args):\n return _SoapySDR.Device_getAntenna(self, *args)\n\n def hasDCOffsetMode(self, *args):\n return _SoapySDR.Device_hasDCOffsetMode(self, *args)\n\n def setDCOffsetMode(self, *args):\n return _SoapySDR.Device_setDCOffsetMode(self, *args)\n\n def getDCOffsetMode(self, *args):\n return _SoapySDR.Device_getDCOffsetMode(self, *args)\n\n def hasDCOffset(self, *args):\n return _SoapySDR.Device_hasDCOffset(self, *args)\n\n def setDCOffset(self, *args):\n return _SoapySDR.Device_setDCOffset(self, *args)\n\n def getDCOffset(self, *args):\n return _SoapySDR.Device_getDCOffset(self, *args)\n\n def hasIQBalance(self, *args):\n return _SoapySDR.Device_hasIQBalance(self, *args)\n\n def setIQBalance(self, *args):\n return _SoapySDR.Device_setIQBalance(self, *args)\n\n def getIQBalance(self, *args):\n return _SoapySDR.Device_getIQBalance(self, *args)\n\n def hasFrequencyCorrection(self, *args):\n return _SoapySDR.Device_hasFrequencyCorrection(self, *args)\n\n def setFrequencyCorrection(self, *args):\n return _SoapySDR.Device_setFrequencyCorrection(self, *args)\n\n def getFrequencyCorrection(self, *args):\n return _SoapySDR.Device_getFrequencyCorrection(self, *args)\n\n def listGains(self, *args):\n return _SoapySDR.Device_listGains(self, *args)\n\n def hasGainMode(self, *args):\n return _SoapySDR.Device_hasGainMode(self, *args)\n\n def setGainMode(self, *args):\n return _SoapySDR.Device_setGainMode(self, *args)\n\n def getGainMode(self, *args):\n return _SoapySDR.Device_getGainMode(self, *args)\n\n def setGain(self, *args):\n return _SoapySDR.Device_setGain(self, *args)\n\n def getGain(self, *args):\n return _SoapySDR.Device_getGain(self, *args)\n\n def getGainRange(self, *args):\n return _SoapySDR.Device_getGainRange(self, *args)\n\n def setFrequency(self, *args):\n return _SoapySDR.Device_setFrequency(self, *args)\n\n def getFrequency(self, *args):\n return _SoapySDR.Device_getFrequency(self, *args)\n\n def listFrequencies(self, *args):\n return _SoapySDR.Device_listFrequencies(self, *args)\n\n def getFrequencyRange(self, *args):\n return _SoapySDR.Device_getFrequencyRange(self, *args)\n\n def getFrequencyArgsInfo(self, *args):\n return _SoapySDR.Device_getFrequencyArgsInfo(self, *args)\n\n def setSampleRate(self, *args):\n return _SoapySDR.Device_setSampleRate(self, *args)\n\n def getSampleRate(self, *args):\n return _SoapySDR.Device_getSampleRate(self, *args)\n\n def listSampleRates(self, *args):\n return _SoapySDR.Device_listSampleRates(self, *args)\n\n def getSampleRateRange(self, *args):\n return _SoapySDR.Device_getSampleRateRange(self, *args)\n\n def setBandwidth(self, *args):\n return _SoapySDR.Device_setBandwidth(self, *args)\n\n def getBandwidth(self, *args):\n return _SoapySDR.Device_getBandwidth(self, *args)\n\n def listBandwidths(self, *args):\n return _SoapySDR.Device_listBandwidths(self, *args)\n\n def getBandwidthRange(self, *args):\n return _SoapySDR.Device_getBandwidthRange(self, *args)\n\n def setMasterClockRate(self, *args):\n return _SoapySDR.Device_setMasterClockRate(self, *args)\n\n def getMasterClockRate(self):\n return _SoapySDR.Device_getMasterClockRate(self)\n\n def getMasterClockRates(self):\n return _SoapySDR.Device_getMasterClockRates(self)\n\n def listClockSources(self):\n return _SoapySDR.Device_listClockSources(self)\n\n def setClockSource(self, *args):\n return _SoapySDR.Device_setClockSource(self, *args)\n\n def getClockSource(self):\n return _SoapySDR.Device_getClockSource(self)\n\n def listTimeSources(self):\n return _SoapySDR.Device_listTimeSources(self)\n\n def setTimeSource(self, *args):\n return _SoapySDR.Device_setTimeSource(self, *args)\n\n def getTimeSource(self):\n return _SoapySDR.Device_getTimeSource(self)\n\n def hasHardwareTime(self, what=''):\n return _SoapySDR.Device_hasHardwareTime(self, what)\n\n def getHardwareTime(self, what=''):\n return _SoapySDR.Device_getHardwareTime(self, what)\n\n def setHardwareTime(self, *args):\n return _SoapySDR.Device_setHardwareTime(self, *args)\n\n def setCommandTime(self, *args):\n return _SoapySDR.Device_setCommandTime(self, *args)\n\n def listSensors(self, *args):\n return _SoapySDR.Device_listSensors(self, *args)\n\n def getSensorInfo(self, *args):\n return _SoapySDR.Device_getSensorInfo(self, *args)\n\n def readSensor(self, *args):\n return _SoapySDR.Device_readSensor(self, *args)\n\n def listRegisterInterfaces(self):\n return _SoapySDR.Device_listRegisterInterfaces(self)\n\n def writeRegister(self, *args):\n return _SoapySDR.Device_writeRegister(self, *args)\n\n def readRegister(self, *args):\n return _SoapySDR.Device_readRegister(self, *args)\n\n def writeRegisters(self, *args):\n return _SoapySDR.Device_writeRegisters(self, *args)\n\n def readRegisters(self, *args):\n return _SoapySDR.Device_readRegisters(self, *args)\n\n def getSettingInfo(self, *args):\n return _SoapySDR.Device_getSettingInfo(self, *args)\n\n def writeSetting(self, *args):\n return _SoapySDR.Device_writeSetting(self, *args)\n\n def readSetting(self, *args):\n return _SoapySDR.Device_readSetting(self, *args)\n\n def listGPIOBanks(self):\n return _SoapySDR.Device_listGPIOBanks(self)\n\n def writeGPIO(self, *args):\n return _SoapySDR.Device_writeGPIO(self, *args)\n\n def readGPIO(self, *args):\n return _SoapySDR.Device_readGPIO(self, *args)\n\n def writeGPIODir(self, *args):\n return _SoapySDR.Device_writeGPIODir(self, *args)\n\n def readGPIODir(self, *args):\n return _SoapySDR.Device_readGPIODir(self, *args)\n\n def writeI2C(self, *args):\n return _SoapySDR.Device_writeI2C(self, *args)\n\n def readI2C(self, *args):\n return _SoapySDR.Device_readI2C(self, *args)\n\n def transactSPI(self, *args):\n return _SoapySDR.Device_transactSPI(self, *args)\n\n def listUARTs(self):\n return _SoapySDR.Device_listUARTs(self)\n\n def writeUART(self, *args):\n return _SoapySDR.Device_writeUART(self, *args)\n\n def readUART(self, *args):\n return _SoapySDR.Device_readUART(self, *args)\n\n def readStream__(self, *args):\n return _SoapySDR.Device_readStream__(self, *args)\n\n def writeStream__(self, *args):\n return _SoapySDR.Device_writeStream__(self, *args)\n\n def readStreamStatus__(self, *args):\n return _SoapySDR.Device_readStreamStatus__(self, *args)\n\n def __del__(self):\n Device.unmake(self)\n\n def __str__(self):\n return '%s:%s' % (self.getDriverKey(), self.getHardwareKey())\n\n def readStream(self, stream, buffs, numElems, flags=0, timeoutUs=100000):\n ptrs = [extractBuffPointer(b) for b in buffs]\n return self.readStream__(stream, ptrs, numElems, flags, timeoutUs)\n\n def writeStream(self, stream, buffs, numElems, flags=0, timeNs=0,\n timeoutUs=100000):\n ptrs = [extractBuffPointer(b) for b in buffs]\n return self.writeStream__(stream, ptrs, numElems, flags, timeNs,\n timeoutUs)\n\n def readStreamStatus(self, stream, timeoutUs=100000):\n return self.readStreamStatus__(stream, timeoutUs)\n\n\n<mask token>\n\n\nclass Device(Device):\n\n def __new__(cls, *args, **kwargs):\n return cls.make(*args, **kwargs)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass SoapySDRKwargsList(_object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def iterator(self):\n return _SoapySDR.SoapySDRKwargsList_iterator(self)\n\n def __iter__(self):\n return self.iterator()\n <mask token>\n\n def __bool__(self):\n return _SoapySDR.SoapySDRKwargsList___bool__(self)\n\n def __len__(self):\n return _SoapySDR.SoapySDRKwargsList___len__(self)\n\n def pop(self):\n return _SoapySDR.SoapySDRKwargsList_pop(self)\n\n def __getslice__(self, *args):\n return _SoapySDR.SoapySDRKwargsList___getslice__(self, *args)\n\n def __setslice__(self, *args):\n return _SoapySDR.SoapySDRKwargsList___setslice__(self, *args)\n <mask token>\n\n def __delitem__(self, *args):\n return _SoapySDR.SoapySDRKwargsList___delitem__(self, *args)\n\n def __getitem__(self, *args):\n return _SoapySDR.SoapySDRKwargsList___getitem__(self, *args)\n\n def __setitem__(self, *args):\n return _SoapySDR.SoapySDRKwargsList___setitem__(self, *args)\n\n def append(self, *args):\n return _SoapySDR.SoapySDRKwargsList_append(self, *args)\n\n def empty(self):\n return _SoapySDR.SoapySDRKwargsList_empty(self)\n\n def size(self):\n return _SoapySDR.SoapySDRKwargsList_size(self)\n <mask token>\n\n def swap(self, *args):\n return _SoapySDR.SoapySDRKwargsList_swap(self, *args)\n\n def get_allocator(self):\n return _SoapySDR.SoapySDRKwargsList_get_allocator(self)\n\n def begin(self):\n return _SoapySDR.SoapySDRKwargsList_begin(self)\n\n def end(self):\n return _SoapySDR.SoapySDRKwargsList_end(self)\n\n def rbegin(self):\n return _SoapySDR.SoapySDRKwargsList_rbegin(self)\n\n def rend(self):\n return _SoapySDR.SoapySDRKwargsList_rend(self)\n\n def pop_back(self):\n return _SoapySDR.SoapySDRKwargsList_pop_back(self)\n\n def erase(self, *args):\n return _SoapySDR.SoapySDRKwargsList_erase(self, *args)\n\n def __init__(self, *args):\n this = _SoapySDR.new_SoapySDRKwargsList(*args)\n try:\n self.this.append(this)\n except:\n self.this = this\n <mask token>\n\n def front(self):\n return _SoapySDR.SoapySDRKwargsList_front(self)\n\n def back(self):\n return _SoapySDR.SoapySDRKwargsList_back(self)\n\n def assign(self, *args):\n return _SoapySDR.SoapySDRKwargsList_assign(self, *args)\n\n def resize(self, *args):\n return _SoapySDR.SoapySDRKwargsList_resize(self, *args)\n <mask token>\n\n def reserve(self, *args):\n return _SoapySDR.SoapySDRKwargsList_reserve(self, *args)\n\n def capacity(self):\n return _SoapySDR.SoapySDRKwargsList_capacity(self)\n <mask token>\n <mask token>\n\n\n<mask token>\n\n\nclass SoapySDRArgInfoList(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n SoapySDRArgInfoList, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self,\n SoapySDRArgInfoList, name)\n __repr__ = _swig_repr\n\n def iterator(self):\n return _SoapySDR.SoapySDRArgInfoList_iterator(self)\n\n def __iter__(self):\n return self.iterator()\n\n def __nonzero__(self):\n return _SoapySDR.SoapySDRArgInfoList___nonzero__(self)\n\n def __bool__(self):\n return _SoapySDR.SoapySDRArgInfoList___bool__(self)\n\n def __len__(self):\n return _SoapySDR.SoapySDRArgInfoList___len__(self)\n\n def pop(self):\n return _SoapySDR.SoapySDRArgInfoList_pop(self)\n\n def __getslice__(self, *args):\n return _SoapySDR.SoapySDRArgInfoList___getslice__(self, *args)\n\n def __setslice__(self, *args):\n return _SoapySDR.SoapySDRArgInfoList___setslice__(self, *args)\n\n def __delslice__(self, *args):\n return _SoapySDR.SoapySDRArgInfoList___delslice__(self, *args)\n\n def __delitem__(self, *args):\n return _SoapySDR.SoapySDRArgInfoList___delitem__(self, *args)\n\n def __getitem__(self, *args):\n return _SoapySDR.SoapySDRArgInfoList___getitem__(self, *args)\n\n def __setitem__(self, *args):\n return _SoapySDR.SoapySDRArgInfoList___setitem__(self, *args)\n\n def append(self, *args):\n return _SoapySDR.SoapySDRArgInfoList_append(self, *args)\n\n def empty(self):\n return _SoapySDR.SoapySDRArgInfoList_empty(self)\n\n def size(self):\n return _SoapySDR.SoapySDRArgInfoList_size(self)\n\n def clear(self):\n return _SoapySDR.SoapySDRArgInfoList_clear(self)\n\n def swap(self, *args):\n return _SoapySDR.SoapySDRArgInfoList_swap(self, *args)\n\n def get_allocator(self):\n return _SoapySDR.SoapySDRArgInfoList_get_allocator(self)\n\n def begin(self):\n return _SoapySDR.SoapySDRArgInfoList_begin(self)\n\n def end(self):\n return _SoapySDR.SoapySDRArgInfoList_end(self)\n\n def rbegin(self):\n return _SoapySDR.SoapySDRArgInfoList_rbegin(self)\n\n def rend(self):\n return _SoapySDR.SoapySDRArgInfoList_rend(self)\n\n def pop_back(self):\n return _SoapySDR.SoapySDRArgInfoList_pop_back(self)\n\n def erase(self, *args):\n return _SoapySDR.SoapySDRArgInfoList_erase(self, *args)\n\n def __init__(self, *args):\n this = _SoapySDR.new_SoapySDRArgInfoList(*args)\n try:\n self.this.append(this)\n except:\n self.this = this\n\n def push_back(self, *args):\n return _SoapySDR.SoapySDRArgInfoList_push_back(self, *args)\n\n def front(self):\n return _SoapySDR.SoapySDRArgInfoList_front(self)\n\n def back(self):\n return _SoapySDR.SoapySDRArgInfoList_back(self)\n\n def assign(self, *args):\n return _SoapySDR.SoapySDRArgInfoList_assign(self, *args)\n\n def resize(self, *args):\n return _SoapySDR.SoapySDRArgInfoList_resize(self, *args)\n\n def insert(self, *args):\n return _SoapySDR.SoapySDRArgInfoList_insert(self, *args)\n\n def reserve(self, *args):\n return _SoapySDR.SoapySDRArgInfoList_reserve(self, *args)\n\n def capacity(self):\n return _SoapySDR.SoapySDRArgInfoList_capacity(self)\n __swig_destroy__ = _SoapySDR.delete_SoapySDRArgInfoList\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass SoapySDRStringList(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n SoapySDRStringList, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SoapySDRStringList,\n name)\n __repr__ = _swig_repr\n\n def iterator(self):\n return _SoapySDR.SoapySDRStringList_iterator(self)\n\n def __iter__(self):\n return self.iterator()\n\n def __nonzero__(self):\n return _SoapySDR.SoapySDRStringList___nonzero__(self)\n\n def __bool__(self):\n return _SoapySDR.SoapySDRStringList___bool__(self)\n\n def __len__(self):\n return _SoapySDR.SoapySDRStringList___len__(self)\n\n def pop(self):\n return _SoapySDR.SoapySDRStringList_pop(self)\n\n def __getslice__(self, *args):\n return _SoapySDR.SoapySDRStringList___getslice__(self, *args)\n\n def __setslice__(self, *args):\n return _SoapySDR.SoapySDRStringList___setslice__(self, *args)\n\n def __delslice__(self, *args):\n return _SoapySDR.SoapySDRStringList___delslice__(self, *args)\n\n def __delitem__(self, *args):\n return _SoapySDR.SoapySDRStringList___delitem__(self, *args)\n\n def __getitem__(self, *args):\n return _SoapySDR.SoapySDRStringList___getitem__(self, *args)\n\n def __setitem__(self, *args):\n return _SoapySDR.SoapySDRStringList___setitem__(self, *args)\n\n def append(self, *args):\n return _SoapySDR.SoapySDRStringList_append(self, *args)\n\n def empty(self):\n return _SoapySDR.SoapySDRStringList_empty(self)\n\n def size(self):\n return _SoapySDR.SoapySDRStringList_size(self)\n\n def clear(self):\n return _SoapySDR.SoapySDRStringList_clear(self)\n\n def swap(self, *args):\n return _SoapySDR.SoapySDRStringList_swap(self, *args)\n\n def get_allocator(self):\n return _SoapySDR.SoapySDRStringList_get_allocator(self)\n\n def begin(self):\n return _SoapySDR.SoapySDRStringList_begin(self)\n\n def end(self):\n return _SoapySDR.SoapySDRStringList_end(self)\n\n def rbegin(self):\n return _SoapySDR.SoapySDRStringList_rbegin(self)\n\n def rend(self):\n return _SoapySDR.SoapySDRStringList_rend(self)\n\n def pop_back(self):\n return _SoapySDR.SoapySDRStringList_pop_back(self)\n\n def erase(self, *args):\n return _SoapySDR.SoapySDRStringList_erase(self, *args)\n\n def __init__(self, *args):\n this = _SoapySDR.new_SoapySDRStringList(*args)\n try:\n self.this.append(this)\n except:\n self.this = this\n\n def push_back(self, *args):\n return _SoapySDR.SoapySDRStringList_push_back(self, *args)\n\n def front(self):\n return _SoapySDR.SoapySDRStringList_front(self)\n\n def back(self):\n return _SoapySDR.SoapySDRStringList_back(self)\n\n def assign(self, *args):\n return _SoapySDR.SoapySDRStringList_assign(self, *args)\n\n def resize(self, *args):\n return _SoapySDR.SoapySDRStringList_resize(self, *args)\n\n def insert(self, *args):\n return _SoapySDR.SoapySDRStringList_insert(self, *args)\n\n def reserve(self, *args):\n return _SoapySDR.SoapySDRStringList_reserve(self, *args)\n\n def capacity(self):\n return _SoapySDR.SoapySDRStringList_capacity(self)\n __swig_destroy__ = _SoapySDR.delete_SoapySDRStringList\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass SoapySDRRangeList(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n SoapySDRRangeList, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SoapySDRRangeList,\n name)\n __repr__ = _swig_repr\n\n def iterator(self):\n return _SoapySDR.SoapySDRRangeList_iterator(self)\n\n def __iter__(self):\n return self.iterator()\n\n def __nonzero__(self):\n return _SoapySDR.SoapySDRRangeList___nonzero__(self)\n\n def __bool__(self):\n return _SoapySDR.SoapySDRRangeList___bool__(self)\n\n def __len__(self):\n return _SoapySDR.SoapySDRRangeList___len__(self)\n\n def pop(self):\n return _SoapySDR.SoapySDRRangeList_pop(self)\n\n def __getslice__(self, *args):\n return _SoapySDR.SoapySDRRangeList___getslice__(self, *args)\n\n def __setslice__(self, *args):\n return _SoapySDR.SoapySDRRangeList___setslice__(self, *args)\n\n def __delslice__(self, *args):\n return _SoapySDR.SoapySDRRangeList___delslice__(self, *args)\n\n def __delitem__(self, *args):\n return _SoapySDR.SoapySDRRangeList___delitem__(self, *args)\n\n def __getitem__(self, *args):\n return _SoapySDR.SoapySDRRangeList___getitem__(self, *args)\n\n def __setitem__(self, *args):\n return _SoapySDR.SoapySDRRangeList___setitem__(self, *args)\n\n def append(self, *args):\n return _SoapySDR.SoapySDRRangeList_append(self, *args)\n\n def empty(self):\n return _SoapySDR.SoapySDRRangeList_empty(self)\n\n def size(self):\n return _SoapySDR.SoapySDRRangeList_size(self)\n\n def clear(self):\n return _SoapySDR.SoapySDRRangeList_clear(self)\n\n def swap(self, *args):\n return _SoapySDR.SoapySDRRangeList_swap(self, *args)\n\n def get_allocator(self):\n return _SoapySDR.SoapySDRRangeList_get_allocator(self)\n\n def begin(self):\n return _SoapySDR.SoapySDRRangeList_begin(self)\n\n def end(self):\n return _SoapySDR.SoapySDRRangeList_end(self)\n\n def rbegin(self):\n return _SoapySDR.SoapySDRRangeList_rbegin(self)\n\n def rend(self):\n return _SoapySDR.SoapySDRRangeList_rend(self)\n\n def pop_back(self):\n return _SoapySDR.SoapySDRRangeList_pop_back(self)\n\n def erase(self, *args):\n return _SoapySDR.SoapySDRRangeList_erase(self, *args)\n\n def __init__(self, *args):\n this = _SoapySDR.new_SoapySDRRangeList(*args)\n try:\n self.this.append(this)\n except:\n self.this = this\n\n def push_back(self, *args):\n return _SoapySDR.SoapySDRRangeList_push_back(self, *args)\n\n def front(self):\n return _SoapySDR.SoapySDRRangeList_front(self)\n\n def back(self):\n return _SoapySDR.SoapySDRRangeList_back(self)\n\n def assign(self, *args):\n return _SoapySDR.SoapySDRRangeList_assign(self, *args)\n\n def resize(self, *args):\n return _SoapySDR.SoapySDRRangeList_resize(self, *args)\n\n def insert(self, *args):\n return _SoapySDR.SoapySDRRangeList_insert(self, *args)\n\n def reserve(self, *args):\n return _SoapySDR.SoapySDRRangeList_reserve(self, *args)\n\n def capacity(self):\n return _SoapySDR.SoapySDRRangeList_capacity(self)\n __swig_destroy__ = _SoapySDR.delete_SoapySDRRangeList\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass SoapySDRSizeList(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n SoapySDRSizeList, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SoapySDRSizeList, name\n )\n __repr__ = _swig_repr\n\n def iterator(self):\n return _SoapySDR.SoapySDRSizeList_iterator(self)\n\n def __iter__(self):\n return self.iterator()\n\n def __nonzero__(self):\n return _SoapySDR.SoapySDRSizeList___nonzero__(self)\n\n def __bool__(self):\n return _SoapySDR.SoapySDRSizeList___bool__(self)\n\n def __len__(self):\n return _SoapySDR.SoapySDRSizeList___len__(self)\n\n def pop(self):\n return _SoapySDR.SoapySDRSizeList_pop(self)\n\n def __getslice__(self, *args):\n return _SoapySDR.SoapySDRSizeList___getslice__(self, *args)\n\n def __setslice__(self, *args):\n return _SoapySDR.SoapySDRSizeList___setslice__(self, *args)\n\n def __delslice__(self, *args):\n return _SoapySDR.SoapySDRSizeList___delslice__(self, *args)\n\n def __delitem__(self, *args):\n return _SoapySDR.SoapySDRSizeList___delitem__(self, *args)\n\n def __getitem__(self, *args):\n return _SoapySDR.SoapySDRSizeList___getitem__(self, *args)\n\n def __setitem__(self, *args):\n return _SoapySDR.SoapySDRSizeList___setitem__(self, *args)\n\n def append(self, *args):\n return _SoapySDR.SoapySDRSizeList_append(self, *args)\n\n def empty(self):\n return _SoapySDR.SoapySDRSizeList_empty(self)\n\n def size(self):\n return _SoapySDR.SoapySDRSizeList_size(self)\n\n def clear(self):\n return _SoapySDR.SoapySDRSizeList_clear(self)\n\n def swap(self, *args):\n return _SoapySDR.SoapySDRSizeList_swap(self, *args)\n\n def get_allocator(self):\n return _SoapySDR.SoapySDRSizeList_get_allocator(self)\n\n def begin(self):\n return _SoapySDR.SoapySDRSizeList_begin(self)\n\n def end(self):\n return _SoapySDR.SoapySDRSizeList_end(self)\n\n def rbegin(self):\n return _SoapySDR.SoapySDRSizeList_rbegin(self)\n\n def rend(self):\n return _SoapySDR.SoapySDRSizeList_rend(self)\n\n def pop_back(self):\n return _SoapySDR.SoapySDRSizeList_pop_back(self)\n\n def erase(self, *args):\n return _SoapySDR.SoapySDRSizeList_erase(self, *args)\n\n def __init__(self, *args):\n this = _SoapySDR.new_SoapySDRSizeList(*args)\n try:\n self.this.append(this)\n except:\n self.this = this\n\n def push_back(self, *args):\n return _SoapySDR.SoapySDRSizeList_push_back(self, *args)\n\n def front(self):\n return _SoapySDR.SoapySDRSizeList_front(self)\n\n def back(self):\n return _SoapySDR.SoapySDRSizeList_back(self)\n\n def assign(self, *args):\n return _SoapySDR.SoapySDRSizeList_assign(self, *args)\n\n def resize(self, *args):\n return _SoapySDR.SoapySDRSizeList_resize(self, *args)\n\n def insert(self, *args):\n return _SoapySDR.SoapySDRSizeList_insert(self, *args)\n\n def reserve(self, *args):\n return _SoapySDR.SoapySDRSizeList_reserve(self, *args)\n\n def capacity(self):\n return _SoapySDR.SoapySDRSizeList_capacity(self)\n __swig_destroy__ = _SoapySDR.delete_SoapySDRSizeList\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass SoapySDRDoubleList(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n SoapySDRDoubleList, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SoapySDRDoubleList,\n name)\n __repr__ = _swig_repr\n\n def iterator(self):\n return _SoapySDR.SoapySDRDoubleList_iterator(self)\n\n def __iter__(self):\n return self.iterator()\n\n def __nonzero__(self):\n return _SoapySDR.SoapySDRDoubleList___nonzero__(self)\n\n def __bool__(self):\n return _SoapySDR.SoapySDRDoubleList___bool__(self)\n\n def __len__(self):\n return _SoapySDR.SoapySDRDoubleList___len__(self)\n\n def pop(self):\n return _SoapySDR.SoapySDRDoubleList_pop(self)\n\n def __getslice__(self, *args):\n return _SoapySDR.SoapySDRDoubleList___getslice__(self, *args)\n\n def __setslice__(self, *args):\n return _SoapySDR.SoapySDRDoubleList___setslice__(self, *args)\n\n def __delslice__(self, *args):\n return _SoapySDR.SoapySDRDoubleList___delslice__(self, *args)\n\n def __delitem__(self, *args):\n return _SoapySDR.SoapySDRDoubleList___delitem__(self, *args)\n\n def __getitem__(self, *args):\n return _SoapySDR.SoapySDRDoubleList___getitem__(self, *args)\n\n def __setitem__(self, *args):\n return _SoapySDR.SoapySDRDoubleList___setitem__(self, *args)\n\n def append(self, *args):\n return _SoapySDR.SoapySDRDoubleList_append(self, *args)\n\n def empty(self):\n return _SoapySDR.SoapySDRDoubleList_empty(self)\n\n def size(self):\n return _SoapySDR.SoapySDRDoubleList_size(self)\n\n def clear(self):\n return _SoapySDR.SoapySDRDoubleList_clear(self)\n\n def swap(self, *args):\n return _SoapySDR.SoapySDRDoubleList_swap(self, *args)\n\n def get_allocator(self):\n return _SoapySDR.SoapySDRDoubleList_get_allocator(self)\n\n def begin(self):\n return _SoapySDR.SoapySDRDoubleList_begin(self)\n\n def end(self):\n return _SoapySDR.SoapySDRDoubleList_end(self)\n\n def rbegin(self):\n return _SoapySDR.SoapySDRDoubleList_rbegin(self)\n\n def rend(self):\n return _SoapySDR.SoapySDRDoubleList_rend(self)\n\n def pop_back(self):\n return _SoapySDR.SoapySDRDoubleList_pop_back(self)\n\n def erase(self, *args):\n return _SoapySDR.SoapySDRDoubleList_erase(self, *args)\n\n def __init__(self, *args):\n this = _SoapySDR.new_SoapySDRDoubleList(*args)\n try:\n self.this.append(this)\n except:\n self.this = this\n\n def push_back(self, *args):\n return _SoapySDR.SoapySDRDoubleList_push_back(self, *args)\n\n def front(self):\n return _SoapySDR.SoapySDRDoubleList_front(self)\n\n def back(self):\n return _SoapySDR.SoapySDRDoubleList_back(self)\n\n def assign(self, *args):\n return _SoapySDR.SoapySDRDoubleList_assign(self, *args)\n\n def resize(self, *args):\n return _SoapySDR.SoapySDRDoubleList_resize(self, *args)\n\n def insert(self, *args):\n return _SoapySDR.SoapySDRDoubleList_insert(self, *args)\n\n def reserve(self, *args):\n return _SoapySDR.SoapySDRDoubleList_reserve(self, *args)\n\n def capacity(self):\n return _SoapySDR.SoapySDRDoubleList_capacity(self)\n __swig_destroy__ = _SoapySDR.delete_SoapySDRDoubleList\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass StreamResult(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n StreamResult, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, StreamResult, name)\n __repr__ = _swig_repr\n\n def __init__(self):\n this = _SoapySDR.new_StreamResult()\n try:\n self.this.append(this)\n except:\n self.this = this\n __swig_setmethods__['ret'] = _SoapySDR.StreamResult_ret_set\n __swig_getmethods__['ret'] = _SoapySDR.StreamResult_ret_get\n if _newclass:\n ret = _swig_property(_SoapySDR.StreamResult_ret_get, _SoapySDR.\n StreamResult_ret_set)\n __swig_setmethods__['flags'] = _SoapySDR.StreamResult_flags_set\n __swig_getmethods__['flags'] = _SoapySDR.StreamResult_flags_get\n if _newclass:\n flags = _swig_property(_SoapySDR.StreamResult_flags_get, _SoapySDR.\n StreamResult_flags_set)\n __swig_setmethods__['timeNs'] = _SoapySDR.StreamResult_timeNs_set\n __swig_getmethods__['timeNs'] = _SoapySDR.StreamResult_timeNs_get\n if _newclass:\n timeNs = _swig_property(_SoapySDR.StreamResult_timeNs_get,\n _SoapySDR.StreamResult_timeNs_set)\n __swig_setmethods__['chanMask'] = _SoapySDR.StreamResult_chanMask_set\n __swig_getmethods__['chanMask'] = _SoapySDR.StreamResult_chanMask_get\n if _newclass:\n chanMask = _swig_property(_SoapySDR.StreamResult_chanMask_get,\n _SoapySDR.StreamResult_chanMask_set)\n\n def __str__(self):\n return 'ret=%s, flags=%s, timeNs=%s' % (self.ret, self.flags, self.\n timeNs)\n __swig_destroy__ = _SoapySDR.delete_StreamResult\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass Device(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self, Device,\n name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, Device, name)\n\n def __init__(self, *args, **kwargs):\n raise AttributeError('No constructor defined')\n __repr__ = _swig_repr\n __swig_destroy__ = _SoapySDR.delete_Device\n __del__ = lambda self: None\n __swig_getmethods__['enumerate'] = lambda x: _SoapySDR.Device_enumerate\n if _newclass:\n enumerate = staticmethod(_SoapySDR.Device_enumerate)\n __swig_getmethods__['make'] = lambda x: _SoapySDR.Device_make\n if _newclass:\n make = staticmethod(_SoapySDR.Device_make)\n __swig_getmethods__['unmake'] = lambda x: _SoapySDR.Device_unmake\n if _newclass:\n unmake = staticmethod(_SoapySDR.Device_unmake)\n\n def getDriverKey(self):\n return _SoapySDR.Device_getDriverKey(self)\n\n def getHardwareKey(self):\n return _SoapySDR.Device_getHardwareKey(self)\n\n def getHardwareInfo(self):\n return _SoapySDR.Device_getHardwareInfo(self)\n\n def setFrontendMapping(self, *args):\n return _SoapySDR.Device_setFrontendMapping(self, *args)\n\n def getFrontendMapping(self, *args):\n return _SoapySDR.Device_getFrontendMapping(self, *args)\n\n def getNumChannels(self, *args):\n return _SoapySDR.Device_getNumChannels(self, *args)\n\n def getChannelInfo(self, *args):\n return _SoapySDR.Device_getChannelInfo(self, *args)\n\n def getFullDuplex(self, *args):\n return _SoapySDR.Device_getFullDuplex(self, *args)\n\n def getStreamFormats(self, *args):\n return _SoapySDR.Device_getStreamFormats(self, *args)\n\n def getNativeStreamFormat(self, *args):\n return _SoapySDR.Device_getNativeStreamFormat(self, *args)\n\n def getStreamArgsInfo(self, *args):\n return _SoapySDR.Device_getStreamArgsInfo(self, *args)\n\n def setupStream(self, *args):\n return _SoapySDR.Device_setupStream(self, *args)\n\n def closeStream(self, *args):\n return _SoapySDR.Device_closeStream(self, *args)\n\n def getStreamMTU(self, *args):\n return _SoapySDR.Device_getStreamMTU(self, *args)\n\n def activateStream(self, *args):\n return _SoapySDR.Device_activateStream(self, *args)\n\n def deactivateStream(self, *args):\n return _SoapySDR.Device_deactivateStream(self, *args)\n\n def readStream(self, *args):\n return _SoapySDR.Device_readStream(self, *args)\n\n def writeStream(self, *args):\n return _SoapySDR.Device_writeStream(self, *args)\n\n def readStreamStatus(self, *args):\n return _SoapySDR.Device_readStreamStatus(self, *args)\n\n def getNumDirectAccessBuffers(self, *args):\n return _SoapySDR.Device_getNumDirectAccessBuffers(self, *args)\n\n def getDirectAccessBufferAddrs(self, *args):\n return _SoapySDR.Device_getDirectAccessBufferAddrs(self, *args)\n\n def acquireReadBuffer(self, *args):\n return _SoapySDR.Device_acquireReadBuffer(self, *args)\n\n def releaseReadBuffer(self, *args):\n return _SoapySDR.Device_releaseReadBuffer(self, *args)\n\n def acquireWriteBuffer(self, *args):\n return _SoapySDR.Device_acquireWriteBuffer(self, *args)\n\n def releaseWriteBuffer(self, *args):\n return _SoapySDR.Device_releaseWriteBuffer(self, *args)\n\n def listAntennas(self, *args):\n return _SoapySDR.Device_listAntennas(self, *args)\n\n def setAntenna(self, *args):\n return _SoapySDR.Device_setAntenna(self, *args)\n\n def getAntenna(self, *args):\n return _SoapySDR.Device_getAntenna(self, *args)\n\n def hasDCOffsetMode(self, *args):\n return _SoapySDR.Device_hasDCOffsetMode(self, *args)\n\n def setDCOffsetMode(self, *args):\n return _SoapySDR.Device_setDCOffsetMode(self, *args)\n\n def getDCOffsetMode(self, *args):\n return _SoapySDR.Device_getDCOffsetMode(self, *args)\n\n def hasDCOffset(self, *args):\n return _SoapySDR.Device_hasDCOffset(self, *args)\n\n def setDCOffset(self, *args):\n return _SoapySDR.Device_setDCOffset(self, *args)\n\n def getDCOffset(self, *args):\n return _SoapySDR.Device_getDCOffset(self, *args)\n\n def hasIQBalance(self, *args):\n return _SoapySDR.Device_hasIQBalance(self, *args)\n\n def setIQBalance(self, *args):\n return _SoapySDR.Device_setIQBalance(self, *args)\n\n def getIQBalance(self, *args):\n return _SoapySDR.Device_getIQBalance(self, *args)\n\n def hasFrequencyCorrection(self, *args):\n return _SoapySDR.Device_hasFrequencyCorrection(self, *args)\n\n def setFrequencyCorrection(self, *args):\n return _SoapySDR.Device_setFrequencyCorrection(self, *args)\n\n def getFrequencyCorrection(self, *args):\n return _SoapySDR.Device_getFrequencyCorrection(self, *args)\n\n def listGains(self, *args):\n return _SoapySDR.Device_listGains(self, *args)\n\n def hasGainMode(self, *args):\n return _SoapySDR.Device_hasGainMode(self, *args)\n\n def setGainMode(self, *args):\n return _SoapySDR.Device_setGainMode(self, *args)\n\n def getGainMode(self, *args):\n return _SoapySDR.Device_getGainMode(self, *args)\n\n def setGain(self, *args):\n return _SoapySDR.Device_setGain(self, *args)\n\n def getGain(self, *args):\n return _SoapySDR.Device_getGain(self, *args)\n\n def getGainRange(self, *args):\n return _SoapySDR.Device_getGainRange(self, *args)\n\n def setFrequency(self, *args):\n return _SoapySDR.Device_setFrequency(self, *args)\n\n def getFrequency(self, *args):\n return _SoapySDR.Device_getFrequency(self, *args)\n\n def listFrequencies(self, *args):\n return _SoapySDR.Device_listFrequencies(self, *args)\n\n def getFrequencyRange(self, *args):\n return _SoapySDR.Device_getFrequencyRange(self, *args)\n\n def getFrequencyArgsInfo(self, *args):\n return _SoapySDR.Device_getFrequencyArgsInfo(self, *args)\n\n def setSampleRate(self, *args):\n return _SoapySDR.Device_setSampleRate(self, *args)\n\n def getSampleRate(self, *args):\n return _SoapySDR.Device_getSampleRate(self, *args)\n\n def listSampleRates(self, *args):\n return _SoapySDR.Device_listSampleRates(self, *args)\n\n def getSampleRateRange(self, *args):\n return _SoapySDR.Device_getSampleRateRange(self, *args)\n\n def setBandwidth(self, *args):\n return _SoapySDR.Device_setBandwidth(self, *args)\n\n def getBandwidth(self, *args):\n return _SoapySDR.Device_getBandwidth(self, *args)\n\n def listBandwidths(self, *args):\n return _SoapySDR.Device_listBandwidths(self, *args)\n\n def getBandwidthRange(self, *args):\n return _SoapySDR.Device_getBandwidthRange(self, *args)\n\n def setMasterClockRate(self, *args):\n return _SoapySDR.Device_setMasterClockRate(self, *args)\n\n def getMasterClockRate(self):\n return _SoapySDR.Device_getMasterClockRate(self)\n\n def getMasterClockRates(self):\n return _SoapySDR.Device_getMasterClockRates(self)\n\n def listClockSources(self):\n return _SoapySDR.Device_listClockSources(self)\n\n def setClockSource(self, *args):\n return _SoapySDR.Device_setClockSource(self, *args)\n\n def getClockSource(self):\n return _SoapySDR.Device_getClockSource(self)\n\n def listTimeSources(self):\n return _SoapySDR.Device_listTimeSources(self)\n\n def setTimeSource(self, *args):\n return _SoapySDR.Device_setTimeSource(self, *args)\n\n def getTimeSource(self):\n return _SoapySDR.Device_getTimeSource(self)\n\n def hasHardwareTime(self, what=''):\n return _SoapySDR.Device_hasHardwareTime(self, what)\n\n def getHardwareTime(self, what=''):\n return _SoapySDR.Device_getHardwareTime(self, what)\n\n def setHardwareTime(self, *args):\n return _SoapySDR.Device_setHardwareTime(self, *args)\n\n def setCommandTime(self, *args):\n return _SoapySDR.Device_setCommandTime(self, *args)\n\n def listSensors(self, *args):\n return _SoapySDR.Device_listSensors(self, *args)\n\n def getSensorInfo(self, *args):\n return _SoapySDR.Device_getSensorInfo(self, *args)\n\n def readSensor(self, *args):\n return _SoapySDR.Device_readSensor(self, *args)\n\n def listRegisterInterfaces(self):\n return _SoapySDR.Device_listRegisterInterfaces(self)\n\n def writeRegister(self, *args):\n return _SoapySDR.Device_writeRegister(self, *args)\n\n def readRegister(self, *args):\n return _SoapySDR.Device_readRegister(self, *args)\n\n def writeRegisters(self, *args):\n return _SoapySDR.Device_writeRegisters(self, *args)\n\n def readRegisters(self, *args):\n return _SoapySDR.Device_readRegisters(self, *args)\n\n def getSettingInfo(self, *args):\n return _SoapySDR.Device_getSettingInfo(self, *args)\n\n def writeSetting(self, *args):\n return _SoapySDR.Device_writeSetting(self, *args)\n\n def readSetting(self, *args):\n return _SoapySDR.Device_readSetting(self, *args)\n\n def listGPIOBanks(self):\n return _SoapySDR.Device_listGPIOBanks(self)\n\n def writeGPIO(self, *args):\n return _SoapySDR.Device_writeGPIO(self, *args)\n\n def readGPIO(self, *args):\n return _SoapySDR.Device_readGPIO(self, *args)\n\n def writeGPIODir(self, *args):\n return _SoapySDR.Device_writeGPIODir(self, *args)\n\n def readGPIODir(self, *args):\n return _SoapySDR.Device_readGPIODir(self, *args)\n\n def writeI2C(self, *args):\n return _SoapySDR.Device_writeI2C(self, *args)\n\n def readI2C(self, *args):\n return _SoapySDR.Device_readI2C(self, *args)\n\n def transactSPI(self, *args):\n return _SoapySDR.Device_transactSPI(self, *args)\n\n def listUARTs(self):\n return _SoapySDR.Device_listUARTs(self)\n\n def writeUART(self, *args):\n return _SoapySDR.Device_writeUART(self, *args)\n\n def readUART(self, *args):\n return _SoapySDR.Device_readUART(self, *args)\n\n def readStream__(self, *args):\n return _SoapySDR.Device_readStream__(self, *args)\n\n def writeStream__(self, *args):\n return _SoapySDR.Device_writeStream__(self, *args)\n\n def readStreamStatus__(self, *args):\n return _SoapySDR.Device_readStreamStatus__(self, *args)\n\n def __del__(self):\n Device.unmake(self)\n\n def __str__(self):\n return '%s:%s' % (self.getDriverKey(), self.getHardwareKey())\n\n def readStream(self, stream, buffs, numElems, flags=0, timeoutUs=100000):\n ptrs = [extractBuffPointer(b) for b in buffs]\n return self.readStream__(stream, ptrs, numElems, flags, timeoutUs)\n\n def writeStream(self, stream, buffs, numElems, flags=0, timeNs=0,\n timeoutUs=100000):\n ptrs = [extractBuffPointer(b) for b in buffs]\n return self.writeStream__(stream, ptrs, numElems, flags, timeNs,\n timeoutUs)\n\n def readStreamStatus(self, stream, timeoutUs=100000):\n return self.readStreamStatus__(stream, timeoutUs)\n\n\n<mask token>\n\n\nclass Device(Device):\n\n def __new__(cls, *args, **kwargs):\n return cls.make(*args, **kwargs)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef _swig_setattr(self, class_type, name, value):\n return _swig_setattr_nondynamic(self, class_type, name, value, 0)\n\n\ndef _swig_getattr(self, class_type, name):\n if name == 'thisown':\n return self.this.own()\n method = class_type.__swig_getmethods__.get(name, None)\n if method:\n return method(self)\n raise AttributeError(name)\n\n\ndef _swig_repr(self):\n try:\n strthis = 'proxy of ' + self.this.__repr__()\n except:\n strthis = ''\n return '<%s.%s; %s >' % (self.__class__.__module__, self.__class__.\n __name__, strthis)\n\n\n<mask token>\n\n\nclass SwigPyIterator(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n SwigPyIterator, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SwigPyIterator, name)\n\n def __init__(self, *args, **kwargs):\n raise AttributeError('No constructor defined - class is abstract')\n __repr__ = _swig_repr\n __swig_destroy__ = _SoapySDR.delete_SwigPyIterator\n __del__ = lambda self: None\n\n def value(self):\n return _SoapySDR.SwigPyIterator_value(self)\n\n def incr(self, n=1):\n return _SoapySDR.SwigPyIterator_incr(self, n)\n\n def decr(self, n=1):\n return _SoapySDR.SwigPyIterator_decr(self, n)\n\n def distance(self, *args):\n return _SoapySDR.SwigPyIterator_distance(self, *args)\n\n def equal(self, *args):\n return _SoapySDR.SwigPyIterator_equal(self, *args)\n\n def copy(self):\n return _SoapySDR.SwigPyIterator_copy(self)\n\n def next(self):\n return _SoapySDR.SwigPyIterator_next(self)\n\n def __next__(self):\n return _SoapySDR.SwigPyIterator___next__(self)\n\n def previous(self):\n return _SoapySDR.SwigPyIterator_previous(self)\n\n def advance(self, *args):\n return _SoapySDR.SwigPyIterator_advance(self, *args)\n\n def __eq__(self, *args):\n return _SoapySDR.SwigPyIterator___eq__(self, *args)\n\n def __ne__(self, *args):\n return _SoapySDR.SwigPyIterator___ne__(self, *args)\n\n def __iadd__(self, *args):\n return _SoapySDR.SwigPyIterator___iadd__(self, *args)\n\n def __isub__(self, *args):\n return _SoapySDR.SwigPyIterator___isub__(self, *args)\n\n def __add__(self, *args):\n return _SoapySDR.SwigPyIterator___add__(self, *args)\n\n def __sub__(self, *args):\n return _SoapySDR.SwigPyIterator___sub__(self, *args)\n\n def __iter__(self):\n return self\n\n\n<mask token>\n\n\ndef KwargsToString(*args):\n return _SoapySDR.KwargsToString(*args)\n\n\n<mask token>\n\n\nclass Range(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self, Range, name,\n value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, Range, name)\n __repr__ = _swig_repr\n\n def __init__(self, *args):\n this = _SoapySDR.new_Range(*args)\n try:\n self.this.append(this)\n except:\n self.this = this\n\n def minimum(self):\n return _SoapySDR.Range_minimum(self)\n\n def maximum(self):\n return _SoapySDR.Range_maximum(self)\n\n def step(self):\n return _SoapySDR.Range_step(self)\n\n def __str__(self):\n fields = [self.minimum(), self.maximum()]\n if self.step() != 0.0:\n fields.append(self.step())\n return ', '.join([('%g' % f) for f in fields])\n __swig_destroy__ = _SoapySDR.delete_Range\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass ArgInfo(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self, ArgInfo,\n name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, ArgInfo, name)\n __repr__ = _swig_repr\n\n def __init__(self):\n this = _SoapySDR.new_ArgInfo()\n try:\n self.this.append(this)\n except:\n self.this = this\n __swig_setmethods__['key'] = _SoapySDR.ArgInfo_key_set\n __swig_getmethods__['key'] = _SoapySDR.ArgInfo_key_get\n if _newclass:\n key = _swig_property(_SoapySDR.ArgInfo_key_get, _SoapySDR.\n ArgInfo_key_set)\n __swig_setmethods__['value'] = _SoapySDR.ArgInfo_value_set\n __swig_getmethods__['value'] = _SoapySDR.ArgInfo_value_get\n if _newclass:\n value = _swig_property(_SoapySDR.ArgInfo_value_get, _SoapySDR.\n ArgInfo_value_set)\n __swig_setmethods__['name'] = _SoapySDR.ArgInfo_name_set\n __swig_getmethods__['name'] = _SoapySDR.ArgInfo_name_get\n if _newclass:\n name = _swig_property(_SoapySDR.ArgInfo_name_get, _SoapySDR.\n ArgInfo_name_set)\n __swig_setmethods__['description'] = _SoapySDR.ArgInfo_description_set\n __swig_getmethods__['description'] = _SoapySDR.ArgInfo_description_get\n if _newclass:\n description = _swig_property(_SoapySDR.ArgInfo_description_get,\n _SoapySDR.ArgInfo_description_set)\n __swig_setmethods__['units'] = _SoapySDR.ArgInfo_units_set\n __swig_getmethods__['units'] = _SoapySDR.ArgInfo_units_get\n if _newclass:\n units = _swig_property(_SoapySDR.ArgInfo_units_get, _SoapySDR.\n ArgInfo_units_set)\n BOOL = _SoapySDR.ArgInfo_BOOL\n INT = _SoapySDR.ArgInfo_INT\n FLOAT = _SoapySDR.ArgInfo_FLOAT\n STRING = _SoapySDR.ArgInfo_STRING\n __swig_setmethods__['type'] = _SoapySDR.ArgInfo_type_set\n __swig_getmethods__['type'] = _SoapySDR.ArgInfo_type_get\n if _newclass:\n type = _swig_property(_SoapySDR.ArgInfo_type_get, _SoapySDR.\n ArgInfo_type_set)\n __swig_setmethods__['range'] = _SoapySDR.ArgInfo_range_set\n __swig_getmethods__['range'] = _SoapySDR.ArgInfo_range_get\n if _newclass:\n range = _swig_property(_SoapySDR.ArgInfo_range_get, _SoapySDR.\n ArgInfo_range_set)\n __swig_setmethods__['options'] = _SoapySDR.ArgInfo_options_set\n __swig_getmethods__['options'] = _SoapySDR.ArgInfo_options_get\n if _newclass:\n options = _swig_property(_SoapySDR.ArgInfo_options_get, _SoapySDR.\n ArgInfo_options_set)\n __swig_setmethods__['optionNames'] = _SoapySDR.ArgInfo_optionNames_set\n __swig_getmethods__['optionNames'] = _SoapySDR.ArgInfo_optionNames_get\n if _newclass:\n optionNames = _swig_property(_SoapySDR.ArgInfo_optionNames_get,\n _SoapySDR.ArgInfo_optionNames_set)\n __swig_destroy__ = _SoapySDR.delete_ArgInfo\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass SoapySDRKwargs(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n SoapySDRKwargs, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SoapySDRKwargs, name)\n __repr__ = _swig_repr\n\n def iterator(self):\n return _SoapySDR.SoapySDRKwargs_iterator(self)\n\n def __iter__(self):\n return self.iterator()\n\n def __nonzero__(self):\n return _SoapySDR.SoapySDRKwargs___nonzero__(self)\n\n def __bool__(self):\n return _SoapySDR.SoapySDRKwargs___bool__(self)\n\n def __len__(self):\n return _SoapySDR.SoapySDRKwargs___len__(self)\n\n def __iter__(self):\n return self.key_iterator()\n\n def iterkeys(self):\n return self.key_iterator()\n\n def itervalues(self):\n return self.value_iterator()\n\n def iteritems(self):\n return self.iterator()\n\n def __getitem__(self, *args):\n return _SoapySDR.SoapySDRKwargs___getitem__(self, *args)\n\n def __delitem__(self, *args):\n return _SoapySDR.SoapySDRKwargs___delitem__(self, *args)\n\n def has_key(self, *args):\n return _SoapySDR.SoapySDRKwargs_has_key(self, *args)\n\n def keys(self):\n return _SoapySDR.SoapySDRKwargs_keys(self)\n\n def values(self):\n return _SoapySDR.SoapySDRKwargs_values(self)\n\n def items(self):\n return _SoapySDR.SoapySDRKwargs_items(self)\n\n def __contains__(self, *args):\n return _SoapySDR.SoapySDRKwargs___contains__(self, *args)\n\n def key_iterator(self):\n return _SoapySDR.SoapySDRKwargs_key_iterator(self)\n\n def value_iterator(self):\n return _SoapySDR.SoapySDRKwargs_value_iterator(self)\n\n def __setitem__(self, *args):\n return _SoapySDR.SoapySDRKwargs___setitem__(self, *args)\n\n def asdict(self):\n return _SoapySDR.SoapySDRKwargs_asdict(self)\n\n def __init__(self, *args):\n this = _SoapySDR.new_SoapySDRKwargs(*args)\n try:\n self.this.append(this)\n except:\n self.this = this\n\n def empty(self):\n return _SoapySDR.SoapySDRKwargs_empty(self)\n\n def size(self):\n return _SoapySDR.SoapySDRKwargs_size(self)\n\n def clear(self):\n return _SoapySDR.SoapySDRKwargs_clear(self)\n\n def swap(self, *args):\n return _SoapySDR.SoapySDRKwargs_swap(self, *args)\n\n def get_allocator(self):\n return _SoapySDR.SoapySDRKwargs_get_allocator(self)\n\n def begin(self):\n return _SoapySDR.SoapySDRKwargs_begin(self)\n\n def end(self):\n return _SoapySDR.SoapySDRKwargs_end(self)\n\n def rbegin(self):\n return _SoapySDR.SoapySDRKwargs_rbegin(self)\n\n def rend(self):\n return _SoapySDR.SoapySDRKwargs_rend(self)\n\n def count(self, *args):\n return _SoapySDR.SoapySDRKwargs_count(self, *args)\n\n def erase(self, *args):\n return _SoapySDR.SoapySDRKwargs_erase(self, *args)\n\n def find(self, *args):\n return _SoapySDR.SoapySDRKwargs_find(self, *args)\n\n def lower_bound(self, *args):\n return _SoapySDR.SoapySDRKwargs_lower_bound(self, *args)\n\n def upper_bound(self, *args):\n return _SoapySDR.SoapySDRKwargs_upper_bound(self, *args)\n\n def __str__(self):\n out = list()\n for k, v in self.iteritems():\n out.append('%s=%s' % (k, v))\n return '{' + ', '.join(out) + '}'\n __swig_destroy__ = _SoapySDR.delete_SoapySDRKwargs\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass SoapySDRKwargsList(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n SoapySDRKwargsList, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SoapySDRKwargsList,\n name)\n __repr__ = _swig_repr\n\n def iterator(self):\n return _SoapySDR.SoapySDRKwargsList_iterator(self)\n\n def __iter__(self):\n return self.iterator()\n\n def __nonzero__(self):\n return _SoapySDR.SoapySDRKwargsList___nonzero__(self)\n\n def __bool__(self):\n return _SoapySDR.SoapySDRKwargsList___bool__(self)\n\n def __len__(self):\n return _SoapySDR.SoapySDRKwargsList___len__(self)\n\n def pop(self):\n return _SoapySDR.SoapySDRKwargsList_pop(self)\n\n def __getslice__(self, *args):\n return _SoapySDR.SoapySDRKwargsList___getslice__(self, *args)\n\n def __setslice__(self, *args):\n return _SoapySDR.SoapySDRKwargsList___setslice__(self, *args)\n\n def __delslice__(self, *args):\n return _SoapySDR.SoapySDRKwargsList___delslice__(self, *args)\n\n def __delitem__(self, *args):\n return _SoapySDR.SoapySDRKwargsList___delitem__(self, *args)\n\n def __getitem__(self, *args):\n return _SoapySDR.SoapySDRKwargsList___getitem__(self, *args)\n\n def __setitem__(self, *args):\n return _SoapySDR.SoapySDRKwargsList___setitem__(self, *args)\n\n def append(self, *args):\n return _SoapySDR.SoapySDRKwargsList_append(self, *args)\n\n def empty(self):\n return _SoapySDR.SoapySDRKwargsList_empty(self)\n\n def size(self):\n return _SoapySDR.SoapySDRKwargsList_size(self)\n\n def clear(self):\n return _SoapySDR.SoapySDRKwargsList_clear(self)\n\n def swap(self, *args):\n return _SoapySDR.SoapySDRKwargsList_swap(self, *args)\n\n def get_allocator(self):\n return _SoapySDR.SoapySDRKwargsList_get_allocator(self)\n\n def begin(self):\n return _SoapySDR.SoapySDRKwargsList_begin(self)\n\n def end(self):\n return _SoapySDR.SoapySDRKwargsList_end(self)\n\n def rbegin(self):\n return _SoapySDR.SoapySDRKwargsList_rbegin(self)\n\n def rend(self):\n return _SoapySDR.SoapySDRKwargsList_rend(self)\n\n def pop_back(self):\n return _SoapySDR.SoapySDRKwargsList_pop_back(self)\n\n def erase(self, *args):\n return _SoapySDR.SoapySDRKwargsList_erase(self, *args)\n\n def __init__(self, *args):\n this = _SoapySDR.new_SoapySDRKwargsList(*args)\n try:\n self.this.append(this)\n except:\n self.this = this\n\n def push_back(self, *args):\n return _SoapySDR.SoapySDRKwargsList_push_back(self, *args)\n\n def front(self):\n return _SoapySDR.SoapySDRKwargsList_front(self)\n\n def back(self):\n return _SoapySDR.SoapySDRKwargsList_back(self)\n\n def assign(self, *args):\n return _SoapySDR.SoapySDRKwargsList_assign(self, *args)\n\n def resize(self, *args):\n return _SoapySDR.SoapySDRKwargsList_resize(self, *args)\n\n def insert(self, *args):\n return _SoapySDR.SoapySDRKwargsList_insert(self, *args)\n\n def reserve(self, *args):\n return _SoapySDR.SoapySDRKwargsList_reserve(self, *args)\n\n def capacity(self):\n return _SoapySDR.SoapySDRKwargsList_capacity(self)\n __swig_destroy__ = _SoapySDR.delete_SoapySDRKwargsList\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass SoapySDRArgInfoList(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n SoapySDRArgInfoList, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self,\n SoapySDRArgInfoList, name)\n __repr__ = _swig_repr\n\n def iterator(self):\n return _SoapySDR.SoapySDRArgInfoList_iterator(self)\n\n def __iter__(self):\n return self.iterator()\n\n def __nonzero__(self):\n return _SoapySDR.SoapySDRArgInfoList___nonzero__(self)\n\n def __bool__(self):\n return _SoapySDR.SoapySDRArgInfoList___bool__(self)\n\n def __len__(self):\n return _SoapySDR.SoapySDRArgInfoList___len__(self)\n\n def pop(self):\n return _SoapySDR.SoapySDRArgInfoList_pop(self)\n\n def __getslice__(self, *args):\n return _SoapySDR.SoapySDRArgInfoList___getslice__(self, *args)\n\n def __setslice__(self, *args):\n return _SoapySDR.SoapySDRArgInfoList___setslice__(self, *args)\n\n def __delslice__(self, *args):\n return _SoapySDR.SoapySDRArgInfoList___delslice__(self, *args)\n\n def __delitem__(self, *args):\n return _SoapySDR.SoapySDRArgInfoList___delitem__(self, *args)\n\n def __getitem__(self, *args):\n return _SoapySDR.SoapySDRArgInfoList___getitem__(self, *args)\n\n def __setitem__(self, *args):\n return _SoapySDR.SoapySDRArgInfoList___setitem__(self, *args)\n\n def append(self, *args):\n return _SoapySDR.SoapySDRArgInfoList_append(self, *args)\n\n def empty(self):\n return _SoapySDR.SoapySDRArgInfoList_empty(self)\n\n def size(self):\n return _SoapySDR.SoapySDRArgInfoList_size(self)\n\n def clear(self):\n return _SoapySDR.SoapySDRArgInfoList_clear(self)\n\n def swap(self, *args):\n return _SoapySDR.SoapySDRArgInfoList_swap(self, *args)\n\n def get_allocator(self):\n return _SoapySDR.SoapySDRArgInfoList_get_allocator(self)\n\n def begin(self):\n return _SoapySDR.SoapySDRArgInfoList_begin(self)\n\n def end(self):\n return _SoapySDR.SoapySDRArgInfoList_end(self)\n\n def rbegin(self):\n return _SoapySDR.SoapySDRArgInfoList_rbegin(self)\n\n def rend(self):\n return _SoapySDR.SoapySDRArgInfoList_rend(self)\n\n def pop_back(self):\n return _SoapySDR.SoapySDRArgInfoList_pop_back(self)\n\n def erase(self, *args):\n return _SoapySDR.SoapySDRArgInfoList_erase(self, *args)\n\n def __init__(self, *args):\n this = _SoapySDR.new_SoapySDRArgInfoList(*args)\n try:\n self.this.append(this)\n except:\n self.this = this\n\n def push_back(self, *args):\n return _SoapySDR.SoapySDRArgInfoList_push_back(self, *args)\n\n def front(self):\n return _SoapySDR.SoapySDRArgInfoList_front(self)\n\n def back(self):\n return _SoapySDR.SoapySDRArgInfoList_back(self)\n\n def assign(self, *args):\n return _SoapySDR.SoapySDRArgInfoList_assign(self, *args)\n\n def resize(self, *args):\n return _SoapySDR.SoapySDRArgInfoList_resize(self, *args)\n\n def insert(self, *args):\n return _SoapySDR.SoapySDRArgInfoList_insert(self, *args)\n\n def reserve(self, *args):\n return _SoapySDR.SoapySDRArgInfoList_reserve(self, *args)\n\n def capacity(self):\n return _SoapySDR.SoapySDRArgInfoList_capacity(self)\n __swig_destroy__ = _SoapySDR.delete_SoapySDRArgInfoList\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass SoapySDRStringList(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n SoapySDRStringList, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SoapySDRStringList,\n name)\n __repr__ = _swig_repr\n\n def iterator(self):\n return _SoapySDR.SoapySDRStringList_iterator(self)\n\n def __iter__(self):\n return self.iterator()\n\n def __nonzero__(self):\n return _SoapySDR.SoapySDRStringList___nonzero__(self)\n\n def __bool__(self):\n return _SoapySDR.SoapySDRStringList___bool__(self)\n\n def __len__(self):\n return _SoapySDR.SoapySDRStringList___len__(self)\n\n def pop(self):\n return _SoapySDR.SoapySDRStringList_pop(self)\n\n def __getslice__(self, *args):\n return _SoapySDR.SoapySDRStringList___getslice__(self, *args)\n\n def __setslice__(self, *args):\n return _SoapySDR.SoapySDRStringList___setslice__(self, *args)\n\n def __delslice__(self, *args):\n return _SoapySDR.SoapySDRStringList___delslice__(self, *args)\n\n def __delitem__(self, *args):\n return _SoapySDR.SoapySDRStringList___delitem__(self, *args)\n\n def __getitem__(self, *args):\n return _SoapySDR.SoapySDRStringList___getitem__(self, *args)\n\n def __setitem__(self, *args):\n return _SoapySDR.SoapySDRStringList___setitem__(self, *args)\n\n def append(self, *args):\n return _SoapySDR.SoapySDRStringList_append(self, *args)\n\n def empty(self):\n return _SoapySDR.SoapySDRStringList_empty(self)\n\n def size(self):\n return _SoapySDR.SoapySDRStringList_size(self)\n\n def clear(self):\n return _SoapySDR.SoapySDRStringList_clear(self)\n\n def swap(self, *args):\n return _SoapySDR.SoapySDRStringList_swap(self, *args)\n\n def get_allocator(self):\n return _SoapySDR.SoapySDRStringList_get_allocator(self)\n\n def begin(self):\n return _SoapySDR.SoapySDRStringList_begin(self)\n\n def end(self):\n return _SoapySDR.SoapySDRStringList_end(self)\n\n def rbegin(self):\n return _SoapySDR.SoapySDRStringList_rbegin(self)\n\n def rend(self):\n return _SoapySDR.SoapySDRStringList_rend(self)\n\n def pop_back(self):\n return _SoapySDR.SoapySDRStringList_pop_back(self)\n\n def erase(self, *args):\n return _SoapySDR.SoapySDRStringList_erase(self, *args)\n\n def __init__(self, *args):\n this = _SoapySDR.new_SoapySDRStringList(*args)\n try:\n self.this.append(this)\n except:\n self.this = this\n\n def push_back(self, *args):\n return _SoapySDR.SoapySDRStringList_push_back(self, *args)\n\n def front(self):\n return _SoapySDR.SoapySDRStringList_front(self)\n\n def back(self):\n return _SoapySDR.SoapySDRStringList_back(self)\n\n def assign(self, *args):\n return _SoapySDR.SoapySDRStringList_assign(self, *args)\n\n def resize(self, *args):\n return _SoapySDR.SoapySDRStringList_resize(self, *args)\n\n def insert(self, *args):\n return _SoapySDR.SoapySDRStringList_insert(self, *args)\n\n def reserve(self, *args):\n return _SoapySDR.SoapySDRStringList_reserve(self, *args)\n\n def capacity(self):\n return _SoapySDR.SoapySDRStringList_capacity(self)\n __swig_destroy__ = _SoapySDR.delete_SoapySDRStringList\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass SoapySDRRangeList(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n SoapySDRRangeList, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SoapySDRRangeList,\n name)\n __repr__ = _swig_repr\n\n def iterator(self):\n return _SoapySDR.SoapySDRRangeList_iterator(self)\n\n def __iter__(self):\n return self.iterator()\n\n def __nonzero__(self):\n return _SoapySDR.SoapySDRRangeList___nonzero__(self)\n\n def __bool__(self):\n return _SoapySDR.SoapySDRRangeList___bool__(self)\n\n def __len__(self):\n return _SoapySDR.SoapySDRRangeList___len__(self)\n\n def pop(self):\n return _SoapySDR.SoapySDRRangeList_pop(self)\n\n def __getslice__(self, *args):\n return _SoapySDR.SoapySDRRangeList___getslice__(self, *args)\n\n def __setslice__(self, *args):\n return _SoapySDR.SoapySDRRangeList___setslice__(self, *args)\n\n def __delslice__(self, *args):\n return _SoapySDR.SoapySDRRangeList___delslice__(self, *args)\n\n def __delitem__(self, *args):\n return _SoapySDR.SoapySDRRangeList___delitem__(self, *args)\n\n def __getitem__(self, *args):\n return _SoapySDR.SoapySDRRangeList___getitem__(self, *args)\n\n def __setitem__(self, *args):\n return _SoapySDR.SoapySDRRangeList___setitem__(self, *args)\n\n def append(self, *args):\n return _SoapySDR.SoapySDRRangeList_append(self, *args)\n\n def empty(self):\n return _SoapySDR.SoapySDRRangeList_empty(self)\n\n def size(self):\n return _SoapySDR.SoapySDRRangeList_size(self)\n\n def clear(self):\n return _SoapySDR.SoapySDRRangeList_clear(self)\n\n def swap(self, *args):\n return _SoapySDR.SoapySDRRangeList_swap(self, *args)\n\n def get_allocator(self):\n return _SoapySDR.SoapySDRRangeList_get_allocator(self)\n\n def begin(self):\n return _SoapySDR.SoapySDRRangeList_begin(self)\n\n def end(self):\n return _SoapySDR.SoapySDRRangeList_end(self)\n\n def rbegin(self):\n return _SoapySDR.SoapySDRRangeList_rbegin(self)\n\n def rend(self):\n return _SoapySDR.SoapySDRRangeList_rend(self)\n\n def pop_back(self):\n return _SoapySDR.SoapySDRRangeList_pop_back(self)\n\n def erase(self, *args):\n return _SoapySDR.SoapySDRRangeList_erase(self, *args)\n\n def __init__(self, *args):\n this = _SoapySDR.new_SoapySDRRangeList(*args)\n try:\n self.this.append(this)\n except:\n self.this = this\n\n def push_back(self, *args):\n return _SoapySDR.SoapySDRRangeList_push_back(self, *args)\n\n def front(self):\n return _SoapySDR.SoapySDRRangeList_front(self)\n\n def back(self):\n return _SoapySDR.SoapySDRRangeList_back(self)\n\n def assign(self, *args):\n return _SoapySDR.SoapySDRRangeList_assign(self, *args)\n\n def resize(self, *args):\n return _SoapySDR.SoapySDRRangeList_resize(self, *args)\n\n def insert(self, *args):\n return _SoapySDR.SoapySDRRangeList_insert(self, *args)\n\n def reserve(self, *args):\n return _SoapySDR.SoapySDRRangeList_reserve(self, *args)\n\n def capacity(self):\n return _SoapySDR.SoapySDRRangeList_capacity(self)\n __swig_destroy__ = _SoapySDR.delete_SoapySDRRangeList\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass SoapySDRSizeList(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n SoapySDRSizeList, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SoapySDRSizeList, name\n )\n __repr__ = _swig_repr\n\n def iterator(self):\n return _SoapySDR.SoapySDRSizeList_iterator(self)\n\n def __iter__(self):\n return self.iterator()\n\n def __nonzero__(self):\n return _SoapySDR.SoapySDRSizeList___nonzero__(self)\n\n def __bool__(self):\n return _SoapySDR.SoapySDRSizeList___bool__(self)\n\n def __len__(self):\n return _SoapySDR.SoapySDRSizeList___len__(self)\n\n def pop(self):\n return _SoapySDR.SoapySDRSizeList_pop(self)\n\n def __getslice__(self, *args):\n return _SoapySDR.SoapySDRSizeList___getslice__(self, *args)\n\n def __setslice__(self, *args):\n return _SoapySDR.SoapySDRSizeList___setslice__(self, *args)\n\n def __delslice__(self, *args):\n return _SoapySDR.SoapySDRSizeList___delslice__(self, *args)\n\n def __delitem__(self, *args):\n return _SoapySDR.SoapySDRSizeList___delitem__(self, *args)\n\n def __getitem__(self, *args):\n return _SoapySDR.SoapySDRSizeList___getitem__(self, *args)\n\n def __setitem__(self, *args):\n return _SoapySDR.SoapySDRSizeList___setitem__(self, *args)\n\n def append(self, *args):\n return _SoapySDR.SoapySDRSizeList_append(self, *args)\n\n def empty(self):\n return _SoapySDR.SoapySDRSizeList_empty(self)\n\n def size(self):\n return _SoapySDR.SoapySDRSizeList_size(self)\n\n def clear(self):\n return _SoapySDR.SoapySDRSizeList_clear(self)\n\n def swap(self, *args):\n return _SoapySDR.SoapySDRSizeList_swap(self, *args)\n\n def get_allocator(self):\n return _SoapySDR.SoapySDRSizeList_get_allocator(self)\n\n def begin(self):\n return _SoapySDR.SoapySDRSizeList_begin(self)\n\n def end(self):\n return _SoapySDR.SoapySDRSizeList_end(self)\n\n def rbegin(self):\n return _SoapySDR.SoapySDRSizeList_rbegin(self)\n\n def rend(self):\n return _SoapySDR.SoapySDRSizeList_rend(self)\n\n def pop_back(self):\n return _SoapySDR.SoapySDRSizeList_pop_back(self)\n\n def erase(self, *args):\n return _SoapySDR.SoapySDRSizeList_erase(self, *args)\n\n def __init__(self, *args):\n this = _SoapySDR.new_SoapySDRSizeList(*args)\n try:\n self.this.append(this)\n except:\n self.this = this\n\n def push_back(self, *args):\n return _SoapySDR.SoapySDRSizeList_push_back(self, *args)\n\n def front(self):\n return _SoapySDR.SoapySDRSizeList_front(self)\n\n def back(self):\n return _SoapySDR.SoapySDRSizeList_back(self)\n\n def assign(self, *args):\n return _SoapySDR.SoapySDRSizeList_assign(self, *args)\n\n def resize(self, *args):\n return _SoapySDR.SoapySDRSizeList_resize(self, *args)\n\n def insert(self, *args):\n return _SoapySDR.SoapySDRSizeList_insert(self, *args)\n\n def reserve(self, *args):\n return _SoapySDR.SoapySDRSizeList_reserve(self, *args)\n\n def capacity(self):\n return _SoapySDR.SoapySDRSizeList_capacity(self)\n __swig_destroy__ = _SoapySDR.delete_SoapySDRSizeList\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass SoapySDRDoubleList(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n SoapySDRDoubleList, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SoapySDRDoubleList,\n name)\n __repr__ = _swig_repr\n\n def iterator(self):\n return _SoapySDR.SoapySDRDoubleList_iterator(self)\n\n def __iter__(self):\n return self.iterator()\n\n def __nonzero__(self):\n return _SoapySDR.SoapySDRDoubleList___nonzero__(self)\n\n def __bool__(self):\n return _SoapySDR.SoapySDRDoubleList___bool__(self)\n\n def __len__(self):\n return _SoapySDR.SoapySDRDoubleList___len__(self)\n\n def pop(self):\n return _SoapySDR.SoapySDRDoubleList_pop(self)\n\n def __getslice__(self, *args):\n return _SoapySDR.SoapySDRDoubleList___getslice__(self, *args)\n\n def __setslice__(self, *args):\n return _SoapySDR.SoapySDRDoubleList___setslice__(self, *args)\n\n def __delslice__(self, *args):\n return _SoapySDR.SoapySDRDoubleList___delslice__(self, *args)\n\n def __delitem__(self, *args):\n return _SoapySDR.SoapySDRDoubleList___delitem__(self, *args)\n\n def __getitem__(self, *args):\n return _SoapySDR.SoapySDRDoubleList___getitem__(self, *args)\n\n def __setitem__(self, *args):\n return _SoapySDR.SoapySDRDoubleList___setitem__(self, *args)\n\n def append(self, *args):\n return _SoapySDR.SoapySDRDoubleList_append(self, *args)\n\n def empty(self):\n return _SoapySDR.SoapySDRDoubleList_empty(self)\n\n def size(self):\n return _SoapySDR.SoapySDRDoubleList_size(self)\n\n def clear(self):\n return _SoapySDR.SoapySDRDoubleList_clear(self)\n\n def swap(self, *args):\n return _SoapySDR.SoapySDRDoubleList_swap(self, *args)\n\n def get_allocator(self):\n return _SoapySDR.SoapySDRDoubleList_get_allocator(self)\n\n def begin(self):\n return _SoapySDR.SoapySDRDoubleList_begin(self)\n\n def end(self):\n return _SoapySDR.SoapySDRDoubleList_end(self)\n\n def rbegin(self):\n return _SoapySDR.SoapySDRDoubleList_rbegin(self)\n\n def rend(self):\n return _SoapySDR.SoapySDRDoubleList_rend(self)\n\n def pop_back(self):\n return _SoapySDR.SoapySDRDoubleList_pop_back(self)\n\n def erase(self, *args):\n return _SoapySDR.SoapySDRDoubleList_erase(self, *args)\n\n def __init__(self, *args):\n this = _SoapySDR.new_SoapySDRDoubleList(*args)\n try:\n self.this.append(this)\n except:\n self.this = this\n\n def push_back(self, *args):\n return _SoapySDR.SoapySDRDoubleList_push_back(self, *args)\n\n def front(self):\n return _SoapySDR.SoapySDRDoubleList_front(self)\n\n def back(self):\n return _SoapySDR.SoapySDRDoubleList_back(self)\n\n def assign(self, *args):\n return _SoapySDR.SoapySDRDoubleList_assign(self, *args)\n\n def resize(self, *args):\n return _SoapySDR.SoapySDRDoubleList_resize(self, *args)\n\n def insert(self, *args):\n return _SoapySDR.SoapySDRDoubleList_insert(self, *args)\n\n def reserve(self, *args):\n return _SoapySDR.SoapySDRDoubleList_reserve(self, *args)\n\n def capacity(self):\n return _SoapySDR.SoapySDRDoubleList_capacity(self)\n __swig_destroy__ = _SoapySDR.delete_SoapySDRDoubleList\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass StreamResult(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n StreamResult, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, StreamResult, name)\n __repr__ = _swig_repr\n\n def __init__(self):\n this = _SoapySDR.new_StreamResult()\n try:\n self.this.append(this)\n except:\n self.this = this\n __swig_setmethods__['ret'] = _SoapySDR.StreamResult_ret_set\n __swig_getmethods__['ret'] = _SoapySDR.StreamResult_ret_get\n if _newclass:\n ret = _swig_property(_SoapySDR.StreamResult_ret_get, _SoapySDR.\n StreamResult_ret_set)\n __swig_setmethods__['flags'] = _SoapySDR.StreamResult_flags_set\n __swig_getmethods__['flags'] = _SoapySDR.StreamResult_flags_get\n if _newclass:\n flags = _swig_property(_SoapySDR.StreamResult_flags_get, _SoapySDR.\n StreamResult_flags_set)\n __swig_setmethods__['timeNs'] = _SoapySDR.StreamResult_timeNs_set\n __swig_getmethods__['timeNs'] = _SoapySDR.StreamResult_timeNs_get\n if _newclass:\n timeNs = _swig_property(_SoapySDR.StreamResult_timeNs_get,\n _SoapySDR.StreamResult_timeNs_set)\n __swig_setmethods__['chanMask'] = _SoapySDR.StreamResult_chanMask_set\n __swig_getmethods__['chanMask'] = _SoapySDR.StreamResult_chanMask_get\n if _newclass:\n chanMask = _swig_property(_SoapySDR.StreamResult_chanMask_get,\n _SoapySDR.StreamResult_chanMask_set)\n\n def __str__(self):\n return 'ret=%s, flags=%s, timeNs=%s' % (self.ret, self.flags, self.\n timeNs)\n __swig_destroy__ = _SoapySDR.delete_StreamResult\n __del__ = lambda self: None\n\n\n<mask token>\n\n\ndef SoapySDR_getAPIVersion():\n return _SoapySDR.SoapySDR_getAPIVersion()\n\n\n<mask token>\n\n\ndef SoapySDR_getABIVersion():\n return _SoapySDR.SoapySDR_getABIVersion()\n\n\n<mask token>\n\n\ndef loadModules():\n return _SoapySDR.loadModules()\n\n\n<mask token>\n\n\ndef formatToSize(*args):\n return _SoapySDR.formatToSize(*args)\n\n\n<mask token>\n\n\ndef ticksToTimeNs(*args):\n return _SoapySDR.ticksToTimeNs(*args)\n\n\n<mask token>\n\n\ndef setLogLevel(*args):\n return _SoapySDR.setLogLevel(*args)\n\n\n<mask token>\n\n\nclass Device(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self, Device,\n name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, Device, name)\n\n def __init__(self, *args, **kwargs):\n raise AttributeError('No constructor defined')\n __repr__ = _swig_repr\n __swig_destroy__ = _SoapySDR.delete_Device\n __del__ = lambda self: None\n __swig_getmethods__['enumerate'] = lambda x: _SoapySDR.Device_enumerate\n if _newclass:\n enumerate = staticmethod(_SoapySDR.Device_enumerate)\n __swig_getmethods__['make'] = lambda x: _SoapySDR.Device_make\n if _newclass:\n make = staticmethod(_SoapySDR.Device_make)\n __swig_getmethods__['unmake'] = lambda x: _SoapySDR.Device_unmake\n if _newclass:\n unmake = staticmethod(_SoapySDR.Device_unmake)\n\n def getDriverKey(self):\n return _SoapySDR.Device_getDriverKey(self)\n\n def getHardwareKey(self):\n return _SoapySDR.Device_getHardwareKey(self)\n\n def getHardwareInfo(self):\n return _SoapySDR.Device_getHardwareInfo(self)\n\n def setFrontendMapping(self, *args):\n return _SoapySDR.Device_setFrontendMapping(self, *args)\n\n def getFrontendMapping(self, *args):\n return _SoapySDR.Device_getFrontendMapping(self, *args)\n\n def getNumChannels(self, *args):\n return _SoapySDR.Device_getNumChannels(self, *args)\n\n def getChannelInfo(self, *args):\n return _SoapySDR.Device_getChannelInfo(self, *args)\n\n def getFullDuplex(self, *args):\n return _SoapySDR.Device_getFullDuplex(self, *args)\n\n def getStreamFormats(self, *args):\n return _SoapySDR.Device_getStreamFormats(self, *args)\n\n def getNativeStreamFormat(self, *args):\n return _SoapySDR.Device_getNativeStreamFormat(self, *args)\n\n def getStreamArgsInfo(self, *args):\n return _SoapySDR.Device_getStreamArgsInfo(self, *args)\n\n def setupStream(self, *args):\n return _SoapySDR.Device_setupStream(self, *args)\n\n def closeStream(self, *args):\n return _SoapySDR.Device_closeStream(self, *args)\n\n def getStreamMTU(self, *args):\n return _SoapySDR.Device_getStreamMTU(self, *args)\n\n def activateStream(self, *args):\n return _SoapySDR.Device_activateStream(self, *args)\n\n def deactivateStream(self, *args):\n return _SoapySDR.Device_deactivateStream(self, *args)\n\n def readStream(self, *args):\n return _SoapySDR.Device_readStream(self, *args)\n\n def writeStream(self, *args):\n return _SoapySDR.Device_writeStream(self, *args)\n\n def readStreamStatus(self, *args):\n return _SoapySDR.Device_readStreamStatus(self, *args)\n\n def getNumDirectAccessBuffers(self, *args):\n return _SoapySDR.Device_getNumDirectAccessBuffers(self, *args)\n\n def getDirectAccessBufferAddrs(self, *args):\n return _SoapySDR.Device_getDirectAccessBufferAddrs(self, *args)\n\n def acquireReadBuffer(self, *args):\n return _SoapySDR.Device_acquireReadBuffer(self, *args)\n\n def releaseReadBuffer(self, *args):\n return _SoapySDR.Device_releaseReadBuffer(self, *args)\n\n def acquireWriteBuffer(self, *args):\n return _SoapySDR.Device_acquireWriteBuffer(self, *args)\n\n def releaseWriteBuffer(self, *args):\n return _SoapySDR.Device_releaseWriteBuffer(self, *args)\n\n def listAntennas(self, *args):\n return _SoapySDR.Device_listAntennas(self, *args)\n\n def setAntenna(self, *args):\n return _SoapySDR.Device_setAntenna(self, *args)\n\n def getAntenna(self, *args):\n return _SoapySDR.Device_getAntenna(self, *args)\n\n def hasDCOffsetMode(self, *args):\n return _SoapySDR.Device_hasDCOffsetMode(self, *args)\n\n def setDCOffsetMode(self, *args):\n return _SoapySDR.Device_setDCOffsetMode(self, *args)\n\n def getDCOffsetMode(self, *args):\n return _SoapySDR.Device_getDCOffsetMode(self, *args)\n\n def hasDCOffset(self, *args):\n return _SoapySDR.Device_hasDCOffset(self, *args)\n\n def setDCOffset(self, *args):\n return _SoapySDR.Device_setDCOffset(self, *args)\n\n def getDCOffset(self, *args):\n return _SoapySDR.Device_getDCOffset(self, *args)\n\n def hasIQBalance(self, *args):\n return _SoapySDR.Device_hasIQBalance(self, *args)\n\n def setIQBalance(self, *args):\n return _SoapySDR.Device_setIQBalance(self, *args)\n\n def getIQBalance(self, *args):\n return _SoapySDR.Device_getIQBalance(self, *args)\n\n def hasFrequencyCorrection(self, *args):\n return _SoapySDR.Device_hasFrequencyCorrection(self, *args)\n\n def setFrequencyCorrection(self, *args):\n return _SoapySDR.Device_setFrequencyCorrection(self, *args)\n\n def getFrequencyCorrection(self, *args):\n return _SoapySDR.Device_getFrequencyCorrection(self, *args)\n\n def listGains(self, *args):\n return _SoapySDR.Device_listGains(self, *args)\n\n def hasGainMode(self, *args):\n return _SoapySDR.Device_hasGainMode(self, *args)\n\n def setGainMode(self, *args):\n return _SoapySDR.Device_setGainMode(self, *args)\n\n def getGainMode(self, *args):\n return _SoapySDR.Device_getGainMode(self, *args)\n\n def setGain(self, *args):\n return _SoapySDR.Device_setGain(self, *args)\n\n def getGain(self, *args):\n return _SoapySDR.Device_getGain(self, *args)\n\n def getGainRange(self, *args):\n return _SoapySDR.Device_getGainRange(self, *args)\n\n def setFrequency(self, *args):\n return _SoapySDR.Device_setFrequency(self, *args)\n\n def getFrequency(self, *args):\n return _SoapySDR.Device_getFrequency(self, *args)\n\n def listFrequencies(self, *args):\n return _SoapySDR.Device_listFrequencies(self, *args)\n\n def getFrequencyRange(self, *args):\n return _SoapySDR.Device_getFrequencyRange(self, *args)\n\n def getFrequencyArgsInfo(self, *args):\n return _SoapySDR.Device_getFrequencyArgsInfo(self, *args)\n\n def setSampleRate(self, *args):\n return _SoapySDR.Device_setSampleRate(self, *args)\n\n def getSampleRate(self, *args):\n return _SoapySDR.Device_getSampleRate(self, *args)\n\n def listSampleRates(self, *args):\n return _SoapySDR.Device_listSampleRates(self, *args)\n\n def getSampleRateRange(self, *args):\n return _SoapySDR.Device_getSampleRateRange(self, *args)\n\n def setBandwidth(self, *args):\n return _SoapySDR.Device_setBandwidth(self, *args)\n\n def getBandwidth(self, *args):\n return _SoapySDR.Device_getBandwidth(self, *args)\n\n def listBandwidths(self, *args):\n return _SoapySDR.Device_listBandwidths(self, *args)\n\n def getBandwidthRange(self, *args):\n return _SoapySDR.Device_getBandwidthRange(self, *args)\n\n def setMasterClockRate(self, *args):\n return _SoapySDR.Device_setMasterClockRate(self, *args)\n\n def getMasterClockRate(self):\n return _SoapySDR.Device_getMasterClockRate(self)\n\n def getMasterClockRates(self):\n return _SoapySDR.Device_getMasterClockRates(self)\n\n def listClockSources(self):\n return _SoapySDR.Device_listClockSources(self)\n\n def setClockSource(self, *args):\n return _SoapySDR.Device_setClockSource(self, *args)\n\n def getClockSource(self):\n return _SoapySDR.Device_getClockSource(self)\n\n def listTimeSources(self):\n return _SoapySDR.Device_listTimeSources(self)\n\n def setTimeSource(self, *args):\n return _SoapySDR.Device_setTimeSource(self, *args)\n\n def getTimeSource(self):\n return _SoapySDR.Device_getTimeSource(self)\n\n def hasHardwareTime(self, what=''):\n return _SoapySDR.Device_hasHardwareTime(self, what)\n\n def getHardwareTime(self, what=''):\n return _SoapySDR.Device_getHardwareTime(self, what)\n\n def setHardwareTime(self, *args):\n return _SoapySDR.Device_setHardwareTime(self, *args)\n\n def setCommandTime(self, *args):\n return _SoapySDR.Device_setCommandTime(self, *args)\n\n def listSensors(self, *args):\n return _SoapySDR.Device_listSensors(self, *args)\n\n def getSensorInfo(self, *args):\n return _SoapySDR.Device_getSensorInfo(self, *args)\n\n def readSensor(self, *args):\n return _SoapySDR.Device_readSensor(self, *args)\n\n def listRegisterInterfaces(self):\n return _SoapySDR.Device_listRegisterInterfaces(self)\n\n def writeRegister(self, *args):\n return _SoapySDR.Device_writeRegister(self, *args)\n\n def readRegister(self, *args):\n return _SoapySDR.Device_readRegister(self, *args)\n\n def writeRegisters(self, *args):\n return _SoapySDR.Device_writeRegisters(self, *args)\n\n def readRegisters(self, *args):\n return _SoapySDR.Device_readRegisters(self, *args)\n\n def getSettingInfo(self, *args):\n return _SoapySDR.Device_getSettingInfo(self, *args)\n\n def writeSetting(self, *args):\n return _SoapySDR.Device_writeSetting(self, *args)\n\n def readSetting(self, *args):\n return _SoapySDR.Device_readSetting(self, *args)\n\n def listGPIOBanks(self):\n return _SoapySDR.Device_listGPIOBanks(self)\n\n def writeGPIO(self, *args):\n return _SoapySDR.Device_writeGPIO(self, *args)\n\n def readGPIO(self, *args):\n return _SoapySDR.Device_readGPIO(self, *args)\n\n def writeGPIODir(self, *args):\n return _SoapySDR.Device_writeGPIODir(self, *args)\n\n def readGPIODir(self, *args):\n return _SoapySDR.Device_readGPIODir(self, *args)\n\n def writeI2C(self, *args):\n return _SoapySDR.Device_writeI2C(self, *args)\n\n def readI2C(self, *args):\n return _SoapySDR.Device_readI2C(self, *args)\n\n def transactSPI(self, *args):\n return _SoapySDR.Device_transactSPI(self, *args)\n\n def listUARTs(self):\n return _SoapySDR.Device_listUARTs(self)\n\n def writeUART(self, *args):\n return _SoapySDR.Device_writeUART(self, *args)\n\n def readUART(self, *args):\n return _SoapySDR.Device_readUART(self, *args)\n\n def readStream__(self, *args):\n return _SoapySDR.Device_readStream__(self, *args)\n\n def writeStream__(self, *args):\n return _SoapySDR.Device_writeStream__(self, *args)\n\n def readStreamStatus__(self, *args):\n return _SoapySDR.Device_readStreamStatus__(self, *args)\n\n def __del__(self):\n Device.unmake(self)\n\n def __str__(self):\n return '%s:%s' % (self.getDriverKey(), self.getHardwareKey())\n\n def readStream(self, stream, buffs, numElems, flags=0, timeoutUs=100000):\n ptrs = [extractBuffPointer(b) for b in buffs]\n return self.readStream__(stream, ptrs, numElems, flags, timeoutUs)\n\n def writeStream(self, stream, buffs, numElems, flags=0, timeNs=0,\n timeoutUs=100000):\n ptrs = [extractBuffPointer(b) for b in buffs]\n return self.writeStream__(stream, ptrs, numElems, flags, timeNs,\n timeoutUs)\n\n def readStreamStatus(self, stream, timeoutUs=100000):\n return self.readStreamStatus__(stream, timeoutUs)\n\n\n<mask token>\n\n\nclass Device(Device):\n\n def __new__(cls, *args, **kwargs):\n return cls.make(*args, **kwargs)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef _swig_setattr(self, class_type, name, value):\n return _swig_setattr_nondynamic(self, class_type, name, value, 0)\n\n\ndef _swig_getattr(self, class_type, name):\n if name == 'thisown':\n return self.this.own()\n method = class_type.__swig_getmethods__.get(name, None)\n if method:\n return method(self)\n raise AttributeError(name)\n\n\ndef _swig_repr(self):\n try:\n strthis = 'proxy of ' + self.this.__repr__()\n except:\n strthis = ''\n return '<%s.%s; %s >' % (self.__class__.__module__, self.__class__.\n __name__, strthis)\n\n\n<mask token>\n\n\nclass SwigPyIterator(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n SwigPyIterator, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SwigPyIterator, name)\n\n def __init__(self, *args, **kwargs):\n raise AttributeError('No constructor defined - class is abstract')\n __repr__ = _swig_repr\n __swig_destroy__ = _SoapySDR.delete_SwigPyIterator\n __del__ = lambda self: None\n\n def value(self):\n return _SoapySDR.SwigPyIterator_value(self)\n\n def incr(self, n=1):\n return _SoapySDR.SwigPyIterator_incr(self, n)\n\n def decr(self, n=1):\n return _SoapySDR.SwigPyIterator_decr(self, n)\n\n def distance(self, *args):\n return _SoapySDR.SwigPyIterator_distance(self, *args)\n\n def equal(self, *args):\n return _SoapySDR.SwigPyIterator_equal(self, *args)\n\n def copy(self):\n return _SoapySDR.SwigPyIterator_copy(self)\n\n def next(self):\n return _SoapySDR.SwigPyIterator_next(self)\n\n def __next__(self):\n return _SoapySDR.SwigPyIterator___next__(self)\n\n def previous(self):\n return _SoapySDR.SwigPyIterator_previous(self)\n\n def advance(self, *args):\n return _SoapySDR.SwigPyIterator_advance(self, *args)\n\n def __eq__(self, *args):\n return _SoapySDR.SwigPyIterator___eq__(self, *args)\n\n def __ne__(self, *args):\n return _SoapySDR.SwigPyIterator___ne__(self, *args)\n\n def __iadd__(self, *args):\n return _SoapySDR.SwigPyIterator___iadd__(self, *args)\n\n def __isub__(self, *args):\n return _SoapySDR.SwigPyIterator___isub__(self, *args)\n\n def __add__(self, *args):\n return _SoapySDR.SwigPyIterator___add__(self, *args)\n\n def __sub__(self, *args):\n return _SoapySDR.SwigPyIterator___sub__(self, *args)\n\n def __iter__(self):\n return self\n\n\n<mask token>\n\n\ndef KwargsToString(*args):\n return _SoapySDR.KwargsToString(*args)\n\n\n<mask token>\n\n\nclass Range(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self, Range, name,\n value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, Range, name)\n __repr__ = _swig_repr\n\n def __init__(self, *args):\n this = _SoapySDR.new_Range(*args)\n try:\n self.this.append(this)\n except:\n self.this = this\n\n def minimum(self):\n return _SoapySDR.Range_minimum(self)\n\n def maximum(self):\n return _SoapySDR.Range_maximum(self)\n\n def step(self):\n return _SoapySDR.Range_step(self)\n\n def __str__(self):\n fields = [self.minimum(), self.maximum()]\n if self.step() != 0.0:\n fields.append(self.step())\n return ', '.join([('%g' % f) for f in fields])\n __swig_destroy__ = _SoapySDR.delete_Range\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass ArgInfo(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self, ArgInfo,\n name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, ArgInfo, name)\n __repr__ = _swig_repr\n\n def __init__(self):\n this = _SoapySDR.new_ArgInfo()\n try:\n self.this.append(this)\n except:\n self.this = this\n __swig_setmethods__['key'] = _SoapySDR.ArgInfo_key_set\n __swig_getmethods__['key'] = _SoapySDR.ArgInfo_key_get\n if _newclass:\n key = _swig_property(_SoapySDR.ArgInfo_key_get, _SoapySDR.\n ArgInfo_key_set)\n __swig_setmethods__['value'] = _SoapySDR.ArgInfo_value_set\n __swig_getmethods__['value'] = _SoapySDR.ArgInfo_value_get\n if _newclass:\n value = _swig_property(_SoapySDR.ArgInfo_value_get, _SoapySDR.\n ArgInfo_value_set)\n __swig_setmethods__['name'] = _SoapySDR.ArgInfo_name_set\n __swig_getmethods__['name'] = _SoapySDR.ArgInfo_name_get\n if _newclass:\n name = _swig_property(_SoapySDR.ArgInfo_name_get, _SoapySDR.\n ArgInfo_name_set)\n __swig_setmethods__['description'] = _SoapySDR.ArgInfo_description_set\n __swig_getmethods__['description'] = _SoapySDR.ArgInfo_description_get\n if _newclass:\n description = _swig_property(_SoapySDR.ArgInfo_description_get,\n _SoapySDR.ArgInfo_description_set)\n __swig_setmethods__['units'] = _SoapySDR.ArgInfo_units_set\n __swig_getmethods__['units'] = _SoapySDR.ArgInfo_units_get\n if _newclass:\n units = _swig_property(_SoapySDR.ArgInfo_units_get, _SoapySDR.\n ArgInfo_units_set)\n BOOL = _SoapySDR.ArgInfo_BOOL\n INT = _SoapySDR.ArgInfo_INT\n FLOAT = _SoapySDR.ArgInfo_FLOAT\n STRING = _SoapySDR.ArgInfo_STRING\n __swig_setmethods__['type'] = _SoapySDR.ArgInfo_type_set\n __swig_getmethods__['type'] = _SoapySDR.ArgInfo_type_get\n if _newclass:\n type = _swig_property(_SoapySDR.ArgInfo_type_get, _SoapySDR.\n ArgInfo_type_set)\n __swig_setmethods__['range'] = _SoapySDR.ArgInfo_range_set\n __swig_getmethods__['range'] = _SoapySDR.ArgInfo_range_get\n if _newclass:\n range = _swig_property(_SoapySDR.ArgInfo_range_get, _SoapySDR.\n ArgInfo_range_set)\n __swig_setmethods__['options'] = _SoapySDR.ArgInfo_options_set\n __swig_getmethods__['options'] = _SoapySDR.ArgInfo_options_get\n if _newclass:\n options = _swig_property(_SoapySDR.ArgInfo_options_get, _SoapySDR.\n ArgInfo_options_set)\n __swig_setmethods__['optionNames'] = _SoapySDR.ArgInfo_optionNames_set\n __swig_getmethods__['optionNames'] = _SoapySDR.ArgInfo_optionNames_get\n if _newclass:\n optionNames = _swig_property(_SoapySDR.ArgInfo_optionNames_get,\n _SoapySDR.ArgInfo_optionNames_set)\n __swig_destroy__ = _SoapySDR.delete_ArgInfo\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass SoapySDRKwargs(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n SoapySDRKwargs, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SoapySDRKwargs, name)\n __repr__ = _swig_repr\n\n def iterator(self):\n return _SoapySDR.SoapySDRKwargs_iterator(self)\n\n def __iter__(self):\n return self.iterator()\n\n def __nonzero__(self):\n return _SoapySDR.SoapySDRKwargs___nonzero__(self)\n\n def __bool__(self):\n return _SoapySDR.SoapySDRKwargs___bool__(self)\n\n def __len__(self):\n return _SoapySDR.SoapySDRKwargs___len__(self)\n\n def __iter__(self):\n return self.key_iterator()\n\n def iterkeys(self):\n return self.key_iterator()\n\n def itervalues(self):\n return self.value_iterator()\n\n def iteritems(self):\n return self.iterator()\n\n def __getitem__(self, *args):\n return _SoapySDR.SoapySDRKwargs___getitem__(self, *args)\n\n def __delitem__(self, *args):\n return _SoapySDR.SoapySDRKwargs___delitem__(self, *args)\n\n def has_key(self, *args):\n return _SoapySDR.SoapySDRKwargs_has_key(self, *args)\n\n def keys(self):\n return _SoapySDR.SoapySDRKwargs_keys(self)\n\n def values(self):\n return _SoapySDR.SoapySDRKwargs_values(self)\n\n def items(self):\n return _SoapySDR.SoapySDRKwargs_items(self)\n\n def __contains__(self, *args):\n return _SoapySDR.SoapySDRKwargs___contains__(self, *args)\n\n def key_iterator(self):\n return _SoapySDR.SoapySDRKwargs_key_iterator(self)\n\n def value_iterator(self):\n return _SoapySDR.SoapySDRKwargs_value_iterator(self)\n\n def __setitem__(self, *args):\n return _SoapySDR.SoapySDRKwargs___setitem__(self, *args)\n\n def asdict(self):\n return _SoapySDR.SoapySDRKwargs_asdict(self)\n\n def __init__(self, *args):\n this = _SoapySDR.new_SoapySDRKwargs(*args)\n try:\n self.this.append(this)\n except:\n self.this = this\n\n def empty(self):\n return _SoapySDR.SoapySDRKwargs_empty(self)\n\n def size(self):\n return _SoapySDR.SoapySDRKwargs_size(self)\n\n def clear(self):\n return _SoapySDR.SoapySDRKwargs_clear(self)\n\n def swap(self, *args):\n return _SoapySDR.SoapySDRKwargs_swap(self, *args)\n\n def get_allocator(self):\n return _SoapySDR.SoapySDRKwargs_get_allocator(self)\n\n def begin(self):\n return _SoapySDR.SoapySDRKwargs_begin(self)\n\n def end(self):\n return _SoapySDR.SoapySDRKwargs_end(self)\n\n def rbegin(self):\n return _SoapySDR.SoapySDRKwargs_rbegin(self)\n\n def rend(self):\n return _SoapySDR.SoapySDRKwargs_rend(self)\n\n def count(self, *args):\n return _SoapySDR.SoapySDRKwargs_count(self, *args)\n\n def erase(self, *args):\n return _SoapySDR.SoapySDRKwargs_erase(self, *args)\n\n def find(self, *args):\n return _SoapySDR.SoapySDRKwargs_find(self, *args)\n\n def lower_bound(self, *args):\n return _SoapySDR.SoapySDRKwargs_lower_bound(self, *args)\n\n def upper_bound(self, *args):\n return _SoapySDR.SoapySDRKwargs_upper_bound(self, *args)\n\n def __str__(self):\n out = list()\n for k, v in self.iteritems():\n out.append('%s=%s' % (k, v))\n return '{' + ', '.join(out) + '}'\n __swig_destroy__ = _SoapySDR.delete_SoapySDRKwargs\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass SoapySDRKwargsList(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n SoapySDRKwargsList, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SoapySDRKwargsList,\n name)\n __repr__ = _swig_repr\n\n def iterator(self):\n return _SoapySDR.SoapySDRKwargsList_iterator(self)\n\n def __iter__(self):\n return self.iterator()\n\n def __nonzero__(self):\n return _SoapySDR.SoapySDRKwargsList___nonzero__(self)\n\n def __bool__(self):\n return _SoapySDR.SoapySDRKwargsList___bool__(self)\n\n def __len__(self):\n return _SoapySDR.SoapySDRKwargsList___len__(self)\n\n def pop(self):\n return _SoapySDR.SoapySDRKwargsList_pop(self)\n\n def __getslice__(self, *args):\n return _SoapySDR.SoapySDRKwargsList___getslice__(self, *args)\n\n def __setslice__(self, *args):\n return _SoapySDR.SoapySDRKwargsList___setslice__(self, *args)\n\n def __delslice__(self, *args):\n return _SoapySDR.SoapySDRKwargsList___delslice__(self, *args)\n\n def __delitem__(self, *args):\n return _SoapySDR.SoapySDRKwargsList___delitem__(self, *args)\n\n def __getitem__(self, *args):\n return _SoapySDR.SoapySDRKwargsList___getitem__(self, *args)\n\n def __setitem__(self, *args):\n return _SoapySDR.SoapySDRKwargsList___setitem__(self, *args)\n\n def append(self, *args):\n return _SoapySDR.SoapySDRKwargsList_append(self, *args)\n\n def empty(self):\n return _SoapySDR.SoapySDRKwargsList_empty(self)\n\n def size(self):\n return _SoapySDR.SoapySDRKwargsList_size(self)\n\n def clear(self):\n return _SoapySDR.SoapySDRKwargsList_clear(self)\n\n def swap(self, *args):\n return _SoapySDR.SoapySDRKwargsList_swap(self, *args)\n\n def get_allocator(self):\n return _SoapySDR.SoapySDRKwargsList_get_allocator(self)\n\n def begin(self):\n return _SoapySDR.SoapySDRKwargsList_begin(self)\n\n def end(self):\n return _SoapySDR.SoapySDRKwargsList_end(self)\n\n def rbegin(self):\n return _SoapySDR.SoapySDRKwargsList_rbegin(self)\n\n def rend(self):\n return _SoapySDR.SoapySDRKwargsList_rend(self)\n\n def pop_back(self):\n return _SoapySDR.SoapySDRKwargsList_pop_back(self)\n\n def erase(self, *args):\n return _SoapySDR.SoapySDRKwargsList_erase(self, *args)\n\n def __init__(self, *args):\n this = _SoapySDR.new_SoapySDRKwargsList(*args)\n try:\n self.this.append(this)\n except:\n self.this = this\n\n def push_back(self, *args):\n return _SoapySDR.SoapySDRKwargsList_push_back(self, *args)\n\n def front(self):\n return _SoapySDR.SoapySDRKwargsList_front(self)\n\n def back(self):\n return _SoapySDR.SoapySDRKwargsList_back(self)\n\n def assign(self, *args):\n return _SoapySDR.SoapySDRKwargsList_assign(self, *args)\n\n def resize(self, *args):\n return _SoapySDR.SoapySDRKwargsList_resize(self, *args)\n\n def insert(self, *args):\n return _SoapySDR.SoapySDRKwargsList_insert(self, *args)\n\n def reserve(self, *args):\n return _SoapySDR.SoapySDRKwargsList_reserve(self, *args)\n\n def capacity(self):\n return _SoapySDR.SoapySDRKwargsList_capacity(self)\n __swig_destroy__ = _SoapySDR.delete_SoapySDRKwargsList\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass SoapySDRArgInfoList(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n SoapySDRArgInfoList, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self,\n SoapySDRArgInfoList, name)\n __repr__ = _swig_repr\n\n def iterator(self):\n return _SoapySDR.SoapySDRArgInfoList_iterator(self)\n\n def __iter__(self):\n return self.iterator()\n\n def __nonzero__(self):\n return _SoapySDR.SoapySDRArgInfoList___nonzero__(self)\n\n def __bool__(self):\n return _SoapySDR.SoapySDRArgInfoList___bool__(self)\n\n def __len__(self):\n return _SoapySDR.SoapySDRArgInfoList___len__(self)\n\n def pop(self):\n return _SoapySDR.SoapySDRArgInfoList_pop(self)\n\n def __getslice__(self, *args):\n return _SoapySDR.SoapySDRArgInfoList___getslice__(self, *args)\n\n def __setslice__(self, *args):\n return _SoapySDR.SoapySDRArgInfoList___setslice__(self, *args)\n\n def __delslice__(self, *args):\n return _SoapySDR.SoapySDRArgInfoList___delslice__(self, *args)\n\n def __delitem__(self, *args):\n return _SoapySDR.SoapySDRArgInfoList___delitem__(self, *args)\n\n def __getitem__(self, *args):\n return _SoapySDR.SoapySDRArgInfoList___getitem__(self, *args)\n\n def __setitem__(self, *args):\n return _SoapySDR.SoapySDRArgInfoList___setitem__(self, *args)\n\n def append(self, *args):\n return _SoapySDR.SoapySDRArgInfoList_append(self, *args)\n\n def empty(self):\n return _SoapySDR.SoapySDRArgInfoList_empty(self)\n\n def size(self):\n return _SoapySDR.SoapySDRArgInfoList_size(self)\n\n def clear(self):\n return _SoapySDR.SoapySDRArgInfoList_clear(self)\n\n def swap(self, *args):\n return _SoapySDR.SoapySDRArgInfoList_swap(self, *args)\n\n def get_allocator(self):\n return _SoapySDR.SoapySDRArgInfoList_get_allocator(self)\n\n def begin(self):\n return _SoapySDR.SoapySDRArgInfoList_begin(self)\n\n def end(self):\n return _SoapySDR.SoapySDRArgInfoList_end(self)\n\n def rbegin(self):\n return _SoapySDR.SoapySDRArgInfoList_rbegin(self)\n\n def rend(self):\n return _SoapySDR.SoapySDRArgInfoList_rend(self)\n\n def pop_back(self):\n return _SoapySDR.SoapySDRArgInfoList_pop_back(self)\n\n def erase(self, *args):\n return _SoapySDR.SoapySDRArgInfoList_erase(self, *args)\n\n def __init__(self, *args):\n this = _SoapySDR.new_SoapySDRArgInfoList(*args)\n try:\n self.this.append(this)\n except:\n self.this = this\n\n def push_back(self, *args):\n return _SoapySDR.SoapySDRArgInfoList_push_back(self, *args)\n\n def front(self):\n return _SoapySDR.SoapySDRArgInfoList_front(self)\n\n def back(self):\n return _SoapySDR.SoapySDRArgInfoList_back(self)\n\n def assign(self, *args):\n return _SoapySDR.SoapySDRArgInfoList_assign(self, *args)\n\n def resize(self, *args):\n return _SoapySDR.SoapySDRArgInfoList_resize(self, *args)\n\n def insert(self, *args):\n return _SoapySDR.SoapySDRArgInfoList_insert(self, *args)\n\n def reserve(self, *args):\n return _SoapySDR.SoapySDRArgInfoList_reserve(self, *args)\n\n def capacity(self):\n return _SoapySDR.SoapySDRArgInfoList_capacity(self)\n __swig_destroy__ = _SoapySDR.delete_SoapySDRArgInfoList\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass SoapySDRStringList(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n SoapySDRStringList, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SoapySDRStringList,\n name)\n __repr__ = _swig_repr\n\n def iterator(self):\n return _SoapySDR.SoapySDRStringList_iterator(self)\n\n def __iter__(self):\n return self.iterator()\n\n def __nonzero__(self):\n return _SoapySDR.SoapySDRStringList___nonzero__(self)\n\n def __bool__(self):\n return _SoapySDR.SoapySDRStringList___bool__(self)\n\n def __len__(self):\n return _SoapySDR.SoapySDRStringList___len__(self)\n\n def pop(self):\n return _SoapySDR.SoapySDRStringList_pop(self)\n\n def __getslice__(self, *args):\n return _SoapySDR.SoapySDRStringList___getslice__(self, *args)\n\n def __setslice__(self, *args):\n return _SoapySDR.SoapySDRStringList___setslice__(self, *args)\n\n def __delslice__(self, *args):\n return _SoapySDR.SoapySDRStringList___delslice__(self, *args)\n\n def __delitem__(self, *args):\n return _SoapySDR.SoapySDRStringList___delitem__(self, *args)\n\n def __getitem__(self, *args):\n return _SoapySDR.SoapySDRStringList___getitem__(self, *args)\n\n def __setitem__(self, *args):\n return _SoapySDR.SoapySDRStringList___setitem__(self, *args)\n\n def append(self, *args):\n return _SoapySDR.SoapySDRStringList_append(self, *args)\n\n def empty(self):\n return _SoapySDR.SoapySDRStringList_empty(self)\n\n def size(self):\n return _SoapySDR.SoapySDRStringList_size(self)\n\n def clear(self):\n return _SoapySDR.SoapySDRStringList_clear(self)\n\n def swap(self, *args):\n return _SoapySDR.SoapySDRStringList_swap(self, *args)\n\n def get_allocator(self):\n return _SoapySDR.SoapySDRStringList_get_allocator(self)\n\n def begin(self):\n return _SoapySDR.SoapySDRStringList_begin(self)\n\n def end(self):\n return _SoapySDR.SoapySDRStringList_end(self)\n\n def rbegin(self):\n return _SoapySDR.SoapySDRStringList_rbegin(self)\n\n def rend(self):\n return _SoapySDR.SoapySDRStringList_rend(self)\n\n def pop_back(self):\n return _SoapySDR.SoapySDRStringList_pop_back(self)\n\n def erase(self, *args):\n return _SoapySDR.SoapySDRStringList_erase(self, *args)\n\n def __init__(self, *args):\n this = _SoapySDR.new_SoapySDRStringList(*args)\n try:\n self.this.append(this)\n except:\n self.this = this\n\n def push_back(self, *args):\n return _SoapySDR.SoapySDRStringList_push_back(self, *args)\n\n def front(self):\n return _SoapySDR.SoapySDRStringList_front(self)\n\n def back(self):\n return _SoapySDR.SoapySDRStringList_back(self)\n\n def assign(self, *args):\n return _SoapySDR.SoapySDRStringList_assign(self, *args)\n\n def resize(self, *args):\n return _SoapySDR.SoapySDRStringList_resize(self, *args)\n\n def insert(self, *args):\n return _SoapySDR.SoapySDRStringList_insert(self, *args)\n\n def reserve(self, *args):\n return _SoapySDR.SoapySDRStringList_reserve(self, *args)\n\n def capacity(self):\n return _SoapySDR.SoapySDRStringList_capacity(self)\n __swig_destroy__ = _SoapySDR.delete_SoapySDRStringList\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass SoapySDRRangeList(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n SoapySDRRangeList, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SoapySDRRangeList,\n name)\n __repr__ = _swig_repr\n\n def iterator(self):\n return _SoapySDR.SoapySDRRangeList_iterator(self)\n\n def __iter__(self):\n return self.iterator()\n\n def __nonzero__(self):\n return _SoapySDR.SoapySDRRangeList___nonzero__(self)\n\n def __bool__(self):\n return _SoapySDR.SoapySDRRangeList___bool__(self)\n\n def __len__(self):\n return _SoapySDR.SoapySDRRangeList___len__(self)\n\n def pop(self):\n return _SoapySDR.SoapySDRRangeList_pop(self)\n\n def __getslice__(self, *args):\n return _SoapySDR.SoapySDRRangeList___getslice__(self, *args)\n\n def __setslice__(self, *args):\n return _SoapySDR.SoapySDRRangeList___setslice__(self, *args)\n\n def __delslice__(self, *args):\n return _SoapySDR.SoapySDRRangeList___delslice__(self, *args)\n\n def __delitem__(self, *args):\n return _SoapySDR.SoapySDRRangeList___delitem__(self, *args)\n\n def __getitem__(self, *args):\n return _SoapySDR.SoapySDRRangeList___getitem__(self, *args)\n\n def __setitem__(self, *args):\n return _SoapySDR.SoapySDRRangeList___setitem__(self, *args)\n\n def append(self, *args):\n return _SoapySDR.SoapySDRRangeList_append(self, *args)\n\n def empty(self):\n return _SoapySDR.SoapySDRRangeList_empty(self)\n\n def size(self):\n return _SoapySDR.SoapySDRRangeList_size(self)\n\n def clear(self):\n return _SoapySDR.SoapySDRRangeList_clear(self)\n\n def swap(self, *args):\n return _SoapySDR.SoapySDRRangeList_swap(self, *args)\n\n def get_allocator(self):\n return _SoapySDR.SoapySDRRangeList_get_allocator(self)\n\n def begin(self):\n return _SoapySDR.SoapySDRRangeList_begin(self)\n\n def end(self):\n return _SoapySDR.SoapySDRRangeList_end(self)\n\n def rbegin(self):\n return _SoapySDR.SoapySDRRangeList_rbegin(self)\n\n def rend(self):\n return _SoapySDR.SoapySDRRangeList_rend(self)\n\n def pop_back(self):\n return _SoapySDR.SoapySDRRangeList_pop_back(self)\n\n def erase(self, *args):\n return _SoapySDR.SoapySDRRangeList_erase(self, *args)\n\n def __init__(self, *args):\n this = _SoapySDR.new_SoapySDRRangeList(*args)\n try:\n self.this.append(this)\n except:\n self.this = this\n\n def push_back(self, *args):\n return _SoapySDR.SoapySDRRangeList_push_back(self, *args)\n\n def front(self):\n return _SoapySDR.SoapySDRRangeList_front(self)\n\n def back(self):\n return _SoapySDR.SoapySDRRangeList_back(self)\n\n def assign(self, *args):\n return _SoapySDR.SoapySDRRangeList_assign(self, *args)\n\n def resize(self, *args):\n return _SoapySDR.SoapySDRRangeList_resize(self, *args)\n\n def insert(self, *args):\n return _SoapySDR.SoapySDRRangeList_insert(self, *args)\n\n def reserve(self, *args):\n return _SoapySDR.SoapySDRRangeList_reserve(self, *args)\n\n def capacity(self):\n return _SoapySDR.SoapySDRRangeList_capacity(self)\n __swig_destroy__ = _SoapySDR.delete_SoapySDRRangeList\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass SoapySDRSizeList(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n SoapySDRSizeList, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SoapySDRSizeList, name\n )\n __repr__ = _swig_repr\n\n def iterator(self):\n return _SoapySDR.SoapySDRSizeList_iterator(self)\n\n def __iter__(self):\n return self.iterator()\n\n def __nonzero__(self):\n return _SoapySDR.SoapySDRSizeList___nonzero__(self)\n\n def __bool__(self):\n return _SoapySDR.SoapySDRSizeList___bool__(self)\n\n def __len__(self):\n return _SoapySDR.SoapySDRSizeList___len__(self)\n\n def pop(self):\n return _SoapySDR.SoapySDRSizeList_pop(self)\n\n def __getslice__(self, *args):\n return _SoapySDR.SoapySDRSizeList___getslice__(self, *args)\n\n def __setslice__(self, *args):\n return _SoapySDR.SoapySDRSizeList___setslice__(self, *args)\n\n def __delslice__(self, *args):\n return _SoapySDR.SoapySDRSizeList___delslice__(self, *args)\n\n def __delitem__(self, *args):\n return _SoapySDR.SoapySDRSizeList___delitem__(self, *args)\n\n def __getitem__(self, *args):\n return _SoapySDR.SoapySDRSizeList___getitem__(self, *args)\n\n def __setitem__(self, *args):\n return _SoapySDR.SoapySDRSizeList___setitem__(self, *args)\n\n def append(self, *args):\n return _SoapySDR.SoapySDRSizeList_append(self, *args)\n\n def empty(self):\n return _SoapySDR.SoapySDRSizeList_empty(self)\n\n def size(self):\n return _SoapySDR.SoapySDRSizeList_size(self)\n\n def clear(self):\n return _SoapySDR.SoapySDRSizeList_clear(self)\n\n def swap(self, *args):\n return _SoapySDR.SoapySDRSizeList_swap(self, *args)\n\n def get_allocator(self):\n return _SoapySDR.SoapySDRSizeList_get_allocator(self)\n\n def begin(self):\n return _SoapySDR.SoapySDRSizeList_begin(self)\n\n def end(self):\n return _SoapySDR.SoapySDRSizeList_end(self)\n\n def rbegin(self):\n return _SoapySDR.SoapySDRSizeList_rbegin(self)\n\n def rend(self):\n return _SoapySDR.SoapySDRSizeList_rend(self)\n\n def pop_back(self):\n return _SoapySDR.SoapySDRSizeList_pop_back(self)\n\n def erase(self, *args):\n return _SoapySDR.SoapySDRSizeList_erase(self, *args)\n\n def __init__(self, *args):\n this = _SoapySDR.new_SoapySDRSizeList(*args)\n try:\n self.this.append(this)\n except:\n self.this = this\n\n def push_back(self, *args):\n return _SoapySDR.SoapySDRSizeList_push_back(self, *args)\n\n def front(self):\n return _SoapySDR.SoapySDRSizeList_front(self)\n\n def back(self):\n return _SoapySDR.SoapySDRSizeList_back(self)\n\n def assign(self, *args):\n return _SoapySDR.SoapySDRSizeList_assign(self, *args)\n\n def resize(self, *args):\n return _SoapySDR.SoapySDRSizeList_resize(self, *args)\n\n def insert(self, *args):\n return _SoapySDR.SoapySDRSizeList_insert(self, *args)\n\n def reserve(self, *args):\n return _SoapySDR.SoapySDRSizeList_reserve(self, *args)\n\n def capacity(self):\n return _SoapySDR.SoapySDRSizeList_capacity(self)\n __swig_destroy__ = _SoapySDR.delete_SoapySDRSizeList\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass SoapySDRDoubleList(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n SoapySDRDoubleList, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SoapySDRDoubleList,\n name)\n __repr__ = _swig_repr\n\n def iterator(self):\n return _SoapySDR.SoapySDRDoubleList_iterator(self)\n\n def __iter__(self):\n return self.iterator()\n\n def __nonzero__(self):\n return _SoapySDR.SoapySDRDoubleList___nonzero__(self)\n\n def __bool__(self):\n return _SoapySDR.SoapySDRDoubleList___bool__(self)\n\n def __len__(self):\n return _SoapySDR.SoapySDRDoubleList___len__(self)\n\n def pop(self):\n return _SoapySDR.SoapySDRDoubleList_pop(self)\n\n def __getslice__(self, *args):\n return _SoapySDR.SoapySDRDoubleList___getslice__(self, *args)\n\n def __setslice__(self, *args):\n return _SoapySDR.SoapySDRDoubleList___setslice__(self, *args)\n\n def __delslice__(self, *args):\n return _SoapySDR.SoapySDRDoubleList___delslice__(self, *args)\n\n def __delitem__(self, *args):\n return _SoapySDR.SoapySDRDoubleList___delitem__(self, *args)\n\n def __getitem__(self, *args):\n return _SoapySDR.SoapySDRDoubleList___getitem__(self, *args)\n\n def __setitem__(self, *args):\n return _SoapySDR.SoapySDRDoubleList___setitem__(self, *args)\n\n def append(self, *args):\n return _SoapySDR.SoapySDRDoubleList_append(self, *args)\n\n def empty(self):\n return _SoapySDR.SoapySDRDoubleList_empty(self)\n\n def size(self):\n return _SoapySDR.SoapySDRDoubleList_size(self)\n\n def clear(self):\n return _SoapySDR.SoapySDRDoubleList_clear(self)\n\n def swap(self, *args):\n return _SoapySDR.SoapySDRDoubleList_swap(self, *args)\n\n def get_allocator(self):\n return _SoapySDR.SoapySDRDoubleList_get_allocator(self)\n\n def begin(self):\n return _SoapySDR.SoapySDRDoubleList_begin(self)\n\n def end(self):\n return _SoapySDR.SoapySDRDoubleList_end(self)\n\n def rbegin(self):\n return _SoapySDR.SoapySDRDoubleList_rbegin(self)\n\n def rend(self):\n return _SoapySDR.SoapySDRDoubleList_rend(self)\n\n def pop_back(self):\n return _SoapySDR.SoapySDRDoubleList_pop_back(self)\n\n def erase(self, *args):\n return _SoapySDR.SoapySDRDoubleList_erase(self, *args)\n\n def __init__(self, *args):\n this = _SoapySDR.new_SoapySDRDoubleList(*args)\n try:\n self.this.append(this)\n except:\n self.this = this\n\n def push_back(self, *args):\n return _SoapySDR.SoapySDRDoubleList_push_back(self, *args)\n\n def front(self):\n return _SoapySDR.SoapySDRDoubleList_front(self)\n\n def back(self):\n return _SoapySDR.SoapySDRDoubleList_back(self)\n\n def assign(self, *args):\n return _SoapySDR.SoapySDRDoubleList_assign(self, *args)\n\n def resize(self, *args):\n return _SoapySDR.SoapySDRDoubleList_resize(self, *args)\n\n def insert(self, *args):\n return _SoapySDR.SoapySDRDoubleList_insert(self, *args)\n\n def reserve(self, *args):\n return _SoapySDR.SoapySDRDoubleList_reserve(self, *args)\n\n def capacity(self):\n return _SoapySDR.SoapySDRDoubleList_capacity(self)\n __swig_destroy__ = _SoapySDR.delete_SoapySDRDoubleList\n __del__ = lambda self: None\n\n\n<mask token>\n\n\nclass StreamResult(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self,\n StreamResult, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, StreamResult, name)\n __repr__ = _swig_repr\n\n def __init__(self):\n this = _SoapySDR.new_StreamResult()\n try:\n self.this.append(this)\n except:\n self.this = this\n __swig_setmethods__['ret'] = _SoapySDR.StreamResult_ret_set\n __swig_getmethods__['ret'] = _SoapySDR.StreamResult_ret_get\n if _newclass:\n ret = _swig_property(_SoapySDR.StreamResult_ret_get, _SoapySDR.\n StreamResult_ret_set)\n __swig_setmethods__['flags'] = _SoapySDR.StreamResult_flags_set\n __swig_getmethods__['flags'] = _SoapySDR.StreamResult_flags_get\n if _newclass:\n flags = _swig_property(_SoapySDR.StreamResult_flags_get, _SoapySDR.\n StreamResult_flags_set)\n __swig_setmethods__['timeNs'] = _SoapySDR.StreamResult_timeNs_set\n __swig_getmethods__['timeNs'] = _SoapySDR.StreamResult_timeNs_get\n if _newclass:\n timeNs = _swig_property(_SoapySDR.StreamResult_timeNs_get,\n _SoapySDR.StreamResult_timeNs_set)\n __swig_setmethods__['chanMask'] = _SoapySDR.StreamResult_chanMask_set\n __swig_getmethods__['chanMask'] = _SoapySDR.StreamResult_chanMask_get\n if _newclass:\n chanMask = _swig_property(_SoapySDR.StreamResult_chanMask_get,\n _SoapySDR.StreamResult_chanMask_set)\n\n def __str__(self):\n return 'ret=%s, flags=%s, timeNs=%s' % (self.ret, self.flags, self.\n timeNs)\n __swig_destroy__ = _SoapySDR.delete_StreamResult\n __del__ = lambda self: None\n\n\n<mask token>\n\n\ndef SoapySDR_errToStr(*args):\n return _SoapySDR.SoapySDR_errToStr(*args)\n\n\n<mask token>\n\n\ndef SoapySDR_getAPIVersion():\n return _SoapySDR.SoapySDR_getAPIVersion()\n\n\n<mask token>\n\n\ndef SoapySDR_getABIVersion():\n return _SoapySDR.SoapySDR_getABIVersion()\n\n\n<mask token>\n\n\ndef SoapySDR_getLibVersion():\n return _SoapySDR.SoapySDR_getLibVersion()\n\n\n<mask token>\n\n\ndef SoapySDR_log(*args):\n return _SoapySDR.SoapySDR_log(*args)\n\n\n<mask token>\n\n\ndef SoapySDR_setLogLevel(*args):\n return _SoapySDR.SoapySDR_setLogLevel(*args)\n\n\n<mask token>\n\n\ndef errToStr(*args):\n return _SoapySDR.errToStr(*args)\n\n\n<mask token>\n\n\ndef getAPIVersion():\n return _SoapySDR.getAPIVersion()\n\n\n<mask token>\n\n\ndef getABIVersion():\n return _SoapySDR.getABIVersion()\n\n\n<mask token>\n\n\ndef getLibVersion():\n return _SoapySDR.getLibVersion()\n\n\n<mask token>\n\n\ndef getRootPath():\n return _SoapySDR.getRootPath()\n\n\n<mask token>\n\n\ndef listSearchPaths():\n return _SoapySDR.listSearchPaths()\n\n\n<mask token>\n\n\ndef listModules(*args):\n return _SoapySDR.listModules(*args)\n\n\n<mask token>\n\n\ndef loadModule(*args):\n return _SoapySDR.loadModule(*args)\n\n\n<mask token>\n\n\ndef getLoaderResult(*args):\n return _SoapySDR.getLoaderResult(*args)\n\n\n<mask token>\n\n\ndef unloadModule(*args):\n return _SoapySDR.unloadModule(*args)\n\n\n<mask token>\n\n\ndef loadModules():\n return _SoapySDR.loadModules()\n\n\n<mask token>\n\n\ndef formatToSize(*args):\n return _SoapySDR.formatToSize(*args)\n\n\n<mask token>\n\n\ndef ticksToTimeNs(*args):\n return _SoapySDR.ticksToTimeNs(*args)\n\n\n<mask token>\n\n\ndef log(*args):\n return _SoapySDR.log(*args)\n\n\n<mask token>\n\n\ndef setLogLevel(*args):\n return _SoapySDR.setLogLevel(*args)\n\n\n<mask token>\n\n\nclass Device(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self, Device,\n name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, Device, name)\n\n def __init__(self, *args, **kwargs):\n raise AttributeError('No constructor defined')\n __repr__ = _swig_repr\n __swig_destroy__ = _SoapySDR.delete_Device\n __del__ = lambda self: None\n __swig_getmethods__['enumerate'] = lambda x: _SoapySDR.Device_enumerate\n if _newclass:\n enumerate = staticmethod(_SoapySDR.Device_enumerate)\n __swig_getmethods__['make'] = lambda x: _SoapySDR.Device_make\n if _newclass:\n make = staticmethod(_SoapySDR.Device_make)\n __swig_getmethods__['unmake'] = lambda x: _SoapySDR.Device_unmake\n if _newclass:\n unmake = staticmethod(_SoapySDR.Device_unmake)\n\n def getDriverKey(self):\n return _SoapySDR.Device_getDriverKey(self)\n\n def getHardwareKey(self):\n return _SoapySDR.Device_getHardwareKey(self)\n\n def getHardwareInfo(self):\n return _SoapySDR.Device_getHardwareInfo(self)\n\n def setFrontendMapping(self, *args):\n return _SoapySDR.Device_setFrontendMapping(self, *args)\n\n def getFrontendMapping(self, *args):\n return _SoapySDR.Device_getFrontendMapping(self, *args)\n\n def getNumChannels(self, *args):\n return _SoapySDR.Device_getNumChannels(self, *args)\n\n def getChannelInfo(self, *args):\n return _SoapySDR.Device_getChannelInfo(self, *args)\n\n def getFullDuplex(self, *args):\n return _SoapySDR.Device_getFullDuplex(self, *args)\n\n def getStreamFormats(self, *args):\n return _SoapySDR.Device_getStreamFormats(self, *args)\n\n def getNativeStreamFormat(self, *args):\n return _SoapySDR.Device_getNativeStreamFormat(self, *args)\n\n def getStreamArgsInfo(self, *args):\n return _SoapySDR.Device_getStreamArgsInfo(self, *args)\n\n def setupStream(self, *args):\n return _SoapySDR.Device_setupStream(self, *args)\n\n def closeStream(self, *args):\n return _SoapySDR.Device_closeStream(self, *args)\n\n def getStreamMTU(self, *args):\n return _SoapySDR.Device_getStreamMTU(self, *args)\n\n def activateStream(self, *args):\n return _SoapySDR.Device_activateStream(self, *args)\n\n def deactivateStream(self, *args):\n return _SoapySDR.Device_deactivateStream(self, *args)\n\n def readStream(self, *args):\n return _SoapySDR.Device_readStream(self, *args)\n\n def writeStream(self, *args):\n return _SoapySDR.Device_writeStream(self, *args)\n\n def readStreamStatus(self, *args):\n return _SoapySDR.Device_readStreamStatus(self, *args)\n\n def getNumDirectAccessBuffers(self, *args):\n return _SoapySDR.Device_getNumDirectAccessBuffers(self, *args)\n\n def getDirectAccessBufferAddrs(self, *args):\n return _SoapySDR.Device_getDirectAccessBufferAddrs(self, *args)\n\n def acquireReadBuffer(self, *args):\n return _SoapySDR.Device_acquireReadBuffer(self, *args)\n\n def releaseReadBuffer(self, *args):\n return _SoapySDR.Device_releaseReadBuffer(self, *args)\n\n def acquireWriteBuffer(self, *args):\n return _SoapySDR.Device_acquireWriteBuffer(self, *args)\n\n def releaseWriteBuffer(self, *args):\n return _SoapySDR.Device_releaseWriteBuffer(self, *args)\n\n def listAntennas(self, *args):\n return _SoapySDR.Device_listAntennas(self, *args)\n\n def setAntenna(self, *args):\n return _SoapySDR.Device_setAntenna(self, *args)\n\n def getAntenna(self, *args):\n return _SoapySDR.Device_getAntenna(self, *args)\n\n def hasDCOffsetMode(self, *args):\n return _SoapySDR.Device_hasDCOffsetMode(self, *args)\n\n def setDCOffsetMode(self, *args):\n return _SoapySDR.Device_setDCOffsetMode(self, *args)\n\n def getDCOffsetMode(self, *args):\n return _SoapySDR.Device_getDCOffsetMode(self, *args)\n\n def hasDCOffset(self, *args):\n return _SoapySDR.Device_hasDCOffset(self, *args)\n\n def setDCOffset(self, *args):\n return _SoapySDR.Device_setDCOffset(self, *args)\n\n def getDCOffset(self, *args):\n return _SoapySDR.Device_getDCOffset(self, *args)\n\n def hasIQBalance(self, *args):\n return _SoapySDR.Device_hasIQBalance(self, *args)\n\n def setIQBalance(self, *args):\n return _SoapySDR.Device_setIQBalance(self, *args)\n\n def getIQBalance(self, *args):\n return _SoapySDR.Device_getIQBalance(self, *args)\n\n def hasFrequencyCorrection(self, *args):\n return _SoapySDR.Device_hasFrequencyCorrection(self, *args)\n\n def setFrequencyCorrection(self, *args):\n return _SoapySDR.Device_setFrequencyCorrection(self, *args)\n\n def getFrequencyCorrection(self, *args):\n return _SoapySDR.Device_getFrequencyCorrection(self, *args)\n\n def listGains(self, *args):\n return _SoapySDR.Device_listGains(self, *args)\n\n def hasGainMode(self, *args):\n return _SoapySDR.Device_hasGainMode(self, *args)\n\n def setGainMode(self, *args):\n return _SoapySDR.Device_setGainMode(self, *args)\n\n def getGainMode(self, *args):\n return _SoapySDR.Device_getGainMode(self, *args)\n\n def setGain(self, *args):\n return _SoapySDR.Device_setGain(self, *args)\n\n def getGain(self, *args):\n return _SoapySDR.Device_getGain(self, *args)\n\n def getGainRange(self, *args):\n return _SoapySDR.Device_getGainRange(self, *args)\n\n def setFrequency(self, *args):\n return _SoapySDR.Device_setFrequency(self, *args)\n\n def getFrequency(self, *args):\n return _SoapySDR.Device_getFrequency(self, *args)\n\n def listFrequencies(self, *args):\n return _SoapySDR.Device_listFrequencies(self, *args)\n\n def getFrequencyRange(self, *args):\n return _SoapySDR.Device_getFrequencyRange(self, *args)\n\n def getFrequencyArgsInfo(self, *args):\n return _SoapySDR.Device_getFrequencyArgsInfo(self, *args)\n\n def setSampleRate(self, *args):\n return _SoapySDR.Device_setSampleRate(self, *args)\n\n def getSampleRate(self, *args):\n return _SoapySDR.Device_getSampleRate(self, *args)\n\n def listSampleRates(self, *args):\n return _SoapySDR.Device_listSampleRates(self, *args)\n\n def getSampleRateRange(self, *args):\n return _SoapySDR.Device_getSampleRateRange(self, *args)\n\n def setBandwidth(self, *args):\n return _SoapySDR.Device_setBandwidth(self, *args)\n\n def getBandwidth(self, *args):\n return _SoapySDR.Device_getBandwidth(self, *args)\n\n def listBandwidths(self, *args):\n return _SoapySDR.Device_listBandwidths(self, *args)\n\n def getBandwidthRange(self, *args):\n return _SoapySDR.Device_getBandwidthRange(self, *args)\n\n def setMasterClockRate(self, *args):\n return _SoapySDR.Device_setMasterClockRate(self, *args)\n\n def getMasterClockRate(self):\n return _SoapySDR.Device_getMasterClockRate(self)\n\n def getMasterClockRates(self):\n return _SoapySDR.Device_getMasterClockRates(self)\n\n def listClockSources(self):\n return _SoapySDR.Device_listClockSources(self)\n\n def setClockSource(self, *args):\n return _SoapySDR.Device_setClockSource(self, *args)\n\n def getClockSource(self):\n return _SoapySDR.Device_getClockSource(self)\n\n def listTimeSources(self):\n return _SoapySDR.Device_listTimeSources(self)\n\n def setTimeSource(self, *args):\n return _SoapySDR.Device_setTimeSource(self, *args)\n\n def getTimeSource(self):\n return _SoapySDR.Device_getTimeSource(self)\n\n def hasHardwareTime(self, what=''):\n return _SoapySDR.Device_hasHardwareTime(self, what)\n\n def getHardwareTime(self, what=''):\n return _SoapySDR.Device_getHardwareTime(self, what)\n\n def setHardwareTime(self, *args):\n return _SoapySDR.Device_setHardwareTime(self, *args)\n\n def setCommandTime(self, *args):\n return _SoapySDR.Device_setCommandTime(self, *args)\n\n def listSensors(self, *args):\n return _SoapySDR.Device_listSensors(self, *args)\n\n def getSensorInfo(self, *args):\n return _SoapySDR.Device_getSensorInfo(self, *args)\n\n def readSensor(self, *args):\n return _SoapySDR.Device_readSensor(self, *args)\n\n def listRegisterInterfaces(self):\n return _SoapySDR.Device_listRegisterInterfaces(self)\n\n def writeRegister(self, *args):\n return _SoapySDR.Device_writeRegister(self, *args)\n\n def readRegister(self, *args):\n return _SoapySDR.Device_readRegister(self, *args)\n\n def writeRegisters(self, *args):\n return _SoapySDR.Device_writeRegisters(self, *args)\n\n def readRegisters(self, *args):\n return _SoapySDR.Device_readRegisters(self, *args)\n\n def getSettingInfo(self, *args):\n return _SoapySDR.Device_getSettingInfo(self, *args)\n\n def writeSetting(self, *args):\n return _SoapySDR.Device_writeSetting(self, *args)\n\n def readSetting(self, *args):\n return _SoapySDR.Device_readSetting(self, *args)\n\n def listGPIOBanks(self):\n return _SoapySDR.Device_listGPIOBanks(self)\n\n def writeGPIO(self, *args):\n return _SoapySDR.Device_writeGPIO(self, *args)\n\n def readGPIO(self, *args):\n return _SoapySDR.Device_readGPIO(self, *args)\n\n def writeGPIODir(self, *args):\n return _SoapySDR.Device_writeGPIODir(self, *args)\n\n def readGPIODir(self, *args):\n return _SoapySDR.Device_readGPIODir(self, *args)\n\n def writeI2C(self, *args):\n return _SoapySDR.Device_writeI2C(self, *args)\n\n def readI2C(self, *args):\n return _SoapySDR.Device_readI2C(self, *args)\n\n def transactSPI(self, *args):\n return _SoapySDR.Device_transactSPI(self, *args)\n\n def listUARTs(self):\n return _SoapySDR.Device_listUARTs(self)\n\n def writeUART(self, *args):\n return _SoapySDR.Device_writeUART(self, *args)\n\n def readUART(self, *args):\n return _SoapySDR.Device_readUART(self, *args)\n\n def readStream__(self, *args):\n return _SoapySDR.Device_readStream__(self, *args)\n\n def writeStream__(self, *args):\n return _SoapySDR.Device_writeStream__(self, *args)\n\n def readStreamStatus__(self, *args):\n return _SoapySDR.Device_readStreamStatus__(self, *args)\n\n def __del__(self):\n Device.unmake(self)\n\n def __str__(self):\n return '%s:%s' % (self.getDriverKey(), self.getHardwareKey())\n\n def readStream(self, stream, buffs, numElems, flags=0, timeoutUs=100000):\n ptrs = [extractBuffPointer(b) for b in buffs]\n return self.readStream__(stream, ptrs, numElems, flags, timeoutUs)\n\n def writeStream(self, stream, buffs, numElems, flags=0, timeNs=0,\n timeoutUs=100000):\n ptrs = [extractBuffPointer(b) for b in buffs]\n return self.writeStream__(stream, ptrs, numElems, flags, timeNs,\n timeoutUs)\n\n def readStreamStatus(self, stream, timeoutUs=100000):\n return self.readStreamStatus__(stream, timeoutUs)\n\n\n<mask token>\n\n\ndef Device_enumerate(*args):\n return _SoapySDR.Device_enumerate(*args)\n\n\n<mask token>\n\n\ndef Device_make(*args):\n return _SoapySDR.Device_make(*args)\n\n\n<mask token>\n\n\ndef Device_unmake(*args):\n return _SoapySDR.Device_unmake(*args)\n\n\n<mask token>\n\n\nclass Device(Device):\n\n def __new__(cls, *args, **kwargs):\n return cls.make(*args, **kwargs)\n\n\ndef extractBuffPointer(buff):\n if hasattr(buff, '__array_interface__'):\n return buff.__array_interface__['data'][0]\n if hasattr(buff, '__long__'):\n return long(buff)\n if hasattr(buff, '__int__'):\n return int(buff)\n raise Exception('Unrecognized data format: ' + str(type(buff)))\n",
"step-5": "# This file was automatically generated by SWIG (http://www.swig.org).\n# Version 2.0.12\n#\n# Do not make changes to this file unless you know what you are doing--modify\n# the SWIG interface file instead.\n\n\n\n\n\nfrom sys import version_info\nif version_info >= (2,6,0):\n def swig_import_helper():\n from os.path import dirname\n import imp\n fp = None\n try:\n fp, pathname, description = imp.find_module('_SoapySDR', [dirname(__file__)])\n except ImportError:\n import _SoapySDR\n return _SoapySDR\n if fp is not None:\n try:\n _mod = imp.load_module('_SoapySDR', fp, pathname, description)\n finally:\n fp.close()\n return _mod\n _SoapySDR = swig_import_helper()\n del swig_import_helper\nelse:\n import _SoapySDR\ndel version_info\ntry:\n _swig_property = property\nexcept NameError:\n pass # Python < 2.2 doesn't have 'property'.\ndef _swig_setattr_nondynamic(self,class_type,name,value,static=1):\n if (name == \"thisown\"): return self.this.own(value)\n if (name == \"this\"):\n if type(value).__name__ == 'SwigPyObject':\n self.__dict__[name] = value\n return\n method = class_type.__swig_setmethods__.get(name,None)\n if method: return method(self,value)\n if (not static):\n self.__dict__[name] = value\n else:\n raise AttributeError(\"You cannot add attributes to %s\" % self)\n\ndef _swig_setattr(self,class_type,name,value):\n return _swig_setattr_nondynamic(self,class_type,name,value,0)\n\ndef _swig_getattr(self,class_type,name):\n if (name == \"thisown\"): return self.this.own()\n method = class_type.__swig_getmethods__.get(name,None)\n if method: return method(self)\n raise AttributeError(name)\n\ndef _swig_repr(self):\n try: strthis = \"proxy of \" + self.this.__repr__()\n except: strthis = \"\"\n return \"<%s.%s; %s >\" % (self.__class__.__module__, self.__class__.__name__, strthis,)\n\ntry:\n _object = object\n _newclass = 1\nexcept AttributeError:\n class _object : pass\n _newclass = 0\n\n\nclass SwigPyIterator(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self, SwigPyIterator, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SwigPyIterator, name)\n def __init__(self, *args, **kwargs): raise AttributeError(\"No constructor defined - class is abstract\")\n __repr__ = _swig_repr\n __swig_destroy__ = _SoapySDR.delete_SwigPyIterator\n __del__ = lambda self : None;\n def value(self): return _SoapySDR.SwigPyIterator_value(self)\n def incr(self, n=1): return _SoapySDR.SwigPyIterator_incr(self, n)\n def decr(self, n=1): return _SoapySDR.SwigPyIterator_decr(self, n)\n def distance(self, *args): return _SoapySDR.SwigPyIterator_distance(self, *args)\n def equal(self, *args): return _SoapySDR.SwigPyIterator_equal(self, *args)\n def copy(self): return _SoapySDR.SwigPyIterator_copy(self)\n def next(self): return _SoapySDR.SwigPyIterator_next(self)\n def __next__(self): return _SoapySDR.SwigPyIterator___next__(self)\n def previous(self): return _SoapySDR.SwigPyIterator_previous(self)\n def advance(self, *args): return _SoapySDR.SwigPyIterator_advance(self, *args)\n def __eq__(self, *args): return _SoapySDR.SwigPyIterator___eq__(self, *args)\n def __ne__(self, *args): return _SoapySDR.SwigPyIterator___ne__(self, *args)\n def __iadd__(self, *args): return _SoapySDR.SwigPyIterator___iadd__(self, *args)\n def __isub__(self, *args): return _SoapySDR.SwigPyIterator___isub__(self, *args)\n def __add__(self, *args): return _SoapySDR.SwigPyIterator___add__(self, *args)\n def __sub__(self, *args): return _SoapySDR.SwigPyIterator___sub__(self, *args)\n def __iter__(self): return self\nSwigPyIterator_swigregister = _SoapySDR.SwigPyIterator_swigregister\nSwigPyIterator_swigregister(SwigPyIterator)\n\n\ndef KwargsFromString(*args):\n return _SoapySDR.KwargsFromString(*args)\nKwargsFromString = _SoapySDR.KwargsFromString\n\ndef KwargsToString(*args):\n return _SoapySDR.KwargsToString(*args)\nKwargsToString = _SoapySDR.KwargsToString\nclass Range(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self, Range, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, Range, name)\n __repr__ = _swig_repr\n def __init__(self, *args): \n this = _SoapySDR.new_Range(*args)\n try: self.this.append(this)\n except: self.this = this\n def minimum(self): return _SoapySDR.Range_minimum(self)\n def maximum(self): return _SoapySDR.Range_maximum(self)\n def step(self): return _SoapySDR.Range_step(self)\n def __str__(self):\n fields = [self.minimum(), self.maximum()]\n if self.step() != 0.0: fields.append(self.step())\n return ', '.join(['%g'%f for f in fields])\n\n __swig_destroy__ = _SoapySDR.delete_Range\n __del__ = lambda self : None;\nRange_swigregister = _SoapySDR.Range_swigregister\nRange_swigregister(Range)\n\nclass ArgInfo(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self, ArgInfo, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, ArgInfo, name)\n __repr__ = _swig_repr\n def __init__(self): \n this = _SoapySDR.new_ArgInfo()\n try: self.this.append(this)\n except: self.this = this\n __swig_setmethods__[\"key\"] = _SoapySDR.ArgInfo_key_set\n __swig_getmethods__[\"key\"] = _SoapySDR.ArgInfo_key_get\n if _newclass:key = _swig_property(_SoapySDR.ArgInfo_key_get, _SoapySDR.ArgInfo_key_set)\n __swig_setmethods__[\"value\"] = _SoapySDR.ArgInfo_value_set\n __swig_getmethods__[\"value\"] = _SoapySDR.ArgInfo_value_get\n if _newclass:value = _swig_property(_SoapySDR.ArgInfo_value_get, _SoapySDR.ArgInfo_value_set)\n __swig_setmethods__[\"name\"] = _SoapySDR.ArgInfo_name_set\n __swig_getmethods__[\"name\"] = _SoapySDR.ArgInfo_name_get\n if _newclass:name = _swig_property(_SoapySDR.ArgInfo_name_get, _SoapySDR.ArgInfo_name_set)\n __swig_setmethods__[\"description\"] = _SoapySDR.ArgInfo_description_set\n __swig_getmethods__[\"description\"] = _SoapySDR.ArgInfo_description_get\n if _newclass:description = _swig_property(_SoapySDR.ArgInfo_description_get, _SoapySDR.ArgInfo_description_set)\n __swig_setmethods__[\"units\"] = _SoapySDR.ArgInfo_units_set\n __swig_getmethods__[\"units\"] = _SoapySDR.ArgInfo_units_get\n if _newclass:units = _swig_property(_SoapySDR.ArgInfo_units_get, _SoapySDR.ArgInfo_units_set)\n BOOL = _SoapySDR.ArgInfo_BOOL\n INT = _SoapySDR.ArgInfo_INT\n FLOAT = _SoapySDR.ArgInfo_FLOAT\n STRING = _SoapySDR.ArgInfo_STRING\n __swig_setmethods__[\"type\"] = _SoapySDR.ArgInfo_type_set\n __swig_getmethods__[\"type\"] = _SoapySDR.ArgInfo_type_get\n if _newclass:type = _swig_property(_SoapySDR.ArgInfo_type_get, _SoapySDR.ArgInfo_type_set)\n __swig_setmethods__[\"range\"] = _SoapySDR.ArgInfo_range_set\n __swig_getmethods__[\"range\"] = _SoapySDR.ArgInfo_range_get\n if _newclass:range = _swig_property(_SoapySDR.ArgInfo_range_get, _SoapySDR.ArgInfo_range_set)\n __swig_setmethods__[\"options\"] = _SoapySDR.ArgInfo_options_set\n __swig_getmethods__[\"options\"] = _SoapySDR.ArgInfo_options_get\n if _newclass:options = _swig_property(_SoapySDR.ArgInfo_options_get, _SoapySDR.ArgInfo_options_set)\n __swig_setmethods__[\"optionNames\"] = _SoapySDR.ArgInfo_optionNames_set\n __swig_getmethods__[\"optionNames\"] = _SoapySDR.ArgInfo_optionNames_get\n if _newclass:optionNames = _swig_property(_SoapySDR.ArgInfo_optionNames_get, _SoapySDR.ArgInfo_optionNames_set)\n __swig_destroy__ = _SoapySDR.delete_ArgInfo\n __del__ = lambda self : None;\nArgInfo_swigregister = _SoapySDR.ArgInfo_swigregister\nArgInfo_swigregister(ArgInfo)\n\nclass SoapySDRKwargs(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self, SoapySDRKwargs, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SoapySDRKwargs, name)\n __repr__ = _swig_repr\n def iterator(self): return _SoapySDR.SoapySDRKwargs_iterator(self)\n def __iter__(self): return self.iterator()\n def __nonzero__(self): return _SoapySDR.SoapySDRKwargs___nonzero__(self)\n def __bool__(self): return _SoapySDR.SoapySDRKwargs___bool__(self)\n def __len__(self): return _SoapySDR.SoapySDRKwargs___len__(self)\n def __iter__(self): return self.key_iterator()\n def iterkeys(self): return self.key_iterator()\n def itervalues(self): return self.value_iterator()\n def iteritems(self): return self.iterator()\n def __getitem__(self, *args): return _SoapySDR.SoapySDRKwargs___getitem__(self, *args)\n def __delitem__(self, *args): return _SoapySDR.SoapySDRKwargs___delitem__(self, *args)\n def has_key(self, *args): return _SoapySDR.SoapySDRKwargs_has_key(self, *args)\n def keys(self): return _SoapySDR.SoapySDRKwargs_keys(self)\n def values(self): return _SoapySDR.SoapySDRKwargs_values(self)\n def items(self): return _SoapySDR.SoapySDRKwargs_items(self)\n def __contains__(self, *args): return _SoapySDR.SoapySDRKwargs___contains__(self, *args)\n def key_iterator(self): return _SoapySDR.SoapySDRKwargs_key_iterator(self)\n def value_iterator(self): return _SoapySDR.SoapySDRKwargs_value_iterator(self)\n def __setitem__(self, *args): return _SoapySDR.SoapySDRKwargs___setitem__(self, *args)\n def asdict(self): return _SoapySDR.SoapySDRKwargs_asdict(self)\n def __init__(self, *args): \n this = _SoapySDR.new_SoapySDRKwargs(*args)\n try: self.this.append(this)\n except: self.this = this\n def empty(self): return _SoapySDR.SoapySDRKwargs_empty(self)\n def size(self): return _SoapySDR.SoapySDRKwargs_size(self)\n def clear(self): return _SoapySDR.SoapySDRKwargs_clear(self)\n def swap(self, *args): return _SoapySDR.SoapySDRKwargs_swap(self, *args)\n def get_allocator(self): return _SoapySDR.SoapySDRKwargs_get_allocator(self)\n def begin(self): return _SoapySDR.SoapySDRKwargs_begin(self)\n def end(self): return _SoapySDR.SoapySDRKwargs_end(self)\n def rbegin(self): return _SoapySDR.SoapySDRKwargs_rbegin(self)\n def rend(self): return _SoapySDR.SoapySDRKwargs_rend(self)\n def count(self, *args): return _SoapySDR.SoapySDRKwargs_count(self, *args)\n def erase(self, *args): return _SoapySDR.SoapySDRKwargs_erase(self, *args)\n def find(self, *args): return _SoapySDR.SoapySDRKwargs_find(self, *args)\n def lower_bound(self, *args): return _SoapySDR.SoapySDRKwargs_lower_bound(self, *args)\n def upper_bound(self, *args): return _SoapySDR.SoapySDRKwargs_upper_bound(self, *args)\n def __str__(self):\n out = list()\n for k, v in self.iteritems():\n out.append(\"%s=%s\"%(k, v))\n return '{'+(', '.join(out))+'}'\n\n __swig_destroy__ = _SoapySDR.delete_SoapySDRKwargs\n __del__ = lambda self : None;\nSoapySDRKwargs_swigregister = _SoapySDR.SoapySDRKwargs_swigregister\nSoapySDRKwargs_swigregister(SoapySDRKwargs)\n\nclass SoapySDRKwargsList(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self, SoapySDRKwargsList, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SoapySDRKwargsList, name)\n __repr__ = _swig_repr\n def iterator(self): return _SoapySDR.SoapySDRKwargsList_iterator(self)\n def __iter__(self): return self.iterator()\n def __nonzero__(self): return _SoapySDR.SoapySDRKwargsList___nonzero__(self)\n def __bool__(self): return _SoapySDR.SoapySDRKwargsList___bool__(self)\n def __len__(self): return _SoapySDR.SoapySDRKwargsList___len__(self)\n def pop(self): return _SoapySDR.SoapySDRKwargsList_pop(self)\n def __getslice__(self, *args): return _SoapySDR.SoapySDRKwargsList___getslice__(self, *args)\n def __setslice__(self, *args): return _SoapySDR.SoapySDRKwargsList___setslice__(self, *args)\n def __delslice__(self, *args): return _SoapySDR.SoapySDRKwargsList___delslice__(self, *args)\n def __delitem__(self, *args): return _SoapySDR.SoapySDRKwargsList___delitem__(self, *args)\n def __getitem__(self, *args): return _SoapySDR.SoapySDRKwargsList___getitem__(self, *args)\n def __setitem__(self, *args): return _SoapySDR.SoapySDRKwargsList___setitem__(self, *args)\n def append(self, *args): return _SoapySDR.SoapySDRKwargsList_append(self, *args)\n def empty(self): return _SoapySDR.SoapySDRKwargsList_empty(self)\n def size(self): return _SoapySDR.SoapySDRKwargsList_size(self)\n def clear(self): return _SoapySDR.SoapySDRKwargsList_clear(self)\n def swap(self, *args): return _SoapySDR.SoapySDRKwargsList_swap(self, *args)\n def get_allocator(self): return _SoapySDR.SoapySDRKwargsList_get_allocator(self)\n def begin(self): return _SoapySDR.SoapySDRKwargsList_begin(self)\n def end(self): return _SoapySDR.SoapySDRKwargsList_end(self)\n def rbegin(self): return _SoapySDR.SoapySDRKwargsList_rbegin(self)\n def rend(self): return _SoapySDR.SoapySDRKwargsList_rend(self)\n def pop_back(self): return _SoapySDR.SoapySDRKwargsList_pop_back(self)\n def erase(self, *args): return _SoapySDR.SoapySDRKwargsList_erase(self, *args)\n def __init__(self, *args): \n this = _SoapySDR.new_SoapySDRKwargsList(*args)\n try: self.this.append(this)\n except: self.this = this\n def push_back(self, *args): return _SoapySDR.SoapySDRKwargsList_push_back(self, *args)\n def front(self): return _SoapySDR.SoapySDRKwargsList_front(self)\n def back(self): return _SoapySDR.SoapySDRKwargsList_back(self)\n def assign(self, *args): return _SoapySDR.SoapySDRKwargsList_assign(self, *args)\n def resize(self, *args): return _SoapySDR.SoapySDRKwargsList_resize(self, *args)\n def insert(self, *args): return _SoapySDR.SoapySDRKwargsList_insert(self, *args)\n def reserve(self, *args): return _SoapySDR.SoapySDRKwargsList_reserve(self, *args)\n def capacity(self): return _SoapySDR.SoapySDRKwargsList_capacity(self)\n __swig_destroy__ = _SoapySDR.delete_SoapySDRKwargsList\n __del__ = lambda self : None;\nSoapySDRKwargsList_swigregister = _SoapySDR.SoapySDRKwargsList_swigregister\nSoapySDRKwargsList_swigregister(SoapySDRKwargsList)\n\nclass SoapySDRArgInfoList(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self, SoapySDRArgInfoList, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SoapySDRArgInfoList, name)\n __repr__ = _swig_repr\n def iterator(self): return _SoapySDR.SoapySDRArgInfoList_iterator(self)\n def __iter__(self): return self.iterator()\n def __nonzero__(self): return _SoapySDR.SoapySDRArgInfoList___nonzero__(self)\n def __bool__(self): return _SoapySDR.SoapySDRArgInfoList___bool__(self)\n def __len__(self): return _SoapySDR.SoapySDRArgInfoList___len__(self)\n def pop(self): return _SoapySDR.SoapySDRArgInfoList_pop(self)\n def __getslice__(self, *args): return _SoapySDR.SoapySDRArgInfoList___getslice__(self, *args)\n def __setslice__(self, *args): return _SoapySDR.SoapySDRArgInfoList___setslice__(self, *args)\n def __delslice__(self, *args): return _SoapySDR.SoapySDRArgInfoList___delslice__(self, *args)\n def __delitem__(self, *args): return _SoapySDR.SoapySDRArgInfoList___delitem__(self, *args)\n def __getitem__(self, *args): return _SoapySDR.SoapySDRArgInfoList___getitem__(self, *args)\n def __setitem__(self, *args): return _SoapySDR.SoapySDRArgInfoList___setitem__(self, *args)\n def append(self, *args): return _SoapySDR.SoapySDRArgInfoList_append(self, *args)\n def empty(self): return _SoapySDR.SoapySDRArgInfoList_empty(self)\n def size(self): return _SoapySDR.SoapySDRArgInfoList_size(self)\n def clear(self): return _SoapySDR.SoapySDRArgInfoList_clear(self)\n def swap(self, *args): return _SoapySDR.SoapySDRArgInfoList_swap(self, *args)\n def get_allocator(self): return _SoapySDR.SoapySDRArgInfoList_get_allocator(self)\n def begin(self): return _SoapySDR.SoapySDRArgInfoList_begin(self)\n def end(self): return _SoapySDR.SoapySDRArgInfoList_end(self)\n def rbegin(self): return _SoapySDR.SoapySDRArgInfoList_rbegin(self)\n def rend(self): return _SoapySDR.SoapySDRArgInfoList_rend(self)\n def pop_back(self): return _SoapySDR.SoapySDRArgInfoList_pop_back(self)\n def erase(self, *args): return _SoapySDR.SoapySDRArgInfoList_erase(self, *args)\n def __init__(self, *args): \n this = _SoapySDR.new_SoapySDRArgInfoList(*args)\n try: self.this.append(this)\n except: self.this = this\n def push_back(self, *args): return _SoapySDR.SoapySDRArgInfoList_push_back(self, *args)\n def front(self): return _SoapySDR.SoapySDRArgInfoList_front(self)\n def back(self): return _SoapySDR.SoapySDRArgInfoList_back(self)\n def assign(self, *args): return _SoapySDR.SoapySDRArgInfoList_assign(self, *args)\n def resize(self, *args): return _SoapySDR.SoapySDRArgInfoList_resize(self, *args)\n def insert(self, *args): return _SoapySDR.SoapySDRArgInfoList_insert(self, *args)\n def reserve(self, *args): return _SoapySDR.SoapySDRArgInfoList_reserve(self, *args)\n def capacity(self): return _SoapySDR.SoapySDRArgInfoList_capacity(self)\n __swig_destroy__ = _SoapySDR.delete_SoapySDRArgInfoList\n __del__ = lambda self : None;\nSoapySDRArgInfoList_swigregister = _SoapySDR.SoapySDRArgInfoList_swigregister\nSoapySDRArgInfoList_swigregister(SoapySDRArgInfoList)\n\nclass SoapySDRStringList(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self, SoapySDRStringList, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SoapySDRStringList, name)\n __repr__ = _swig_repr\n def iterator(self): return _SoapySDR.SoapySDRStringList_iterator(self)\n def __iter__(self): return self.iterator()\n def __nonzero__(self): return _SoapySDR.SoapySDRStringList___nonzero__(self)\n def __bool__(self): return _SoapySDR.SoapySDRStringList___bool__(self)\n def __len__(self): return _SoapySDR.SoapySDRStringList___len__(self)\n def pop(self): return _SoapySDR.SoapySDRStringList_pop(self)\n def __getslice__(self, *args): return _SoapySDR.SoapySDRStringList___getslice__(self, *args)\n def __setslice__(self, *args): return _SoapySDR.SoapySDRStringList___setslice__(self, *args)\n def __delslice__(self, *args): return _SoapySDR.SoapySDRStringList___delslice__(self, *args)\n def __delitem__(self, *args): return _SoapySDR.SoapySDRStringList___delitem__(self, *args)\n def __getitem__(self, *args): return _SoapySDR.SoapySDRStringList___getitem__(self, *args)\n def __setitem__(self, *args): return _SoapySDR.SoapySDRStringList___setitem__(self, *args)\n def append(self, *args): return _SoapySDR.SoapySDRStringList_append(self, *args)\n def empty(self): return _SoapySDR.SoapySDRStringList_empty(self)\n def size(self): return _SoapySDR.SoapySDRStringList_size(self)\n def clear(self): return _SoapySDR.SoapySDRStringList_clear(self)\n def swap(self, *args): return _SoapySDR.SoapySDRStringList_swap(self, *args)\n def get_allocator(self): return _SoapySDR.SoapySDRStringList_get_allocator(self)\n def begin(self): return _SoapySDR.SoapySDRStringList_begin(self)\n def end(self): return _SoapySDR.SoapySDRStringList_end(self)\n def rbegin(self): return _SoapySDR.SoapySDRStringList_rbegin(self)\n def rend(self): return _SoapySDR.SoapySDRStringList_rend(self)\n def pop_back(self): return _SoapySDR.SoapySDRStringList_pop_back(self)\n def erase(self, *args): return _SoapySDR.SoapySDRStringList_erase(self, *args)\n def __init__(self, *args): \n this = _SoapySDR.new_SoapySDRStringList(*args)\n try: self.this.append(this)\n except: self.this = this\n def push_back(self, *args): return _SoapySDR.SoapySDRStringList_push_back(self, *args)\n def front(self): return _SoapySDR.SoapySDRStringList_front(self)\n def back(self): return _SoapySDR.SoapySDRStringList_back(self)\n def assign(self, *args): return _SoapySDR.SoapySDRStringList_assign(self, *args)\n def resize(self, *args): return _SoapySDR.SoapySDRStringList_resize(self, *args)\n def insert(self, *args): return _SoapySDR.SoapySDRStringList_insert(self, *args)\n def reserve(self, *args): return _SoapySDR.SoapySDRStringList_reserve(self, *args)\n def capacity(self): return _SoapySDR.SoapySDRStringList_capacity(self)\n __swig_destroy__ = _SoapySDR.delete_SoapySDRStringList\n __del__ = lambda self : None;\nSoapySDRStringList_swigregister = _SoapySDR.SoapySDRStringList_swigregister\nSoapySDRStringList_swigregister(SoapySDRStringList)\n\nclass SoapySDRRangeList(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self, SoapySDRRangeList, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SoapySDRRangeList, name)\n __repr__ = _swig_repr\n def iterator(self): return _SoapySDR.SoapySDRRangeList_iterator(self)\n def __iter__(self): return self.iterator()\n def __nonzero__(self): return _SoapySDR.SoapySDRRangeList___nonzero__(self)\n def __bool__(self): return _SoapySDR.SoapySDRRangeList___bool__(self)\n def __len__(self): return _SoapySDR.SoapySDRRangeList___len__(self)\n def pop(self): return _SoapySDR.SoapySDRRangeList_pop(self)\n def __getslice__(self, *args): return _SoapySDR.SoapySDRRangeList___getslice__(self, *args)\n def __setslice__(self, *args): return _SoapySDR.SoapySDRRangeList___setslice__(self, *args)\n def __delslice__(self, *args): return _SoapySDR.SoapySDRRangeList___delslice__(self, *args)\n def __delitem__(self, *args): return _SoapySDR.SoapySDRRangeList___delitem__(self, *args)\n def __getitem__(self, *args): return _SoapySDR.SoapySDRRangeList___getitem__(self, *args)\n def __setitem__(self, *args): return _SoapySDR.SoapySDRRangeList___setitem__(self, *args)\n def append(self, *args): return _SoapySDR.SoapySDRRangeList_append(self, *args)\n def empty(self): return _SoapySDR.SoapySDRRangeList_empty(self)\n def size(self): return _SoapySDR.SoapySDRRangeList_size(self)\n def clear(self): return _SoapySDR.SoapySDRRangeList_clear(self)\n def swap(self, *args): return _SoapySDR.SoapySDRRangeList_swap(self, *args)\n def get_allocator(self): return _SoapySDR.SoapySDRRangeList_get_allocator(self)\n def begin(self): return _SoapySDR.SoapySDRRangeList_begin(self)\n def end(self): return _SoapySDR.SoapySDRRangeList_end(self)\n def rbegin(self): return _SoapySDR.SoapySDRRangeList_rbegin(self)\n def rend(self): return _SoapySDR.SoapySDRRangeList_rend(self)\n def pop_back(self): return _SoapySDR.SoapySDRRangeList_pop_back(self)\n def erase(self, *args): return _SoapySDR.SoapySDRRangeList_erase(self, *args)\n def __init__(self, *args): \n this = _SoapySDR.new_SoapySDRRangeList(*args)\n try: self.this.append(this)\n except: self.this = this\n def push_back(self, *args): return _SoapySDR.SoapySDRRangeList_push_back(self, *args)\n def front(self): return _SoapySDR.SoapySDRRangeList_front(self)\n def back(self): return _SoapySDR.SoapySDRRangeList_back(self)\n def assign(self, *args): return _SoapySDR.SoapySDRRangeList_assign(self, *args)\n def resize(self, *args): return _SoapySDR.SoapySDRRangeList_resize(self, *args)\n def insert(self, *args): return _SoapySDR.SoapySDRRangeList_insert(self, *args)\n def reserve(self, *args): return _SoapySDR.SoapySDRRangeList_reserve(self, *args)\n def capacity(self): return _SoapySDR.SoapySDRRangeList_capacity(self)\n __swig_destroy__ = _SoapySDR.delete_SoapySDRRangeList\n __del__ = lambda self : None;\nSoapySDRRangeList_swigregister = _SoapySDR.SoapySDRRangeList_swigregister\nSoapySDRRangeList_swigregister(SoapySDRRangeList)\n\nclass SoapySDRSizeList(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self, SoapySDRSizeList, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SoapySDRSizeList, name)\n __repr__ = _swig_repr\n def iterator(self): return _SoapySDR.SoapySDRSizeList_iterator(self)\n def __iter__(self): return self.iterator()\n def __nonzero__(self): return _SoapySDR.SoapySDRSizeList___nonzero__(self)\n def __bool__(self): return _SoapySDR.SoapySDRSizeList___bool__(self)\n def __len__(self): return _SoapySDR.SoapySDRSizeList___len__(self)\n def pop(self): return _SoapySDR.SoapySDRSizeList_pop(self)\n def __getslice__(self, *args): return _SoapySDR.SoapySDRSizeList___getslice__(self, *args)\n def __setslice__(self, *args): return _SoapySDR.SoapySDRSizeList___setslice__(self, *args)\n def __delslice__(self, *args): return _SoapySDR.SoapySDRSizeList___delslice__(self, *args)\n def __delitem__(self, *args): return _SoapySDR.SoapySDRSizeList___delitem__(self, *args)\n def __getitem__(self, *args): return _SoapySDR.SoapySDRSizeList___getitem__(self, *args)\n def __setitem__(self, *args): return _SoapySDR.SoapySDRSizeList___setitem__(self, *args)\n def append(self, *args): return _SoapySDR.SoapySDRSizeList_append(self, *args)\n def empty(self): return _SoapySDR.SoapySDRSizeList_empty(self)\n def size(self): return _SoapySDR.SoapySDRSizeList_size(self)\n def clear(self): return _SoapySDR.SoapySDRSizeList_clear(self)\n def swap(self, *args): return _SoapySDR.SoapySDRSizeList_swap(self, *args)\n def get_allocator(self): return _SoapySDR.SoapySDRSizeList_get_allocator(self)\n def begin(self): return _SoapySDR.SoapySDRSizeList_begin(self)\n def end(self): return _SoapySDR.SoapySDRSizeList_end(self)\n def rbegin(self): return _SoapySDR.SoapySDRSizeList_rbegin(self)\n def rend(self): return _SoapySDR.SoapySDRSizeList_rend(self)\n def pop_back(self): return _SoapySDR.SoapySDRSizeList_pop_back(self)\n def erase(self, *args): return _SoapySDR.SoapySDRSizeList_erase(self, *args)\n def __init__(self, *args): \n this = _SoapySDR.new_SoapySDRSizeList(*args)\n try: self.this.append(this)\n except: self.this = this\n def push_back(self, *args): return _SoapySDR.SoapySDRSizeList_push_back(self, *args)\n def front(self): return _SoapySDR.SoapySDRSizeList_front(self)\n def back(self): return _SoapySDR.SoapySDRSizeList_back(self)\n def assign(self, *args): return _SoapySDR.SoapySDRSizeList_assign(self, *args)\n def resize(self, *args): return _SoapySDR.SoapySDRSizeList_resize(self, *args)\n def insert(self, *args): return _SoapySDR.SoapySDRSizeList_insert(self, *args)\n def reserve(self, *args): return _SoapySDR.SoapySDRSizeList_reserve(self, *args)\n def capacity(self): return _SoapySDR.SoapySDRSizeList_capacity(self)\n __swig_destroy__ = _SoapySDR.delete_SoapySDRSizeList\n __del__ = lambda self : None;\nSoapySDRSizeList_swigregister = _SoapySDR.SoapySDRSizeList_swigregister\nSoapySDRSizeList_swigregister(SoapySDRSizeList)\n\nclass SoapySDRDoubleList(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self, SoapySDRDoubleList, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, SoapySDRDoubleList, name)\n __repr__ = _swig_repr\n def iterator(self): return _SoapySDR.SoapySDRDoubleList_iterator(self)\n def __iter__(self): return self.iterator()\n def __nonzero__(self): return _SoapySDR.SoapySDRDoubleList___nonzero__(self)\n def __bool__(self): return _SoapySDR.SoapySDRDoubleList___bool__(self)\n def __len__(self): return _SoapySDR.SoapySDRDoubleList___len__(self)\n def pop(self): return _SoapySDR.SoapySDRDoubleList_pop(self)\n def __getslice__(self, *args): return _SoapySDR.SoapySDRDoubleList___getslice__(self, *args)\n def __setslice__(self, *args): return _SoapySDR.SoapySDRDoubleList___setslice__(self, *args)\n def __delslice__(self, *args): return _SoapySDR.SoapySDRDoubleList___delslice__(self, *args)\n def __delitem__(self, *args): return _SoapySDR.SoapySDRDoubleList___delitem__(self, *args)\n def __getitem__(self, *args): return _SoapySDR.SoapySDRDoubleList___getitem__(self, *args)\n def __setitem__(self, *args): return _SoapySDR.SoapySDRDoubleList___setitem__(self, *args)\n def append(self, *args): return _SoapySDR.SoapySDRDoubleList_append(self, *args)\n def empty(self): return _SoapySDR.SoapySDRDoubleList_empty(self)\n def size(self): return _SoapySDR.SoapySDRDoubleList_size(self)\n def clear(self): return _SoapySDR.SoapySDRDoubleList_clear(self)\n def swap(self, *args): return _SoapySDR.SoapySDRDoubleList_swap(self, *args)\n def get_allocator(self): return _SoapySDR.SoapySDRDoubleList_get_allocator(self)\n def begin(self): return _SoapySDR.SoapySDRDoubleList_begin(self)\n def end(self): return _SoapySDR.SoapySDRDoubleList_end(self)\n def rbegin(self): return _SoapySDR.SoapySDRDoubleList_rbegin(self)\n def rend(self): return _SoapySDR.SoapySDRDoubleList_rend(self)\n def pop_back(self): return _SoapySDR.SoapySDRDoubleList_pop_back(self)\n def erase(self, *args): return _SoapySDR.SoapySDRDoubleList_erase(self, *args)\n def __init__(self, *args): \n this = _SoapySDR.new_SoapySDRDoubleList(*args)\n try: self.this.append(this)\n except: self.this = this\n def push_back(self, *args): return _SoapySDR.SoapySDRDoubleList_push_back(self, *args)\n def front(self): return _SoapySDR.SoapySDRDoubleList_front(self)\n def back(self): return _SoapySDR.SoapySDRDoubleList_back(self)\n def assign(self, *args): return _SoapySDR.SoapySDRDoubleList_assign(self, *args)\n def resize(self, *args): return _SoapySDR.SoapySDRDoubleList_resize(self, *args)\n def insert(self, *args): return _SoapySDR.SoapySDRDoubleList_insert(self, *args)\n def reserve(self, *args): return _SoapySDR.SoapySDRDoubleList_reserve(self, *args)\n def capacity(self): return _SoapySDR.SoapySDRDoubleList_capacity(self)\n __swig_destroy__ = _SoapySDR.delete_SoapySDRDoubleList\n __del__ = lambda self : None;\nSoapySDRDoubleList_swigregister = _SoapySDR.SoapySDRDoubleList_swigregister\nSoapySDRDoubleList_swigregister(SoapySDRDoubleList)\n\nclass StreamResult(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self, StreamResult, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, StreamResult, name)\n __repr__ = _swig_repr\n def __init__(self): \n this = _SoapySDR.new_StreamResult()\n try: self.this.append(this)\n except: self.this = this\n __swig_setmethods__[\"ret\"] = _SoapySDR.StreamResult_ret_set\n __swig_getmethods__[\"ret\"] = _SoapySDR.StreamResult_ret_get\n if _newclass:ret = _swig_property(_SoapySDR.StreamResult_ret_get, _SoapySDR.StreamResult_ret_set)\n __swig_setmethods__[\"flags\"] = _SoapySDR.StreamResult_flags_set\n __swig_getmethods__[\"flags\"] = _SoapySDR.StreamResult_flags_get\n if _newclass:flags = _swig_property(_SoapySDR.StreamResult_flags_get, _SoapySDR.StreamResult_flags_set)\n __swig_setmethods__[\"timeNs\"] = _SoapySDR.StreamResult_timeNs_set\n __swig_getmethods__[\"timeNs\"] = _SoapySDR.StreamResult_timeNs_get\n if _newclass:timeNs = _swig_property(_SoapySDR.StreamResult_timeNs_get, _SoapySDR.StreamResult_timeNs_set)\n __swig_setmethods__[\"chanMask\"] = _SoapySDR.StreamResult_chanMask_set\n __swig_getmethods__[\"chanMask\"] = _SoapySDR.StreamResult_chanMask_get\n if _newclass:chanMask = _swig_property(_SoapySDR.StreamResult_chanMask_get, _SoapySDR.StreamResult_chanMask_set)\n def __str__(self):\n return \"ret=%s, flags=%s, timeNs=%s\"%(self.ret, self.flags, self.timeNs)\n\n __swig_destroy__ = _SoapySDR.delete_StreamResult\n __del__ = lambda self : None;\nStreamResult_swigregister = _SoapySDR.StreamResult_swigregister\nStreamResult_swigregister(StreamResult)\n\nSOAPY_SDR_TX = _SoapySDR.SOAPY_SDR_TX\nSOAPY_SDR_RX = _SoapySDR.SOAPY_SDR_RX\nSOAPY_SDR_END_BURST = _SoapySDR.SOAPY_SDR_END_BURST\nSOAPY_SDR_HAS_TIME = _SoapySDR.SOAPY_SDR_HAS_TIME\nSOAPY_SDR_END_ABRUPT = _SoapySDR.SOAPY_SDR_END_ABRUPT\nSOAPY_SDR_ONE_PACKET = _SoapySDR.SOAPY_SDR_ONE_PACKET\nSOAPY_SDR_MORE_FRAGMENTS = _SoapySDR.SOAPY_SDR_MORE_FRAGMENTS\nSOAPY_SDR_WAIT_TRIGGER = _SoapySDR.SOAPY_SDR_WAIT_TRIGGER\n\ndef SoapySDR_errToStr(*args):\n return _SoapySDR.SoapySDR_errToStr(*args)\nSoapySDR_errToStr = _SoapySDR.SoapySDR_errToStr\nSOAPY_SDR_TIMEOUT = _SoapySDR.SOAPY_SDR_TIMEOUT\nSOAPY_SDR_STREAM_ERROR = _SoapySDR.SOAPY_SDR_STREAM_ERROR\nSOAPY_SDR_CORRUPTION = _SoapySDR.SOAPY_SDR_CORRUPTION\nSOAPY_SDR_OVERFLOW = _SoapySDR.SOAPY_SDR_OVERFLOW\nSOAPY_SDR_NOT_SUPPORTED = _SoapySDR.SOAPY_SDR_NOT_SUPPORTED\nSOAPY_SDR_TIME_ERROR = _SoapySDR.SOAPY_SDR_TIME_ERROR\nSOAPY_SDR_UNDERFLOW = _SoapySDR.SOAPY_SDR_UNDERFLOW\nSOAPY_SDR_API_VERSION = _SoapySDR.SOAPY_SDR_API_VERSION\nSOAPY_SDR_ABI_VERSION = _SoapySDR.SOAPY_SDR_ABI_VERSION\n\ndef SoapySDR_getAPIVersion():\n return _SoapySDR.SoapySDR_getAPIVersion()\nSoapySDR_getAPIVersion = _SoapySDR.SoapySDR_getAPIVersion\n\ndef SoapySDR_getABIVersion():\n return _SoapySDR.SoapySDR_getABIVersion()\nSoapySDR_getABIVersion = _SoapySDR.SoapySDR_getABIVersion\n\ndef SoapySDR_getLibVersion():\n return _SoapySDR.SoapySDR_getLibVersion()\nSoapySDR_getLibVersion = _SoapySDR.SoapySDR_getLibVersion\nSOAPY_SDR_CF64 = _SoapySDR.SOAPY_SDR_CF64\nSOAPY_SDR_CF32 = _SoapySDR.SOAPY_SDR_CF32\nSOAPY_SDR_CS32 = _SoapySDR.SOAPY_SDR_CS32\nSOAPY_SDR_CU32 = _SoapySDR.SOAPY_SDR_CU32\nSOAPY_SDR_CS16 = _SoapySDR.SOAPY_SDR_CS16\nSOAPY_SDR_CU16 = _SoapySDR.SOAPY_SDR_CU16\nSOAPY_SDR_CS12 = _SoapySDR.SOAPY_SDR_CS12\nSOAPY_SDR_CU12 = _SoapySDR.SOAPY_SDR_CU12\nSOAPY_SDR_CS8 = _SoapySDR.SOAPY_SDR_CS8\nSOAPY_SDR_CU8 = _SoapySDR.SOAPY_SDR_CU8\nSOAPY_SDR_CS4 = _SoapySDR.SOAPY_SDR_CS4\nSOAPY_SDR_CU4 = _SoapySDR.SOAPY_SDR_CU4\nSOAPY_SDR_F64 = _SoapySDR.SOAPY_SDR_F64\nSOAPY_SDR_F32 = _SoapySDR.SOAPY_SDR_F32\nSOAPY_SDR_S32 = _SoapySDR.SOAPY_SDR_S32\nSOAPY_SDR_U32 = _SoapySDR.SOAPY_SDR_U32\nSOAPY_SDR_S16 = _SoapySDR.SOAPY_SDR_S16\nSOAPY_SDR_U16 = _SoapySDR.SOAPY_SDR_U16\nSOAPY_SDR_S8 = _SoapySDR.SOAPY_SDR_S8\nSOAPY_SDR_U8 = _SoapySDR.SOAPY_SDR_U8\n\ndef SoapySDR_formatToSize(*args):\n return _SoapySDR.SoapySDR_formatToSize(*args)\nSoapySDR_formatToSize = _SoapySDR.SoapySDR_formatToSize\nSOAPY_SDR_FATAL = _SoapySDR.SOAPY_SDR_FATAL\nSOAPY_SDR_CRITICAL = _SoapySDR.SOAPY_SDR_CRITICAL\nSOAPY_SDR_ERROR = _SoapySDR.SOAPY_SDR_ERROR\nSOAPY_SDR_WARNING = _SoapySDR.SOAPY_SDR_WARNING\nSOAPY_SDR_NOTICE = _SoapySDR.SOAPY_SDR_NOTICE\nSOAPY_SDR_INFO = _SoapySDR.SOAPY_SDR_INFO\nSOAPY_SDR_DEBUG = _SoapySDR.SOAPY_SDR_DEBUG\nSOAPY_SDR_TRACE = _SoapySDR.SOAPY_SDR_TRACE\nSOAPY_SDR_SSI = _SoapySDR.SOAPY_SDR_SSI\n\ndef SoapySDR_log(*args):\n return _SoapySDR.SoapySDR_log(*args)\nSoapySDR_log = _SoapySDR.SoapySDR_log\n\ndef SoapySDR_setLogLevel(*args):\n return _SoapySDR.SoapySDR_setLogLevel(*args)\nSoapySDR_setLogLevel = _SoapySDR.SoapySDR_setLogLevel\n\ndef errToStr(*args):\n return _SoapySDR.errToStr(*args)\nerrToStr = _SoapySDR.errToStr\n\ndef getAPIVersion():\n return _SoapySDR.getAPIVersion()\ngetAPIVersion = _SoapySDR.getAPIVersion\n\ndef getABIVersion():\n return _SoapySDR.getABIVersion()\ngetABIVersion = _SoapySDR.getABIVersion\n\ndef getLibVersion():\n return _SoapySDR.getLibVersion()\ngetLibVersion = _SoapySDR.getLibVersion\n\ndef getRootPath():\n return _SoapySDR.getRootPath()\ngetRootPath = _SoapySDR.getRootPath\n\ndef listSearchPaths():\n return _SoapySDR.listSearchPaths()\nlistSearchPaths = _SoapySDR.listSearchPaths\n\ndef listModules(*args):\n return _SoapySDR.listModules(*args)\nlistModules = _SoapySDR.listModules\n\ndef loadModule(*args):\n return _SoapySDR.loadModule(*args)\nloadModule = _SoapySDR.loadModule\n\ndef getLoaderResult(*args):\n return _SoapySDR.getLoaderResult(*args)\ngetLoaderResult = _SoapySDR.getLoaderResult\n\ndef unloadModule(*args):\n return _SoapySDR.unloadModule(*args)\nunloadModule = _SoapySDR.unloadModule\n\ndef loadModules():\n return _SoapySDR.loadModules()\nloadModules = _SoapySDR.loadModules\n\ndef formatToSize(*args):\n return _SoapySDR.formatToSize(*args)\nformatToSize = _SoapySDR.formatToSize\n\ndef ticksToTimeNs(*args):\n return _SoapySDR.ticksToTimeNs(*args)\nticksToTimeNs = _SoapySDR.ticksToTimeNs\n\ndef timeNsToTicks(*args):\n return _SoapySDR.timeNsToTicks(*args)\ntimeNsToTicks = _SoapySDR.timeNsToTicks\n\ndef log(*args):\n return _SoapySDR.log(*args)\nlog = _SoapySDR.log\n\ndef setLogLevel(*args):\n return _SoapySDR.setLogLevel(*args)\nsetLogLevel = _SoapySDR.setLogLevel\nclass Device(_object):\n __swig_setmethods__ = {}\n __setattr__ = lambda self, name, value: _swig_setattr(self, Device, name, value)\n __swig_getmethods__ = {}\n __getattr__ = lambda self, name: _swig_getattr(self, Device, name)\n def __init__(self, *args, **kwargs): raise AttributeError(\"No constructor defined\")\n __repr__ = _swig_repr\n __swig_destroy__ = _SoapySDR.delete_Device\n __del__ = lambda self : None;\n __swig_getmethods__[\"enumerate\"] = lambda x: _SoapySDR.Device_enumerate\n if _newclass:enumerate = staticmethod(_SoapySDR.Device_enumerate)\n __swig_getmethods__[\"make\"] = lambda x: _SoapySDR.Device_make\n if _newclass:make = staticmethod(_SoapySDR.Device_make)\n __swig_getmethods__[\"unmake\"] = lambda x: _SoapySDR.Device_unmake\n if _newclass:unmake = staticmethod(_SoapySDR.Device_unmake)\n def getDriverKey(self): return _SoapySDR.Device_getDriverKey(self)\n def getHardwareKey(self): return _SoapySDR.Device_getHardwareKey(self)\n def getHardwareInfo(self): return _SoapySDR.Device_getHardwareInfo(self)\n def setFrontendMapping(self, *args): return _SoapySDR.Device_setFrontendMapping(self, *args)\n def getFrontendMapping(self, *args): return _SoapySDR.Device_getFrontendMapping(self, *args)\n def getNumChannels(self, *args): return _SoapySDR.Device_getNumChannels(self, *args)\n def getChannelInfo(self, *args): return _SoapySDR.Device_getChannelInfo(self, *args)\n def getFullDuplex(self, *args): return _SoapySDR.Device_getFullDuplex(self, *args)\n def getStreamFormats(self, *args): return _SoapySDR.Device_getStreamFormats(self, *args)\n def getNativeStreamFormat(self, *args): return _SoapySDR.Device_getNativeStreamFormat(self, *args)\n def getStreamArgsInfo(self, *args): return _SoapySDR.Device_getStreamArgsInfo(self, *args)\n def setupStream(self, *args): return _SoapySDR.Device_setupStream(self, *args)\n def closeStream(self, *args): return _SoapySDR.Device_closeStream(self, *args)\n def getStreamMTU(self, *args): return _SoapySDR.Device_getStreamMTU(self, *args)\n def activateStream(self, *args): return _SoapySDR.Device_activateStream(self, *args)\n def deactivateStream(self, *args): return _SoapySDR.Device_deactivateStream(self, *args)\n def readStream(self, *args): return _SoapySDR.Device_readStream(self, *args)\n def writeStream(self, *args): return _SoapySDR.Device_writeStream(self, *args)\n def readStreamStatus(self, *args): return _SoapySDR.Device_readStreamStatus(self, *args)\n def getNumDirectAccessBuffers(self, *args): return _SoapySDR.Device_getNumDirectAccessBuffers(self, *args)\n def getDirectAccessBufferAddrs(self, *args): return _SoapySDR.Device_getDirectAccessBufferAddrs(self, *args)\n def acquireReadBuffer(self, *args): return _SoapySDR.Device_acquireReadBuffer(self, *args)\n def releaseReadBuffer(self, *args): return _SoapySDR.Device_releaseReadBuffer(self, *args)\n def acquireWriteBuffer(self, *args): return _SoapySDR.Device_acquireWriteBuffer(self, *args)\n def releaseWriteBuffer(self, *args): return _SoapySDR.Device_releaseWriteBuffer(self, *args)\n def listAntennas(self, *args): return _SoapySDR.Device_listAntennas(self, *args)\n def setAntenna(self, *args): return _SoapySDR.Device_setAntenna(self, *args)\n def getAntenna(self, *args): return _SoapySDR.Device_getAntenna(self, *args)\n def hasDCOffsetMode(self, *args): return _SoapySDR.Device_hasDCOffsetMode(self, *args)\n def setDCOffsetMode(self, *args): return _SoapySDR.Device_setDCOffsetMode(self, *args)\n def getDCOffsetMode(self, *args): return _SoapySDR.Device_getDCOffsetMode(self, *args)\n def hasDCOffset(self, *args): return _SoapySDR.Device_hasDCOffset(self, *args)\n def setDCOffset(self, *args): return _SoapySDR.Device_setDCOffset(self, *args)\n def getDCOffset(self, *args): return _SoapySDR.Device_getDCOffset(self, *args)\n def hasIQBalance(self, *args): return _SoapySDR.Device_hasIQBalance(self, *args)\n def setIQBalance(self, *args): return _SoapySDR.Device_setIQBalance(self, *args)\n def getIQBalance(self, *args): return _SoapySDR.Device_getIQBalance(self, *args)\n def hasFrequencyCorrection(self, *args): return _SoapySDR.Device_hasFrequencyCorrection(self, *args)\n def setFrequencyCorrection(self, *args): return _SoapySDR.Device_setFrequencyCorrection(self, *args)\n def getFrequencyCorrection(self, *args): return _SoapySDR.Device_getFrequencyCorrection(self, *args)\n def listGains(self, *args): return _SoapySDR.Device_listGains(self, *args)\n def hasGainMode(self, *args): return _SoapySDR.Device_hasGainMode(self, *args)\n def setGainMode(self, *args): return _SoapySDR.Device_setGainMode(self, *args)\n def getGainMode(self, *args): return _SoapySDR.Device_getGainMode(self, *args)\n def setGain(self, *args): return _SoapySDR.Device_setGain(self, *args)\n def getGain(self, *args): return _SoapySDR.Device_getGain(self, *args)\n def getGainRange(self, *args): return _SoapySDR.Device_getGainRange(self, *args)\n def setFrequency(self, *args): return _SoapySDR.Device_setFrequency(self, *args)\n def getFrequency(self, *args): return _SoapySDR.Device_getFrequency(self, *args)\n def listFrequencies(self, *args): return _SoapySDR.Device_listFrequencies(self, *args)\n def getFrequencyRange(self, *args): return _SoapySDR.Device_getFrequencyRange(self, *args)\n def getFrequencyArgsInfo(self, *args): return _SoapySDR.Device_getFrequencyArgsInfo(self, *args)\n def setSampleRate(self, *args): return _SoapySDR.Device_setSampleRate(self, *args)\n def getSampleRate(self, *args): return _SoapySDR.Device_getSampleRate(self, *args)\n def listSampleRates(self, *args): return _SoapySDR.Device_listSampleRates(self, *args)\n def getSampleRateRange(self, *args): return _SoapySDR.Device_getSampleRateRange(self, *args)\n def setBandwidth(self, *args): return _SoapySDR.Device_setBandwidth(self, *args)\n def getBandwidth(self, *args): return _SoapySDR.Device_getBandwidth(self, *args)\n def listBandwidths(self, *args): return _SoapySDR.Device_listBandwidths(self, *args)\n def getBandwidthRange(self, *args): return _SoapySDR.Device_getBandwidthRange(self, *args)\n def setMasterClockRate(self, *args): return _SoapySDR.Device_setMasterClockRate(self, *args)\n def getMasterClockRate(self): return _SoapySDR.Device_getMasterClockRate(self)\n def getMasterClockRates(self): return _SoapySDR.Device_getMasterClockRates(self)\n def listClockSources(self): return _SoapySDR.Device_listClockSources(self)\n def setClockSource(self, *args): return _SoapySDR.Device_setClockSource(self, *args)\n def getClockSource(self): return _SoapySDR.Device_getClockSource(self)\n def listTimeSources(self): return _SoapySDR.Device_listTimeSources(self)\n def setTimeSource(self, *args): return _SoapySDR.Device_setTimeSource(self, *args)\n def getTimeSource(self): return _SoapySDR.Device_getTimeSource(self)\n def hasHardwareTime(self, what=\"\"): return _SoapySDR.Device_hasHardwareTime(self, what)\n def getHardwareTime(self, what=\"\"): return _SoapySDR.Device_getHardwareTime(self, what)\n def setHardwareTime(self, *args): return _SoapySDR.Device_setHardwareTime(self, *args)\n def setCommandTime(self, *args): return _SoapySDR.Device_setCommandTime(self, *args)\n def listSensors(self, *args): return _SoapySDR.Device_listSensors(self, *args)\n def getSensorInfo(self, *args): return _SoapySDR.Device_getSensorInfo(self, *args)\n def readSensor(self, *args): return _SoapySDR.Device_readSensor(self, *args)\n def listRegisterInterfaces(self): return _SoapySDR.Device_listRegisterInterfaces(self)\n def writeRegister(self, *args): return _SoapySDR.Device_writeRegister(self, *args)\n def readRegister(self, *args): return _SoapySDR.Device_readRegister(self, *args)\n def writeRegisters(self, *args): return _SoapySDR.Device_writeRegisters(self, *args)\n def readRegisters(self, *args): return _SoapySDR.Device_readRegisters(self, *args)\n def getSettingInfo(self, *args): return _SoapySDR.Device_getSettingInfo(self, *args)\n def writeSetting(self, *args): return _SoapySDR.Device_writeSetting(self, *args)\n def readSetting(self, *args): return _SoapySDR.Device_readSetting(self, *args)\n def listGPIOBanks(self): return _SoapySDR.Device_listGPIOBanks(self)\n def writeGPIO(self, *args): return _SoapySDR.Device_writeGPIO(self, *args)\n def readGPIO(self, *args): return _SoapySDR.Device_readGPIO(self, *args)\n def writeGPIODir(self, *args): return _SoapySDR.Device_writeGPIODir(self, *args)\n def readGPIODir(self, *args): return _SoapySDR.Device_readGPIODir(self, *args)\n def writeI2C(self, *args): return _SoapySDR.Device_writeI2C(self, *args)\n def readI2C(self, *args): return _SoapySDR.Device_readI2C(self, *args)\n def transactSPI(self, *args): return _SoapySDR.Device_transactSPI(self, *args)\n def listUARTs(self): return _SoapySDR.Device_listUARTs(self)\n def writeUART(self, *args): return _SoapySDR.Device_writeUART(self, *args)\n def readUART(self, *args): return _SoapySDR.Device_readUART(self, *args)\n def readStream__(self, *args): return _SoapySDR.Device_readStream__(self, *args)\n def writeStream__(self, *args): return _SoapySDR.Device_writeStream__(self, *args)\n def readStreamStatus__(self, *args): return _SoapySDR.Device_readStreamStatus__(self, *args)\n #call unmake from custom deleter\n def __del__(self):\n Device.unmake(self)\n\n def __str__(self):\n return \"%s:%s\"%(self.getDriverKey(), self.getHardwareKey())\n\n def readStream(self, stream, buffs, numElems, flags = 0, timeoutUs = 100000):\n ptrs = [extractBuffPointer(b) for b in buffs]\n return self.readStream__(stream, ptrs, numElems, flags, timeoutUs)\n\n def writeStream(self, stream, buffs, numElems, flags = 0, timeNs = 0, timeoutUs = 100000):\n ptrs = [extractBuffPointer(b) for b in buffs]\n return self.writeStream__(stream, ptrs, numElems, flags, timeNs, timeoutUs)\n\n def readStreamStatus(self, stream, timeoutUs = 100000):\n return self.readStreamStatus__(stream, timeoutUs)\n\nDevice_swigregister = _SoapySDR.Device_swigregister\nDevice_swigregister(Device)\n\ndef Device_enumerate(*args):\n return _SoapySDR.Device_enumerate(*args)\nDevice_enumerate = _SoapySDR.Device_enumerate\n\ndef Device_make(*args):\n return _SoapySDR.Device_make(*args)\nDevice_make = _SoapySDR.Device_make\n\ndef Device_unmake(*args):\n return _SoapySDR.Device_unmake(*args)\nDevice_unmake = _SoapySDR.Device_unmake\n\n__all__ = list()\nfor key in sorted(globals().keys()):\n if key.startswith('SOAPY_SDR_'):\n __all__.append(key)\n\n_Device = Device\nclass Device(Device):\n def __new__(cls, *args, **kwargs):\n return cls.make(*args, **kwargs)\n\ndef extractBuffPointer(buff):\n if hasattr(buff, '__array_interface__'): return buff.__array_interface__['data'][0]\n if hasattr(buff, '__long__'): return long(buff)\n if hasattr(buff, '__int__'): return int(buff)\n raise Exception(\"Unrecognized data format: \" + str(type(buff)))\n\n# This file is compatible with both classic and new-style classes.\n\n\n",
"step-ids": [
177,
316,
400,
419,
427
]
}
|
[
177,
316,
400,
419,
427
] |
#!/usr/bin/python
import argparse
import string
import numpy
def gen_ft_parser():
ft_parser = argparse.ArgumentParser(
description='Generate a Character-Feature Translation Table')
ft_parser.add_argument('alphabet_file', metavar='alphabet_file',
type=str, help='A file contianing all the characters that will '
'appear in the translation table.')
ft_parser.add_argument('save_file', metavar='save_path',
type=str, help='The feature table filename.')
return ft_parser
def construct_alphabet(alpha_string):
symbols = set(alpha_string)
alphabet = ''.join(sorted(c for c in string.printable if c in symbols))
return numpy.array(list(alphabet))
def load_alphabet(alphabet_file):
with open(alphabet_file) as alphabet:
alphabet = alphabet.read(100000).replace('\n', ' ')
return construct_alphabet(alphabet)
def gen_row(c, key):
row = [False] * (len(key) + 1)
row[key[c.lower()]] = True
row[-1] = c.isupper()
return row
def build_table(alphabet):
code = ''.join(sorted(set(''.join(alphabet).lower())))
key = {c:i for i, c in enumerate(code)}
table = numpy.zeros((len(alphabet), len(key) + 1))
for i, c in enumerate(alphabet):
table[i] = gen_row(c, key)
return table
def main(args):
table = build_table(load_alphabet(args.alphabet_file))
numpy.save(args.save_file, table)
if __name__ == "__main__":
main(gen_ft_parser().parse_args())
|
normal
|
{
"blob_id": "f4d4be174bed2704c0ad12eea2f0cd64eaaa0aaa",
"index": 1973,
"step-1": "<mask token>\n\n\ndef gen_ft_parser():\n ft_parser = argparse.ArgumentParser(description=\n 'Generate a Character-Feature Translation Table')\n ft_parser.add_argument('alphabet_file', metavar='alphabet_file', type=\n str, help=\n 'A file contianing all the characters that will appear in the translation table.'\n )\n ft_parser.add_argument('save_file', metavar='save_path', type=str, help\n ='The feature table filename.')\n return ft_parser\n\n\ndef construct_alphabet(alpha_string):\n symbols = set(alpha_string)\n alphabet = ''.join(sorted(c for c in string.printable if c in symbols))\n return numpy.array(list(alphabet))\n\n\ndef load_alphabet(alphabet_file):\n with open(alphabet_file) as alphabet:\n alphabet = alphabet.read(100000).replace('\\n', ' ')\n return construct_alphabet(alphabet)\n\n\ndef gen_row(c, key):\n row = [False] * (len(key) + 1)\n row[key[c.lower()]] = True\n row[-1] = c.isupper()\n return row\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef gen_ft_parser():\n ft_parser = argparse.ArgumentParser(description=\n 'Generate a Character-Feature Translation Table')\n ft_parser.add_argument('alphabet_file', metavar='alphabet_file', type=\n str, help=\n 'A file contianing all the characters that will appear in the translation table.'\n )\n ft_parser.add_argument('save_file', metavar='save_path', type=str, help\n ='The feature table filename.')\n return ft_parser\n\n\ndef construct_alphabet(alpha_string):\n symbols = set(alpha_string)\n alphabet = ''.join(sorted(c for c in string.printable if c in symbols))\n return numpy.array(list(alphabet))\n\n\ndef load_alphabet(alphabet_file):\n with open(alphabet_file) as alphabet:\n alphabet = alphabet.read(100000).replace('\\n', ' ')\n return construct_alphabet(alphabet)\n\n\ndef gen_row(c, key):\n row = [False] * (len(key) + 1)\n row[key[c.lower()]] = True\n row[-1] = c.isupper()\n return row\n\n\ndef build_table(alphabet):\n code = ''.join(sorted(set(''.join(alphabet).lower())))\n key = {c: i for i, c in enumerate(code)}\n table = numpy.zeros((len(alphabet), len(key) + 1))\n for i, c in enumerate(alphabet):\n table[i] = gen_row(c, key)\n return table\n\n\ndef main(args):\n table = build_table(load_alphabet(args.alphabet_file))\n numpy.save(args.save_file, table)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef gen_ft_parser():\n ft_parser = argparse.ArgumentParser(description=\n 'Generate a Character-Feature Translation Table')\n ft_parser.add_argument('alphabet_file', metavar='alphabet_file', type=\n str, help=\n 'A file contianing all the characters that will appear in the translation table.'\n )\n ft_parser.add_argument('save_file', metavar='save_path', type=str, help\n ='The feature table filename.')\n return ft_parser\n\n\ndef construct_alphabet(alpha_string):\n symbols = set(alpha_string)\n alphabet = ''.join(sorted(c for c in string.printable if c in symbols))\n return numpy.array(list(alphabet))\n\n\ndef load_alphabet(alphabet_file):\n with open(alphabet_file) as alphabet:\n alphabet = alphabet.read(100000).replace('\\n', ' ')\n return construct_alphabet(alphabet)\n\n\ndef gen_row(c, key):\n row = [False] * (len(key) + 1)\n row[key[c.lower()]] = True\n row[-1] = c.isupper()\n return row\n\n\ndef build_table(alphabet):\n code = ''.join(sorted(set(''.join(alphabet).lower())))\n key = {c: i for i, c in enumerate(code)}\n table = numpy.zeros((len(alphabet), len(key) + 1))\n for i, c in enumerate(alphabet):\n table[i] = gen_row(c, key)\n return table\n\n\ndef main(args):\n table = build_table(load_alphabet(args.alphabet_file))\n numpy.save(args.save_file, table)\n\n\nif __name__ == '__main__':\n main(gen_ft_parser().parse_args())\n",
"step-4": "import argparse\nimport string\nimport numpy\n\n\ndef gen_ft_parser():\n ft_parser = argparse.ArgumentParser(description=\n 'Generate a Character-Feature Translation Table')\n ft_parser.add_argument('alphabet_file', metavar='alphabet_file', type=\n str, help=\n 'A file contianing all the characters that will appear in the translation table.'\n )\n ft_parser.add_argument('save_file', metavar='save_path', type=str, help\n ='The feature table filename.')\n return ft_parser\n\n\ndef construct_alphabet(alpha_string):\n symbols = set(alpha_string)\n alphabet = ''.join(sorted(c for c in string.printable if c in symbols))\n return numpy.array(list(alphabet))\n\n\ndef load_alphabet(alphabet_file):\n with open(alphabet_file) as alphabet:\n alphabet = alphabet.read(100000).replace('\\n', ' ')\n return construct_alphabet(alphabet)\n\n\ndef gen_row(c, key):\n row = [False] * (len(key) + 1)\n row[key[c.lower()]] = True\n row[-1] = c.isupper()\n return row\n\n\ndef build_table(alphabet):\n code = ''.join(sorted(set(''.join(alphabet).lower())))\n key = {c: i for i, c in enumerate(code)}\n table = numpy.zeros((len(alphabet), len(key) + 1))\n for i, c in enumerate(alphabet):\n table[i] = gen_row(c, key)\n return table\n\n\ndef main(args):\n table = build_table(load_alphabet(args.alphabet_file))\n numpy.save(args.save_file, table)\n\n\nif __name__ == '__main__':\n main(gen_ft_parser().parse_args())\n",
"step-5": "#!/usr/bin/python\n\nimport argparse\nimport string\nimport numpy\n\n\ndef gen_ft_parser():\n ft_parser = argparse.ArgumentParser(\n description='Generate a Character-Feature Translation Table')\n ft_parser.add_argument('alphabet_file', metavar='alphabet_file', \n type=str, help='A file contianing all the characters that will '\n 'appear in the translation table.')\n ft_parser.add_argument('save_file', metavar='save_path',\n type=str, help='The feature table filename.')\n return ft_parser\n\ndef construct_alphabet(alpha_string):\n symbols = set(alpha_string)\n alphabet = ''.join(sorted(c for c in string.printable if c in symbols))\n return numpy.array(list(alphabet))\n\ndef load_alphabet(alphabet_file):\n with open(alphabet_file) as alphabet:\n alphabet = alphabet.read(100000).replace('\\n', ' ')\n return construct_alphabet(alphabet)\n\ndef gen_row(c, key):\n row = [False] * (len(key) + 1)\n row[key[c.lower()]] = True\n row[-1] = c.isupper()\n return row\n\ndef build_table(alphabet):\n code = ''.join(sorted(set(''.join(alphabet).lower())))\n key = {c:i for i, c in enumerate(code)}\n table = numpy.zeros((len(alphabet), len(key) + 1))\n for i, c in enumerate(alphabet):\n table[i] = gen_row(c, key)\n return table\n\ndef main(args):\n table = build_table(load_alphabet(args.alphabet_file))\n numpy.save(args.save_file, table)\n\nif __name__ == \"__main__\":\n main(gen_ft_parser().parse_args())\n\n",
"step-ids": [
4,
6,
7,
8,
9
]
}
|
[
4,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if a == b:
print(a)
else:
while True:
if i // a > 0 and i % a == 0 and i // b > 0 and i % b == 0:
print(i)
break
else:
i += 1
<|reserved_special_token_1|>
a = int(input())
b = int(input())
i = 1
if a == b:
print(a)
else:
while True:
if i // a > 0 and i % a == 0 and i // b > 0 and i % b == 0:
print(i)
break
else:
i += 1
<|reserved_special_token_1|>
# put your python code here
a = int(input())
b = int(input())
# and
i = 1
if a == b:
print(a)
else:
while True:
if i // a > 0 and i % a == 0 and i // b > 0 and i % b == 0:
print(i)
break
else:
i += 1
|
flexible
|
{
"blob_id": "af5ebdcd818fdf9c607240733b7b5dbb793cf55e",
"index": 7328,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif a == b:\n print(a)\nelse:\n while True:\n if i // a > 0 and i % a == 0 and i // b > 0 and i % b == 0:\n print(i)\n break\n else:\n i += 1\n",
"step-3": "a = int(input())\nb = int(input())\ni = 1\nif a == b:\n print(a)\nelse:\n while True:\n if i // a > 0 and i % a == 0 and i // b > 0 and i % b == 0:\n print(i)\n break\n else:\n i += 1\n",
"step-4": "# put your python code here\na = int(input())\nb = int(input())\n\n# and\ni = 1\nif a == b:\n print(a)\nelse:\n while True:\n if i // a > 0 and i % a == 0 and i // b > 0 and i % b == 0:\n print(i)\n break\n else:\n i += 1\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
class people:
def __init__(self, name):
self.name = name
self.purchase_descrip = []
self.purchase_price_descrip = []
self.purchases = []
self.total_spent = 0
self.debt = 0
self.debt_temp = 0
self.pay = []
self.pay_out = []
self.pay_who = []
def add_purchase(self, purchase):
self.purchases.append(purchase)
def add_description(self, description):
self.purchase_descrip.append(description)
def get_purchase(self):
return self.purchases
def get_description(self):
return self.purchase_descrip
def set_total(self):
self.total_spent = 0
for items in self.purchases:
self.total_spent = self.total_spent+float(items)
def get_total(self):
return self.total_spent
def get_name(self):
return self.name
def add_purchase_descrip(self, price, description):
self.purchase_price_descrip.append("$"+str(price)+" "+description)
def get_purchase_descrip(self):
return self.purchase_price_descrip
def set_debt(self, cost_per_person):
self.debt = float(self.total_spent)-cost_per_person
def get_debt(self):
return self.debt
def add_payment(self, payment):
self.pay.append(payment)
def get_pay(self):
return self.pay
def add_pay_who(self, who_to_pay):
self.pay_who.append(who_to_pay)
def get_pay_who(self):
return self.pay_who
def set_debt_temp(self):
self.debt_temp = self.debt
def get_temp_debt(self):
return self.debt_temp
def update_temp_debt(self, payment):
self.debt_temp = self.debt_temp+payment*-1
def pay_temp_debt(self, payment):
self.debt_temp-payment
def round_payments(self):
for x in range(0, len(self.pay)):
self.pay[x] = round(self.pay[x], 2)
def round_purchases(self):
for x in range(0, len(self.purchases)):
self.purchases[x] = round(float(self.purchases[x]), 2)
|
normal
|
{
"blob_id": "bdda42665acfefccad45a2b49f5436a186140579",
"index": 8576,
"step-1": "class people:\n <mask token>\n\n def add_purchase(self, purchase):\n self.purchases.append(purchase)\n\n def add_description(self, description):\n self.purchase_descrip.append(description)\n <mask token>\n <mask token>\n\n def set_total(self):\n self.total_spent = 0\n for items in self.purchases:\n self.total_spent = self.total_spent + float(items)\n <mask token>\n <mask token>\n\n def add_purchase_descrip(self, price, description):\n self.purchase_price_descrip.append('$' + str(price) + ' ' +\n description)\n\n def get_purchase_descrip(self):\n return self.purchase_price_descrip\n\n def set_debt(self, cost_per_person):\n self.debt = float(self.total_spent) - cost_per_person\n\n def get_debt(self):\n return self.debt\n <mask token>\n <mask token>\n <mask token>\n\n def get_pay_who(self):\n return self.pay_who\n\n def set_debt_temp(self):\n self.debt_temp = self.debt\n\n def get_temp_debt(self):\n return self.debt_temp\n <mask token>\n\n def pay_temp_debt(self, payment):\n self.debt_temp - payment\n\n def round_payments(self):\n for x in range(0, len(self.pay)):\n self.pay[x] = round(self.pay[x], 2)\n <mask token>\n",
"step-2": "class people:\n <mask token>\n\n def add_purchase(self, purchase):\n self.purchases.append(purchase)\n\n def add_description(self, description):\n self.purchase_descrip.append(description)\n\n def get_purchase(self):\n return self.purchases\n <mask token>\n\n def set_total(self):\n self.total_spent = 0\n for items in self.purchases:\n self.total_spent = self.total_spent + float(items)\n\n def get_total(self):\n return self.total_spent\n <mask token>\n\n def add_purchase_descrip(self, price, description):\n self.purchase_price_descrip.append('$' + str(price) + ' ' +\n description)\n\n def get_purchase_descrip(self):\n return self.purchase_price_descrip\n\n def set_debt(self, cost_per_person):\n self.debt = float(self.total_spent) - cost_per_person\n\n def get_debt(self):\n return self.debt\n\n def add_payment(self, payment):\n self.pay.append(payment)\n <mask token>\n\n def add_pay_who(self, who_to_pay):\n self.pay_who.append(who_to_pay)\n\n def get_pay_who(self):\n return self.pay_who\n\n def set_debt_temp(self):\n self.debt_temp = self.debt\n\n def get_temp_debt(self):\n return self.debt_temp\n\n def update_temp_debt(self, payment):\n self.debt_temp = self.debt_temp + payment * -1\n\n def pay_temp_debt(self, payment):\n self.debt_temp - payment\n\n def round_payments(self):\n for x in range(0, len(self.pay)):\n self.pay[x] = round(self.pay[x], 2)\n <mask token>\n",
"step-3": "class people:\n\n def __init__(self, name):\n self.name = name\n self.purchase_descrip = []\n self.purchase_price_descrip = []\n self.purchases = []\n self.total_spent = 0\n self.debt = 0\n self.debt_temp = 0\n self.pay = []\n self.pay_out = []\n self.pay_who = []\n\n def add_purchase(self, purchase):\n self.purchases.append(purchase)\n\n def add_description(self, description):\n self.purchase_descrip.append(description)\n\n def get_purchase(self):\n return self.purchases\n <mask token>\n\n def set_total(self):\n self.total_spent = 0\n for items in self.purchases:\n self.total_spent = self.total_spent + float(items)\n\n def get_total(self):\n return self.total_spent\n <mask token>\n\n def add_purchase_descrip(self, price, description):\n self.purchase_price_descrip.append('$' + str(price) + ' ' +\n description)\n\n def get_purchase_descrip(self):\n return self.purchase_price_descrip\n\n def set_debt(self, cost_per_person):\n self.debt = float(self.total_spent) - cost_per_person\n\n def get_debt(self):\n return self.debt\n\n def add_payment(self, payment):\n self.pay.append(payment)\n <mask token>\n\n def add_pay_who(self, who_to_pay):\n self.pay_who.append(who_to_pay)\n\n def get_pay_who(self):\n return self.pay_who\n\n def set_debt_temp(self):\n self.debt_temp = self.debt\n\n def get_temp_debt(self):\n return self.debt_temp\n\n def update_temp_debt(self, payment):\n self.debt_temp = self.debt_temp + payment * -1\n\n def pay_temp_debt(self, payment):\n self.debt_temp - payment\n\n def round_payments(self):\n for x in range(0, len(self.pay)):\n self.pay[x] = round(self.pay[x], 2)\n\n def round_purchases(self):\n for x in range(0, len(self.purchases)):\n self.purchases[x] = round(float(self.purchases[x]), 2)\n",
"step-4": "class people:\n\n def __init__(self, name):\n self.name = name\n self.purchase_descrip = []\n self.purchase_price_descrip = []\n self.purchases = []\n self.total_spent = 0\n self.debt = 0\n self.debt_temp = 0\n self.pay = []\n self.pay_out = []\n self.pay_who = []\n\n def add_purchase(self, purchase):\n self.purchases.append(purchase)\n\n def add_description(self, description):\n self.purchase_descrip.append(description)\n\n def get_purchase(self):\n return self.purchases\n <mask token>\n\n def set_total(self):\n self.total_spent = 0\n for items in self.purchases:\n self.total_spent = self.total_spent + float(items)\n\n def get_total(self):\n return self.total_spent\n\n def get_name(self):\n return self.name\n\n def add_purchase_descrip(self, price, description):\n self.purchase_price_descrip.append('$' + str(price) + ' ' +\n description)\n\n def get_purchase_descrip(self):\n return self.purchase_price_descrip\n\n def set_debt(self, cost_per_person):\n self.debt = float(self.total_spent) - cost_per_person\n\n def get_debt(self):\n return self.debt\n\n def add_payment(self, payment):\n self.pay.append(payment)\n\n def get_pay(self):\n return self.pay\n\n def add_pay_who(self, who_to_pay):\n self.pay_who.append(who_to_pay)\n\n def get_pay_who(self):\n return self.pay_who\n\n def set_debt_temp(self):\n self.debt_temp = self.debt\n\n def get_temp_debt(self):\n return self.debt_temp\n\n def update_temp_debt(self, payment):\n self.debt_temp = self.debt_temp + payment * -1\n\n def pay_temp_debt(self, payment):\n self.debt_temp - payment\n\n def round_payments(self):\n for x in range(0, len(self.pay)):\n self.pay[x] = round(self.pay[x], 2)\n\n def round_purchases(self):\n for x in range(0, len(self.purchases)):\n self.purchases[x] = round(float(self.purchases[x]), 2)\n",
"step-5": "class people:\n\n def __init__(self, name):\n self.name = name\n self.purchase_descrip = []\n self.purchase_price_descrip = []\n self.purchases = []\n self.total_spent = 0\n self.debt = 0\n self.debt_temp = 0\n self.pay = []\n self.pay_out = []\n self.pay_who = []\n\n def add_purchase(self, purchase):\n self.purchases.append(purchase)\n\n def add_description(self, description):\n self.purchase_descrip.append(description)\n\n def get_purchase(self):\n return self.purchases\n\n def get_description(self):\n return self.purchase_descrip\n\n def set_total(self):\n self.total_spent = 0\n for items in self.purchases:\n self.total_spent = self.total_spent+float(items)\n\n def get_total(self):\n return self.total_spent\n\n def get_name(self):\n return self.name\n\n def add_purchase_descrip(self, price, description):\n self.purchase_price_descrip.append(\"$\"+str(price)+\" \"+description)\n\n def get_purchase_descrip(self):\n return self.purchase_price_descrip\n\n def set_debt(self, cost_per_person):\n self.debt = float(self.total_spent)-cost_per_person\n\n def get_debt(self):\n return self.debt\n\n def add_payment(self, payment):\n self.pay.append(payment)\n\n def get_pay(self):\n return self.pay\n\n def add_pay_who(self, who_to_pay):\n self.pay_who.append(who_to_pay)\n\n def get_pay_who(self):\n return self.pay_who\n\n def set_debt_temp(self):\n self.debt_temp = self.debt\n\n def get_temp_debt(self):\n return self.debt_temp\n\n def update_temp_debt(self, payment):\n self.debt_temp = self.debt_temp+payment*-1\n\n def pay_temp_debt(self, payment):\n self.debt_temp-payment\n\n def round_payments(self):\n for x in range(0, len(self.pay)):\n self.pay[x] = round(self.pay[x], 2)\n\n def round_purchases(self):\n for x in range(0, len(self.purchases)):\n self.purchases[x] = round(float(self.purchases[x]), 2)\n\n\n\n",
"step-ids": [
13,
18,
20,
22,
24
]
}
|
[
13,
18,
20,
22,
24
] |
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import re
import sys
import time
import os
# directory 현재 경로에 download폴더 생성
dirPath = "download"
try:
if not (os.path.isdir(dirPath)):
os.makedirs(os.path.join(dirPath))
except OSError as e:
print("{0} Failed to create directory!!!!!".format(dirPath))
# chrome download folder option
donwload_loc = "{0}\\download".format(os.getcwd())
options = webdriver.ChromeOptions()
options.add_experimental_option("prefs", {
"download.default_directory": donwload_loc,
"download.prompt_for_download": False,
"download.directory_upgrade": True,
"safebrowsing_for_trusted_sources_enabled": False,
"safebrowsing.enabled": False
})
# chrome driver 경로
chromedriver = '../chromedriver/chromedriver.exe'
driver = webdriver.Chrome(chromedriver, chrome_options=options)
# 제일 맨처음 URL
# url = 'http://www.k-heritage.tv/brd/board/909/L/CATEGORY/911/menu/901?brdType=R&thisPage=1&bbIdx=20474&searchField=&searchText='
# 중간 URL
url = 'http://www.k-heritage.tv/brd/board/909/L/CATEGORY/911/menu/901?brdType=R&thisPage=1&bbIdx=17438&searchField=&searchText='
while '#n' not in url:
#url 호출
driver.get(url)
# 해당 tag가 생성될 때 까지 기다림
mediaWrap = WebDriverWait(driver, 2).until(
EC.presence_of_element_located((By.CSS_SELECTOR, ".media_wrap")))
# 다음페이지 url 저장
next_url = driver.find_elements_by_css_selector('div.thumList>dl>dd>a')[0].get_attribute('href')
try:
# hwp 파일이 있는경우 저장
aTagList = driver.find_elements_by_css_selector("dl.b_file dd li a")
file_list = []
for aTag in aTagList:
if ".hwp" in aTag.text:
file_list.append(aTag.get_attribute('href'))
for file_url in file_list:
driver.get(file_url)
time.sleep(1.3)
# hwp 파일이 없으면 html 특정 태그를 xml로 저장
if len(file_list) == 0:
title = driver.find_elements_by_css_selector('p.sub_tit2 em.sub_mode')[0].text
# 윈도우에서 파일명에 저장할 수 없는 문자 제거
title = re.sub(r"[\\|\/|\:|\*|\?|\"|\<|\>|\|]",'',title)
xml_file = open('download\\{0}.xml'.format(title),mode="wt", encoding="utf-8")
xml_file.write(driver.find_elements_by_css_selector('div.type_cont')[0].get_attribute('innerHTML'))
except IndexError:
# 간혹 파일 다운로드 url이 오류가 발생하는 경우가 있음
driver.get(url)
mediaWrap = WebDriverWait(driver, 2).until(
EC.presence_of_element_located((By.CSS_SELECTOR, ".media_wrap")))
title = driver.find_elements_by_css_selector('p.sub_tit2 em.sub_mode')[0].text
title = re.sub(r"[\\|\/|\:|\*|\?|\"|\<|\>|\|]",'',title)
xml_file = open('download\\{0}.xml'.format(title),mode="wt", encoding="utf-8")
# 유형 확인
info_agree = driver.find_elements_by_css_selector('div.info_agree')
type_cont = driver.find_elements_by_css_selector('.media_wrap .type_cont')
card_cont_info = driver.find_elements_by_css_selector('.media_wrap .card_cont_info')
if len(info_agree) > 0:
xml_file.write(info_agree[0].get_attribute('innerHTML'))
elif len(type_cont) > 0:
xml_file.write(type_cont[0].get_attribute('innerHTML'))
elif len(card_cont_info) > 0:
xml_file.write(card_cont_info[0].get_attribute('innerHTML'))
except:
print("Unexpected error:", sys.exc_info()[0])
print('{0} error '.format(driver.current_url))
finally:
url = next_url
driver.quit()
|
normal
|
{
"blob_id": "5f022b7f20b8aef1e3538a6b1e69dc302752cdc7",
"index": 7640,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ntry:\n if not os.path.isdir(dirPath):\n os.makedirs(os.path.join(dirPath))\nexcept OSError as e:\n print('{0} Failed to create directory!!!!!'.format(dirPath))\n<mask token>\noptions.add_experimental_option('prefs', {'download.default_directory':\n donwload_loc, 'download.prompt_for_download': False,\n 'download.directory_upgrade': True,\n 'safebrowsing_for_trusted_sources_enabled': False,\n 'safebrowsing.enabled': False})\n<mask token>\nwhile '#n' not in url:\n driver.get(url)\n mediaWrap = WebDriverWait(driver, 2).until(EC.\n presence_of_element_located((By.CSS_SELECTOR, '.media_wrap')))\n next_url = driver.find_elements_by_css_selector('div.thumList>dl>dd>a')[0\n ].get_attribute('href')\n try:\n aTagList = driver.find_elements_by_css_selector('dl.b_file dd li a')\n file_list = []\n for aTag in aTagList:\n if '.hwp' in aTag.text:\n file_list.append(aTag.get_attribute('href'))\n for file_url in file_list:\n driver.get(file_url)\n time.sleep(1.3)\n if len(file_list) == 0:\n title = driver.find_elements_by_css_selector(\n 'p.sub_tit2 em.sub_mode')[0].text\n title = re.sub('[\\\\\\\\|\\\\/|\\\\:|\\\\*|\\\\?|\\\\\"|\\\\<|\\\\>|\\\\|]', '', title)\n xml_file = open('download\\\\{0}.xml'.format(title), mode='wt',\n encoding='utf-8')\n xml_file.write(driver.find_elements_by_css_selector(\n 'div.type_cont')[0].get_attribute('innerHTML'))\n except IndexError:\n driver.get(url)\n mediaWrap = WebDriverWait(driver, 2).until(EC.\n presence_of_element_located((By.CSS_SELECTOR, '.media_wrap')))\n title = driver.find_elements_by_css_selector('p.sub_tit2 em.sub_mode')[\n 0].text\n title = re.sub('[\\\\\\\\|\\\\/|\\\\:|\\\\*|\\\\?|\\\\\"|\\\\<|\\\\>|\\\\|]', '', title)\n xml_file = open('download\\\\{0}.xml'.format(title), mode='wt',\n encoding='utf-8')\n info_agree = driver.find_elements_by_css_selector('div.info_agree')\n type_cont = driver.find_elements_by_css_selector(\n '.media_wrap .type_cont')\n card_cont_info = driver.find_elements_by_css_selector(\n '.media_wrap .card_cont_info')\n if len(info_agree) > 0:\n xml_file.write(info_agree[0].get_attribute('innerHTML'))\n elif len(type_cont) > 0:\n xml_file.write(type_cont[0].get_attribute('innerHTML'))\n elif len(card_cont_info) > 0:\n xml_file.write(card_cont_info[0].get_attribute('innerHTML'))\n except:\n print('Unexpected error:', sys.exc_info()[0])\n print('{0} error '.format(driver.current_url))\n finally:\n url = next_url\ndriver.quit()\n",
"step-3": "<mask token>\ndirPath = 'download'\ntry:\n if not os.path.isdir(dirPath):\n os.makedirs(os.path.join(dirPath))\nexcept OSError as e:\n print('{0} Failed to create directory!!!!!'.format(dirPath))\ndonwload_loc = '{0}\\\\download'.format(os.getcwd())\noptions = webdriver.ChromeOptions()\noptions.add_experimental_option('prefs', {'download.default_directory':\n donwload_loc, 'download.prompt_for_download': False,\n 'download.directory_upgrade': True,\n 'safebrowsing_for_trusted_sources_enabled': False,\n 'safebrowsing.enabled': False})\nchromedriver = '../chromedriver/chromedriver.exe'\ndriver = webdriver.Chrome(chromedriver, chrome_options=options)\nurl = (\n 'http://www.k-heritage.tv/brd/board/909/L/CATEGORY/911/menu/901?brdType=R&thisPage=1&bbIdx=17438&searchField=&searchText='\n )\nwhile '#n' not in url:\n driver.get(url)\n mediaWrap = WebDriverWait(driver, 2).until(EC.\n presence_of_element_located((By.CSS_SELECTOR, '.media_wrap')))\n next_url = driver.find_elements_by_css_selector('div.thumList>dl>dd>a')[0\n ].get_attribute('href')\n try:\n aTagList = driver.find_elements_by_css_selector('dl.b_file dd li a')\n file_list = []\n for aTag in aTagList:\n if '.hwp' in aTag.text:\n file_list.append(aTag.get_attribute('href'))\n for file_url in file_list:\n driver.get(file_url)\n time.sleep(1.3)\n if len(file_list) == 0:\n title = driver.find_elements_by_css_selector(\n 'p.sub_tit2 em.sub_mode')[0].text\n title = re.sub('[\\\\\\\\|\\\\/|\\\\:|\\\\*|\\\\?|\\\\\"|\\\\<|\\\\>|\\\\|]', '', title)\n xml_file = open('download\\\\{0}.xml'.format(title), mode='wt',\n encoding='utf-8')\n xml_file.write(driver.find_elements_by_css_selector(\n 'div.type_cont')[0].get_attribute('innerHTML'))\n except IndexError:\n driver.get(url)\n mediaWrap = WebDriverWait(driver, 2).until(EC.\n presence_of_element_located((By.CSS_SELECTOR, '.media_wrap')))\n title = driver.find_elements_by_css_selector('p.sub_tit2 em.sub_mode')[\n 0].text\n title = re.sub('[\\\\\\\\|\\\\/|\\\\:|\\\\*|\\\\?|\\\\\"|\\\\<|\\\\>|\\\\|]', '', title)\n xml_file = open('download\\\\{0}.xml'.format(title), mode='wt',\n encoding='utf-8')\n info_agree = driver.find_elements_by_css_selector('div.info_agree')\n type_cont = driver.find_elements_by_css_selector(\n '.media_wrap .type_cont')\n card_cont_info = driver.find_elements_by_css_selector(\n '.media_wrap .card_cont_info')\n if len(info_agree) > 0:\n xml_file.write(info_agree[0].get_attribute('innerHTML'))\n elif len(type_cont) > 0:\n xml_file.write(type_cont[0].get_attribute('innerHTML'))\n elif len(card_cont_info) > 0:\n xml_file.write(card_cont_info[0].get_attribute('innerHTML'))\n except:\n print('Unexpected error:', sys.exc_info()[0])\n print('{0} error '.format(driver.current_url))\n finally:\n url = next_url\ndriver.quit()\n",
"step-4": "from selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.support.ui import WebDriverWait\nfrom selenium.webdriver.support import expected_conditions as EC\nimport re\nimport sys\nimport time\nimport os\ndirPath = 'download'\ntry:\n if not os.path.isdir(dirPath):\n os.makedirs(os.path.join(dirPath))\nexcept OSError as e:\n print('{0} Failed to create directory!!!!!'.format(dirPath))\ndonwload_loc = '{0}\\\\download'.format(os.getcwd())\noptions = webdriver.ChromeOptions()\noptions.add_experimental_option('prefs', {'download.default_directory':\n donwload_loc, 'download.prompt_for_download': False,\n 'download.directory_upgrade': True,\n 'safebrowsing_for_trusted_sources_enabled': False,\n 'safebrowsing.enabled': False})\nchromedriver = '../chromedriver/chromedriver.exe'\ndriver = webdriver.Chrome(chromedriver, chrome_options=options)\nurl = (\n 'http://www.k-heritage.tv/brd/board/909/L/CATEGORY/911/menu/901?brdType=R&thisPage=1&bbIdx=17438&searchField=&searchText='\n )\nwhile '#n' not in url:\n driver.get(url)\n mediaWrap = WebDriverWait(driver, 2).until(EC.\n presence_of_element_located((By.CSS_SELECTOR, '.media_wrap')))\n next_url = driver.find_elements_by_css_selector('div.thumList>dl>dd>a')[0\n ].get_attribute('href')\n try:\n aTagList = driver.find_elements_by_css_selector('dl.b_file dd li a')\n file_list = []\n for aTag in aTagList:\n if '.hwp' in aTag.text:\n file_list.append(aTag.get_attribute('href'))\n for file_url in file_list:\n driver.get(file_url)\n time.sleep(1.3)\n if len(file_list) == 0:\n title = driver.find_elements_by_css_selector(\n 'p.sub_tit2 em.sub_mode')[0].text\n title = re.sub('[\\\\\\\\|\\\\/|\\\\:|\\\\*|\\\\?|\\\\\"|\\\\<|\\\\>|\\\\|]', '', title)\n xml_file = open('download\\\\{0}.xml'.format(title), mode='wt',\n encoding='utf-8')\n xml_file.write(driver.find_elements_by_css_selector(\n 'div.type_cont')[0].get_attribute('innerHTML'))\n except IndexError:\n driver.get(url)\n mediaWrap = WebDriverWait(driver, 2).until(EC.\n presence_of_element_located((By.CSS_SELECTOR, '.media_wrap')))\n title = driver.find_elements_by_css_selector('p.sub_tit2 em.sub_mode')[\n 0].text\n title = re.sub('[\\\\\\\\|\\\\/|\\\\:|\\\\*|\\\\?|\\\\\"|\\\\<|\\\\>|\\\\|]', '', title)\n xml_file = open('download\\\\{0}.xml'.format(title), mode='wt',\n encoding='utf-8')\n info_agree = driver.find_elements_by_css_selector('div.info_agree')\n type_cont = driver.find_elements_by_css_selector(\n '.media_wrap .type_cont')\n card_cont_info = driver.find_elements_by_css_selector(\n '.media_wrap .card_cont_info')\n if len(info_agree) > 0:\n xml_file.write(info_agree[0].get_attribute('innerHTML'))\n elif len(type_cont) > 0:\n xml_file.write(type_cont[0].get_attribute('innerHTML'))\n elif len(card_cont_info) > 0:\n xml_file.write(card_cont_info[0].get_attribute('innerHTML'))\n except:\n print('Unexpected error:', sys.exc_info()[0])\n print('{0} error '.format(driver.current_url))\n finally:\n url = next_url\ndriver.quit()\n",
"step-5": "from selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.support.ui import WebDriverWait\nfrom selenium.webdriver.support import expected_conditions as EC\nimport re\nimport sys\nimport time\nimport os\n\n# directory 현재 경로에 download폴더 생성\ndirPath = \"download\"\ntry:\n\tif not (os.path.isdir(dirPath)):\n\t\tos.makedirs(os.path.join(dirPath))\nexcept OSError as e:\n\tprint(\"{0} Failed to create directory!!!!!\".format(dirPath))\n\n# chrome download folder option\ndonwload_loc = \"{0}\\\\download\".format(os.getcwd())\noptions = webdriver.ChromeOptions()\noptions.add_experimental_option(\"prefs\", {\n\t\"download.default_directory\": donwload_loc,\n\t\"download.prompt_for_download\": False,\n\t\"download.directory_upgrade\": True,\n\t\"safebrowsing_for_trusted_sources_enabled\": False,\n\t\"safebrowsing.enabled\": False\n})\n\n# chrome driver 경로\nchromedriver = '../chromedriver/chromedriver.exe'\ndriver = webdriver.Chrome(chromedriver, chrome_options=options)\n\n# 제일 맨처음 URL\n# url = 'http://www.k-heritage.tv/brd/board/909/L/CATEGORY/911/menu/901?brdType=R&thisPage=1&bbIdx=20474&searchField=&searchText='\n# 중간 URL\nurl = 'http://www.k-heritage.tv/brd/board/909/L/CATEGORY/911/menu/901?brdType=R&thisPage=1&bbIdx=17438&searchField=&searchText='\nwhile '#n' not in url:\n\t#url 호출\n\tdriver.get(url)\n\t# 해당 tag가 생성될 때 까지 기다림\n\tmediaWrap = WebDriverWait(driver, 2).until(\n\t\tEC.presence_of_element_located((By.CSS_SELECTOR, \".media_wrap\")))\n\t# 다음페이지 url 저장\n\tnext_url = driver.find_elements_by_css_selector('div.thumList>dl>dd>a')[0].get_attribute('href')\n\ttry:\n\t\t# hwp 파일이 있는경우 저장\n\t\taTagList = driver.find_elements_by_css_selector(\"dl.b_file dd li a\")\n\t\tfile_list = []\n\t\tfor aTag in aTagList:\n\t\t\tif \".hwp\" in aTag.text:\n\t\t\t\tfile_list.append(aTag.get_attribute('href'))\n\n\t\tfor file_url in file_list:\n\t\t\tdriver.get(file_url)\n\t\t\ttime.sleep(1.3)\n\n\t\t# hwp 파일이 없으면 html 특정 태그를 xml로 저장\n\t\tif len(file_list) == 0:\n\t\t\ttitle = driver.find_elements_by_css_selector('p.sub_tit2 em.sub_mode')[0].text\n\t\t\t# 윈도우에서 파일명에 저장할 수 없는 문자 제거\n\t\t\ttitle = re.sub(r\"[\\\\|\\/|\\:|\\*|\\?|\\\"|\\<|\\>|\\|]\",'',title)\n\t\t\txml_file = open('download\\\\{0}.xml'.format(title),mode=\"wt\", encoding=\"utf-8\")\n\t\t\txml_file.write(driver.find_elements_by_css_selector('div.type_cont')[0].get_attribute('innerHTML'))\n\texcept IndexError:\n\t\t# 간혹 파일 다운로드 url이 오류가 발생하는 경우가 있음\n\t\tdriver.get(url)\n\t\tmediaWrap = WebDriverWait(driver, 2).until(\n\t\t\tEC.presence_of_element_located((By.CSS_SELECTOR, \".media_wrap\")))\n\t\ttitle = driver.find_elements_by_css_selector('p.sub_tit2 em.sub_mode')[0].text\n\t\ttitle = re.sub(r\"[\\\\|\\/|\\:|\\*|\\?|\\\"|\\<|\\>|\\|]\",'',title)\n\t\txml_file = open('download\\\\{0}.xml'.format(title),mode=\"wt\", encoding=\"utf-8\")\n\t\t# 유형 확인\n\t\tinfo_agree = driver.find_elements_by_css_selector('div.info_agree')\n\t\ttype_cont = driver.find_elements_by_css_selector('.media_wrap .type_cont')\n\t\tcard_cont_info = driver.find_elements_by_css_selector('.media_wrap .card_cont_info')\n\t\tif len(info_agree) > 0:\n\t\t\txml_file.write(info_agree[0].get_attribute('innerHTML'))\n\t\telif len(type_cont) > 0:\n\t\t\txml_file.write(type_cont[0].get_attribute('innerHTML'))\n\t\telif len(card_cont_info) > 0:\n\t\t\txml_file.write(card_cont_info[0].get_attribute('innerHTML'))\n\texcept:\n\t\tprint(\"Unexpected error:\", sys.exc_info()[0])\n\t\tprint('{0} error '.format(driver.current_url))\n\tfinally:\n\t\turl = next_url\ndriver.quit()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class Torus(POVRayElement):
""""""
def render_scene(filename, object_type, color, location, rotation):
assert object_type in object_types
assert color in colors
color = colors[color]
size = 2
radius = size / 2
attributes = Texture(Pigment('color', color)), Finish('ambient', 0.7
), 'rotate', (0, rotation, 0)
if object_type == 'box':
location.insert(1, size / 2)
obj = Box([(x - size / 2) for x in location], [(x + size / 2) for x in
location], *attributes)
if object_type == 'sphere':
location.insert(1, radius)
obj = Sphere(location, radius, *attributes)
if object_type == 'torus':
location.insert(1, radius / 2)
obj = Torus(radius, radius / 2, 'translate', location, *attributes)
if object_type == 'ellipsoid':
location.insert(1, radius)
obj = Sphere(location, radius, 'scale', (0.75, 0.45, 1.5), *attributes)
if object_type == 'cylinder':
location.insert(1, 0)
location2 = list(location)
location2[1] = size * 2
obj = Cylinder(location, location2, radius, *attributes)
camera = Camera('location', [0, 8, 7], 'look_at', [0, 0, 0])
light = LightSource([0, 10, 0], 'color', [1, 1, 1])
chessboard = Plane([0, 1, 0], 0, 'hollow', Texture(Pigment('checker',
'color', [0.47, 0.6, 0.74], 'color', [0.34, 0.48, 0.6]), 'scale', 4
), Finish('ambient', 0.5))
scene = Scene(camera, objects=[light, obj, chessboard])
scene.render(filename, width=128, height=128, antialiasing=1.0)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Torus(POVRayElement):
""""""
def render_scene(filename, object_type, color, location, rotation):
assert object_type in object_types
assert color in colors
color = colors[color]
size = 2
radius = size / 2
attributes = Texture(Pigment('color', color)), Finish('ambient', 0.7
), 'rotate', (0, rotation, 0)
if object_type == 'box':
location.insert(1, size / 2)
obj = Box([(x - size / 2) for x in location], [(x + size / 2) for x in
location], *attributes)
if object_type == 'sphere':
location.insert(1, radius)
obj = Sphere(location, radius, *attributes)
if object_type == 'torus':
location.insert(1, radius / 2)
obj = Torus(radius, radius / 2, 'translate', location, *attributes)
if object_type == 'ellipsoid':
location.insert(1, radius)
obj = Sphere(location, radius, 'scale', (0.75, 0.45, 1.5), *attributes)
if object_type == 'cylinder':
location.insert(1, 0)
location2 = list(location)
location2[1] = size * 2
obj = Cylinder(location, location2, radius, *attributes)
camera = Camera('location', [0, 8, 7], 'look_at', [0, 0, 0])
light = LightSource([0, 10, 0], 'color', [1, 1, 1])
chessboard = Plane([0, 1, 0], 0, 'hollow', Texture(Pigment('checker',
'color', [0.47, 0.6, 0.74], 'color', [0.34, 0.48, 0.6]), 'scale', 4
), Finish('ambient', 0.5))
scene = Scene(camera, objects=[light, obj, chessboard])
scene.render(filename, width=128, height=128, antialiasing=1.0)
<|reserved_special_token_0|>
parser.add_argument('--n_samples', type=int, default=100)
parser.add_argument('--seed', type=int, default=2018)
<|reserved_special_token_0|>
random.seed(args.seed)
os.makedirs('assets', exist_ok=True)
print('Rendering scenes...')
for color in colors:
for object_type in object_types:
for i in range(args.n_samples):
filename = 'assets/%s-%s-%d' % (color, object_type, i)
if os.path.exists(filename):
print('%s exists, skipping' % filename)
continue
location = [random.uniform(-3, 3), random.uniform(-3, 3)]
rotation = random.uniform(0, 360)
render_scene(filename, object_type, color, location, rotation)
print('Finished')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Torus(POVRayElement):
""""""
def render_scene(filename, object_type, color, location, rotation):
assert object_type in object_types
assert color in colors
color = colors[color]
size = 2
radius = size / 2
attributes = Texture(Pigment('color', color)), Finish('ambient', 0.7
), 'rotate', (0, rotation, 0)
if object_type == 'box':
location.insert(1, size / 2)
obj = Box([(x - size / 2) for x in location], [(x + size / 2) for x in
location], *attributes)
if object_type == 'sphere':
location.insert(1, radius)
obj = Sphere(location, radius, *attributes)
if object_type == 'torus':
location.insert(1, radius / 2)
obj = Torus(radius, radius / 2, 'translate', location, *attributes)
if object_type == 'ellipsoid':
location.insert(1, radius)
obj = Sphere(location, radius, 'scale', (0.75, 0.45, 1.5), *attributes)
if object_type == 'cylinder':
location.insert(1, 0)
location2 = list(location)
location2[1] = size * 2
obj = Cylinder(location, location2, radius, *attributes)
camera = Camera('location', [0, 8, 7], 'look_at', [0, 0, 0])
light = LightSource([0, 10, 0], 'color', [1, 1, 1])
chessboard = Plane([0, 1, 0], 0, 'hollow', Texture(Pigment('checker',
'color', [0.47, 0.6, 0.74], 'color', [0.34, 0.48, 0.6]), 'scale', 4
), Finish('ambient', 0.5))
scene = Scene(camera, objects=[light, obj, chessboard])
scene.render(filename, width=128, height=128, antialiasing=1.0)
parser = argparse.ArgumentParser()
parser.add_argument('--n_samples', type=int, default=100)
parser.add_argument('--seed', type=int, default=2018)
args = parser.parse_args()
random.seed(args.seed)
os.makedirs('assets', exist_ok=True)
print('Rendering scenes...')
for color in colors:
for object_type in object_types:
for i in range(args.n_samples):
filename = 'assets/%s-%s-%d' % (color, object_type, i)
if os.path.exists(filename):
print('%s exists, skipping' % filename)
continue
location = [random.uniform(-3, 3), random.uniform(-3, 3)]
rotation = random.uniform(0, 360)
render_scene(filename, object_type, color, location, rotation)
print('Finished')
<|reserved_special_token_1|>
import os
import random
import argparse
from vapory import *
from data import colors, object_types
class Torus(POVRayElement):
""""""
def render_scene(filename, object_type, color, location, rotation):
assert object_type in object_types
assert color in colors
color = colors[color]
size = 2
radius = size / 2
attributes = Texture(Pigment('color', color)), Finish('ambient', 0.7
), 'rotate', (0, rotation, 0)
if object_type == 'box':
location.insert(1, size / 2)
obj = Box([(x - size / 2) for x in location], [(x + size / 2) for x in
location], *attributes)
if object_type == 'sphere':
location.insert(1, radius)
obj = Sphere(location, radius, *attributes)
if object_type == 'torus':
location.insert(1, radius / 2)
obj = Torus(radius, radius / 2, 'translate', location, *attributes)
if object_type == 'ellipsoid':
location.insert(1, radius)
obj = Sphere(location, radius, 'scale', (0.75, 0.45, 1.5), *attributes)
if object_type == 'cylinder':
location.insert(1, 0)
location2 = list(location)
location2[1] = size * 2
obj = Cylinder(location, location2, radius, *attributes)
camera = Camera('location', [0, 8, 7], 'look_at', [0, 0, 0])
light = LightSource([0, 10, 0], 'color', [1, 1, 1])
chessboard = Plane([0, 1, 0], 0, 'hollow', Texture(Pigment('checker',
'color', [0.47, 0.6, 0.74], 'color', [0.34, 0.48, 0.6]), 'scale', 4
), Finish('ambient', 0.5))
scene = Scene(camera, objects=[light, obj, chessboard])
scene.render(filename, width=128, height=128, antialiasing=1.0)
parser = argparse.ArgumentParser()
parser.add_argument('--n_samples', type=int, default=100)
parser.add_argument('--seed', type=int, default=2018)
args = parser.parse_args()
random.seed(args.seed)
os.makedirs('assets', exist_ok=True)
print('Rendering scenes...')
for color in colors:
for object_type in object_types:
for i in range(args.n_samples):
filename = 'assets/%s-%s-%d' % (color, object_type, i)
if os.path.exists(filename):
print('%s exists, skipping' % filename)
continue
location = [random.uniform(-3, 3), random.uniform(-3, 3)]
rotation = random.uniform(0, 360)
render_scene(filename, object_type, color, location, rotation)
print('Finished')
<|reserved_special_token_1|>
import os
import random
import argparse
from vapory import *
from data import colors, object_types
class Torus(POVRayElement):
""""""
def render_scene(filename, object_type, color, location, rotation):
assert (object_type in object_types)
assert (color in colors)
color = colors[color]
size = 2
radius = size/2
attributes = Texture(Pigment('color', color)), Finish('ambient', 0.7), 'rotate', (0, rotation, 0)
if object_type == 'box':
location.insert(1, size/2)
obj = Box([x - size/2 for x in location], [x + size/2 for x in location], *attributes)
if object_type == 'sphere':
location.insert(1, radius)
obj = Sphere(location, radius, *attributes)
if object_type == 'torus':
location.insert(1, radius/2)
obj = Torus(radius, radius/2, 'translate', location, *attributes)
if object_type == 'ellipsoid':
location.insert(1, radius)
obj = Sphere(location, radius, 'scale', (0.75, 0.45, 1.5), *attributes)
if object_type == 'cylinder':
location.insert(1, 0)
location2 = list(location)
location2[1] = size*2
obj = Cylinder(location, location2, radius, *attributes)
camera = Camera('location', [0, 8, 7], 'look_at', [0, 0, 0])
light = LightSource([0, 10, 0], 'color', [1, 1, 1])
chessboard = Plane([0, 1, 0], 0, 'hollow',
Texture(Pigment('checker',
'color', [.47, .6, .74],
'color', [.34, 0.48, 0.6]),
'scale', 4), Finish('ambient', 0.5))
scene = Scene(camera, objects=[light, obj, chessboard])
scene.render(filename, width=128, height=128, antialiasing=1.0)
parser = argparse.ArgumentParser()
parser.add_argument('--n_samples', type=int, default=100)
parser.add_argument('--seed', type=int, default=2018)
args = parser.parse_args()
random.seed(args.seed)
os.makedirs('assets', exist_ok=True)
print("Rendering scenes...")
for color in colors:
for object_type in object_types:
for i in range(args.n_samples):
filename = 'assets/%s-%s-%d' % (color, object_type, i)
if os.path.exists(filename):
print("%s exists, skipping" % filename)
continue
location = [random.uniform(-3, 3), random.uniform(-3, 3)]
rotation = random.uniform(0, 360)
render_scene(filename, object_type, color, location, rotation)
print("Finished")
|
flexible
|
{
"blob_id": "f8972067fa88e7e74e05cdcc7bdec184116dec4a",
"index": 7771,
"step-1": "<mask token>\n\n\nclass Torus(POVRayElement):\n \"\"\"\"\"\"\n\n\ndef render_scene(filename, object_type, color, location, rotation):\n assert object_type in object_types\n assert color in colors\n color = colors[color]\n size = 2\n radius = size / 2\n attributes = Texture(Pigment('color', color)), Finish('ambient', 0.7\n ), 'rotate', (0, rotation, 0)\n if object_type == 'box':\n location.insert(1, size / 2)\n obj = Box([(x - size / 2) for x in location], [(x + size / 2) for x in\n location], *attributes)\n if object_type == 'sphere':\n location.insert(1, radius)\n obj = Sphere(location, radius, *attributes)\n if object_type == 'torus':\n location.insert(1, radius / 2)\n obj = Torus(radius, radius / 2, 'translate', location, *attributes)\n if object_type == 'ellipsoid':\n location.insert(1, radius)\n obj = Sphere(location, radius, 'scale', (0.75, 0.45, 1.5), *attributes)\n if object_type == 'cylinder':\n location.insert(1, 0)\n location2 = list(location)\n location2[1] = size * 2\n obj = Cylinder(location, location2, radius, *attributes)\n camera = Camera('location', [0, 8, 7], 'look_at', [0, 0, 0])\n light = LightSource([0, 10, 0], 'color', [1, 1, 1])\n chessboard = Plane([0, 1, 0], 0, 'hollow', Texture(Pigment('checker',\n 'color', [0.47, 0.6, 0.74], 'color', [0.34, 0.48, 0.6]), 'scale', 4\n ), Finish('ambient', 0.5))\n scene = Scene(camera, objects=[light, obj, chessboard])\n scene.render(filename, width=128, height=128, antialiasing=1.0)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Torus(POVRayElement):\n \"\"\"\"\"\"\n\n\ndef render_scene(filename, object_type, color, location, rotation):\n assert object_type in object_types\n assert color in colors\n color = colors[color]\n size = 2\n radius = size / 2\n attributes = Texture(Pigment('color', color)), Finish('ambient', 0.7\n ), 'rotate', (0, rotation, 0)\n if object_type == 'box':\n location.insert(1, size / 2)\n obj = Box([(x - size / 2) for x in location], [(x + size / 2) for x in\n location], *attributes)\n if object_type == 'sphere':\n location.insert(1, radius)\n obj = Sphere(location, radius, *attributes)\n if object_type == 'torus':\n location.insert(1, radius / 2)\n obj = Torus(radius, radius / 2, 'translate', location, *attributes)\n if object_type == 'ellipsoid':\n location.insert(1, radius)\n obj = Sphere(location, radius, 'scale', (0.75, 0.45, 1.5), *attributes)\n if object_type == 'cylinder':\n location.insert(1, 0)\n location2 = list(location)\n location2[1] = size * 2\n obj = Cylinder(location, location2, radius, *attributes)\n camera = Camera('location', [0, 8, 7], 'look_at', [0, 0, 0])\n light = LightSource([0, 10, 0], 'color', [1, 1, 1])\n chessboard = Plane([0, 1, 0], 0, 'hollow', Texture(Pigment('checker',\n 'color', [0.47, 0.6, 0.74], 'color', [0.34, 0.48, 0.6]), 'scale', 4\n ), Finish('ambient', 0.5))\n scene = Scene(camera, objects=[light, obj, chessboard])\n scene.render(filename, width=128, height=128, antialiasing=1.0)\n\n\n<mask token>\nparser.add_argument('--n_samples', type=int, default=100)\nparser.add_argument('--seed', type=int, default=2018)\n<mask token>\nrandom.seed(args.seed)\nos.makedirs('assets', exist_ok=True)\nprint('Rendering scenes...')\nfor color in colors:\n for object_type in object_types:\n for i in range(args.n_samples):\n filename = 'assets/%s-%s-%d' % (color, object_type, i)\n if os.path.exists(filename):\n print('%s exists, skipping' % filename)\n continue\n location = [random.uniform(-3, 3), random.uniform(-3, 3)]\n rotation = random.uniform(0, 360)\n render_scene(filename, object_type, color, location, rotation)\nprint('Finished')\n",
"step-3": "<mask token>\n\n\nclass Torus(POVRayElement):\n \"\"\"\"\"\"\n\n\ndef render_scene(filename, object_type, color, location, rotation):\n assert object_type in object_types\n assert color in colors\n color = colors[color]\n size = 2\n radius = size / 2\n attributes = Texture(Pigment('color', color)), Finish('ambient', 0.7\n ), 'rotate', (0, rotation, 0)\n if object_type == 'box':\n location.insert(1, size / 2)\n obj = Box([(x - size / 2) for x in location], [(x + size / 2) for x in\n location], *attributes)\n if object_type == 'sphere':\n location.insert(1, radius)\n obj = Sphere(location, radius, *attributes)\n if object_type == 'torus':\n location.insert(1, radius / 2)\n obj = Torus(radius, radius / 2, 'translate', location, *attributes)\n if object_type == 'ellipsoid':\n location.insert(1, radius)\n obj = Sphere(location, radius, 'scale', (0.75, 0.45, 1.5), *attributes)\n if object_type == 'cylinder':\n location.insert(1, 0)\n location2 = list(location)\n location2[1] = size * 2\n obj = Cylinder(location, location2, radius, *attributes)\n camera = Camera('location', [0, 8, 7], 'look_at', [0, 0, 0])\n light = LightSource([0, 10, 0], 'color', [1, 1, 1])\n chessboard = Plane([0, 1, 0], 0, 'hollow', Texture(Pigment('checker',\n 'color', [0.47, 0.6, 0.74], 'color', [0.34, 0.48, 0.6]), 'scale', 4\n ), Finish('ambient', 0.5))\n scene = Scene(camera, objects=[light, obj, chessboard])\n scene.render(filename, width=128, height=128, antialiasing=1.0)\n\n\nparser = argparse.ArgumentParser()\nparser.add_argument('--n_samples', type=int, default=100)\nparser.add_argument('--seed', type=int, default=2018)\nargs = parser.parse_args()\nrandom.seed(args.seed)\nos.makedirs('assets', exist_ok=True)\nprint('Rendering scenes...')\nfor color in colors:\n for object_type in object_types:\n for i in range(args.n_samples):\n filename = 'assets/%s-%s-%d' % (color, object_type, i)\n if os.path.exists(filename):\n print('%s exists, skipping' % filename)\n continue\n location = [random.uniform(-3, 3), random.uniform(-3, 3)]\n rotation = random.uniform(0, 360)\n render_scene(filename, object_type, color, location, rotation)\nprint('Finished')\n",
"step-4": "import os\nimport random\nimport argparse\nfrom vapory import *\nfrom data import colors, object_types\n\n\nclass Torus(POVRayElement):\n \"\"\"\"\"\"\n\n\ndef render_scene(filename, object_type, color, location, rotation):\n assert object_type in object_types\n assert color in colors\n color = colors[color]\n size = 2\n radius = size / 2\n attributes = Texture(Pigment('color', color)), Finish('ambient', 0.7\n ), 'rotate', (0, rotation, 0)\n if object_type == 'box':\n location.insert(1, size / 2)\n obj = Box([(x - size / 2) for x in location], [(x + size / 2) for x in\n location], *attributes)\n if object_type == 'sphere':\n location.insert(1, radius)\n obj = Sphere(location, radius, *attributes)\n if object_type == 'torus':\n location.insert(1, radius / 2)\n obj = Torus(radius, radius / 2, 'translate', location, *attributes)\n if object_type == 'ellipsoid':\n location.insert(1, radius)\n obj = Sphere(location, radius, 'scale', (0.75, 0.45, 1.5), *attributes)\n if object_type == 'cylinder':\n location.insert(1, 0)\n location2 = list(location)\n location2[1] = size * 2\n obj = Cylinder(location, location2, radius, *attributes)\n camera = Camera('location', [0, 8, 7], 'look_at', [0, 0, 0])\n light = LightSource([0, 10, 0], 'color', [1, 1, 1])\n chessboard = Plane([0, 1, 0], 0, 'hollow', Texture(Pigment('checker',\n 'color', [0.47, 0.6, 0.74], 'color', [0.34, 0.48, 0.6]), 'scale', 4\n ), Finish('ambient', 0.5))\n scene = Scene(camera, objects=[light, obj, chessboard])\n scene.render(filename, width=128, height=128, antialiasing=1.0)\n\n\nparser = argparse.ArgumentParser()\nparser.add_argument('--n_samples', type=int, default=100)\nparser.add_argument('--seed', type=int, default=2018)\nargs = parser.parse_args()\nrandom.seed(args.seed)\nos.makedirs('assets', exist_ok=True)\nprint('Rendering scenes...')\nfor color in colors:\n for object_type in object_types:\n for i in range(args.n_samples):\n filename = 'assets/%s-%s-%d' % (color, object_type, i)\n if os.path.exists(filename):\n print('%s exists, skipping' % filename)\n continue\n location = [random.uniform(-3, 3), random.uniform(-3, 3)]\n rotation = random.uniform(0, 360)\n render_scene(filename, object_type, color, location, rotation)\nprint('Finished')\n",
"step-5": "import os\nimport random\nimport argparse\n\nfrom vapory import *\n\nfrom data import colors, object_types\n\n\nclass Torus(POVRayElement):\n \"\"\"\"\"\"\n\n\ndef render_scene(filename, object_type, color, location, rotation):\n assert (object_type in object_types)\n assert (color in colors)\n\n color = colors[color]\n size = 2\n radius = size/2\n attributes = Texture(Pigment('color', color)), Finish('ambient', 0.7), 'rotate', (0, rotation, 0)\n if object_type == 'box':\n location.insert(1, size/2)\n obj = Box([x - size/2 for x in location], [x + size/2 for x in location], *attributes)\n if object_type == 'sphere':\n location.insert(1, radius)\n obj = Sphere(location, radius, *attributes)\n if object_type == 'torus':\n location.insert(1, radius/2)\n obj = Torus(radius, radius/2, 'translate', location, *attributes)\n if object_type == 'ellipsoid':\n location.insert(1, radius)\n obj = Sphere(location, radius, 'scale', (0.75, 0.45, 1.5), *attributes)\n if object_type == 'cylinder':\n location.insert(1, 0)\n location2 = list(location)\n location2[1] = size*2\n obj = Cylinder(location, location2, radius, *attributes)\n\n camera = Camera('location', [0, 8, 7], 'look_at', [0, 0, 0])\n light = LightSource([0, 10, 0], 'color', [1, 1, 1])\n\n chessboard = Plane([0, 1, 0], 0, 'hollow',\n Texture(Pigment('checker',\n 'color', [.47, .6, .74],\n 'color', [.34, 0.48, 0.6]),\n 'scale', 4), Finish('ambient', 0.5))\n\n scene = Scene(camera, objects=[light, obj, chessboard])\n scene.render(filename, width=128, height=128, antialiasing=1.0)\n\n\nparser = argparse.ArgumentParser()\nparser.add_argument('--n_samples', type=int, default=100)\nparser.add_argument('--seed', type=int, default=2018)\nargs = parser.parse_args()\n\nrandom.seed(args.seed)\n\nos.makedirs('assets', exist_ok=True)\n\nprint(\"Rendering scenes...\")\nfor color in colors:\n for object_type in object_types:\n for i in range(args.n_samples):\n filename = 'assets/%s-%s-%d' % (color, object_type, i)\n if os.path.exists(filename):\n print(\"%s exists, skipping\" % filename)\n continue\n location = [random.uniform(-3, 3), random.uniform(-3, 3)]\n rotation = random.uniform(0, 360)\n render_scene(filename, object_type, color, location, rotation)\n\nprint(\"Finished\")\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
api_key = 'your_key'
<|reserved_special_token_1|>
api_key = "your_key"
|
flexible
|
{
"blob_id": "f024b0736f5fcdebede8d5b0985cf9d7170db8fc",
"index": 7401,
"step-1": "<mask token>\n",
"step-2": "api_key = 'your_key'\n",
"step-3": "api_key = \"your_key\"\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
for name in ["Madan", "Mohan", "Reddy", "Govindu"]:
print("My name includes "+name)
# Tables
# for i in range(1, 11):
# for j in range(1, 11):
# print("{0} * {1} = {2}".format(i,j, i*j))
# print("\n")
for i in range(1, 3):
for j in range(4, 7):
if j==5:
break
print(j)
|
normal
|
{
"blob_id": "c0376d94b34ea43e562e68cd65d4e5d2c5b04fb3",
"index": 6657,
"step-1": "<mask token>\n",
"step-2": "for name in ['Madan', 'Mohan', 'Reddy', 'Govindu']:\n print('My name includes ' + name)\nfor i in range(1, 3):\n for j in range(4, 7):\n if j == 5:\n break\n print(j)\n",
"step-3": "for name in [\"Madan\", \"Mohan\", \"Reddy\", \"Govindu\"]:\n print(\"My name includes \"+name)\n\n# Tables\n# for i in range(1, 11):\n# for j in range(1, 11):\n# print(\"{0} * {1} = {2}\".format(i,j, i*j))\n# print(\"\\n\")\n\n\nfor i in range(1, 3):\n for j in range(4, 7):\n if j==5:\n break\n print(j)",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(word_dict)
<|reserved_special_token_0|>
print(multiple_list)
<|reserved_special_token_0|>
print(final_list)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
word_list = ['Tree', 'Apple', 'Snake', 'flowers']
word_dict = {word: word[::-1] for word in word_list}
print(word_dict)
<|reserved_special_token_0|>
use_range = range(1, 101)
multiple_list = [i for i in use_range if i % 2 == 0]
print(multiple_list)
<|reserved_special_token_0|>
list_above = [[1, 2, 3, 4], [5, 6, 7, 8]]
final_list = [[(bottom * 2) for bottom in top] for top in list_above]
print(final_list)
<|reserved_special_token_1|>
"""
Create a list of words and with it, create a new dictionary
in which the key is the word and the value is the same word
reversed.
"""
word_list = ['Tree','Apple','Snake','flowers']
word_dict = {word:word[::-1] for word in word_list}
print(word_dict)
#Output: {'Tree': 'eerT', 'Apple': 'elppA', 'Snake': 'ekanS', 'flowers': 'srewolf'}
"""
Let's try this one again:
Using the range function, create a sequence of numbers
from 1 to 100, and using the comprehension to return only
those that are multiplies of 2.
"""
use_range = range(1,101)
multiple_list = [i for i in use_range if i%2==0]
print(multiple_list)
"""
[[1, 2, 3, 4], [5, 6, 7, 8]]
Use the list above and create nested comprehensions so that
the final value is a new list like the following
[[2, 4, 6, 8], [10, 12, 14, 16]] The number multiplied by 2
"""
list_above = [[1, 2, 3, 4], [5, 6, 7, 8]]
final_list = [[bottom*2 for bottom in top] for top in list_above]
print(final_list)
|
flexible
|
{
"blob_id": "5ac489a2d30155bb92767184ad546247817e28ea",
"index": 1478,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(word_dict)\n<mask token>\nprint(multiple_list)\n<mask token>\nprint(final_list)\n",
"step-3": "<mask token>\nword_list = ['Tree', 'Apple', 'Snake', 'flowers']\nword_dict = {word: word[::-1] for word in word_list}\nprint(word_dict)\n<mask token>\nuse_range = range(1, 101)\nmultiple_list = [i for i in use_range if i % 2 == 0]\nprint(multiple_list)\n<mask token>\nlist_above = [[1, 2, 3, 4], [5, 6, 7, 8]]\nfinal_list = [[(bottom * 2) for bottom in top] for top in list_above]\nprint(final_list)\n",
"step-4": "\"\"\"\nCreate a list of words and with it, create a new dictionary\nin which the key is the word and the value is the same word\nreversed.\n\"\"\"\n\nword_list = ['Tree','Apple','Snake','flowers']\nword_dict = {word:word[::-1] for word in word_list}\nprint(word_dict)\n#Output: {'Tree': 'eerT', 'Apple': 'elppA', 'Snake': 'ekanS', 'flowers': 'srewolf'}\n\n\"\"\"\nLet's try this one again:\nUsing the range function, create a sequence of numbers\nfrom 1 to 100, and using the comprehension to return only\nthose that are multiplies of 2.\n\"\"\"\nuse_range = range(1,101)\nmultiple_list = [i for i in use_range if i%2==0]\nprint(multiple_list)\n\n\n\"\"\"\n[[1, 2, 3, 4], [5, 6, 7, 8]]\nUse the list above and create nested comprehensions so that\nthe final value is a new list like the following\n[[2, 4, 6, 8], [10, 12, 14, 16]] The number multiplied by 2\n\"\"\"\nlist_above = [[1, 2, 3, 4], [5, 6, 7, 8]]\n\nfinal_list = [[bottom*2 for bottom in top] for top in list_above]\nprint(final_list)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from libtbx.program_template import ProgramTemplate
from mmtbx import pdbtools
from libtbx import Auto
import os
import mmtbx.pdbtools
from cctbx import uctbx
class Program(ProgramTemplate):
description = '''
phenix.pdbtools tools for PDB model manipulations.
Usage examples:
phenix.pdbtools model.pdb sites.shake=0.4
phenix.pdbtools model.cif remove="element H"
'''
datatypes = ['model', 'phil']
master_phil_str = """\
include scope mmtbx.pdbtools.master_params
output {
prefix = None
.type = str
suffix = _modified
.type = str
serial = None
.type = int
overwrite = True
.type = bool
}
# temporary GUI PHIL
include scope libtbx.phil.interface.tracking_params
gui
.help = "GUI-specific parameter required for output directory"
{
output_dir = None
.type = path
.style = output_dir
}
"""
def validate(self):
print('Validating inputs', file=self.logger)
self.data_manager.has_models(
raise_sorry = True,
expected_n = 1,
exact_count = True)
def run(self):
self.model = self.data_manager.get_model()
cs = self.model.crystal_symmetry()
if(cs is None or cs.is_empty() or cs.is_nonsense()):
print("Crystal symmetry undefined, creating fake P1 box.")
box_crystal_symmetry = \
uctbx.non_crystallographic_unit_cell_with_the_sites_in_its_center(
sites_cart = self.model.get_sites_cart(),
buffer_layer = 5).crystal_symmetry()
self.model.set_crystal_symmetry(crystal_symmetry = box_crystal_symmetry)
print('Performing manipulations', file=self.logger)
self.model = mmtbx.pdbtools.modify(
model = self.model,
params = self.params.modify,
log = self.logger).get_results().model
# Write output model file
input_file_name_base = os.path.basename(
self.data_manager.get_default_model_name())[:-4]
if( self.model.input_model_format_cif()): extension = ".cif"
elif(self.model.input_model_format_pdb()): extension = ".pdb"
if(self.params.output.prefix is not None):
output_file_name = self.params.output.prefix
if(self.params.output.suffix is not None):
output_file_name = output_file_name + self.params.output.suffix
else:
output_file_name = input_file_name_base + self.params.output.suffix
output_file_name = output_file_name + extension
ofn = self.get_default_output_filename(
prefix=output_file_name,
suffix=None,
serial=Auto)
print('Writing output model', file=self.logger)
output_cs=True
if(cs is None): output_cs = False
self.data_manager.write_model_file(self.model.model_as_str(
output_cs=output_cs), ofn)
self.result = ofn
def get_results(self):
return self.result
# So master_phil_str can be called
master_phil_str = Program.master_phil_str
|
normal
|
{
"blob_id": "e1228f5e17bae6632f8decd114f72723dbbce944",
"index": 6186,
"step-1": "<mask token>\n\n\nclass Program(ProgramTemplate):\n <mask token>\n <mask token>\n <mask token>\n\n def validate(self):\n print('Validating inputs', file=self.logger)\n self.data_manager.has_models(raise_sorry=True, expected_n=1,\n exact_count=True)\n <mask token>\n\n def get_results(self):\n return self.result\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Program(ProgramTemplate):\n description = \"\"\"\nphenix.pdbtools tools for PDB model manipulations.\n\nUsage examples:\n phenix.pdbtools model.pdb sites.shake=0.4\n phenix.pdbtools model.cif remove=\"element H\"\n \"\"\"\n datatypes = ['model', 'phil']\n master_phil_str = \"\"\"include scope mmtbx.pdbtools.master_params\n\noutput {\n prefix = None\n .type = str\n suffix = _modified\n .type = str\n serial = None\n .type = int\n overwrite = True\n .type = bool\n}\n# temporary GUI PHIL\ninclude scope libtbx.phil.interface.tracking_params\ngui\n .help = \"GUI-specific parameter required for output directory\"\n{\n output_dir = None\n .type = path\n .style = output_dir\n}\n\"\"\"\n\n def validate(self):\n print('Validating inputs', file=self.logger)\n self.data_manager.has_models(raise_sorry=True, expected_n=1,\n exact_count=True)\n\n def run(self):\n self.model = self.data_manager.get_model()\n cs = self.model.crystal_symmetry()\n if cs is None or cs.is_empty() or cs.is_nonsense():\n print('Crystal symmetry undefined, creating fake P1 box.')\n box_crystal_symmetry = (uctbx.\n non_crystallographic_unit_cell_with_the_sites_in_its_center\n (sites_cart=self.model.get_sites_cart(), buffer_layer=5).\n crystal_symmetry())\n self.model.set_crystal_symmetry(crystal_symmetry=\n box_crystal_symmetry)\n print('Performing manipulations', file=self.logger)\n self.model = mmtbx.pdbtools.modify(model=self.model, params=self.\n params.modify, log=self.logger).get_results().model\n input_file_name_base = os.path.basename(self.data_manager.\n get_default_model_name())[:-4]\n if self.model.input_model_format_cif():\n extension = '.cif'\n elif self.model.input_model_format_pdb():\n extension = '.pdb'\n if self.params.output.prefix is not None:\n output_file_name = self.params.output.prefix\n if self.params.output.suffix is not None:\n output_file_name = output_file_name + self.params.output.suffix\n else:\n output_file_name = input_file_name_base + self.params.output.suffix\n output_file_name = output_file_name + extension\n ofn = self.get_default_output_filename(prefix=output_file_name,\n suffix=None, serial=Auto)\n print('Writing output model', file=self.logger)\n output_cs = True\n if cs is None:\n output_cs = False\n self.data_manager.write_model_file(self.model.model_as_str(\n output_cs=output_cs), ofn)\n self.result = ofn\n\n def get_results(self):\n return self.result\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Program(ProgramTemplate):\n description = \"\"\"\nphenix.pdbtools tools for PDB model manipulations.\n\nUsage examples:\n phenix.pdbtools model.pdb sites.shake=0.4\n phenix.pdbtools model.cif remove=\"element H\"\n \"\"\"\n datatypes = ['model', 'phil']\n master_phil_str = \"\"\"include scope mmtbx.pdbtools.master_params\n\noutput {\n prefix = None\n .type = str\n suffix = _modified\n .type = str\n serial = None\n .type = int\n overwrite = True\n .type = bool\n}\n# temporary GUI PHIL\ninclude scope libtbx.phil.interface.tracking_params\ngui\n .help = \"GUI-specific parameter required for output directory\"\n{\n output_dir = None\n .type = path\n .style = output_dir\n}\n\"\"\"\n\n def validate(self):\n print('Validating inputs', file=self.logger)\n self.data_manager.has_models(raise_sorry=True, expected_n=1,\n exact_count=True)\n\n def run(self):\n self.model = self.data_manager.get_model()\n cs = self.model.crystal_symmetry()\n if cs is None or cs.is_empty() or cs.is_nonsense():\n print('Crystal symmetry undefined, creating fake P1 box.')\n box_crystal_symmetry = (uctbx.\n non_crystallographic_unit_cell_with_the_sites_in_its_center\n (sites_cart=self.model.get_sites_cart(), buffer_layer=5).\n crystal_symmetry())\n self.model.set_crystal_symmetry(crystal_symmetry=\n box_crystal_symmetry)\n print('Performing manipulations', file=self.logger)\n self.model = mmtbx.pdbtools.modify(model=self.model, params=self.\n params.modify, log=self.logger).get_results().model\n input_file_name_base = os.path.basename(self.data_manager.\n get_default_model_name())[:-4]\n if self.model.input_model_format_cif():\n extension = '.cif'\n elif self.model.input_model_format_pdb():\n extension = '.pdb'\n if self.params.output.prefix is not None:\n output_file_name = self.params.output.prefix\n if self.params.output.suffix is not None:\n output_file_name = output_file_name + self.params.output.suffix\n else:\n output_file_name = input_file_name_base + self.params.output.suffix\n output_file_name = output_file_name + extension\n ofn = self.get_default_output_filename(prefix=output_file_name,\n suffix=None, serial=Auto)\n print('Writing output model', file=self.logger)\n output_cs = True\n if cs is None:\n output_cs = False\n self.data_manager.write_model_file(self.model.model_as_str(\n output_cs=output_cs), ofn)\n self.result = ofn\n\n def get_results(self):\n return self.result\n\n\nmaster_phil_str = Program.master_phil_str\n",
"step-4": "from __future__ import absolute_import, division, print_function\nfrom libtbx.program_template import ProgramTemplate\nfrom mmtbx import pdbtools\nfrom libtbx import Auto\nimport os\nimport mmtbx.pdbtools\nfrom cctbx import uctbx\n\n\nclass Program(ProgramTemplate):\n description = \"\"\"\nphenix.pdbtools tools for PDB model manipulations.\n\nUsage examples:\n phenix.pdbtools model.pdb sites.shake=0.4\n phenix.pdbtools model.cif remove=\"element H\"\n \"\"\"\n datatypes = ['model', 'phil']\n master_phil_str = \"\"\"include scope mmtbx.pdbtools.master_params\n\noutput {\n prefix = None\n .type = str\n suffix = _modified\n .type = str\n serial = None\n .type = int\n overwrite = True\n .type = bool\n}\n# temporary GUI PHIL\ninclude scope libtbx.phil.interface.tracking_params\ngui\n .help = \"GUI-specific parameter required for output directory\"\n{\n output_dir = None\n .type = path\n .style = output_dir\n}\n\"\"\"\n\n def validate(self):\n print('Validating inputs', file=self.logger)\n self.data_manager.has_models(raise_sorry=True, expected_n=1,\n exact_count=True)\n\n def run(self):\n self.model = self.data_manager.get_model()\n cs = self.model.crystal_symmetry()\n if cs is None or cs.is_empty() or cs.is_nonsense():\n print('Crystal symmetry undefined, creating fake P1 box.')\n box_crystal_symmetry = (uctbx.\n non_crystallographic_unit_cell_with_the_sites_in_its_center\n (sites_cart=self.model.get_sites_cart(), buffer_layer=5).\n crystal_symmetry())\n self.model.set_crystal_symmetry(crystal_symmetry=\n box_crystal_symmetry)\n print('Performing manipulations', file=self.logger)\n self.model = mmtbx.pdbtools.modify(model=self.model, params=self.\n params.modify, log=self.logger).get_results().model\n input_file_name_base = os.path.basename(self.data_manager.\n get_default_model_name())[:-4]\n if self.model.input_model_format_cif():\n extension = '.cif'\n elif self.model.input_model_format_pdb():\n extension = '.pdb'\n if self.params.output.prefix is not None:\n output_file_name = self.params.output.prefix\n if self.params.output.suffix is not None:\n output_file_name = output_file_name + self.params.output.suffix\n else:\n output_file_name = input_file_name_base + self.params.output.suffix\n output_file_name = output_file_name + extension\n ofn = self.get_default_output_filename(prefix=output_file_name,\n suffix=None, serial=Auto)\n print('Writing output model', file=self.logger)\n output_cs = True\n if cs is None:\n output_cs = False\n self.data_manager.write_model_file(self.model.model_as_str(\n output_cs=output_cs), ofn)\n self.result = ofn\n\n def get_results(self):\n return self.result\n\n\nmaster_phil_str = Program.master_phil_str\n",
"step-5": "# -*- coding: utf-8 -*-\nfrom __future__ import absolute_import, division, print_function\nfrom libtbx.program_template import ProgramTemplate\nfrom mmtbx import pdbtools\nfrom libtbx import Auto\nimport os\nimport mmtbx.pdbtools\nfrom cctbx import uctbx\n\nclass Program(ProgramTemplate):\n\n description = '''\nphenix.pdbtools tools for PDB model manipulations.\n\nUsage examples:\n phenix.pdbtools model.pdb sites.shake=0.4\n phenix.pdbtools model.cif remove=\"element H\"\n '''\n\n datatypes = ['model', 'phil']\n\n master_phil_str = \"\"\"\\\ninclude scope mmtbx.pdbtools.master_params\n\noutput {\n prefix = None\n .type = str\n suffix = _modified\n .type = str\n serial = None\n .type = int\n overwrite = True\n .type = bool\n}\n# temporary GUI PHIL\ninclude scope libtbx.phil.interface.tracking_params\ngui\n .help = \"GUI-specific parameter required for output directory\"\n{\n output_dir = None\n .type = path\n .style = output_dir\n}\n\"\"\"\n\n def validate(self):\n print('Validating inputs', file=self.logger)\n self.data_manager.has_models(\n raise_sorry = True,\n expected_n = 1,\n exact_count = True)\n\n def run(self):\n self.model = self.data_manager.get_model()\n cs = self.model.crystal_symmetry()\n if(cs is None or cs.is_empty() or cs.is_nonsense()):\n print(\"Crystal symmetry undefined, creating fake P1 box.\")\n box_crystal_symmetry = \\\n uctbx.non_crystallographic_unit_cell_with_the_sites_in_its_center(\n sites_cart = self.model.get_sites_cart(),\n buffer_layer = 5).crystal_symmetry()\n self.model.set_crystal_symmetry(crystal_symmetry = box_crystal_symmetry)\n print('Performing manipulations', file=self.logger)\n self.model = mmtbx.pdbtools.modify(\n model = self.model,\n params = self.params.modify,\n log = self.logger).get_results().model\n # Write output model file\n input_file_name_base = os.path.basename(\n self.data_manager.get_default_model_name())[:-4]\n if( self.model.input_model_format_cif()): extension = \".cif\"\n elif(self.model.input_model_format_pdb()): extension = \".pdb\"\n if(self.params.output.prefix is not None):\n output_file_name = self.params.output.prefix\n if(self.params.output.suffix is not None):\n output_file_name = output_file_name + self.params.output.suffix\n else:\n output_file_name = input_file_name_base + self.params.output.suffix\n output_file_name = output_file_name + extension\n ofn = self.get_default_output_filename(\n prefix=output_file_name,\n suffix=None,\n serial=Auto)\n print('Writing output model', file=self.logger)\n output_cs=True\n if(cs is None): output_cs = False\n self.data_manager.write_model_file(self.model.model_as_str(\n output_cs=output_cs), ofn)\n self.result = ofn\n\n def get_results(self):\n return self.result\n\n# So master_phil_str can be called\nmaster_phil_str = Program.master_phil_str\n",
"step-ids": [
3,
5,
6,
7,
8
]
}
|
[
3,
5,
6,
7,
8
] |
N = int(input())
A_list = list(map(int, input().split()))
B_list = list(map(int, input().split()))
C_list = list(map(int, input().split()))
ans = 0
for i in range(N):
ans += B_list[A_list[i] - 1]
if i < N - 1:
if A_list[i] + 1 == A_list[i + 1]:
ans += C_list[A_list[i] - 1]
print(ans)
|
normal
|
{
"blob_id": "cc160b1b0478446ba0daec4a0fe9e63453df3d96",
"index": 5029,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(N):\n ans += B_list[A_list[i] - 1]\n if i < N - 1:\n if A_list[i] + 1 == A_list[i + 1]:\n ans += C_list[A_list[i] - 1]\nprint(ans)\n",
"step-3": "N = int(input())\nA_list = list(map(int, input().split()))\nB_list = list(map(int, input().split()))\nC_list = list(map(int, input().split()))\nans = 0\nfor i in range(N):\n ans += B_list[A_list[i] - 1]\n if i < N - 1:\n if A_list[i] + 1 == A_list[i + 1]:\n ans += C_list[A_list[i] - 1]\nprint(ans)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
class wxBitmapWidget(wx.Panel):
<|reserved_special_token_0|>
def __init__(self, parent):
""" Initialize a wxBitmapWidget.
Parameters
----------
parent : wx.Window
The wx.Window object which serves as the widget parent.
"""
super(wxBitmapWidget, self).__init__(parent)
self._bitmap = None
self._scaled_contents = False
self._preserve_aspect_ratio = False
self._allow_upscaling = False
self._resize_timer = None
self._resizing = False
self.Bind(wx.EVT_PAINT, self.OnPaint)
def OnPaint(self, event):
""" The paint event handler for the widget.
"""
bmp = self._bitmap
if bmp is None:
return
bmp_width, bmp_height = bmp.GetWidth(), bmp.GetHeight()
if bmp_width == 0 or bmp_height == 0:
return
evt_x = 0
evt_y = 0
evt_width, evt_height = self.GetSize().asTuple()
if not self._scaled_contents:
paint_x = max(0, int(evt_width / 2.0 - bmp_width / 2.0 + evt_x))
paint_y = max(0, int(evt_height / 2.0 - bmp_height / 2.0 + evt_y))
paint_width = bmp_width
paint_height = bmp_height
else:
if self._preserve_aspect_ratio:
bmp_ratio = float(bmp_width) / bmp_height
evt_ratio = float(evt_width) / evt_height
if evt_ratio >= bmp_ratio:
if self._allow_upscaling:
paint_height = evt_height
else:
paint_height = min(bmp_height, evt_height)
paint_width = int(paint_height * bmp_ratio)
else:
if self._allow_upscaling:
paint_width = evt_width
else:
paint_width = min(bmp_width, evt_width)
paint_height = int(paint_width / bmp_ratio)
elif self._allow_upscaling:
paint_height = evt_height
paint_width = evt_width
else:
paint_height = min(bmp_height, evt_height)
paint_width = min(bmp_width, evt_width)
paint_x = int(evt_width / 2.0 - paint_width / 2.0 + evt_x)
paint_y = int(evt_height / 2.0 - paint_height / 2.0 + evt_y)
if paint_width != bmp_width or paint_height != bmp_height:
img = bmp.ConvertToImage()
if self._resizing:
quality = wx.IMAGE_QUALITY_NORMAL
else:
quality = wx.IMAGE_QUALITY_HIGH
img.Rescale(paint_width, paint_height, quality)
bmp = wx.BitmapFromImage(img)
dc = wx.PaintDC(self)
dc.DrawBitmap(bmp, paint_x, paint_y)
def OnResize(self, event):
""" The resize event handler for the widget.
This method is only bound and called when content scaling is
enabled. It starts(restarts) a timer to perform a high quality
scaled repaint when resizing is finished.
"""
self._resizing = True
self._resize_timer.Start(60, True)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def SetAllowUpscaling(self, allow):
""" Set whether or not to allow the image to be scaled beyond
its natural size.
Parameters
----------
allow : bool
If True, then the image may be scaled larger than its
natural if it is scaled to fit. If False, the image will
never be scaled larger than its natural size. In either
case, the image may be scaled smaller.
"""
self._allow_upscaling = allow
self.Refresh()
class WXImageView(WXControl, AbstractTkImageView):
""" A Wx implementation of ImageView.
"""
_cached_size_hint = None
def create(self, parent):
""" Creates the underlying wxBitmapWidget control.
"""
self.widget = wxBitmapWidget(parent)
def initialize(self):
""" Initializes the attributes on the underlying control.
"""
super(WXImageView, self).initialize()
shell = self.shell_obj
self.set_image(shell.image)
self.set_scale_to_fit(shell.scale_to_fit)
self.set_preserve_aspect_ratio(shell.preserve_aspect_ratio)
self.set_allow_upscaling(shell.allow_upscaling)
def shell_image_changed(self, image):
""" The change handler for the 'image' attribute on the shell
component.
"""
self.set_image(image)
def shell_scale_to_fit_changed(self, scale_to_fit):
""" The change handler for the 'scale_to_fit' attribute on the
shell component.
"""
self.set_scale_to_fit(scale_to_fit)
def shell_preserve_aspect_ratio_changed(self, preserve):
""" The change handler for the 'preserve_aspect_ratio' attribute
on the shell component.
"""
self.set_preserve_aspect_ratio(preserve)
def shell_allow_upscaling_changed(self, allow):
""" The change handler for the 'allow_upscaling' attribute on
the shell component.
"""
self.set_allow_upscaling(allow)
def set_image(self, image):
""" Sets the image on the underlying wxBitmapWidget.
"""
bmp = image.as_wxBitmap() if image is not None else None
self.widget.SetBitmap(bmp)
cached = self._cached_size_hint
hint = self._cached_size_hint = self.size_hint()
if cached != hint:
self.shell_obj.size_hint_updated()
def set_scale_to_fit(self, scale_to_fit):
""" Sets whether or not the image scales with the underlying
control.
"""
self.widget.SetScaledContents(scale_to_fit)
def set_preserve_aspect_ratio(self, preserve):
""" Sets whether or not to preserve the aspect ratio of the
image when scaling.
"""
self.widget.SetPreserveAspectRatio(preserve)
def set_allow_upscaling(self, allow):
""" Sets whether or not the image will scale beyond its natural
size.
"""
self.widget.SetAllowUpscaling(allow)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class wxBitmapWidget(wx.Panel):
<|reserved_special_token_0|>
def __init__(self, parent):
""" Initialize a wxBitmapWidget.
Parameters
----------
parent : wx.Window
The wx.Window object which serves as the widget parent.
"""
super(wxBitmapWidget, self).__init__(parent)
self._bitmap = None
self._scaled_contents = False
self._preserve_aspect_ratio = False
self._allow_upscaling = False
self._resize_timer = None
self._resizing = False
self.Bind(wx.EVT_PAINT, self.OnPaint)
def OnPaint(self, event):
""" The paint event handler for the widget.
"""
bmp = self._bitmap
if bmp is None:
return
bmp_width, bmp_height = bmp.GetWidth(), bmp.GetHeight()
if bmp_width == 0 or bmp_height == 0:
return
evt_x = 0
evt_y = 0
evt_width, evt_height = self.GetSize().asTuple()
if not self._scaled_contents:
paint_x = max(0, int(evt_width / 2.0 - bmp_width / 2.0 + evt_x))
paint_y = max(0, int(evt_height / 2.0 - bmp_height / 2.0 + evt_y))
paint_width = bmp_width
paint_height = bmp_height
else:
if self._preserve_aspect_ratio:
bmp_ratio = float(bmp_width) / bmp_height
evt_ratio = float(evt_width) / evt_height
if evt_ratio >= bmp_ratio:
if self._allow_upscaling:
paint_height = evt_height
else:
paint_height = min(bmp_height, evt_height)
paint_width = int(paint_height * bmp_ratio)
else:
if self._allow_upscaling:
paint_width = evt_width
else:
paint_width = min(bmp_width, evt_width)
paint_height = int(paint_width / bmp_ratio)
elif self._allow_upscaling:
paint_height = evt_height
paint_width = evt_width
else:
paint_height = min(bmp_height, evt_height)
paint_width = min(bmp_width, evt_width)
paint_x = int(evt_width / 2.0 - paint_width / 2.0 + evt_x)
paint_y = int(evt_height / 2.0 - paint_height / 2.0 + evt_y)
if paint_width != bmp_width or paint_height != bmp_height:
img = bmp.ConvertToImage()
if self._resizing:
quality = wx.IMAGE_QUALITY_NORMAL
else:
quality = wx.IMAGE_QUALITY_HIGH
img.Rescale(paint_width, paint_height, quality)
bmp = wx.BitmapFromImage(img)
dc = wx.PaintDC(self)
dc.DrawBitmap(bmp, paint_x, paint_y)
def OnResize(self, event):
""" The resize event handler for the widget.
This method is only bound and called when content scaling is
enabled. It starts(restarts) a timer to perform a high quality
scaled repaint when resizing is finished.
"""
self._resizing = True
self._resize_timer.Start(60, True)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def GetBitmap(self, bitmap):
""" Get the underlying wx.Bitmap used to paint the control.
Returns
-------
result : wx.Bitmap or None
The bitmap being used to paint the control, or None if
no bitmap has been supplied.
"""
return self._bitmap
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def SetPreserveAspectRatio(self, preserve):
""" Set whether or not to preserve the image aspect ratio.
Parameters
----------
preserve : bool
If True then the aspect ratio of the image will be preserved
if it is scaled to fit. Otherwise, the aspect ratio will be
ignored.
"""
self._preserve_aspect_ratio = preserve
self.Refresh()
<|reserved_special_token_0|>
def SetAllowUpscaling(self, allow):
""" Set whether or not to allow the image to be scaled beyond
its natural size.
Parameters
----------
allow : bool
If True, then the image may be scaled larger than its
natural if it is scaled to fit. If False, the image will
never be scaled larger than its natural size. In either
case, the image may be scaled smaller.
"""
self._allow_upscaling = allow
self.Refresh()
class WXImageView(WXControl, AbstractTkImageView):
""" A Wx implementation of ImageView.
"""
_cached_size_hint = None
def create(self, parent):
""" Creates the underlying wxBitmapWidget control.
"""
self.widget = wxBitmapWidget(parent)
def initialize(self):
""" Initializes the attributes on the underlying control.
"""
super(WXImageView, self).initialize()
shell = self.shell_obj
self.set_image(shell.image)
self.set_scale_to_fit(shell.scale_to_fit)
self.set_preserve_aspect_ratio(shell.preserve_aspect_ratio)
self.set_allow_upscaling(shell.allow_upscaling)
def shell_image_changed(self, image):
""" The change handler for the 'image' attribute on the shell
component.
"""
self.set_image(image)
def shell_scale_to_fit_changed(self, scale_to_fit):
""" The change handler for the 'scale_to_fit' attribute on the
shell component.
"""
self.set_scale_to_fit(scale_to_fit)
def shell_preserve_aspect_ratio_changed(self, preserve):
""" The change handler for the 'preserve_aspect_ratio' attribute
on the shell component.
"""
self.set_preserve_aspect_ratio(preserve)
def shell_allow_upscaling_changed(self, allow):
""" The change handler for the 'allow_upscaling' attribute on
the shell component.
"""
self.set_allow_upscaling(allow)
def set_image(self, image):
""" Sets the image on the underlying wxBitmapWidget.
"""
bmp = image.as_wxBitmap() if image is not None else None
self.widget.SetBitmap(bmp)
cached = self._cached_size_hint
hint = self._cached_size_hint = self.size_hint()
if cached != hint:
self.shell_obj.size_hint_updated()
def set_scale_to_fit(self, scale_to_fit):
""" Sets whether or not the image scales with the underlying
control.
"""
self.widget.SetScaledContents(scale_to_fit)
def set_preserve_aspect_ratio(self, preserve):
""" Sets whether or not to preserve the aspect ratio of the
image when scaling.
"""
self.widget.SetPreserveAspectRatio(preserve)
def set_allow_upscaling(self, allow):
""" Sets whether or not the image will scale beyond its natural
size.
"""
self.widget.SetAllowUpscaling(allow)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class wxBitmapWidget(wx.Panel):
<|reserved_special_token_0|>
def __init__(self, parent):
""" Initialize a wxBitmapWidget.
Parameters
----------
parent : wx.Window
The wx.Window object which serves as the widget parent.
"""
super(wxBitmapWidget, self).__init__(parent)
self._bitmap = None
self._scaled_contents = False
self._preserve_aspect_ratio = False
self._allow_upscaling = False
self._resize_timer = None
self._resizing = False
self.Bind(wx.EVT_PAINT, self.OnPaint)
def OnPaint(self, event):
""" The paint event handler for the widget.
"""
bmp = self._bitmap
if bmp is None:
return
bmp_width, bmp_height = bmp.GetWidth(), bmp.GetHeight()
if bmp_width == 0 or bmp_height == 0:
return
evt_x = 0
evt_y = 0
evt_width, evt_height = self.GetSize().asTuple()
if not self._scaled_contents:
paint_x = max(0, int(evt_width / 2.0 - bmp_width / 2.0 + evt_x))
paint_y = max(0, int(evt_height / 2.0 - bmp_height / 2.0 + evt_y))
paint_width = bmp_width
paint_height = bmp_height
else:
if self._preserve_aspect_ratio:
bmp_ratio = float(bmp_width) / bmp_height
evt_ratio = float(evt_width) / evt_height
if evt_ratio >= bmp_ratio:
if self._allow_upscaling:
paint_height = evt_height
else:
paint_height = min(bmp_height, evt_height)
paint_width = int(paint_height * bmp_ratio)
else:
if self._allow_upscaling:
paint_width = evt_width
else:
paint_width = min(bmp_width, evt_width)
paint_height = int(paint_width / bmp_ratio)
elif self._allow_upscaling:
paint_height = evt_height
paint_width = evt_width
else:
paint_height = min(bmp_height, evt_height)
paint_width = min(bmp_width, evt_width)
paint_x = int(evt_width / 2.0 - paint_width / 2.0 + evt_x)
paint_y = int(evt_height / 2.0 - paint_height / 2.0 + evt_y)
if paint_width != bmp_width or paint_height != bmp_height:
img = bmp.ConvertToImage()
if self._resizing:
quality = wx.IMAGE_QUALITY_NORMAL
else:
quality = wx.IMAGE_QUALITY_HIGH
img.Rescale(paint_width, paint_height, quality)
bmp = wx.BitmapFromImage(img)
dc = wx.PaintDC(self)
dc.DrawBitmap(bmp, paint_x, paint_y)
def OnResize(self, event):
""" The resize event handler for the widget.
This method is only bound and called when content scaling is
enabled. It starts(restarts) a timer to perform a high quality
scaled repaint when resizing is finished.
"""
self._resizing = True
self._resize_timer.Start(60, True)
<|reserved_special_token_0|>
def GetBestSize(self):
""" Overridden method to return the size of the bitmap as the
best size for the widget.
"""
bmp = self._bitmap
return wx.Size(bmp.GetWidth(), bmp.GetHeight())
def GetBestSizeTuple(self):
""" Overridden method to return the size of the bitmap as the
best size for the widget.
"""
return self.GetBestSize().asTuple()
def GetBitmap(self, bitmap):
""" Get the underlying wx.Bitmap used to paint the control.
Returns
-------
result : wx.Bitmap or None
The bitmap being used to paint the control, or None if
no bitmap has been supplied.
"""
return self._bitmap
def SetBitmap(self, bitmap):
""" Set the underlying wx.Bitmap and refresh the widget.
Parameters
----------
bitmap : wx.Bitmap
The bitmap to paint on the widget.
"""
self._bitmap = bitmap
self.Refresh()
def GetScaledContents(self):
""" Whether or not the bitmap is scaled to fit the bounds.
Returns
-------
result : bool
Whether or not the bitmap is scaled to fit the bounds of
the widget.
"""
return self._scaled_contents
<|reserved_special_token_0|>
def GetPreserveAspectRatio(self):
""" Returns whether or not the aspect ratio of the image is
maintained during a resize.
"""
return self._preserve_aspect_ratio
def SetPreserveAspectRatio(self, preserve):
""" Set whether or not to preserve the image aspect ratio.
Parameters
----------
preserve : bool
If True then the aspect ratio of the image will be preserved
if it is scaled to fit. Otherwise, the aspect ratio will be
ignored.
"""
self._preserve_aspect_ratio = preserve
self.Refresh()
<|reserved_special_token_0|>
def SetAllowUpscaling(self, allow):
""" Set whether or not to allow the image to be scaled beyond
its natural size.
Parameters
----------
allow : bool
If True, then the image may be scaled larger than its
natural if it is scaled to fit. If False, the image will
never be scaled larger than its natural size. In either
case, the image may be scaled smaller.
"""
self._allow_upscaling = allow
self.Refresh()
class WXImageView(WXControl, AbstractTkImageView):
""" A Wx implementation of ImageView.
"""
_cached_size_hint = None
def create(self, parent):
""" Creates the underlying wxBitmapWidget control.
"""
self.widget = wxBitmapWidget(parent)
def initialize(self):
""" Initializes the attributes on the underlying control.
"""
super(WXImageView, self).initialize()
shell = self.shell_obj
self.set_image(shell.image)
self.set_scale_to_fit(shell.scale_to_fit)
self.set_preserve_aspect_ratio(shell.preserve_aspect_ratio)
self.set_allow_upscaling(shell.allow_upscaling)
def shell_image_changed(self, image):
""" The change handler for the 'image' attribute on the shell
component.
"""
self.set_image(image)
def shell_scale_to_fit_changed(self, scale_to_fit):
""" The change handler for the 'scale_to_fit' attribute on the
shell component.
"""
self.set_scale_to_fit(scale_to_fit)
def shell_preserve_aspect_ratio_changed(self, preserve):
""" The change handler for the 'preserve_aspect_ratio' attribute
on the shell component.
"""
self.set_preserve_aspect_ratio(preserve)
def shell_allow_upscaling_changed(self, allow):
""" The change handler for the 'allow_upscaling' attribute on
the shell component.
"""
self.set_allow_upscaling(allow)
def set_image(self, image):
""" Sets the image on the underlying wxBitmapWidget.
"""
bmp = image.as_wxBitmap() if image is not None else None
self.widget.SetBitmap(bmp)
cached = self._cached_size_hint
hint = self._cached_size_hint = self.size_hint()
if cached != hint:
self.shell_obj.size_hint_updated()
def set_scale_to_fit(self, scale_to_fit):
""" Sets whether or not the image scales with the underlying
control.
"""
self.widget.SetScaledContents(scale_to_fit)
def set_preserve_aspect_ratio(self, preserve):
""" Sets whether or not to preserve the aspect ratio of the
image when scaling.
"""
self.widget.SetPreserveAspectRatio(preserve)
def set_allow_upscaling(self, allow):
""" Sets whether or not the image will scale beyond its natural
size.
"""
self.widget.SetAllowUpscaling(allow)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class wxBitmapWidget(wx.Panel):
<|reserved_special_token_0|>
def __init__(self, parent):
""" Initialize a wxBitmapWidget.
Parameters
----------
parent : wx.Window
The wx.Window object which serves as the widget parent.
"""
super(wxBitmapWidget, self).__init__(parent)
self._bitmap = None
self._scaled_contents = False
self._preserve_aspect_ratio = False
self._allow_upscaling = False
self._resize_timer = None
self._resizing = False
self.Bind(wx.EVT_PAINT, self.OnPaint)
def OnPaint(self, event):
""" The paint event handler for the widget.
"""
bmp = self._bitmap
if bmp is None:
return
bmp_width, bmp_height = bmp.GetWidth(), bmp.GetHeight()
if bmp_width == 0 or bmp_height == 0:
return
evt_x = 0
evt_y = 0
evt_width, evt_height = self.GetSize().asTuple()
if not self._scaled_contents:
paint_x = max(0, int(evt_width / 2.0 - bmp_width / 2.0 + evt_x))
paint_y = max(0, int(evt_height / 2.0 - bmp_height / 2.0 + evt_y))
paint_width = bmp_width
paint_height = bmp_height
else:
if self._preserve_aspect_ratio:
bmp_ratio = float(bmp_width) / bmp_height
evt_ratio = float(evt_width) / evt_height
if evt_ratio >= bmp_ratio:
if self._allow_upscaling:
paint_height = evt_height
else:
paint_height = min(bmp_height, evt_height)
paint_width = int(paint_height * bmp_ratio)
else:
if self._allow_upscaling:
paint_width = evt_width
else:
paint_width = min(bmp_width, evt_width)
paint_height = int(paint_width / bmp_ratio)
elif self._allow_upscaling:
paint_height = evt_height
paint_width = evt_width
else:
paint_height = min(bmp_height, evt_height)
paint_width = min(bmp_width, evt_width)
paint_x = int(evt_width / 2.0 - paint_width / 2.0 + evt_x)
paint_y = int(evt_height / 2.0 - paint_height / 2.0 + evt_y)
if paint_width != bmp_width or paint_height != bmp_height:
img = bmp.ConvertToImage()
if self._resizing:
quality = wx.IMAGE_QUALITY_NORMAL
else:
quality = wx.IMAGE_QUALITY_HIGH
img.Rescale(paint_width, paint_height, quality)
bmp = wx.BitmapFromImage(img)
dc = wx.PaintDC(self)
dc.DrawBitmap(bmp, paint_x, paint_y)
def OnResize(self, event):
""" The resize event handler for the widget.
This method is only bound and called when content scaling is
enabled. It starts(restarts) a timer to perform a high quality
scaled repaint when resizing is finished.
"""
self._resizing = True
self._resize_timer.Start(60, True)
def OnResizeEnd(self, event):
""" The repaint timer event handler.
This method is only bound and called when content scaling is
enabled and resizing has completed. It triggers a high quality
repaint.
"""
self._resizing = False
self.Refresh()
def GetBestSize(self):
""" Overridden method to return the size of the bitmap as the
best size for the widget.
"""
bmp = self._bitmap
return wx.Size(bmp.GetWidth(), bmp.GetHeight())
def GetBestSizeTuple(self):
""" Overridden method to return the size of the bitmap as the
best size for the widget.
"""
return self.GetBestSize().asTuple()
def GetBitmap(self, bitmap):
""" Get the underlying wx.Bitmap used to paint the control.
Returns
-------
result : wx.Bitmap or None
The bitmap being used to paint the control, or None if
no bitmap has been supplied.
"""
return self._bitmap
def SetBitmap(self, bitmap):
""" Set the underlying wx.Bitmap and refresh the widget.
Parameters
----------
bitmap : wx.Bitmap
The bitmap to paint on the widget.
"""
self._bitmap = bitmap
self.Refresh()
def GetScaledContents(self):
""" Whether or not the bitmap is scaled to fit the bounds.
Returns
-------
result : bool
Whether or not the bitmap is scaled to fit the bounds of
the widget.
"""
return self._scaled_contents
def SetScaledContents(self, scaled):
""" Set whether or not the bitmap should be scaled to fit the
bounds of the widget.
Parameters
----------
scaled : bool
Whether or not to scale the bitmap to fit the bounds of the
widget.
"""
if scaled:
if not self._scaled_contents:
self._scaled_contents = True
self._resize_timer = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self.OnResizeEnd)
self.Bind(wx.EVT_SIZE, self.OnResize)
elif self._scaled_contents:
self._scaled_contents = False
self._timer = None
self.Unbind(wx.EVT_TIMER, handler=self.OnResizeEnd)
self.Unbind(wx.EVT_SIZE, handler=self.OnResize)
self.Refresh()
def GetPreserveAspectRatio(self):
""" Returns whether or not the aspect ratio of the image is
maintained during a resize.
"""
return self._preserve_aspect_ratio
def SetPreserveAspectRatio(self, preserve):
""" Set whether or not to preserve the image aspect ratio.
Parameters
----------
preserve : bool
If True then the aspect ratio of the image will be preserved
if it is scaled to fit. Otherwise, the aspect ratio will be
ignored.
"""
self._preserve_aspect_ratio = preserve
self.Refresh()
def GetAllowUpscaling(self):
""" Returns whether or not the image can be scaled greater than
its natural size.
"""
return self._allow_upscaling
def SetAllowUpscaling(self, allow):
""" Set whether or not to allow the image to be scaled beyond
its natural size.
Parameters
----------
allow : bool
If True, then the image may be scaled larger than its
natural if it is scaled to fit. If False, the image will
never be scaled larger than its natural size. In either
case, the image may be scaled smaller.
"""
self._allow_upscaling = allow
self.Refresh()
class WXImageView(WXControl, AbstractTkImageView):
""" A Wx implementation of ImageView.
"""
_cached_size_hint = None
def create(self, parent):
""" Creates the underlying wxBitmapWidget control.
"""
self.widget = wxBitmapWidget(parent)
def initialize(self):
""" Initializes the attributes on the underlying control.
"""
super(WXImageView, self).initialize()
shell = self.shell_obj
self.set_image(shell.image)
self.set_scale_to_fit(shell.scale_to_fit)
self.set_preserve_aspect_ratio(shell.preserve_aspect_ratio)
self.set_allow_upscaling(shell.allow_upscaling)
def shell_image_changed(self, image):
""" The change handler for the 'image' attribute on the shell
component.
"""
self.set_image(image)
def shell_scale_to_fit_changed(self, scale_to_fit):
""" The change handler for the 'scale_to_fit' attribute on the
shell component.
"""
self.set_scale_to_fit(scale_to_fit)
def shell_preserve_aspect_ratio_changed(self, preserve):
""" The change handler for the 'preserve_aspect_ratio' attribute
on the shell component.
"""
self.set_preserve_aspect_ratio(preserve)
def shell_allow_upscaling_changed(self, allow):
""" The change handler for the 'allow_upscaling' attribute on
the shell component.
"""
self.set_allow_upscaling(allow)
def set_image(self, image):
""" Sets the image on the underlying wxBitmapWidget.
"""
bmp = image.as_wxBitmap() if image is not None else None
self.widget.SetBitmap(bmp)
cached = self._cached_size_hint
hint = self._cached_size_hint = self.size_hint()
if cached != hint:
self.shell_obj.size_hint_updated()
def set_scale_to_fit(self, scale_to_fit):
""" Sets whether or not the image scales with the underlying
control.
"""
self.widget.SetScaledContents(scale_to_fit)
def set_preserve_aspect_ratio(self, preserve):
""" Sets whether or not to preserve the aspect ratio of the
image when scaling.
"""
self.widget.SetPreserveAspectRatio(preserve)
def set_allow_upscaling(self, allow):
""" Sets whether or not the image will scale beyond its natural
size.
"""
self.widget.SetAllowUpscaling(allow)
<|reserved_special_token_1|>
#------------------------------------------------------------------------------
# Copyright (c) 2011, Enthought, Inc.
# All rights reserved.
#------------------------------------------------------------------------------
import wx
from .wx_control import WXControl
from ...components.image_view import AbstractTkImageView
class wxBitmapWidget(wx.Panel):
""" A wx.Panel subclass which paints a provided wx.Bitmap.
This differs from wx.StaticBitmap in that it provides the option to
scale the provided bitmap to the bounds of the widget. If the widget
is set to scale its contents, low quality scaling will occur during
resize, with a high quality pass performed once resizing as finished.
"""
def __init__(self, parent):
""" Initialize a wxBitmapWidget.
Parameters
----------
parent : wx.Window
The wx.Window object which serves as the widget parent.
"""
super(wxBitmapWidget, self).__init__(parent)
self._bitmap = None
self._scaled_contents = False
self._preserve_aspect_ratio = False
self._allow_upscaling = False
self._resize_timer = None
self._resizing = False
self.Bind(wx.EVT_PAINT, self.OnPaint)
#--------------------------------------------------------------------------
# Private API
#--------------------------------------------------------------------------
def OnPaint(self, event):
""" The paint event handler for the widget.
"""
bmp = self._bitmap
if bmp is None:
return
bmp_width, bmp_height = bmp.GetWidth(), bmp.GetHeight()
if bmp_width == 0 or bmp_height == 0:
return
evt_x = 0
evt_y = 0
evt_width, evt_height = self.GetSize().asTuple()
if not self._scaled_contents:
# If the image isn't scaled, it is centered if possible.
# Otherwise, it's painted at the origin and clipped.
paint_x = max(0, int((evt_width / 2. - bmp_width / 2.) + evt_x))
paint_y = max(0, int((evt_height / 2. - bmp_height / 2.) + evt_y))
paint_width = bmp_width
paint_height = bmp_height
else:
# If the image *is* scaled, it's scaled size depends on the
# size of the paint area as well as the other scaling flags.
if self._preserve_aspect_ratio:
bmp_ratio = float(bmp_width) / bmp_height
evt_ratio = float(evt_width) / evt_height
if evt_ratio >= bmp_ratio:
if self._allow_upscaling:
paint_height = evt_height
else:
paint_height = min(bmp_height, evt_height)
paint_width = int(paint_height * bmp_ratio)
else:
if self._allow_upscaling:
paint_width = evt_width
else:
paint_width = min(bmp_width, evt_width)
paint_height = int(paint_width / bmp_ratio)
else:
if self._allow_upscaling:
paint_height = evt_height
paint_width = evt_width
else:
paint_height = min(bmp_height, evt_height)
paint_width = min(bmp_width, evt_width)
# In all cases of scaling, we know that the scaled image is
# no larger than the paint area, and can thus be centered.
paint_x = int((evt_width / 2. - paint_width / 2.) + evt_x)
paint_y = int((evt_height / 2. - paint_height / 2.) + evt_y)
# Scale the bitmap if needed, using a faster method if the
# image is currently being resized
if paint_width != bmp_width or paint_height != bmp_height:
img = bmp.ConvertToImage()
if self._resizing:
quality = wx.IMAGE_QUALITY_NORMAL
else:
quality = wx.IMAGE_QUALITY_HIGH
img.Rescale(paint_width, paint_height, quality)
bmp = wx.BitmapFromImage(img)
# Finally, draw the bitmap into the computed location
dc = wx.PaintDC(self)
dc.DrawBitmap(bmp, paint_x, paint_y)
def OnResize(self, event):
""" The resize event handler for the widget.
This method is only bound and called when content scaling is
enabled. It starts(restarts) a timer to perform a high quality
scaled repaint when resizing is finished.
"""
self._resizing = True
self._resize_timer.Start(60, True)
def OnResizeEnd(self, event):
""" The repaint timer event handler.
This method is only bound and called when content scaling is
enabled and resizing has completed. It triggers a high quality
repaint.
"""
self._resizing = False
self.Refresh()
#--------------------------------------------------------------------------
# Public API
#--------------------------------------------------------------------------
def GetBestSize(self):
""" Overridden method to return the size of the bitmap as the
best size for the widget.
"""
bmp = self._bitmap
return wx.Size(bmp.GetWidth(), bmp.GetHeight())
def GetBestSizeTuple(self):
""" Overridden method to return the size of the bitmap as the
best size for the widget.
"""
return self.GetBestSize().asTuple()
def GetBitmap(self, bitmap):
""" Get the underlying wx.Bitmap used to paint the control.
Returns
-------
result : wx.Bitmap or None
The bitmap being used to paint the control, or None if
no bitmap has been supplied.
"""
return self._bitmap
def SetBitmap(self, bitmap):
""" Set the underlying wx.Bitmap and refresh the widget.
Parameters
----------
bitmap : wx.Bitmap
The bitmap to paint on the widget.
"""
self._bitmap = bitmap
self.Refresh()
def GetScaledContents(self):
""" Whether or not the bitmap is scaled to fit the bounds.
Returns
-------
result : bool
Whether or not the bitmap is scaled to fit the bounds of
the widget.
"""
return self._scaled_contents
def SetScaledContents(self, scaled):
""" Set whether or not the bitmap should be scaled to fit the
bounds of the widget.
Parameters
----------
scaled : bool
Whether or not to scale the bitmap to fit the bounds of the
widget.
"""
if scaled:
if not self._scaled_contents:
self._scaled_contents = True
self._resize_timer = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self.OnResizeEnd)
self.Bind(wx.EVT_SIZE, self.OnResize)
else:
if self._scaled_contents:
self._scaled_contents = False
self._timer = None
self.Unbind(wx.EVT_TIMER, handler=self.OnResizeEnd)
self.Unbind(wx.EVT_SIZE, handler=self.OnResize)
self.Refresh()
def GetPreserveAspectRatio(self):
""" Returns whether or not the aspect ratio of the image is
maintained during a resize.
"""
return self._preserve_aspect_ratio
def SetPreserveAspectRatio(self, preserve):
""" Set whether or not to preserve the image aspect ratio.
Parameters
----------
preserve : bool
If True then the aspect ratio of the image will be preserved
if it is scaled to fit. Otherwise, the aspect ratio will be
ignored.
"""
self._preserve_aspect_ratio = preserve
self.Refresh()
def GetAllowUpscaling(self):
""" Returns whether or not the image can be scaled greater than
its natural size.
"""
return self._allow_upscaling
def SetAllowUpscaling(self, allow):
""" Set whether or not to allow the image to be scaled beyond
its natural size.
Parameters
----------
allow : bool
If True, then the image may be scaled larger than its
natural if it is scaled to fit. If False, the image will
never be scaled larger than its natural size. In either
case, the image may be scaled smaller.
"""
self._allow_upscaling = allow
self.Refresh()
class WXImageView(WXControl, AbstractTkImageView):
""" A Wx implementation of ImageView.
"""
#: The internal cached size hint which is used to determine whether
#: of not a size hint updated event should be emitted when the text
#: in the label changes
_cached_size_hint = None
#--------------------------------------------------------------------------
# Setup methods
#--------------------------------------------------------------------------
def create(self, parent):
""" Creates the underlying wxBitmapWidget control.
"""
self.widget = wxBitmapWidget(parent)
def initialize(self):
""" Initializes the attributes on the underlying control.
"""
super(WXImageView, self).initialize()
shell = self.shell_obj
self.set_image(shell.image)
self.set_scale_to_fit(shell.scale_to_fit)
self.set_preserve_aspect_ratio(shell.preserve_aspect_ratio)
self.set_allow_upscaling(shell.allow_upscaling)
#--------------------------------------------------------------------------
# Implementation
#--------------------------------------------------------------------------
def shell_image_changed(self, image):
""" The change handler for the 'image' attribute on the shell
component.
"""
self.set_image(image)
def shell_scale_to_fit_changed(self, scale_to_fit):
""" The change handler for the 'scale_to_fit' attribute on the
shell component.
"""
self.set_scale_to_fit(scale_to_fit)
def shell_preserve_aspect_ratio_changed(self, preserve):
""" The change handler for the 'preserve_aspect_ratio' attribute
on the shell component.
"""
self.set_preserve_aspect_ratio(preserve)
def shell_allow_upscaling_changed(self, allow):
""" The change handler for the 'allow_upscaling' attribute on
the shell component.
"""
self.set_allow_upscaling(allow)
#--------------------------------------------------------------------------
# Widget Update Methods
#--------------------------------------------------------------------------
def set_image(self, image):
""" Sets the image on the underlying wxBitmapWidget.
"""
bmp = image.as_wxBitmap() if image is not None else None
self.widget.SetBitmap(bmp)
# Emit a size hint updated event if the size hint has actually
# changed. This is an optimization so that a constraints update
# only occurs when the size hint has actually changed. This
# logic must be implemented here so that the label has been
# updated before the new size hint is computed. Placing this
# logic on the shell object would not guarantee that the label
# has been updated at the time the change handler is called.
cached = self._cached_size_hint
hint = self._cached_size_hint = self.size_hint()
if cached != hint:
self.shell_obj.size_hint_updated()
def set_scale_to_fit(self, scale_to_fit):
""" Sets whether or not the image scales with the underlying
control.
"""
self.widget.SetScaledContents(scale_to_fit)
def set_preserve_aspect_ratio(self, preserve):
""" Sets whether or not to preserve the aspect ratio of the
image when scaling.
"""
self.widget.SetPreserveAspectRatio(preserve)
def set_allow_upscaling(self, allow):
""" Sets whether or not the image will scale beyond its natural
size.
"""
self.widget.SetAllowUpscaling(allow)
|
flexible
|
{
"blob_id": "d4198c2c3706e03ba1bce3e31c5139f01248a184",
"index": 5161,
"step-1": "<mask token>\n\n\nclass wxBitmapWidget(wx.Panel):\n <mask token>\n\n def __init__(self, parent):\n \"\"\" Initialize a wxBitmapWidget.\n\n Parameters\n ----------\n parent : wx.Window\n The wx.Window object which serves as the widget parent.\n \n \"\"\"\n super(wxBitmapWidget, self).__init__(parent)\n self._bitmap = None\n self._scaled_contents = False\n self._preserve_aspect_ratio = False\n self._allow_upscaling = False\n self._resize_timer = None\n self._resizing = False\n self.Bind(wx.EVT_PAINT, self.OnPaint)\n\n def OnPaint(self, event):\n \"\"\" The paint event handler for the widget.\n\n \"\"\"\n bmp = self._bitmap\n if bmp is None:\n return\n bmp_width, bmp_height = bmp.GetWidth(), bmp.GetHeight()\n if bmp_width == 0 or bmp_height == 0:\n return\n evt_x = 0\n evt_y = 0\n evt_width, evt_height = self.GetSize().asTuple()\n if not self._scaled_contents:\n paint_x = max(0, int(evt_width / 2.0 - bmp_width / 2.0 + evt_x))\n paint_y = max(0, int(evt_height / 2.0 - bmp_height / 2.0 + evt_y))\n paint_width = bmp_width\n paint_height = bmp_height\n else:\n if self._preserve_aspect_ratio:\n bmp_ratio = float(bmp_width) / bmp_height\n evt_ratio = float(evt_width) / evt_height\n if evt_ratio >= bmp_ratio:\n if self._allow_upscaling:\n paint_height = evt_height\n else:\n paint_height = min(bmp_height, evt_height)\n paint_width = int(paint_height * bmp_ratio)\n else:\n if self._allow_upscaling:\n paint_width = evt_width\n else:\n paint_width = min(bmp_width, evt_width)\n paint_height = int(paint_width / bmp_ratio)\n elif self._allow_upscaling:\n paint_height = evt_height\n paint_width = evt_width\n else:\n paint_height = min(bmp_height, evt_height)\n paint_width = min(bmp_width, evt_width)\n paint_x = int(evt_width / 2.0 - paint_width / 2.0 + evt_x)\n paint_y = int(evt_height / 2.0 - paint_height / 2.0 + evt_y)\n if paint_width != bmp_width or paint_height != bmp_height:\n img = bmp.ConvertToImage()\n if self._resizing:\n quality = wx.IMAGE_QUALITY_NORMAL\n else:\n quality = wx.IMAGE_QUALITY_HIGH\n img.Rescale(paint_width, paint_height, quality)\n bmp = wx.BitmapFromImage(img)\n dc = wx.PaintDC(self)\n dc.DrawBitmap(bmp, paint_x, paint_y)\n\n def OnResize(self, event):\n \"\"\" The resize event handler for the widget.\n\n This method is only bound and called when content scaling is\n enabled. It starts(restarts) a timer to perform a high quality\n scaled repaint when resizing is finished.\n\n \"\"\"\n self._resizing = True\n self._resize_timer.Start(60, True)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def SetAllowUpscaling(self, allow):\n \"\"\" Set whether or not to allow the image to be scaled beyond\n its natural size.\n\n Parameters\n ----------\n allow : bool\n If True, then the image may be scaled larger than its \n natural if it is scaled to fit. If False, the image will\n never be scaled larger than its natural size. In either\n case, the image may be scaled smaller.\n\n \"\"\"\n self._allow_upscaling = allow\n self.Refresh()\n\n\nclass WXImageView(WXControl, AbstractTkImageView):\n \"\"\" A Wx implementation of ImageView.\n\n \"\"\"\n _cached_size_hint = None\n\n def create(self, parent):\n \"\"\" Creates the underlying wxBitmapWidget control.\n\n \"\"\"\n self.widget = wxBitmapWidget(parent)\n\n def initialize(self):\n \"\"\" Initializes the attributes on the underlying control.\n\n \"\"\"\n super(WXImageView, self).initialize()\n shell = self.shell_obj\n self.set_image(shell.image)\n self.set_scale_to_fit(shell.scale_to_fit)\n self.set_preserve_aspect_ratio(shell.preserve_aspect_ratio)\n self.set_allow_upscaling(shell.allow_upscaling)\n\n def shell_image_changed(self, image):\n \"\"\" The change handler for the 'image' attribute on the shell \n component.\n\n \"\"\"\n self.set_image(image)\n\n def shell_scale_to_fit_changed(self, scale_to_fit):\n \"\"\" The change handler for the 'scale_to_fit' attribute on the \n shell component.\n\n \"\"\"\n self.set_scale_to_fit(scale_to_fit)\n\n def shell_preserve_aspect_ratio_changed(self, preserve):\n \"\"\" The change handler for the 'preserve_aspect_ratio' attribute\n on the shell component.\n\n \"\"\"\n self.set_preserve_aspect_ratio(preserve)\n\n def shell_allow_upscaling_changed(self, allow):\n \"\"\" The change handler for the 'allow_upscaling' attribute on \n the shell component.\n\n \"\"\"\n self.set_allow_upscaling(allow)\n\n def set_image(self, image):\n \"\"\" Sets the image on the underlying wxBitmapWidget.\n\n \"\"\"\n bmp = image.as_wxBitmap() if image is not None else None\n self.widget.SetBitmap(bmp)\n cached = self._cached_size_hint\n hint = self._cached_size_hint = self.size_hint()\n if cached != hint:\n self.shell_obj.size_hint_updated()\n\n def set_scale_to_fit(self, scale_to_fit):\n \"\"\" Sets whether or not the image scales with the underlying \n control.\n\n \"\"\"\n self.widget.SetScaledContents(scale_to_fit)\n\n def set_preserve_aspect_ratio(self, preserve):\n \"\"\" Sets whether or not to preserve the aspect ratio of the \n image when scaling.\n\n \"\"\"\n self.widget.SetPreserveAspectRatio(preserve)\n\n def set_allow_upscaling(self, allow):\n \"\"\" Sets whether or not the image will scale beyond its natural\n size.\n\n \"\"\"\n self.widget.SetAllowUpscaling(allow)\n",
"step-2": "<mask token>\n\n\nclass wxBitmapWidget(wx.Panel):\n <mask token>\n\n def __init__(self, parent):\n \"\"\" Initialize a wxBitmapWidget.\n\n Parameters\n ----------\n parent : wx.Window\n The wx.Window object which serves as the widget parent.\n \n \"\"\"\n super(wxBitmapWidget, self).__init__(parent)\n self._bitmap = None\n self._scaled_contents = False\n self._preserve_aspect_ratio = False\n self._allow_upscaling = False\n self._resize_timer = None\n self._resizing = False\n self.Bind(wx.EVT_PAINT, self.OnPaint)\n\n def OnPaint(self, event):\n \"\"\" The paint event handler for the widget.\n\n \"\"\"\n bmp = self._bitmap\n if bmp is None:\n return\n bmp_width, bmp_height = bmp.GetWidth(), bmp.GetHeight()\n if bmp_width == 0 or bmp_height == 0:\n return\n evt_x = 0\n evt_y = 0\n evt_width, evt_height = self.GetSize().asTuple()\n if not self._scaled_contents:\n paint_x = max(0, int(evt_width / 2.0 - bmp_width / 2.0 + evt_x))\n paint_y = max(0, int(evt_height / 2.0 - bmp_height / 2.0 + evt_y))\n paint_width = bmp_width\n paint_height = bmp_height\n else:\n if self._preserve_aspect_ratio:\n bmp_ratio = float(bmp_width) / bmp_height\n evt_ratio = float(evt_width) / evt_height\n if evt_ratio >= bmp_ratio:\n if self._allow_upscaling:\n paint_height = evt_height\n else:\n paint_height = min(bmp_height, evt_height)\n paint_width = int(paint_height * bmp_ratio)\n else:\n if self._allow_upscaling:\n paint_width = evt_width\n else:\n paint_width = min(bmp_width, evt_width)\n paint_height = int(paint_width / bmp_ratio)\n elif self._allow_upscaling:\n paint_height = evt_height\n paint_width = evt_width\n else:\n paint_height = min(bmp_height, evt_height)\n paint_width = min(bmp_width, evt_width)\n paint_x = int(evt_width / 2.0 - paint_width / 2.0 + evt_x)\n paint_y = int(evt_height / 2.0 - paint_height / 2.0 + evt_y)\n if paint_width != bmp_width or paint_height != bmp_height:\n img = bmp.ConvertToImage()\n if self._resizing:\n quality = wx.IMAGE_QUALITY_NORMAL\n else:\n quality = wx.IMAGE_QUALITY_HIGH\n img.Rescale(paint_width, paint_height, quality)\n bmp = wx.BitmapFromImage(img)\n dc = wx.PaintDC(self)\n dc.DrawBitmap(bmp, paint_x, paint_y)\n\n def OnResize(self, event):\n \"\"\" The resize event handler for the widget.\n\n This method is only bound and called when content scaling is\n enabled. It starts(restarts) a timer to perform a high quality\n scaled repaint when resizing is finished.\n\n \"\"\"\n self._resizing = True\n self._resize_timer.Start(60, True)\n <mask token>\n <mask token>\n <mask token>\n\n def GetBitmap(self, bitmap):\n \"\"\" Get the underlying wx.Bitmap used to paint the control.\n\n Returns\n -------\n result : wx.Bitmap or None\n The bitmap being used to paint the control, or None if\n no bitmap has been supplied.\n\n \"\"\"\n return self._bitmap\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def SetPreserveAspectRatio(self, preserve):\n \"\"\" Set whether or not to preserve the image aspect ratio.\n\n Parameters\n ----------\n preserve : bool\n If True then the aspect ratio of the image will be preserved\n if it is scaled to fit. Otherwise, the aspect ratio will be\n ignored.\n\n \"\"\"\n self._preserve_aspect_ratio = preserve\n self.Refresh()\n <mask token>\n\n def SetAllowUpscaling(self, allow):\n \"\"\" Set whether or not to allow the image to be scaled beyond\n its natural size.\n\n Parameters\n ----------\n allow : bool\n If True, then the image may be scaled larger than its \n natural if it is scaled to fit. If False, the image will\n never be scaled larger than its natural size. In either\n case, the image may be scaled smaller.\n\n \"\"\"\n self._allow_upscaling = allow\n self.Refresh()\n\n\nclass WXImageView(WXControl, AbstractTkImageView):\n \"\"\" A Wx implementation of ImageView.\n\n \"\"\"\n _cached_size_hint = None\n\n def create(self, parent):\n \"\"\" Creates the underlying wxBitmapWidget control.\n\n \"\"\"\n self.widget = wxBitmapWidget(parent)\n\n def initialize(self):\n \"\"\" Initializes the attributes on the underlying control.\n\n \"\"\"\n super(WXImageView, self).initialize()\n shell = self.shell_obj\n self.set_image(shell.image)\n self.set_scale_to_fit(shell.scale_to_fit)\n self.set_preserve_aspect_ratio(shell.preserve_aspect_ratio)\n self.set_allow_upscaling(shell.allow_upscaling)\n\n def shell_image_changed(self, image):\n \"\"\" The change handler for the 'image' attribute on the shell \n component.\n\n \"\"\"\n self.set_image(image)\n\n def shell_scale_to_fit_changed(self, scale_to_fit):\n \"\"\" The change handler for the 'scale_to_fit' attribute on the \n shell component.\n\n \"\"\"\n self.set_scale_to_fit(scale_to_fit)\n\n def shell_preserve_aspect_ratio_changed(self, preserve):\n \"\"\" The change handler for the 'preserve_aspect_ratio' attribute\n on the shell component.\n\n \"\"\"\n self.set_preserve_aspect_ratio(preserve)\n\n def shell_allow_upscaling_changed(self, allow):\n \"\"\" The change handler for the 'allow_upscaling' attribute on \n the shell component.\n\n \"\"\"\n self.set_allow_upscaling(allow)\n\n def set_image(self, image):\n \"\"\" Sets the image on the underlying wxBitmapWidget.\n\n \"\"\"\n bmp = image.as_wxBitmap() if image is not None else None\n self.widget.SetBitmap(bmp)\n cached = self._cached_size_hint\n hint = self._cached_size_hint = self.size_hint()\n if cached != hint:\n self.shell_obj.size_hint_updated()\n\n def set_scale_to_fit(self, scale_to_fit):\n \"\"\" Sets whether or not the image scales with the underlying \n control.\n\n \"\"\"\n self.widget.SetScaledContents(scale_to_fit)\n\n def set_preserve_aspect_ratio(self, preserve):\n \"\"\" Sets whether or not to preserve the aspect ratio of the \n image when scaling.\n\n \"\"\"\n self.widget.SetPreserveAspectRatio(preserve)\n\n def set_allow_upscaling(self, allow):\n \"\"\" Sets whether or not the image will scale beyond its natural\n size.\n\n \"\"\"\n self.widget.SetAllowUpscaling(allow)\n",
"step-3": "<mask token>\n\n\nclass wxBitmapWidget(wx.Panel):\n <mask token>\n\n def __init__(self, parent):\n \"\"\" Initialize a wxBitmapWidget.\n\n Parameters\n ----------\n parent : wx.Window\n The wx.Window object which serves as the widget parent.\n \n \"\"\"\n super(wxBitmapWidget, self).__init__(parent)\n self._bitmap = None\n self._scaled_contents = False\n self._preserve_aspect_ratio = False\n self._allow_upscaling = False\n self._resize_timer = None\n self._resizing = False\n self.Bind(wx.EVT_PAINT, self.OnPaint)\n\n def OnPaint(self, event):\n \"\"\" The paint event handler for the widget.\n\n \"\"\"\n bmp = self._bitmap\n if bmp is None:\n return\n bmp_width, bmp_height = bmp.GetWidth(), bmp.GetHeight()\n if bmp_width == 0 or bmp_height == 0:\n return\n evt_x = 0\n evt_y = 0\n evt_width, evt_height = self.GetSize().asTuple()\n if not self._scaled_contents:\n paint_x = max(0, int(evt_width / 2.0 - bmp_width / 2.0 + evt_x))\n paint_y = max(0, int(evt_height / 2.0 - bmp_height / 2.0 + evt_y))\n paint_width = bmp_width\n paint_height = bmp_height\n else:\n if self._preserve_aspect_ratio:\n bmp_ratio = float(bmp_width) / bmp_height\n evt_ratio = float(evt_width) / evt_height\n if evt_ratio >= bmp_ratio:\n if self._allow_upscaling:\n paint_height = evt_height\n else:\n paint_height = min(bmp_height, evt_height)\n paint_width = int(paint_height * bmp_ratio)\n else:\n if self._allow_upscaling:\n paint_width = evt_width\n else:\n paint_width = min(bmp_width, evt_width)\n paint_height = int(paint_width / bmp_ratio)\n elif self._allow_upscaling:\n paint_height = evt_height\n paint_width = evt_width\n else:\n paint_height = min(bmp_height, evt_height)\n paint_width = min(bmp_width, evt_width)\n paint_x = int(evt_width / 2.0 - paint_width / 2.0 + evt_x)\n paint_y = int(evt_height / 2.0 - paint_height / 2.0 + evt_y)\n if paint_width != bmp_width or paint_height != bmp_height:\n img = bmp.ConvertToImage()\n if self._resizing:\n quality = wx.IMAGE_QUALITY_NORMAL\n else:\n quality = wx.IMAGE_QUALITY_HIGH\n img.Rescale(paint_width, paint_height, quality)\n bmp = wx.BitmapFromImage(img)\n dc = wx.PaintDC(self)\n dc.DrawBitmap(bmp, paint_x, paint_y)\n\n def OnResize(self, event):\n \"\"\" The resize event handler for the widget.\n\n This method is only bound and called when content scaling is\n enabled. It starts(restarts) a timer to perform a high quality\n scaled repaint when resizing is finished.\n\n \"\"\"\n self._resizing = True\n self._resize_timer.Start(60, True)\n <mask token>\n\n def GetBestSize(self):\n \"\"\" Overridden method to return the size of the bitmap as the \n best size for the widget.\n\n \"\"\"\n bmp = self._bitmap\n return wx.Size(bmp.GetWidth(), bmp.GetHeight())\n\n def GetBestSizeTuple(self):\n \"\"\" Overridden method to return the size of the bitmap as the \n best size for the widget.\n\n \"\"\"\n return self.GetBestSize().asTuple()\n\n def GetBitmap(self, bitmap):\n \"\"\" Get the underlying wx.Bitmap used to paint the control.\n\n Returns\n -------\n result : wx.Bitmap or None\n The bitmap being used to paint the control, or None if\n no bitmap has been supplied.\n\n \"\"\"\n return self._bitmap\n\n def SetBitmap(self, bitmap):\n \"\"\" Set the underlying wx.Bitmap and refresh the widget.\n\n Parameters\n ----------\n bitmap : wx.Bitmap\n The bitmap to paint on the widget.\n \n \"\"\"\n self._bitmap = bitmap\n self.Refresh()\n\n def GetScaledContents(self):\n \"\"\" Whether or not the bitmap is scaled to fit the bounds.\n\n Returns\n -------\n result : bool\n Whether or not the bitmap is scaled to fit the bounds of\n the widget.\n \n \"\"\"\n return self._scaled_contents\n <mask token>\n\n def GetPreserveAspectRatio(self):\n \"\"\" Returns whether or not the aspect ratio of the image is \n maintained during a resize.\n\n \"\"\"\n return self._preserve_aspect_ratio\n\n def SetPreserveAspectRatio(self, preserve):\n \"\"\" Set whether or not to preserve the image aspect ratio.\n\n Parameters\n ----------\n preserve : bool\n If True then the aspect ratio of the image will be preserved\n if it is scaled to fit. Otherwise, the aspect ratio will be\n ignored.\n\n \"\"\"\n self._preserve_aspect_ratio = preserve\n self.Refresh()\n <mask token>\n\n def SetAllowUpscaling(self, allow):\n \"\"\" Set whether or not to allow the image to be scaled beyond\n its natural size.\n\n Parameters\n ----------\n allow : bool\n If True, then the image may be scaled larger than its \n natural if it is scaled to fit. If False, the image will\n never be scaled larger than its natural size. In either\n case, the image may be scaled smaller.\n\n \"\"\"\n self._allow_upscaling = allow\n self.Refresh()\n\n\nclass WXImageView(WXControl, AbstractTkImageView):\n \"\"\" A Wx implementation of ImageView.\n\n \"\"\"\n _cached_size_hint = None\n\n def create(self, parent):\n \"\"\" Creates the underlying wxBitmapWidget control.\n\n \"\"\"\n self.widget = wxBitmapWidget(parent)\n\n def initialize(self):\n \"\"\" Initializes the attributes on the underlying control.\n\n \"\"\"\n super(WXImageView, self).initialize()\n shell = self.shell_obj\n self.set_image(shell.image)\n self.set_scale_to_fit(shell.scale_to_fit)\n self.set_preserve_aspect_ratio(shell.preserve_aspect_ratio)\n self.set_allow_upscaling(shell.allow_upscaling)\n\n def shell_image_changed(self, image):\n \"\"\" The change handler for the 'image' attribute on the shell \n component.\n\n \"\"\"\n self.set_image(image)\n\n def shell_scale_to_fit_changed(self, scale_to_fit):\n \"\"\" The change handler for the 'scale_to_fit' attribute on the \n shell component.\n\n \"\"\"\n self.set_scale_to_fit(scale_to_fit)\n\n def shell_preserve_aspect_ratio_changed(self, preserve):\n \"\"\" The change handler for the 'preserve_aspect_ratio' attribute\n on the shell component.\n\n \"\"\"\n self.set_preserve_aspect_ratio(preserve)\n\n def shell_allow_upscaling_changed(self, allow):\n \"\"\" The change handler for the 'allow_upscaling' attribute on \n the shell component.\n\n \"\"\"\n self.set_allow_upscaling(allow)\n\n def set_image(self, image):\n \"\"\" Sets the image on the underlying wxBitmapWidget.\n\n \"\"\"\n bmp = image.as_wxBitmap() if image is not None else None\n self.widget.SetBitmap(bmp)\n cached = self._cached_size_hint\n hint = self._cached_size_hint = self.size_hint()\n if cached != hint:\n self.shell_obj.size_hint_updated()\n\n def set_scale_to_fit(self, scale_to_fit):\n \"\"\" Sets whether or not the image scales with the underlying \n control.\n\n \"\"\"\n self.widget.SetScaledContents(scale_to_fit)\n\n def set_preserve_aspect_ratio(self, preserve):\n \"\"\" Sets whether or not to preserve the aspect ratio of the \n image when scaling.\n\n \"\"\"\n self.widget.SetPreserveAspectRatio(preserve)\n\n def set_allow_upscaling(self, allow):\n \"\"\" Sets whether or not the image will scale beyond its natural\n size.\n\n \"\"\"\n self.widget.SetAllowUpscaling(allow)\n",
"step-4": "<mask token>\n\n\nclass wxBitmapWidget(wx.Panel):\n <mask token>\n\n def __init__(self, parent):\n \"\"\" Initialize a wxBitmapWidget.\n\n Parameters\n ----------\n parent : wx.Window\n The wx.Window object which serves as the widget parent.\n \n \"\"\"\n super(wxBitmapWidget, self).__init__(parent)\n self._bitmap = None\n self._scaled_contents = False\n self._preserve_aspect_ratio = False\n self._allow_upscaling = False\n self._resize_timer = None\n self._resizing = False\n self.Bind(wx.EVT_PAINT, self.OnPaint)\n\n def OnPaint(self, event):\n \"\"\" The paint event handler for the widget.\n\n \"\"\"\n bmp = self._bitmap\n if bmp is None:\n return\n bmp_width, bmp_height = bmp.GetWidth(), bmp.GetHeight()\n if bmp_width == 0 or bmp_height == 0:\n return\n evt_x = 0\n evt_y = 0\n evt_width, evt_height = self.GetSize().asTuple()\n if not self._scaled_contents:\n paint_x = max(0, int(evt_width / 2.0 - bmp_width / 2.0 + evt_x))\n paint_y = max(0, int(evt_height / 2.0 - bmp_height / 2.0 + evt_y))\n paint_width = bmp_width\n paint_height = bmp_height\n else:\n if self._preserve_aspect_ratio:\n bmp_ratio = float(bmp_width) / bmp_height\n evt_ratio = float(evt_width) / evt_height\n if evt_ratio >= bmp_ratio:\n if self._allow_upscaling:\n paint_height = evt_height\n else:\n paint_height = min(bmp_height, evt_height)\n paint_width = int(paint_height * bmp_ratio)\n else:\n if self._allow_upscaling:\n paint_width = evt_width\n else:\n paint_width = min(bmp_width, evt_width)\n paint_height = int(paint_width / bmp_ratio)\n elif self._allow_upscaling:\n paint_height = evt_height\n paint_width = evt_width\n else:\n paint_height = min(bmp_height, evt_height)\n paint_width = min(bmp_width, evt_width)\n paint_x = int(evt_width / 2.0 - paint_width / 2.0 + evt_x)\n paint_y = int(evt_height / 2.0 - paint_height / 2.0 + evt_y)\n if paint_width != bmp_width or paint_height != bmp_height:\n img = bmp.ConvertToImage()\n if self._resizing:\n quality = wx.IMAGE_QUALITY_NORMAL\n else:\n quality = wx.IMAGE_QUALITY_HIGH\n img.Rescale(paint_width, paint_height, quality)\n bmp = wx.BitmapFromImage(img)\n dc = wx.PaintDC(self)\n dc.DrawBitmap(bmp, paint_x, paint_y)\n\n def OnResize(self, event):\n \"\"\" The resize event handler for the widget.\n\n This method is only bound and called when content scaling is\n enabled. It starts(restarts) a timer to perform a high quality\n scaled repaint when resizing is finished.\n\n \"\"\"\n self._resizing = True\n self._resize_timer.Start(60, True)\n\n def OnResizeEnd(self, event):\n \"\"\" The repaint timer event handler.\n\n This method is only bound and called when content scaling is\n enabled and resizing has completed. It triggers a high quality\n repaint.\n\n \"\"\"\n self._resizing = False\n self.Refresh()\n\n def GetBestSize(self):\n \"\"\" Overridden method to return the size of the bitmap as the \n best size for the widget.\n\n \"\"\"\n bmp = self._bitmap\n return wx.Size(bmp.GetWidth(), bmp.GetHeight())\n\n def GetBestSizeTuple(self):\n \"\"\" Overridden method to return the size of the bitmap as the \n best size for the widget.\n\n \"\"\"\n return self.GetBestSize().asTuple()\n\n def GetBitmap(self, bitmap):\n \"\"\" Get the underlying wx.Bitmap used to paint the control.\n\n Returns\n -------\n result : wx.Bitmap or None\n The bitmap being used to paint the control, or None if\n no bitmap has been supplied.\n\n \"\"\"\n return self._bitmap\n\n def SetBitmap(self, bitmap):\n \"\"\" Set the underlying wx.Bitmap and refresh the widget.\n\n Parameters\n ----------\n bitmap : wx.Bitmap\n The bitmap to paint on the widget.\n \n \"\"\"\n self._bitmap = bitmap\n self.Refresh()\n\n def GetScaledContents(self):\n \"\"\" Whether or not the bitmap is scaled to fit the bounds.\n\n Returns\n -------\n result : bool\n Whether or not the bitmap is scaled to fit the bounds of\n the widget.\n \n \"\"\"\n return self._scaled_contents\n\n def SetScaledContents(self, scaled):\n \"\"\" Set whether or not the bitmap should be scaled to fit the\n bounds of the widget.\n\n Parameters\n ----------\n scaled : bool\n Whether or not to scale the bitmap to fit the bounds of the\n widget.\n \n \"\"\"\n if scaled:\n if not self._scaled_contents:\n self._scaled_contents = True\n self._resize_timer = wx.Timer(self)\n self.Bind(wx.EVT_TIMER, self.OnResizeEnd)\n self.Bind(wx.EVT_SIZE, self.OnResize)\n elif self._scaled_contents:\n self._scaled_contents = False\n self._timer = None\n self.Unbind(wx.EVT_TIMER, handler=self.OnResizeEnd)\n self.Unbind(wx.EVT_SIZE, handler=self.OnResize)\n self.Refresh()\n\n def GetPreserveAspectRatio(self):\n \"\"\" Returns whether or not the aspect ratio of the image is \n maintained during a resize.\n\n \"\"\"\n return self._preserve_aspect_ratio\n\n def SetPreserveAspectRatio(self, preserve):\n \"\"\" Set whether or not to preserve the image aspect ratio.\n\n Parameters\n ----------\n preserve : bool\n If True then the aspect ratio of the image will be preserved\n if it is scaled to fit. Otherwise, the aspect ratio will be\n ignored.\n\n \"\"\"\n self._preserve_aspect_ratio = preserve\n self.Refresh()\n\n def GetAllowUpscaling(self):\n \"\"\" Returns whether or not the image can be scaled greater than\n its natural size.\n\n \"\"\"\n return self._allow_upscaling\n\n def SetAllowUpscaling(self, allow):\n \"\"\" Set whether or not to allow the image to be scaled beyond\n its natural size.\n\n Parameters\n ----------\n allow : bool\n If True, then the image may be scaled larger than its \n natural if it is scaled to fit. If False, the image will\n never be scaled larger than its natural size. In either\n case, the image may be scaled smaller.\n\n \"\"\"\n self._allow_upscaling = allow\n self.Refresh()\n\n\nclass WXImageView(WXControl, AbstractTkImageView):\n \"\"\" A Wx implementation of ImageView.\n\n \"\"\"\n _cached_size_hint = None\n\n def create(self, parent):\n \"\"\" Creates the underlying wxBitmapWidget control.\n\n \"\"\"\n self.widget = wxBitmapWidget(parent)\n\n def initialize(self):\n \"\"\" Initializes the attributes on the underlying control.\n\n \"\"\"\n super(WXImageView, self).initialize()\n shell = self.shell_obj\n self.set_image(shell.image)\n self.set_scale_to_fit(shell.scale_to_fit)\n self.set_preserve_aspect_ratio(shell.preserve_aspect_ratio)\n self.set_allow_upscaling(shell.allow_upscaling)\n\n def shell_image_changed(self, image):\n \"\"\" The change handler for the 'image' attribute on the shell \n component.\n\n \"\"\"\n self.set_image(image)\n\n def shell_scale_to_fit_changed(self, scale_to_fit):\n \"\"\" The change handler for the 'scale_to_fit' attribute on the \n shell component.\n\n \"\"\"\n self.set_scale_to_fit(scale_to_fit)\n\n def shell_preserve_aspect_ratio_changed(self, preserve):\n \"\"\" The change handler for the 'preserve_aspect_ratio' attribute\n on the shell component.\n\n \"\"\"\n self.set_preserve_aspect_ratio(preserve)\n\n def shell_allow_upscaling_changed(self, allow):\n \"\"\" The change handler for the 'allow_upscaling' attribute on \n the shell component.\n\n \"\"\"\n self.set_allow_upscaling(allow)\n\n def set_image(self, image):\n \"\"\" Sets the image on the underlying wxBitmapWidget.\n\n \"\"\"\n bmp = image.as_wxBitmap() if image is not None else None\n self.widget.SetBitmap(bmp)\n cached = self._cached_size_hint\n hint = self._cached_size_hint = self.size_hint()\n if cached != hint:\n self.shell_obj.size_hint_updated()\n\n def set_scale_to_fit(self, scale_to_fit):\n \"\"\" Sets whether or not the image scales with the underlying \n control.\n\n \"\"\"\n self.widget.SetScaledContents(scale_to_fit)\n\n def set_preserve_aspect_ratio(self, preserve):\n \"\"\" Sets whether or not to preserve the aspect ratio of the \n image when scaling.\n\n \"\"\"\n self.widget.SetPreserveAspectRatio(preserve)\n\n def set_allow_upscaling(self, allow):\n \"\"\" Sets whether or not the image will scale beyond its natural\n size.\n\n \"\"\"\n self.widget.SetAllowUpscaling(allow)\n",
"step-5": "#------------------------------------------------------------------------------\n# Copyright (c) 2011, Enthought, Inc.\n# All rights reserved.\n#------------------------------------------------------------------------------\nimport wx\n\nfrom .wx_control import WXControl\n\nfrom ...components.image_view import AbstractTkImageView\n\n\nclass wxBitmapWidget(wx.Panel):\n \"\"\" A wx.Panel subclass which paints a provided wx.Bitmap. \n\n This differs from wx.StaticBitmap in that it provides the option to\n scale the provided bitmap to the bounds of the widget. If the widget\n is set to scale its contents, low quality scaling will occur during\n resize, with a high quality pass performed once resizing as finished.\n\n \"\"\"\n def __init__(self, parent):\n \"\"\" Initialize a wxBitmapWidget.\n\n Parameters\n ----------\n parent : wx.Window\n The wx.Window object which serves as the widget parent.\n \n \"\"\"\n super(wxBitmapWidget, self).__init__(parent)\n self._bitmap = None\n self._scaled_contents = False\n self._preserve_aspect_ratio = False\n self._allow_upscaling = False\n self._resize_timer = None\n self._resizing = False\n self.Bind(wx.EVT_PAINT, self.OnPaint)\n\n #--------------------------------------------------------------------------\n # Private API\n #--------------------------------------------------------------------------\n def OnPaint(self, event):\n \"\"\" The paint event handler for the widget.\n\n \"\"\"\n bmp = self._bitmap\n if bmp is None:\n return\n\n bmp_width, bmp_height = bmp.GetWidth(), bmp.GetHeight()\n if bmp_width == 0 or bmp_height == 0:\n return\n\n evt_x = 0\n evt_y = 0\n evt_width, evt_height = self.GetSize().asTuple()\n\n if not self._scaled_contents:\n # If the image isn't scaled, it is centered if possible.\n # Otherwise, it's painted at the origin and clipped.\n paint_x = max(0, int((evt_width / 2. - bmp_width / 2.) + evt_x))\n paint_y = max(0, int((evt_height / 2. - bmp_height / 2.) + evt_y))\n paint_width = bmp_width\n paint_height = bmp_height\n else:\n # If the image *is* scaled, it's scaled size depends on the \n # size of the paint area as well as the other scaling flags.\n if self._preserve_aspect_ratio:\n bmp_ratio = float(bmp_width) / bmp_height\n evt_ratio = float(evt_width) / evt_height\n if evt_ratio >= bmp_ratio:\n if self._allow_upscaling:\n paint_height = evt_height\n else:\n paint_height = min(bmp_height, evt_height)\n paint_width = int(paint_height * bmp_ratio)\n else:\n if self._allow_upscaling:\n paint_width = evt_width\n else:\n paint_width = min(bmp_width, evt_width)\n paint_height = int(paint_width / bmp_ratio)\n else:\n if self._allow_upscaling:\n paint_height = evt_height\n paint_width = evt_width\n else:\n paint_height = min(bmp_height, evt_height)\n paint_width = min(bmp_width, evt_width)\n # In all cases of scaling, we know that the scaled image is\n # no larger than the paint area, and can thus be centered.\n paint_x = int((evt_width / 2. - paint_width / 2.) + evt_x)\n paint_y = int((evt_height / 2. - paint_height / 2.) + evt_y)\n\n # Scale the bitmap if needed, using a faster method if the\n # image is currently being resized\n if paint_width != bmp_width or paint_height != bmp_height:\n img = bmp.ConvertToImage()\n if self._resizing:\n quality = wx.IMAGE_QUALITY_NORMAL\n else:\n quality = wx.IMAGE_QUALITY_HIGH\n img.Rescale(paint_width, paint_height, quality)\n bmp = wx.BitmapFromImage(img)\n\n # Finally, draw the bitmap into the computed location\n dc = wx.PaintDC(self)\n dc.DrawBitmap(bmp, paint_x, paint_y)\n\n def OnResize(self, event):\n \"\"\" The resize event handler for the widget.\n\n This method is only bound and called when content scaling is\n enabled. It starts(restarts) a timer to perform a high quality\n scaled repaint when resizing is finished.\n\n \"\"\"\n self._resizing = True\n self._resize_timer.Start(60, True)\n\n def OnResizeEnd(self, event):\n \"\"\" The repaint timer event handler.\n\n This method is only bound and called when content scaling is\n enabled and resizing has completed. It triggers a high quality\n repaint.\n\n \"\"\"\n self._resizing = False\n self.Refresh()\n\n #--------------------------------------------------------------------------\n # Public API\n #--------------------------------------------------------------------------\n def GetBestSize(self):\n \"\"\" Overridden method to return the size of the bitmap as the \n best size for the widget.\n\n \"\"\"\n bmp = self._bitmap\n return wx.Size(bmp.GetWidth(), bmp.GetHeight())\n\n def GetBestSizeTuple(self):\n \"\"\" Overridden method to return the size of the bitmap as the \n best size for the widget.\n\n \"\"\"\n return self.GetBestSize().asTuple()\n\n def GetBitmap(self, bitmap):\n \"\"\" Get the underlying wx.Bitmap used to paint the control.\n\n Returns\n -------\n result : wx.Bitmap or None\n The bitmap being used to paint the control, or None if\n no bitmap has been supplied.\n\n \"\"\"\n return self._bitmap\n\n def SetBitmap(self, bitmap):\n \"\"\" Set the underlying wx.Bitmap and refresh the widget.\n\n Parameters\n ----------\n bitmap : wx.Bitmap\n The bitmap to paint on the widget.\n \n \"\"\"\n self._bitmap = bitmap\n self.Refresh()\n\n def GetScaledContents(self):\n \"\"\" Whether or not the bitmap is scaled to fit the bounds.\n\n Returns\n -------\n result : bool\n Whether or not the bitmap is scaled to fit the bounds of\n the widget.\n \n \"\"\"\n return self._scaled_contents\n \n def SetScaledContents(self, scaled):\n \"\"\" Set whether or not the bitmap should be scaled to fit the\n bounds of the widget.\n\n Parameters\n ----------\n scaled : bool\n Whether or not to scale the bitmap to fit the bounds of the\n widget.\n \n \"\"\"\n if scaled:\n if not self._scaled_contents:\n self._scaled_contents = True\n self._resize_timer = wx.Timer(self)\n self.Bind(wx.EVT_TIMER, self.OnResizeEnd)\n self.Bind(wx.EVT_SIZE, self.OnResize)\n else:\n if self._scaled_contents:\n self._scaled_contents = False\n self._timer = None\n self.Unbind(wx.EVT_TIMER, handler=self.OnResizeEnd)\n self.Unbind(wx.EVT_SIZE, handler=self.OnResize)\n self.Refresh()\n\n def GetPreserveAspectRatio(self):\n \"\"\" Returns whether or not the aspect ratio of the image is \n maintained during a resize.\n\n \"\"\"\n return self._preserve_aspect_ratio\n\n def SetPreserveAspectRatio(self, preserve):\n \"\"\" Set whether or not to preserve the image aspect ratio.\n\n Parameters\n ----------\n preserve : bool\n If True then the aspect ratio of the image will be preserved\n if it is scaled to fit. Otherwise, the aspect ratio will be\n ignored.\n\n \"\"\"\n self._preserve_aspect_ratio = preserve\n self.Refresh()\n \n def GetAllowUpscaling(self):\n \"\"\" Returns whether or not the image can be scaled greater than\n its natural size.\n\n \"\"\"\n return self._allow_upscaling\n\n def SetAllowUpscaling(self, allow):\n \"\"\" Set whether or not to allow the image to be scaled beyond\n its natural size.\n\n Parameters\n ----------\n allow : bool\n If True, then the image may be scaled larger than its \n natural if it is scaled to fit. If False, the image will\n never be scaled larger than its natural size. In either\n case, the image may be scaled smaller.\n\n \"\"\"\n self._allow_upscaling = allow\n self.Refresh()\n\n\nclass WXImageView(WXControl, AbstractTkImageView):\n \"\"\" A Wx implementation of ImageView.\n\n \"\"\"\n #: The internal cached size hint which is used to determine whether\n #: of not a size hint updated event should be emitted when the text\n #: in the label changes\n _cached_size_hint = None\n\n #--------------------------------------------------------------------------\n # Setup methods\n #--------------------------------------------------------------------------\n def create(self, parent):\n \"\"\" Creates the underlying wxBitmapWidget control.\n\n \"\"\"\n self.widget = wxBitmapWidget(parent)\n\n def initialize(self):\n \"\"\" Initializes the attributes on the underlying control.\n\n \"\"\"\n super(WXImageView, self).initialize()\n shell = self.shell_obj\n self.set_image(shell.image)\n self.set_scale_to_fit(shell.scale_to_fit)\n self.set_preserve_aspect_ratio(shell.preserve_aspect_ratio)\n self.set_allow_upscaling(shell.allow_upscaling)\n\n #--------------------------------------------------------------------------\n # Implementation\n #--------------------------------------------------------------------------\n def shell_image_changed(self, image):\n \"\"\" The change handler for the 'image' attribute on the shell \n component.\n\n \"\"\"\n self.set_image(image)\n \n def shell_scale_to_fit_changed(self, scale_to_fit):\n \"\"\" The change handler for the 'scale_to_fit' attribute on the \n shell component.\n\n \"\"\"\n self.set_scale_to_fit(scale_to_fit)\n\n def shell_preserve_aspect_ratio_changed(self, preserve):\n \"\"\" The change handler for the 'preserve_aspect_ratio' attribute\n on the shell component.\n\n \"\"\"\n self.set_preserve_aspect_ratio(preserve)\n\n def shell_allow_upscaling_changed(self, allow):\n \"\"\" The change handler for the 'allow_upscaling' attribute on \n the shell component.\n\n \"\"\"\n self.set_allow_upscaling(allow)\n\n #--------------------------------------------------------------------------\n # Widget Update Methods\n #--------------------------------------------------------------------------\n def set_image(self, image):\n \"\"\" Sets the image on the underlying wxBitmapWidget.\n\n \"\"\"\n bmp = image.as_wxBitmap() if image is not None else None\n self.widget.SetBitmap(bmp)\n # Emit a size hint updated event if the size hint has actually\n # changed. This is an optimization so that a constraints update\n # only occurs when the size hint has actually changed. This \n # logic must be implemented here so that the label has been\n # updated before the new size hint is computed. Placing this\n # logic on the shell object would not guarantee that the label\n # has been updated at the time the change handler is called.\n cached = self._cached_size_hint\n hint = self._cached_size_hint = self.size_hint()\n if cached != hint:\n self.shell_obj.size_hint_updated()\n \n def set_scale_to_fit(self, scale_to_fit): \n \"\"\" Sets whether or not the image scales with the underlying \n control.\n\n \"\"\"\n self.widget.SetScaledContents(scale_to_fit)\n\n def set_preserve_aspect_ratio(self, preserve):\n \"\"\" Sets whether or not to preserve the aspect ratio of the \n image when scaling.\n\n \"\"\"\n self.widget.SetPreserveAspectRatio(preserve)\n\n def set_allow_upscaling(self, allow):\n \"\"\" Sets whether or not the image will scale beyond its natural\n size.\n\n \"\"\"\n self.widget.SetAllowUpscaling(allow)\n\n",
"step-ids": [
18,
20,
25,
28,
31
]
}
|
[
18,
20,
25,
28,
31
] |
from temp_conversion_script import convert_c_to_f
from temp_conversion_script import fever_detection
def test_convert_c_to_f():
answer = convert_c_to_f(20.0)
expected = 68.0
assert answer == expected
def test2():
answer = convert_c_to_f(-40.0)
expected = -40.0
assert answer == expected
def test_fever_detection():
temp_list = [93.0, 98.0, 100.0, 105.0, 101.0]
max_temp, is_fever = fever_detection(temp_list)
expected_max = 105.0
is_fever = True
assert max_temp == expected_max
|
normal
|
{
"blob_id": "d75187ed435c3d3aeeb31be4a0a4ed1754f8d160",
"index": 4436,
"step-1": "<mask token>\n\n\ndef test2():\n answer = convert_c_to_f(-40.0)\n expected = -40.0\n assert answer == expected\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_convert_c_to_f():\n answer = convert_c_to_f(20.0)\n expected = 68.0\n assert answer == expected\n\n\ndef test2():\n answer = convert_c_to_f(-40.0)\n expected = -40.0\n assert answer == expected\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef test_convert_c_to_f():\n answer = convert_c_to_f(20.0)\n expected = 68.0\n assert answer == expected\n\n\ndef test2():\n answer = convert_c_to_f(-40.0)\n expected = -40.0\n assert answer == expected\n\n\ndef test_fever_detection():\n temp_list = [93.0, 98.0, 100.0, 105.0, 101.0]\n max_temp, is_fever = fever_detection(temp_list)\n expected_max = 105.0\n is_fever = True\n assert max_temp == expected_max\n",
"step-4": "from temp_conversion_script import convert_c_to_f\nfrom temp_conversion_script import fever_detection\n\n\ndef test_convert_c_to_f():\n answer = convert_c_to_f(20.0)\n expected = 68.0\n assert answer == expected\n\n\ndef test2():\n answer = convert_c_to_f(-40.0)\n expected = -40.0\n assert answer == expected\n\n\ndef test_fever_detection():\n temp_list = [93.0, 98.0, 100.0, 105.0, 101.0]\n max_temp, is_fever = fever_detection(temp_list)\n expected_max = 105.0\n is_fever = True\n assert max_temp == expected_max\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
from django.urls import path
from group import views
app_name = 'group'
urlpatterns = [
path('group/',views.CreateGroup.as_view(), name='group_create'),
path('shift/',views.CreateShift.as_view(), name='shift_create'),
path('subject/',views.createSubject.as_view(), name='subject_create'),
]
|
normal
|
{
"blob_id": "0b7e858eb6d4a5f3cf6aca4fea994dae9f889caa",
"index": 4781,
"step-1": "<mask token>\n",
"step-2": "<mask token>\napp_name = 'group'\nurlpatterns = [path('group/', views.CreateGroup.as_view(), name=\n 'group_create'), path('shift/', views.CreateShift.as_view(), name=\n 'shift_create'), path('subject/', views.createSubject.as_view(), name=\n 'subject_create')]\n",
"step-3": "from django.urls import path\nfrom group import views\napp_name = 'group'\nurlpatterns = [path('group/', views.CreateGroup.as_view(), name=\n 'group_create'), path('shift/', views.CreateShift.as_view(), name=\n 'shift_create'), path('subject/', views.createSubject.as_view(), name=\n 'subject_create')]\n",
"step-4": "from django.urls import path\nfrom group import views\n\napp_name = 'group'\n\nurlpatterns = [\n path('group/',views.CreateGroup.as_view(), name='group_create'),\n path('shift/',views.CreateShift.as_view(), name='shift_create'),\n path('subject/',views.createSubject.as_view(), name='subject_create'),\n]",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
while True:
det, frame_with_color = camera.read()
frame = cv2.cvtColor(frame_with_color, cv2.COLOR_BGR2GRAY)
frame_kp, frame_desc = sift.detectAndCompute(frame, None)
matches = bf.knnMatch(frame_desc, train_desc, k=2)
good = []
for m, n in matches:
if m.distance < 0.75 * n.distance:
good.append(m)
if len(good) > LOWEST_MATCHES_NUMBER:
train_points = []
frame_points = []
for m in good:
train_points.append(train_kp[m.trainIdx].pt)
frame_points.append(frame_kp[m.queryIdx].pt)
train_points, frame_points = np.float32((train_points, frame_points))
H, status = cv2.findHomography(train_points, frame_points, cv2.
RANSAC, 3.0)
h, w = train_img.shape
trainBorder = np.float32([[[0, 0], [0, h - 1], [w - 1, h - 1], [w -
1, 0]]])
queryBorder = cv2.perspectiveTransform(trainBorder, H)
cv2.polylines(frame_with_color, [np.int32(queryBorder)], True, (0,
0, 255), 5)
else:
print('FOUND LOW MATCHES NUMBER {} / {}'.format(len(good),
LOWEST_MATCHES_NUMBER))
cv2.imshow('result', frame_with_color)
if cv2.waitKey(5) == ord('q'):
break
camera.release()
cv2.destroyAllWindows()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
LOWEST_MATCHES_NUMBER = 30
sift = cv2.xfeatures2d.SIFT_create()
bf = cv2.BFMatcher()
train_img = cv2.imread('Photo/demo2.jpg', 0)
train_kp, train_desc = sift.detectAndCompute(train_img, None)
camera = cv2.VideoCapture(0)
while True:
det, frame_with_color = camera.read()
frame = cv2.cvtColor(frame_with_color, cv2.COLOR_BGR2GRAY)
frame_kp, frame_desc = sift.detectAndCompute(frame, None)
matches = bf.knnMatch(frame_desc, train_desc, k=2)
good = []
for m, n in matches:
if m.distance < 0.75 * n.distance:
good.append(m)
if len(good) > LOWEST_MATCHES_NUMBER:
train_points = []
frame_points = []
for m in good:
train_points.append(train_kp[m.trainIdx].pt)
frame_points.append(frame_kp[m.queryIdx].pt)
train_points, frame_points = np.float32((train_points, frame_points))
H, status = cv2.findHomography(train_points, frame_points, cv2.
RANSAC, 3.0)
h, w = train_img.shape
trainBorder = np.float32([[[0, 0], [0, h - 1], [w - 1, h - 1], [w -
1, 0]]])
queryBorder = cv2.perspectiveTransform(trainBorder, H)
cv2.polylines(frame_with_color, [np.int32(queryBorder)], True, (0,
0, 255), 5)
else:
print('FOUND LOW MATCHES NUMBER {} / {}'.format(len(good),
LOWEST_MATCHES_NUMBER))
cv2.imshow('result', frame_with_color)
if cv2.waitKey(5) == ord('q'):
break
camera.release()
cv2.destroyAllWindows()
<|reserved_special_token_1|>
import cv2
import numpy as np
LOWEST_MATCHES_NUMBER = 30
sift = cv2.xfeatures2d.SIFT_create()
bf = cv2.BFMatcher()
train_img = cv2.imread('Photo/demo2.jpg', 0)
train_kp, train_desc = sift.detectAndCompute(train_img, None)
camera = cv2.VideoCapture(0)
while True:
det, frame_with_color = camera.read()
frame = cv2.cvtColor(frame_with_color, cv2.COLOR_BGR2GRAY)
frame_kp, frame_desc = sift.detectAndCompute(frame, None)
matches = bf.knnMatch(frame_desc, train_desc, k=2)
good = []
for m, n in matches:
if m.distance < 0.75 * n.distance:
good.append(m)
if len(good) > LOWEST_MATCHES_NUMBER:
train_points = []
frame_points = []
for m in good:
train_points.append(train_kp[m.trainIdx].pt)
frame_points.append(frame_kp[m.queryIdx].pt)
train_points, frame_points = np.float32((train_points, frame_points))
H, status = cv2.findHomography(train_points, frame_points, cv2.
RANSAC, 3.0)
h, w = train_img.shape
trainBorder = np.float32([[[0, 0], [0, h - 1], [w - 1, h - 1], [w -
1, 0]]])
queryBorder = cv2.perspectiveTransform(trainBorder, H)
cv2.polylines(frame_with_color, [np.int32(queryBorder)], True, (0,
0, 255), 5)
else:
print('FOUND LOW MATCHES NUMBER {} / {}'.format(len(good),
LOWEST_MATCHES_NUMBER))
cv2.imshow('result', frame_with_color)
if cv2.waitKey(5) == ord('q'):
break
camera.release()
cv2.destroyAllWindows()
<|reserved_special_token_1|>
import cv2
import numpy as np
LOWEST_MATCHES_NUMBER = 30
sift = cv2.xfeatures2d.SIFT_create()
bf = cv2.BFMatcher();
train_img = cv2.imread('Photo/demo2.jpg', 0)
train_kp, train_desc = sift.detectAndCompute(train_img, None);
camera = cv2.VideoCapture(0);
while (True):
det, frame_with_color = camera.read();
frame = cv2.cvtColor(frame_with_color,cv2.COLOR_BGR2GRAY)
frame_kp, frame_desc = sift.detectAndCompute(frame,None)
matches=bf.knnMatch(frame_desc,train_desc,k=2)
good = []
for m,n in matches:
if(m.distance < 0.75*n.distance):
good.append(m)
if(len(good)> LOWEST_MATCHES_NUMBER):
train_points = []
frame_points = []
for m in good:
train_points.append(train_kp[m.trainIdx].pt)
frame_points.append(frame_kp[m.queryIdx].pt)
train_points, frame_points=np.float32((train_points,frame_points))
H,status=cv2.findHomography(train_points,frame_points,cv2.RANSAC,3.0)
h,w=train_img.shape
trainBorder=np.float32([[[0,0],[0,h-1],[w-1,h-1],[w-1,0]]])
queryBorder=cv2.perspectiveTransform(trainBorder,H)
cv2.polylines(frame_with_color,[np.int32(queryBorder)],True,(0,0,255),5)
else:
print('FOUND LOW MATCHES NUMBER {} / {}'.format(len(good), LOWEST_MATCHES_NUMBER))
cv2.imshow('result',frame_with_color)
if cv2.waitKey(5)==ord('q'):
break
camera.release()
cv2.destroyAllWindows()
|
flexible
|
{
"blob_id": "1a78d9e0807824263fd46547d5b75c61610456d4",
"index": 1912,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile True:\n det, frame_with_color = camera.read()\n frame = cv2.cvtColor(frame_with_color, cv2.COLOR_BGR2GRAY)\n frame_kp, frame_desc = sift.detectAndCompute(frame, None)\n matches = bf.knnMatch(frame_desc, train_desc, k=2)\n good = []\n for m, n in matches:\n if m.distance < 0.75 * n.distance:\n good.append(m)\n if len(good) > LOWEST_MATCHES_NUMBER:\n train_points = []\n frame_points = []\n for m in good:\n train_points.append(train_kp[m.trainIdx].pt)\n frame_points.append(frame_kp[m.queryIdx].pt)\n train_points, frame_points = np.float32((train_points, frame_points))\n H, status = cv2.findHomography(train_points, frame_points, cv2.\n RANSAC, 3.0)\n h, w = train_img.shape\n trainBorder = np.float32([[[0, 0], [0, h - 1], [w - 1, h - 1], [w -\n 1, 0]]])\n queryBorder = cv2.perspectiveTransform(trainBorder, H)\n cv2.polylines(frame_with_color, [np.int32(queryBorder)], True, (0, \n 0, 255), 5)\n else:\n print('FOUND LOW MATCHES NUMBER {} / {}'.format(len(good),\n LOWEST_MATCHES_NUMBER))\n cv2.imshow('result', frame_with_color)\n if cv2.waitKey(5) == ord('q'):\n break\ncamera.release()\ncv2.destroyAllWindows()\n",
"step-3": "<mask token>\nLOWEST_MATCHES_NUMBER = 30\nsift = cv2.xfeatures2d.SIFT_create()\nbf = cv2.BFMatcher()\ntrain_img = cv2.imread('Photo/demo2.jpg', 0)\ntrain_kp, train_desc = sift.detectAndCompute(train_img, None)\ncamera = cv2.VideoCapture(0)\nwhile True:\n det, frame_with_color = camera.read()\n frame = cv2.cvtColor(frame_with_color, cv2.COLOR_BGR2GRAY)\n frame_kp, frame_desc = sift.detectAndCompute(frame, None)\n matches = bf.knnMatch(frame_desc, train_desc, k=2)\n good = []\n for m, n in matches:\n if m.distance < 0.75 * n.distance:\n good.append(m)\n if len(good) > LOWEST_MATCHES_NUMBER:\n train_points = []\n frame_points = []\n for m in good:\n train_points.append(train_kp[m.trainIdx].pt)\n frame_points.append(frame_kp[m.queryIdx].pt)\n train_points, frame_points = np.float32((train_points, frame_points))\n H, status = cv2.findHomography(train_points, frame_points, cv2.\n RANSAC, 3.0)\n h, w = train_img.shape\n trainBorder = np.float32([[[0, 0], [0, h - 1], [w - 1, h - 1], [w -\n 1, 0]]])\n queryBorder = cv2.perspectiveTransform(trainBorder, H)\n cv2.polylines(frame_with_color, [np.int32(queryBorder)], True, (0, \n 0, 255), 5)\n else:\n print('FOUND LOW MATCHES NUMBER {} / {}'.format(len(good),\n LOWEST_MATCHES_NUMBER))\n cv2.imshow('result', frame_with_color)\n if cv2.waitKey(5) == ord('q'):\n break\ncamera.release()\ncv2.destroyAllWindows()\n",
"step-4": "import cv2\nimport numpy as np\nLOWEST_MATCHES_NUMBER = 30\nsift = cv2.xfeatures2d.SIFT_create()\nbf = cv2.BFMatcher()\ntrain_img = cv2.imread('Photo/demo2.jpg', 0)\ntrain_kp, train_desc = sift.detectAndCompute(train_img, None)\ncamera = cv2.VideoCapture(0)\nwhile True:\n det, frame_with_color = camera.read()\n frame = cv2.cvtColor(frame_with_color, cv2.COLOR_BGR2GRAY)\n frame_kp, frame_desc = sift.detectAndCompute(frame, None)\n matches = bf.knnMatch(frame_desc, train_desc, k=2)\n good = []\n for m, n in matches:\n if m.distance < 0.75 * n.distance:\n good.append(m)\n if len(good) > LOWEST_MATCHES_NUMBER:\n train_points = []\n frame_points = []\n for m in good:\n train_points.append(train_kp[m.trainIdx].pt)\n frame_points.append(frame_kp[m.queryIdx].pt)\n train_points, frame_points = np.float32((train_points, frame_points))\n H, status = cv2.findHomography(train_points, frame_points, cv2.\n RANSAC, 3.0)\n h, w = train_img.shape\n trainBorder = np.float32([[[0, 0], [0, h - 1], [w - 1, h - 1], [w -\n 1, 0]]])\n queryBorder = cv2.perspectiveTransform(trainBorder, H)\n cv2.polylines(frame_with_color, [np.int32(queryBorder)], True, (0, \n 0, 255), 5)\n else:\n print('FOUND LOW MATCHES NUMBER {} / {}'.format(len(good),\n LOWEST_MATCHES_NUMBER))\n cv2.imshow('result', frame_with_color)\n if cv2.waitKey(5) == ord('q'):\n break\ncamera.release()\ncv2.destroyAllWindows()\n",
"step-5": "import cv2\nimport numpy as np\n\nLOWEST_MATCHES_NUMBER = 30\n\nsift = cv2.xfeatures2d.SIFT_create()\nbf = cv2.BFMatcher();\n\ntrain_img = cv2.imread('Photo/demo2.jpg', 0)\ntrain_kp, train_desc = sift.detectAndCompute(train_img, None);\n\ncamera = cv2.VideoCapture(0);\nwhile (True):\n det, frame_with_color = camera.read();\n frame = cv2.cvtColor(frame_with_color,cv2.COLOR_BGR2GRAY)\n frame_kp, frame_desc = sift.detectAndCompute(frame,None)\n matches=bf.knnMatch(frame_desc,train_desc,k=2)\n good = []\n for m,n in matches:\n if(m.distance < 0.75*n.distance):\n good.append(m)\n if(len(good)> LOWEST_MATCHES_NUMBER):\n train_points = []\n frame_points = []\n for m in good:\n train_points.append(train_kp[m.trainIdx].pt)\n frame_points.append(frame_kp[m.queryIdx].pt)\n train_points, frame_points=np.float32((train_points,frame_points))\n H,status=cv2.findHomography(train_points,frame_points,cv2.RANSAC,3.0)\n h,w=train_img.shape\n trainBorder=np.float32([[[0,0],[0,h-1],[w-1,h-1],[w-1,0]]])\n queryBorder=cv2.perspectiveTransform(trainBorder,H)\n cv2.polylines(frame_with_color,[np.int32(queryBorder)],True,(0,0,255),5)\n else:\n print('FOUND LOW MATCHES NUMBER {} / {}'.format(len(good), LOWEST_MATCHES_NUMBER))\n cv2.imshow('result',frame_with_color)\n if cv2.waitKey(5)==ord('q'):\n break\ncamera.release()\ncv2.destroyAllWindows()\n\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from pymongo import MongoClient
from datetime import datetime
import sys
import requests
import urllib
import json
import xml.etree.ElementTree as ET
import xmltodict
import pandas
from lxml import etree
from bson.json_util import dumps
bornTables = pandas.read_html("http://statis.moi.gov.tw/micst/stmain.jsp?sys=220&ym=8700&ymt=10500&kind=21&type=1&funid=c0120101&cycle=4&outmode=0&compmode=0&outkind=1&fld4=1&codspc0=0,2,3,2,6,1,9,1,12,1,15,15,&rdm=ceppbtql")
bornTable = bornTables[1]
deadTables = pandas.read_html("http://statis.moi.gov.tw/micst/stmain.jsp?sys=220&ym=8700&ymt=10500&kind=21&type=1&funid=c0120201&cycle=4&outmode=0&compmode=0&outkind=1&fld4=1&codspc0=0,2,3,2,6,1,9,1,12,1,15,14,&rdm=hf6pfAlV")
deadTable = deadTables[1]
res = urllib.urlopen("https://www.dgbas.gov.tw/public/data/open/localstat/009-%A6U%BF%A4%A5%AB%A7O%A5%AD%A7%A1%A8C%A4%E1%A9%D2%B1o%A6%AC%A4J%C1%60%ADp.xml")
sa = res.read()
o = xmltodict.parse(sa)
salary = json.dumps(o)
salary = salary.decode('unicode-escape')
if __name__ == '__main__':
client = MongoClient('localhost',27017)
db = client['CC']
coll = db['test']
data = ''
for i in range(1998,2017):
data += '{"Year":"'+str(i)+'"'
for j in range(1,22):
data += ',"'+bornTable[j][1]+'":"'+bornTable[j][i-1996]+'"'
data += '}'
coll.insert_one(json.loads(data))
data = ''
db = client['CC']
coll = db['dead']
data = ''
for i in range(1998,2017):
data += '{"Year":"'+str(i)+'"'
for j in range(1,22):
data += ',"'+deadTable[j][1]+'":"'+deadTable[j][i-1996]+'"'
data += '}'
coll.insert_one(json.loads(data))
data = ''
db = client['CC']
coll = db['salary']
coll.insert_one(json.loads(salary))
born = '['
many_docs = coll.find()
for doc in many_docs:
temp = doc
temp = dumps(temp)
born += temp.decode('unicode-escape')
born += ']'
print born
# many_docs = coll.find()
# for doc in many_docs:
# salary = doc
#
# from bson.json_util import dumps
#
# salary = dumps(salary)
# salary = salary.decode('unicode-escape')
#
# print salary
|
normal
|
{
"blob_id": "7deaee28674c465694c348c21e87addbcc8ea923",
"index": 8237,
"step-1": "from pymongo import MongoClient\nfrom datetime import datetime\nimport sys\nimport requests\nimport urllib\nimport json\nimport xml.etree.ElementTree as ET\nimport xmltodict\nimport pandas\nfrom lxml import etree\nfrom bson.json_util import dumps\n\nbornTables = pandas.read_html(\"http://statis.moi.gov.tw/micst/stmain.jsp?sys=220&ym=8700&ymt=10500&kind=21&type=1&funid=c0120101&cycle=4&outmode=0&compmode=0&outkind=1&fld4=1&codspc0=0,2,3,2,6,1,9,1,12,1,15,15,&rdm=ceppbtql\")\n\nbornTable = bornTables[1]\n\ndeadTables = pandas.read_html(\"http://statis.moi.gov.tw/micst/stmain.jsp?sys=220&ym=8700&ymt=10500&kind=21&type=1&funid=c0120201&cycle=4&outmode=0&compmode=0&outkind=1&fld4=1&codspc0=0,2,3,2,6,1,9,1,12,1,15,14,&rdm=hf6pfAlV\")\n\ndeadTable = deadTables[1]\n\nres = urllib.urlopen(\"https://www.dgbas.gov.tw/public/data/open/localstat/009-%A6U%BF%A4%A5%AB%A7O%A5%AD%A7%A1%A8C%A4%E1%A9%D2%B1o%A6%AC%A4J%C1%60%ADp.xml\")\n\nsa = res.read()\no = xmltodict.parse(sa)\nsalary = json.dumps(o)\nsalary = salary.decode('unicode-escape')\n\n\nif __name__ == '__main__':\n\tclient = MongoClient('localhost',27017) \n\tdb = client['CC']\n\tcoll = db['test']\n\n\tdata = ''\n\tfor i in range(1998,2017):\n\t\tdata += '{\"Year\":\"'+str(i)+'\"'\n\t\tfor j in range(1,22):\n\t\t\tdata += ',\"'+bornTable[j][1]+'\":\"'+bornTable[j][i-1996]+'\"'\n\t\tdata += '}'\n\t\tcoll.insert_one(json.loads(data)) \n\t\tdata = ''\n\t\t\n\tdb = client['CC']\n\tcoll = db['dead']\n\t\n\tdata = ''\n\tfor i in range(1998,2017):\n\t\tdata += '{\"Year\":\"'+str(i)+'\"'\n\t\tfor j in range(1,22):\n\t\t\tdata += ',\"'+deadTable[j][1]+'\":\"'+deadTable[j][i-1996]+'\"'\n\t\tdata += '}'\n\t\tcoll.insert_one(json.loads(data))\n\t\tdata = ''\n\t\n\tdb = client['CC']\n\tcoll = db['salary']\n\t\n\tcoll.insert_one(json.loads(salary))\n\n\tborn = '['\n\tmany_docs = coll.find()\n\tfor doc in many_docs:\n\t\ttemp = doc\n\t\ttemp = dumps(temp)\n\t\tborn += temp.decode('unicode-escape')\n\tborn += ']'\n\tprint born\n\n\t\n#\tmany_docs = coll.find()\n#\tfor doc in many_docs:\n#\t\tsalary = doc\n#\t\n#\tfrom bson.json_util import dumps\n#\t\n#\tsalary = dumps(salary)\n#\tsalary = salary.decode('unicode-escape')\n#\t\n#\tprint salary\n\t\t\n\t",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
class Session:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Session:
<|reserved_special_token_0|>
def start(self):
self.board.draw()
session = True
while session:
self.clock.tick(TICK_RATE[self.MODE])
self.pucman.move(self.board)
for ghast in self.ghasts:
sprite = self.ghasts[ghast]
sprite.move(self.pucman.pos, self.board)
if sprite.atPucman(self.pucman.pos):
session = False
print('You died to ' + sprite.name)
self.board.draw()
self.pucman.draw(self.board)
for ghast in self.ghasts:
self.ghasts[ghast].draw(self.board._)
pygame.display.update()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Session:
def __init__(self, MODE='PLAYING'):
pygame.init()
board = Board(size=BOARD_SIZE, color=COLOR['BACKGROUND'], level=LEVEL_1
)
pucman = Pucman(start=board.findUniquePos(BOARD_ELEMENT_MAP[
'PUCMAN_START']), size=board.tileSize, color=COLOR['PUCMAN'],
MODE=MODE)
ghasts = {'blinky': Ghast(name='Blinky', start=board.findUniquePos(
BOARD_ELEMENT_MAP['GHAST_SPAWN']), size=board.tileSize, color=
COLOR['BLINKY']), 'pinky': Ghast(name='Pinky', start=board.
findUniquePos(BOARD_ELEMENT_MAP['GHAST_SPAWN']), size=board.
tileSize, color=COLOR['PINKY']), 'inky': Ghast(name='Inky',
start=board.findUniquePos(BOARD_ELEMENT_MAP['GHAST_SPAWN']),
size=board.tileSize, color=COLOR['INKY']), 'clyde': Ghast(name=
'Clyde', start=board.findUniquePos(BOARD_ELEMENT_MAP[
'GHAST_SPAWN']), size=board.tileSize, color=COLOR['CLYDE'])}
self.board = board
self.pucman = pucman
self.ghasts = ghasts
self.clock = pygame.time.Clock()
self.MODE = MODE
def start(self):
self.board.draw()
session = True
while session:
self.clock.tick(TICK_RATE[self.MODE])
self.pucman.move(self.board)
for ghast in self.ghasts:
sprite = self.ghasts[ghast]
sprite.move(self.pucman.pos, self.board)
if sprite.atPucman(self.pucman.pos):
session = False
print('You died to ' + sprite.name)
self.board.draw()
self.pucman.draw(self.board)
for ghast in self.ghasts:
self.ghasts[ghast].draw(self.board._)
pygame.display.update()
<|reserved_special_token_1|>
import sys, math, random
import pygame
from src.config import *
from src.board.levels import LEVEL_1
from src.pucman import Pucman
from src.ghast import Ghast
from src.board.board import Board
class Session:
def __init__(self, MODE='PLAYING'):
pygame.init()
board = Board(size=BOARD_SIZE, color=COLOR['BACKGROUND'], level=LEVEL_1
)
pucman = Pucman(start=board.findUniquePos(BOARD_ELEMENT_MAP[
'PUCMAN_START']), size=board.tileSize, color=COLOR['PUCMAN'],
MODE=MODE)
ghasts = {'blinky': Ghast(name='Blinky', start=board.findUniquePos(
BOARD_ELEMENT_MAP['GHAST_SPAWN']), size=board.tileSize, color=
COLOR['BLINKY']), 'pinky': Ghast(name='Pinky', start=board.
findUniquePos(BOARD_ELEMENT_MAP['GHAST_SPAWN']), size=board.
tileSize, color=COLOR['PINKY']), 'inky': Ghast(name='Inky',
start=board.findUniquePos(BOARD_ELEMENT_MAP['GHAST_SPAWN']),
size=board.tileSize, color=COLOR['INKY']), 'clyde': Ghast(name=
'Clyde', start=board.findUniquePos(BOARD_ELEMENT_MAP[
'GHAST_SPAWN']), size=board.tileSize, color=COLOR['CLYDE'])}
self.board = board
self.pucman = pucman
self.ghasts = ghasts
self.clock = pygame.time.Clock()
self.MODE = MODE
def start(self):
self.board.draw()
session = True
while session:
self.clock.tick(TICK_RATE[self.MODE])
self.pucman.move(self.board)
for ghast in self.ghasts:
sprite = self.ghasts[ghast]
sprite.move(self.pucman.pos, self.board)
if sprite.atPucman(self.pucman.pos):
session = False
print('You died to ' + sprite.name)
self.board.draw()
self.pucman.draw(self.board)
for ghast in self.ghasts:
self.ghasts[ghast].draw(self.board._)
pygame.display.update()
<|reserved_special_token_1|>
# import core modules and community packages
import sys, math, random
import pygame
# import configuration settings
from src.config import *
from src.board.levels import LEVEL_1
# import game elements
from src.pucman import Pucman
from src.ghast import Ghast
from src.board.board import Board
class Session():
def __init__(self, MODE="PLAYING"):
# init all game props
pygame.init()
# initialize game elements
board = Board(
size=BOARD_SIZE,
color=COLOR['BACKGROUND'],
level=LEVEL_1
)
pucman = Pucman(
start=board.findUniquePos(BOARD_ELEMENT_MAP['PUCMAN_START']),
size=board.tileSize,
color=COLOR['PUCMAN'],
MODE=MODE
)
ghasts = {
"blinky": Ghast(
name="Blinky",
start=board.findUniquePos(BOARD_ELEMENT_MAP['GHAST_SPAWN']),
size=board.tileSize,
color=COLOR['BLINKY']
),
"pinky": Ghast(
name="Pinky",
start=board.findUniquePos(BOARD_ELEMENT_MAP['GHAST_SPAWN']),
size=board.tileSize,
color=COLOR['PINKY']
),
"inky": Ghast(
name="Inky",
start=board.findUniquePos(BOARD_ELEMENT_MAP['GHAST_SPAWN']),
size=board.tileSize,
color=COLOR['INKY']
),
"clyde": Ghast(
name="Clyde",
start=board.findUniquePos(BOARD_ELEMENT_MAP['GHAST_SPAWN']),
size=board.tileSize,
color=COLOR['CLYDE']
)
}
self.board = board
self.pucman = pucman
self.ghasts = ghasts
self.clock = pygame.time.Clock()
self.MODE = MODE
def start(self):
# draw background & begin session
self.board.draw()
session = True
# while playing
while session:
# manage game time, 5 ticks per second
self.clock.tick(TICK_RATE[self.MODE])
# pygame.time.delay(50)
# update player state
self.pucman.move(self.board)
# Ghast-AI behavior
for ghast in self.ghasts:
sprite = self.ghasts[ghast]
sprite.move(self.pucman.pos, self.board)
if(sprite.atPucman(self.pucman.pos)):
session = False
print("You died to " + sprite.name)
# begin drawing back to front
self.board.draw()
self.pucman.draw(self.board)
for ghast in self.ghasts:
self.ghasts[ghast].draw(self.board._)
# update board
pygame.display.update()
|
flexible
|
{
"blob_id": "f51a21ed71ede4e7462d9c77cb932a5f05b09e71",
"index": 9174,
"step-1": "<mask token>\n\n\nclass Session:\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Session:\n <mask token>\n\n def start(self):\n self.board.draw()\n session = True\n while session:\n self.clock.tick(TICK_RATE[self.MODE])\n self.pucman.move(self.board)\n for ghast in self.ghasts:\n sprite = self.ghasts[ghast]\n sprite.move(self.pucman.pos, self.board)\n if sprite.atPucman(self.pucman.pos):\n session = False\n print('You died to ' + sprite.name)\n self.board.draw()\n self.pucman.draw(self.board)\n for ghast in self.ghasts:\n self.ghasts[ghast].draw(self.board._)\n pygame.display.update()\n",
"step-3": "<mask token>\n\n\nclass Session:\n\n def __init__(self, MODE='PLAYING'):\n pygame.init()\n board = Board(size=BOARD_SIZE, color=COLOR['BACKGROUND'], level=LEVEL_1\n )\n pucman = Pucman(start=board.findUniquePos(BOARD_ELEMENT_MAP[\n 'PUCMAN_START']), size=board.tileSize, color=COLOR['PUCMAN'],\n MODE=MODE)\n ghasts = {'blinky': Ghast(name='Blinky', start=board.findUniquePos(\n BOARD_ELEMENT_MAP['GHAST_SPAWN']), size=board.tileSize, color=\n COLOR['BLINKY']), 'pinky': Ghast(name='Pinky', start=board.\n findUniquePos(BOARD_ELEMENT_MAP['GHAST_SPAWN']), size=board.\n tileSize, color=COLOR['PINKY']), 'inky': Ghast(name='Inky',\n start=board.findUniquePos(BOARD_ELEMENT_MAP['GHAST_SPAWN']),\n size=board.tileSize, color=COLOR['INKY']), 'clyde': Ghast(name=\n 'Clyde', start=board.findUniquePos(BOARD_ELEMENT_MAP[\n 'GHAST_SPAWN']), size=board.tileSize, color=COLOR['CLYDE'])}\n self.board = board\n self.pucman = pucman\n self.ghasts = ghasts\n self.clock = pygame.time.Clock()\n self.MODE = MODE\n\n def start(self):\n self.board.draw()\n session = True\n while session:\n self.clock.tick(TICK_RATE[self.MODE])\n self.pucman.move(self.board)\n for ghast in self.ghasts:\n sprite = self.ghasts[ghast]\n sprite.move(self.pucman.pos, self.board)\n if sprite.atPucman(self.pucman.pos):\n session = False\n print('You died to ' + sprite.name)\n self.board.draw()\n self.pucman.draw(self.board)\n for ghast in self.ghasts:\n self.ghasts[ghast].draw(self.board._)\n pygame.display.update()\n",
"step-4": "import sys, math, random\nimport pygame\nfrom src.config import *\nfrom src.board.levels import LEVEL_1\nfrom src.pucman import Pucman\nfrom src.ghast import Ghast\nfrom src.board.board import Board\n\n\nclass Session:\n\n def __init__(self, MODE='PLAYING'):\n pygame.init()\n board = Board(size=BOARD_SIZE, color=COLOR['BACKGROUND'], level=LEVEL_1\n )\n pucman = Pucman(start=board.findUniquePos(BOARD_ELEMENT_MAP[\n 'PUCMAN_START']), size=board.tileSize, color=COLOR['PUCMAN'],\n MODE=MODE)\n ghasts = {'blinky': Ghast(name='Blinky', start=board.findUniquePos(\n BOARD_ELEMENT_MAP['GHAST_SPAWN']), size=board.tileSize, color=\n COLOR['BLINKY']), 'pinky': Ghast(name='Pinky', start=board.\n findUniquePos(BOARD_ELEMENT_MAP['GHAST_SPAWN']), size=board.\n tileSize, color=COLOR['PINKY']), 'inky': Ghast(name='Inky',\n start=board.findUniquePos(BOARD_ELEMENT_MAP['GHAST_SPAWN']),\n size=board.tileSize, color=COLOR['INKY']), 'clyde': Ghast(name=\n 'Clyde', start=board.findUniquePos(BOARD_ELEMENT_MAP[\n 'GHAST_SPAWN']), size=board.tileSize, color=COLOR['CLYDE'])}\n self.board = board\n self.pucman = pucman\n self.ghasts = ghasts\n self.clock = pygame.time.Clock()\n self.MODE = MODE\n\n def start(self):\n self.board.draw()\n session = True\n while session:\n self.clock.tick(TICK_RATE[self.MODE])\n self.pucman.move(self.board)\n for ghast in self.ghasts:\n sprite = self.ghasts[ghast]\n sprite.move(self.pucman.pos, self.board)\n if sprite.atPucman(self.pucman.pos):\n session = False\n print('You died to ' + sprite.name)\n self.board.draw()\n self.pucman.draw(self.board)\n for ghast in self.ghasts:\n self.ghasts[ghast].draw(self.board._)\n pygame.display.update()\n",
"step-5": "# import core modules and community packages\nimport sys, math, random\nimport pygame\n\n# import configuration settings\nfrom src.config import *\nfrom src.board.levels import LEVEL_1\n\n# import game elements\nfrom src.pucman import Pucman\nfrom src.ghast import Ghast\nfrom src.board.board import Board\n\nclass Session():\n def __init__(self, MODE=\"PLAYING\"):\n # init all game props\n pygame.init()\n\n # initialize game elements\n board = Board(\n size=BOARD_SIZE, \n color=COLOR['BACKGROUND'], \n level=LEVEL_1\n )\n pucman = Pucman(\n start=board.findUniquePos(BOARD_ELEMENT_MAP['PUCMAN_START']), \n size=board.tileSize, \n color=COLOR['PUCMAN'], \n MODE=MODE\n )\n ghasts = {\n \"blinky\": Ghast(\n name=\"Blinky\",\n start=board.findUniquePos(BOARD_ELEMENT_MAP['GHAST_SPAWN']), \n size=board.tileSize, \n color=COLOR['BLINKY']\n ),\n \"pinky\": Ghast(\n name=\"Pinky\",\n start=board.findUniquePos(BOARD_ELEMENT_MAP['GHAST_SPAWN']), \n size=board.tileSize, \n color=COLOR['PINKY']\n ),\n \"inky\": Ghast(\n name=\"Inky\",\n start=board.findUniquePos(BOARD_ELEMENT_MAP['GHAST_SPAWN']), \n size=board.tileSize, \n color=COLOR['INKY']\n ),\n \"clyde\": Ghast(\n name=\"Clyde\",\n start=board.findUniquePos(BOARD_ELEMENT_MAP['GHAST_SPAWN']), \n size=board.tileSize, \n color=COLOR['CLYDE'] \n )\n }\n\n self.board = board\n self.pucman = pucman\n self.ghasts = ghasts\n self.clock = pygame.time.Clock()\n self.MODE = MODE\n\n def start(self):\n # draw background & begin session\n self.board.draw()\n session = True\n\n # while playing\n while session:\n # manage game time, 5 ticks per second\n self.clock.tick(TICK_RATE[self.MODE])\n # pygame.time.delay(50)\n\n # update player state\n self.pucman.move(self.board)\n\n # Ghast-AI behavior\n for ghast in self.ghasts:\n sprite = self.ghasts[ghast]\n\n sprite.move(self.pucman.pos, self.board)\n if(sprite.atPucman(self.pucman.pos)):\n session = False\n print(\"You died to \" + sprite.name)\n\n # begin drawing back to front\n self.board.draw()\n self.pucman.draw(self.board)\n for ghast in self.ghasts:\n self.ghasts[ghast].draw(self.board._)\n \n # update board\n pygame.display.update()\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('page', '0004_auto_20200320_1521')]
operations = [migrations.AddField(model_name='menu', name='level',
field=models.PositiveIntegerField(default=0, editable=False),
preserve_default=False), migrations.AddField(model_name='menu',
name='lft', field=models.PositiveIntegerField(default=0, editable=
False), preserve_default=False), migrations.AddField(model_name=
'menu', name='rght', field=models.PositiveIntegerField(default=0,
editable=False), preserve_default=False), migrations.AddField(
model_name='menu', name='tree_id', field=models.
PositiveIntegerField(db_index=True, default=1, editable=False),
preserve_default=False), migrations.DeleteModel(name='Menu1')]
<|reserved_special_token_1|>
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('page', '0004_auto_20200320_1521')]
operations = [migrations.AddField(model_name='menu', name='level',
field=models.PositiveIntegerField(default=0, editable=False),
preserve_default=False), migrations.AddField(model_name='menu',
name='lft', field=models.PositiveIntegerField(default=0, editable=
False), preserve_default=False), migrations.AddField(model_name=
'menu', name='rght', field=models.PositiveIntegerField(default=0,
editable=False), preserve_default=False), migrations.AddField(
model_name='menu', name='tree_id', field=models.
PositiveIntegerField(db_index=True, default=1, editable=False),
preserve_default=False), migrations.DeleteModel(name='Menu1')]
<|reserved_special_token_1|>
# Generated by Django 3.0.1 on 2020-03-20 09:59
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('page', '0004_auto_20200320_1521'),
]
operations = [
migrations.AddField(
model_name='menu',
name='level',
field=models.PositiveIntegerField(default=0, editable=False),
preserve_default=False,
),
migrations.AddField(
model_name='menu',
name='lft',
field=models.PositiveIntegerField(default=0, editable=False),
preserve_default=False,
),
migrations.AddField(
model_name='menu',
name='rght',
field=models.PositiveIntegerField(default=0, editable=False),
preserve_default=False,
),
migrations.AddField(
model_name='menu',
name='tree_id',
field=models.PositiveIntegerField(db_index=True, default=1, editable=False),
preserve_default=False,
),
migrations.DeleteModel(
name='Menu1',
),
]
|
flexible
|
{
"blob_id": "807b20f4912ab89bf73966961536a4cd4367f851",
"index": 6468,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('page', '0004_auto_20200320_1521')]\n operations = [migrations.AddField(model_name='menu', name='level',\n field=models.PositiveIntegerField(default=0, editable=False),\n preserve_default=False), migrations.AddField(model_name='menu',\n name='lft', field=models.PositiveIntegerField(default=0, editable=\n False), preserve_default=False), migrations.AddField(model_name=\n 'menu', name='rght', field=models.PositiveIntegerField(default=0,\n editable=False), preserve_default=False), migrations.AddField(\n model_name='menu', name='tree_id', field=models.\n PositiveIntegerField(db_index=True, default=1, editable=False),\n preserve_default=False), migrations.DeleteModel(name='Menu1')]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('page', '0004_auto_20200320_1521')]\n operations = [migrations.AddField(model_name='menu', name='level',\n field=models.PositiveIntegerField(default=0, editable=False),\n preserve_default=False), migrations.AddField(model_name='menu',\n name='lft', field=models.PositiveIntegerField(default=0, editable=\n False), preserve_default=False), migrations.AddField(model_name=\n 'menu', name='rght', field=models.PositiveIntegerField(default=0,\n editable=False), preserve_default=False), migrations.AddField(\n model_name='menu', name='tree_id', field=models.\n PositiveIntegerField(db_index=True, default=1, editable=False),\n preserve_default=False), migrations.DeleteModel(name='Menu1')]\n",
"step-5": "# Generated by Django 3.0.1 on 2020-03-20 09:59\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('page', '0004_auto_20200320_1521'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='menu',\n name='level',\n field=models.PositiveIntegerField(default=0, editable=False),\n preserve_default=False,\n ),\n migrations.AddField(\n model_name='menu',\n name='lft',\n field=models.PositiveIntegerField(default=0, editable=False),\n preserve_default=False,\n ),\n migrations.AddField(\n model_name='menu',\n name='rght',\n field=models.PositiveIntegerField(default=0, editable=False),\n preserve_default=False,\n ),\n migrations.AddField(\n model_name='menu',\n name='tree_id',\n field=models.PositiveIntegerField(db_index=True, default=1, editable=False),\n preserve_default=False,\n ),\n migrations.DeleteModel(\n name='Menu1',\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Copyright (c) Facebook, Inc. and its affiliates.
from .build import build_backbone, BACKBONE_REGISTRY # noqa F401 isort:skip
from .backbone import Backbone
from .fpn import FPN
from .resnet import ResNet, ResNetBlockBase, build_resnet_backbone, make_stage
__all__ = [k for k in globals().keys() if not k.startswith("_")]
# TODO can expose more resnet blocks after careful consideration
|
normal
|
{
"blob_id": "502f405f48df92583757ebc9edb4b15910c1f76a",
"index": 2305,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n__all__ = [k for k in globals().keys() if not k.startswith('_')]\n",
"step-3": "from .build import build_backbone, BACKBONE_REGISTRY\nfrom .backbone import Backbone\nfrom .fpn import FPN\nfrom .resnet import ResNet, ResNetBlockBase, build_resnet_backbone, make_stage\n__all__ = [k for k in globals().keys() if not k.startswith('_')]\n",
"step-4": "# Copyright (c) Facebook, Inc. and its affiliates.\r\nfrom .build import build_backbone, BACKBONE_REGISTRY # noqa F401 isort:skip\r\n\r\nfrom .backbone import Backbone\r\nfrom .fpn import FPN\r\nfrom .resnet import ResNet, ResNetBlockBase, build_resnet_backbone, make_stage\r\n\r\n__all__ = [k for k in globals().keys() if not k.startswith(\"_\")]\r\n# TODO can expose more resnet blocks after careful consideration\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def test_build_identifier():
assert 'role:server_base' == Node.build_identifier('server_base', 'role')
def test_identifier():
node = Node('server_base', 'role', 'irrelevant')
assert 'role:server_base' == node.identifier()
def test_add_successor():
parent = Node('appserver', 'playbook', 'appserver.yml')
child = Node('server_base', 'role', 'roles/server_base')
parent.add_successor(child)
assert child in parent.successors
assert parent in child.predecessors
<|reserved_special_token_0|>
@pytest.mark.parametrize('this, other, equal', [(('myname', 'mytype',
'mypath'), ('myname', 'mytype', 'mypath'), True), (('myname', 'mytype',
'mypath'), ('othername', 'mytype', 'mypath'), False), (('myname',
'mytype', 'mypath'), ('myname', 'othertype', 'mypath'), False), ((
'myname', 'mytype', 'mypath'), ('myname', 'othertype', 'otherpath'),
False)])
def test_eq(this, other, equal):
this_node = Node(*this)
other_node = Node(*other)
assert equal and this_node == other_node or not equal and this_node != other_node
@pytest.mark.parametrize('other', [None, [], ('myname', 'mytype', 'mypath')])
def test_eq_unequal_types(other):
this = Node('myname', 'mytype', 'mypath')
assert this != other
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test_build_identifier():
assert 'role:server_base' == Node.build_identifier('server_base', 'role')
def test_identifier():
node = Node('server_base', 'role', 'irrelevant')
assert 'role:server_base' == node.identifier()
def test_add_successor():
parent = Node('appserver', 'playbook', 'appserver.yml')
child = Node('server_base', 'role', 'roles/server_base')
parent.add_successor(child)
assert child in parent.successors
assert parent in child.predecessors
<|reserved_special_token_0|>
def test_str():
name = 'myname'
typestring = 'mytype'
path = 'mypath'
node = Node(name, typestring, path)
assert str((typestring, name, path)) == str(node)
@pytest.mark.parametrize('this, other, equal', [(('myname', 'mytype',
'mypath'), ('myname', 'mytype', 'mypath'), True), (('myname', 'mytype',
'mypath'), ('othername', 'mytype', 'mypath'), False), (('myname',
'mytype', 'mypath'), ('myname', 'othertype', 'mypath'), False), ((
'myname', 'mytype', 'mypath'), ('myname', 'othertype', 'otherpath'),
False)])
def test_eq(this, other, equal):
this_node = Node(*this)
other_node = Node(*other)
assert equal and this_node == other_node or not equal and this_node != other_node
@pytest.mark.parametrize('other', [None, [], ('myname', 'mytype', 'mypath')])
def test_eq_unequal_types(other):
this = Node('myname', 'mytype', 'mypath')
assert this != other
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test_build_identifier():
assert 'role:server_base' == Node.build_identifier('server_base', 'role')
def test_identifier():
node = Node('server_base', 'role', 'irrelevant')
assert 'role:server_base' == node.identifier()
def test_add_successor():
parent = Node('appserver', 'playbook', 'appserver.yml')
child = Node('server_base', 'role', 'roles/server_base')
parent.add_successor(child)
assert child in parent.successors
assert parent in child.predecessors
def test_add_predecessor():
parent = Node('appserver', 'playbook', 'appserver.yml')
child = Node('server_base', 'role', 'roles/server_base')
child.add_predecessor(parent)
assert child in parent.successors
assert parent in child.predecessors
def test_str():
name = 'myname'
typestring = 'mytype'
path = 'mypath'
node = Node(name, typestring, path)
assert str((typestring, name, path)) == str(node)
@pytest.mark.parametrize('this, other, equal', [(('myname', 'mytype',
'mypath'), ('myname', 'mytype', 'mypath'), True), (('myname', 'mytype',
'mypath'), ('othername', 'mytype', 'mypath'), False), (('myname',
'mytype', 'mypath'), ('myname', 'othertype', 'mypath'), False), ((
'myname', 'mytype', 'mypath'), ('myname', 'othertype', 'otherpath'),
False)])
def test_eq(this, other, equal):
this_node = Node(*this)
other_node = Node(*other)
assert equal and this_node == other_node or not equal and this_node != other_node
@pytest.mark.parametrize('other', [None, [], ('myname', 'mytype', 'mypath')])
def test_eq_unequal_types(other):
this = Node('myname', 'mytype', 'mypath')
assert this != other
<|reserved_special_token_1|>
import pytest
from ansiblediscover.graph.node import Node
def test_build_identifier():
assert 'role:server_base' == Node.build_identifier('server_base', 'role')
def test_identifier():
node = Node('server_base', 'role', 'irrelevant')
assert 'role:server_base' == node.identifier()
def test_add_successor():
parent = Node('appserver', 'playbook', 'appserver.yml')
child = Node('server_base', 'role', 'roles/server_base')
parent.add_successor(child)
assert child in parent.successors
assert parent in child.predecessors
def test_add_predecessor():
parent = Node('appserver', 'playbook', 'appserver.yml')
child = Node('server_base', 'role', 'roles/server_base')
child.add_predecessor(parent)
assert child in parent.successors
assert parent in child.predecessors
def test_str():
name = 'myname'
typestring = 'mytype'
path = 'mypath'
node = Node(name, typestring, path)
assert str((typestring, name, path)) == str(node)
@pytest.mark.parametrize('this, other, equal', [(('myname', 'mytype',
'mypath'), ('myname', 'mytype', 'mypath'), True), (('myname', 'mytype',
'mypath'), ('othername', 'mytype', 'mypath'), False), (('myname',
'mytype', 'mypath'), ('myname', 'othertype', 'mypath'), False), ((
'myname', 'mytype', 'mypath'), ('myname', 'othertype', 'otherpath'),
False)])
def test_eq(this, other, equal):
this_node = Node(*this)
other_node = Node(*other)
assert equal and this_node == other_node or not equal and this_node != other_node
@pytest.mark.parametrize('other', [None, [], ('myname', 'mytype', 'mypath')])
def test_eq_unequal_types(other):
this = Node('myname', 'mytype', 'mypath')
assert this != other
<|reserved_special_token_1|>
import pytest
from ansiblediscover.graph.node import Node
def test_build_identifier():
assert 'role:server_base' == Node.build_identifier('server_base', 'role')
def test_identifier():
node = Node('server_base', 'role', 'irrelevant')
assert 'role:server_base' == node.identifier()
def test_add_successor():
parent = Node('appserver', 'playbook', 'appserver.yml')
child = Node('server_base', 'role', 'roles/server_base')
parent.add_successor(child)
assert child in parent.successors
assert parent in child.predecessors
def test_add_predecessor():
parent = Node('appserver', 'playbook', 'appserver.yml')
child = Node('server_base', 'role', 'roles/server_base')
child.add_predecessor(parent)
assert child in parent.successors
assert parent in child.predecessors
def test_str():
name = 'myname'
typestring = 'mytype'
path = 'mypath'
node = Node(name, typestring, path)
assert str((typestring, name, path)) == str(node)
@pytest.mark.parametrize('this, other, equal', [
(('myname', 'mytype', 'mypath'), ('myname', 'mytype', 'mypath'), True),
(('myname', 'mytype', 'mypath'), ('othername', 'mytype', 'mypath'), False),
(('myname', 'mytype', 'mypath'), ('myname', 'othertype', 'mypath'), False),
(('myname', 'mytype', 'mypath'), ('myname', 'othertype', 'otherpath'), False),
])
def test_eq(this, other, equal):
this_node = Node(*this)
other_node = Node(*other)
assert (equal and (this_node == other_node)) or (not equal and (this_node != other_node))
@pytest.mark.parametrize('other', [
None,
[],
('myname', 'mytype', 'mypath'),
])
def test_eq_unequal_types(other):
this = Node('myname', 'mytype', 'mypath')
assert this != other
|
flexible
|
{
"blob_id": "8e22db940124f92d3048055cf72dcaa79564cdc6",
"index": 1953,
"step-1": "<mask token>\n\n\ndef test_build_identifier():\n assert 'role:server_base' == Node.build_identifier('server_base', 'role')\n\n\ndef test_identifier():\n node = Node('server_base', 'role', 'irrelevant')\n assert 'role:server_base' == node.identifier()\n\n\ndef test_add_successor():\n parent = Node('appserver', 'playbook', 'appserver.yml')\n child = Node('server_base', 'role', 'roles/server_base')\n parent.add_successor(child)\n assert child in parent.successors\n assert parent in child.predecessors\n\n\n<mask token>\n\n\n@pytest.mark.parametrize('this, other, equal', [(('myname', 'mytype',\n 'mypath'), ('myname', 'mytype', 'mypath'), True), (('myname', 'mytype',\n 'mypath'), ('othername', 'mytype', 'mypath'), False), (('myname',\n 'mytype', 'mypath'), ('myname', 'othertype', 'mypath'), False), ((\n 'myname', 'mytype', 'mypath'), ('myname', 'othertype', 'otherpath'), \n False)])\ndef test_eq(this, other, equal):\n this_node = Node(*this)\n other_node = Node(*other)\n assert equal and this_node == other_node or not equal and this_node != other_node\n\n\n@pytest.mark.parametrize('other', [None, [], ('myname', 'mytype', 'mypath')])\ndef test_eq_unequal_types(other):\n this = Node('myname', 'mytype', 'mypath')\n assert this != other\n",
"step-2": "<mask token>\n\n\ndef test_build_identifier():\n assert 'role:server_base' == Node.build_identifier('server_base', 'role')\n\n\ndef test_identifier():\n node = Node('server_base', 'role', 'irrelevant')\n assert 'role:server_base' == node.identifier()\n\n\ndef test_add_successor():\n parent = Node('appserver', 'playbook', 'appserver.yml')\n child = Node('server_base', 'role', 'roles/server_base')\n parent.add_successor(child)\n assert child in parent.successors\n assert parent in child.predecessors\n\n\n<mask token>\n\n\ndef test_str():\n name = 'myname'\n typestring = 'mytype'\n path = 'mypath'\n node = Node(name, typestring, path)\n assert str((typestring, name, path)) == str(node)\n\n\n@pytest.mark.parametrize('this, other, equal', [(('myname', 'mytype',\n 'mypath'), ('myname', 'mytype', 'mypath'), True), (('myname', 'mytype',\n 'mypath'), ('othername', 'mytype', 'mypath'), False), (('myname',\n 'mytype', 'mypath'), ('myname', 'othertype', 'mypath'), False), ((\n 'myname', 'mytype', 'mypath'), ('myname', 'othertype', 'otherpath'), \n False)])\ndef test_eq(this, other, equal):\n this_node = Node(*this)\n other_node = Node(*other)\n assert equal and this_node == other_node or not equal and this_node != other_node\n\n\n@pytest.mark.parametrize('other', [None, [], ('myname', 'mytype', 'mypath')])\ndef test_eq_unequal_types(other):\n this = Node('myname', 'mytype', 'mypath')\n assert this != other\n",
"step-3": "<mask token>\n\n\ndef test_build_identifier():\n assert 'role:server_base' == Node.build_identifier('server_base', 'role')\n\n\ndef test_identifier():\n node = Node('server_base', 'role', 'irrelevant')\n assert 'role:server_base' == node.identifier()\n\n\ndef test_add_successor():\n parent = Node('appserver', 'playbook', 'appserver.yml')\n child = Node('server_base', 'role', 'roles/server_base')\n parent.add_successor(child)\n assert child in parent.successors\n assert parent in child.predecessors\n\n\ndef test_add_predecessor():\n parent = Node('appserver', 'playbook', 'appserver.yml')\n child = Node('server_base', 'role', 'roles/server_base')\n child.add_predecessor(parent)\n assert child in parent.successors\n assert parent in child.predecessors\n\n\ndef test_str():\n name = 'myname'\n typestring = 'mytype'\n path = 'mypath'\n node = Node(name, typestring, path)\n assert str((typestring, name, path)) == str(node)\n\n\n@pytest.mark.parametrize('this, other, equal', [(('myname', 'mytype',\n 'mypath'), ('myname', 'mytype', 'mypath'), True), (('myname', 'mytype',\n 'mypath'), ('othername', 'mytype', 'mypath'), False), (('myname',\n 'mytype', 'mypath'), ('myname', 'othertype', 'mypath'), False), ((\n 'myname', 'mytype', 'mypath'), ('myname', 'othertype', 'otherpath'), \n False)])\ndef test_eq(this, other, equal):\n this_node = Node(*this)\n other_node = Node(*other)\n assert equal and this_node == other_node or not equal and this_node != other_node\n\n\n@pytest.mark.parametrize('other', [None, [], ('myname', 'mytype', 'mypath')])\ndef test_eq_unequal_types(other):\n this = Node('myname', 'mytype', 'mypath')\n assert this != other\n",
"step-4": "import pytest\nfrom ansiblediscover.graph.node import Node\n\n\ndef test_build_identifier():\n assert 'role:server_base' == Node.build_identifier('server_base', 'role')\n\n\ndef test_identifier():\n node = Node('server_base', 'role', 'irrelevant')\n assert 'role:server_base' == node.identifier()\n\n\ndef test_add_successor():\n parent = Node('appserver', 'playbook', 'appserver.yml')\n child = Node('server_base', 'role', 'roles/server_base')\n parent.add_successor(child)\n assert child in parent.successors\n assert parent in child.predecessors\n\n\ndef test_add_predecessor():\n parent = Node('appserver', 'playbook', 'appserver.yml')\n child = Node('server_base', 'role', 'roles/server_base')\n child.add_predecessor(parent)\n assert child in parent.successors\n assert parent in child.predecessors\n\n\ndef test_str():\n name = 'myname'\n typestring = 'mytype'\n path = 'mypath'\n node = Node(name, typestring, path)\n assert str((typestring, name, path)) == str(node)\n\n\n@pytest.mark.parametrize('this, other, equal', [(('myname', 'mytype',\n 'mypath'), ('myname', 'mytype', 'mypath'), True), (('myname', 'mytype',\n 'mypath'), ('othername', 'mytype', 'mypath'), False), (('myname',\n 'mytype', 'mypath'), ('myname', 'othertype', 'mypath'), False), ((\n 'myname', 'mytype', 'mypath'), ('myname', 'othertype', 'otherpath'), \n False)])\ndef test_eq(this, other, equal):\n this_node = Node(*this)\n other_node = Node(*other)\n assert equal and this_node == other_node or not equal and this_node != other_node\n\n\n@pytest.mark.parametrize('other', [None, [], ('myname', 'mytype', 'mypath')])\ndef test_eq_unequal_types(other):\n this = Node('myname', 'mytype', 'mypath')\n assert this != other\n",
"step-5": "import pytest\n\nfrom ansiblediscover.graph.node import Node\n\n\ndef test_build_identifier():\n assert 'role:server_base' == Node.build_identifier('server_base', 'role')\n\n\ndef test_identifier():\n node = Node('server_base', 'role', 'irrelevant')\n assert 'role:server_base' == node.identifier()\n\n\ndef test_add_successor():\n parent = Node('appserver', 'playbook', 'appserver.yml')\n child = Node('server_base', 'role', 'roles/server_base')\n\n parent.add_successor(child)\n\n assert child in parent.successors\n assert parent in child.predecessors\n\n\ndef test_add_predecessor():\n parent = Node('appserver', 'playbook', 'appserver.yml')\n child = Node('server_base', 'role', 'roles/server_base')\n\n child.add_predecessor(parent)\n\n assert child in parent.successors\n assert parent in child.predecessors\n\n\ndef test_str():\n name = 'myname'\n typestring = 'mytype'\n path = 'mypath'\n node = Node(name, typestring, path)\n\n assert str((typestring, name, path)) == str(node)\n\n\n@pytest.mark.parametrize('this, other, equal', [\n (('myname', 'mytype', 'mypath'), ('myname', 'mytype', 'mypath'), True),\n (('myname', 'mytype', 'mypath'), ('othername', 'mytype', 'mypath'), False),\n (('myname', 'mytype', 'mypath'), ('myname', 'othertype', 'mypath'), False),\n (('myname', 'mytype', 'mypath'), ('myname', 'othertype', 'otherpath'), False),\n])\ndef test_eq(this, other, equal):\n this_node = Node(*this)\n other_node = Node(*other)\n\n assert (equal and (this_node == other_node)) or (not equal and (this_node != other_node))\n\n\n@pytest.mark.parametrize('other', [\n None,\n [],\n ('myname', 'mytype', 'mypath'),\n])\ndef test_eq_unequal_types(other):\n this = Node('myname', 'mytype', 'mypath')\n assert this != other\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
from numba import jit
@jit
def resolve():
N = int(input())
ans = 0
for n in range(1, N+1):
for m in range(n, N+1, n):
ans += m
print(ans)
if __name__ == "__main__":
resolve()
|
normal
|
{
"blob_id": "8d8df517ca5486e62cc1b5ac23bbcfa65ed9c1ff",
"index": 6611,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@jit\ndef resolve():\n N = int(input())\n ans = 0\n for n in range(1, N + 1):\n for m in range(n, N + 1, n):\n ans += m\n print(ans)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\n@jit\ndef resolve():\n N = int(input())\n ans = 0\n for n in range(1, N + 1):\n for m in range(n, N + 1, n):\n ans += m\n print(ans)\n\n\nif __name__ == '__main__':\n resolve()\n",
"step-4": "from numba import jit\n\n\n@jit\ndef resolve():\n N = int(input())\n ans = 0\n for n in range(1, N + 1):\n for m in range(n, N + 1, n):\n ans += m\n print(ans)\n\n\nif __name__ == '__main__':\n resolve()\n",
"step-5": "from numba import jit\n\n@jit\ndef resolve():\n N = int(input())\n\n ans = 0\n for n in range(1, N+1):\n for m in range(n, N+1, n):\n ans += m\n print(ans)\n\nif __name__ == \"__main__\":\n resolve()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import scrapy
from kingfisher_scrapy.base_spiders import BigFileSpider
from kingfisher_scrapy.util import components, handle_http_error
class France(BigFileSpider):
"""
Domain
France
Swagger API documentation
https://doc.data.gouv.fr/api/reference/
"""
name = 'france'
# SimpleSpider
data_type = 'release_package'
def start_requests(self):
# A CKAN API JSON response.
# Ministère de l'économie, des finances et de la relance
# https://www.data.gouv.fr/fr/datasets/donnees-essentielles-de-la-commande-publique-fichiers-consolides/
url = 'https://www.data.gouv.fr/api/1/datasets/donnees-essentielles-de-la-commande-publique-fichiers' \
'-consolides/'
yield scrapy.Request(url, meta={'file_name': 'page-1.json'}, callback=self.parse_list)
@handle_http_error
def parse_list(self, response):
for resource in response.json()['resources']:
description = resource['description']
if description and 'ocds' in description.lower():
yield self.build_request(resource['url'], formatter=components(-2))
|
normal
|
{
"blob_id": "369bffa21b5b8c0ca1d93da3aa30a38e2f4c82cc",
"index": 9451,
"step-1": "<mask token>\n\n\nclass France(BigFileSpider):\n <mask token>\n <mask token>\n <mask token>\n\n def start_requests(self):\n url = (\n 'https://www.data.gouv.fr/api/1/datasets/donnees-essentielles-de-la-commande-publique-fichiers-consolides/'\n )\n yield scrapy.Request(url, meta={'file_name': 'page-1.json'},\n callback=self.parse_list)\n\n @handle_http_error\n def parse_list(self, response):\n for resource in response.json()['resources']:\n description = resource['description']\n if description and 'ocds' in description.lower():\n yield self.build_request(resource['url'], formatter=\n components(-2))\n",
"step-2": "<mask token>\n\n\nclass France(BigFileSpider):\n <mask token>\n name = 'france'\n data_type = 'release_package'\n\n def start_requests(self):\n url = (\n 'https://www.data.gouv.fr/api/1/datasets/donnees-essentielles-de-la-commande-publique-fichiers-consolides/'\n )\n yield scrapy.Request(url, meta={'file_name': 'page-1.json'},\n callback=self.parse_list)\n\n @handle_http_error\n def parse_list(self, response):\n for resource in response.json()['resources']:\n description = resource['description']\n if description and 'ocds' in description.lower():\n yield self.build_request(resource['url'], formatter=\n components(-2))\n",
"step-3": "<mask token>\n\n\nclass France(BigFileSpider):\n \"\"\"\n Domain\n France\n Swagger API documentation\n https://doc.data.gouv.fr/api/reference/\n \"\"\"\n name = 'france'\n data_type = 'release_package'\n\n def start_requests(self):\n url = (\n 'https://www.data.gouv.fr/api/1/datasets/donnees-essentielles-de-la-commande-publique-fichiers-consolides/'\n )\n yield scrapy.Request(url, meta={'file_name': 'page-1.json'},\n callback=self.parse_list)\n\n @handle_http_error\n def parse_list(self, response):\n for resource in response.json()['resources']:\n description = resource['description']\n if description and 'ocds' in description.lower():\n yield self.build_request(resource['url'], formatter=\n components(-2))\n",
"step-4": "import scrapy\nfrom kingfisher_scrapy.base_spiders import BigFileSpider\nfrom kingfisher_scrapy.util import components, handle_http_error\n\n\nclass France(BigFileSpider):\n \"\"\"\n Domain\n France\n Swagger API documentation\n https://doc.data.gouv.fr/api/reference/\n \"\"\"\n name = 'france'\n data_type = 'release_package'\n\n def start_requests(self):\n url = (\n 'https://www.data.gouv.fr/api/1/datasets/donnees-essentielles-de-la-commande-publique-fichiers-consolides/'\n )\n yield scrapy.Request(url, meta={'file_name': 'page-1.json'},\n callback=self.parse_list)\n\n @handle_http_error\n def parse_list(self, response):\n for resource in response.json()['resources']:\n description = resource['description']\n if description and 'ocds' in description.lower():\n yield self.build_request(resource['url'], formatter=\n components(-2))\n",
"step-5": "import scrapy\n\nfrom kingfisher_scrapy.base_spiders import BigFileSpider\nfrom kingfisher_scrapy.util import components, handle_http_error\n\n\nclass France(BigFileSpider):\n \"\"\"\n Domain\n France\n Swagger API documentation\n https://doc.data.gouv.fr/api/reference/\n \"\"\"\n name = 'france'\n\n # SimpleSpider\n data_type = 'release_package'\n\n def start_requests(self):\n # A CKAN API JSON response.\n # Ministère de l'économie, des finances et de la relance\n # https://www.data.gouv.fr/fr/datasets/donnees-essentielles-de-la-commande-publique-fichiers-consolides/\n url = 'https://www.data.gouv.fr/api/1/datasets/donnees-essentielles-de-la-commande-publique-fichiers' \\\n '-consolides/'\n yield scrapy.Request(url, meta={'file_name': 'page-1.json'}, callback=self.parse_list)\n\n @handle_http_error\n def parse_list(self, response):\n for resource in response.json()['resources']:\n description = resource['description']\n if description and 'ocds' in description.lower():\n yield self.build_request(resource['url'], formatter=components(-2))\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(result)
<|reserved_special_token_0|>
print(result)
<|reserved_special_token_0|>
print(result)
<|reserved_special_token_0|>
print(result)
<|reserved_special_token_0|>
print(result)
<|reserved_special_token_0|>
print(result)
<|reserved_special_token_0|>
print(result)
<|reserved_special_token_0|>
print(result)
<|reserved_special_token_0|>
print(result)
<|reserved_special_token_0|>
print(re.fullmatch(re_str, '2a a'))
<|reserved_special_token_0|>
print(result)
<|reserved_special_token_0|>
print(result)
<|reserved_special_token_0|>
print(result)
print(re.fullmatch('a*b', 'b'))
print(re.fullmatch('a+b', 'aaaab'))
print(re.fullmatch('[+-]?[1-9]\\d*', '+145345'))
<|reserved_special_token_0|>
if result:
print('密码正确')
else:
print('密码错误')
<|reserved_special_token_0|>
print(result)
<|reserved_special_token_0|>
print(re.findall('\\d+', str1))
<|reserved_special_token_0|>
print(re.findall('a\\d+', str2))
print(re.findall('a(\\d+)', str2))
<|reserved_special_token_0|>
print(re.findall('^(http://)?www.(\\w+).com', str3))
<|reserved_special_token_0|>
print(re.findall(re_str, '123efa123123-ef'))
<|reserved_special_token_0|>
print(re.findall(re_str, str1))
print(re.findall(re_str1, str1))
<|reserved_special_token_0|>
print(re.fullmatch(re_str, 'a+(23)'))
<|reserved_special_token_0|>
print(re_obj.fullmatch('234'))
<|reserved_special_token_0|>
print(result)
<|reserved_special_token_0|>
print(result)
print(result.start(), result.end())
print(result.group())
print(result.group(1))
print(result.string)
<|reserved_special_token_0|>
print(result)
<|reserved_special_token_0|>
print(result)
<|reserved_special_token_0|>
print(result)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
re_str = 'abc'
result = re.fullmatch(re_str, 'abc')
print(result)
re_str = 'a.c'
result = re.fullmatch(re_str, 'abc')
print(result)
re_str = '\\w\\w...'
result = re.fullmatch(re_str, '_a123')
print(result)
re_str = '\\w\\w\\s\\w'
result = re.fullmatch(re_str, 'hj\t8')
print(result)
re_str = '\\d\\d\\d..'
result = re.fullmatch(re_str, '082ww')
print(result)
re_str = 'hello\\bworld'
result = re.fullmatch(re_str, 'hello world')
print(result)
re_str = '\\bhello,\\bworld'
result = re.fullmatch(re_str, 'hello,world')
print(result)
re_str = '^The..'
result = re.fullmatch(re_str, 'The2;')
print(result)
re_str = 'The$'
result = re.fullmatch(re_str, 'The')
print(result)
re_str = '\\d\\D\\s\\s\\Ba'
print(re.fullmatch(re_str, '2a a'))
re_str = '\\d[bcd]'
result = re.fullmatch(re_str, '2d')
print(result)
re_str = '[1-7][abc-][a-z]'
result = re.fullmatch(re_str, '3-b')
print(result)
re_str = '[^a-z]'
result = re.fullmatch(re_str, '是')
print(result)
print(re.fullmatch('a*b', 'b'))
print(re.fullmatch('a+b', 'aaaab'))
print(re.fullmatch('[+-]?[1-9]\\d*', '+145345'))
re_str = '[a-zA-Z][a-zA-Z\\d]{5,11}'
str1 = 'ab123456'
result = re.fullmatch(re_str, str1)
if result:
print('密码正确')
else:
print('密码错误')
re_str = '[-+]?[1-9]\\d*[.]?\\d*|[-+]?0[.][0-9]*[1-9]|0'
result = re.fullmatch(re_str, '0.0000009')
print(result)
str1 = 'ahs123+34asdf24'
print(re.findall('\\d+', str1))
str2 = 'a153s123+34asfa24'
print(re.findall('a\\d+', str2))
print(re.findall('a(\\d+)', str2))
str3 = 'http://www.qq.com'
print(re.findall('^(http://)?www.(\\w+).com', str3))
re_str = '(\\d{3})([a-z]{2})a\\1{2}-\\2'
print(re.findall(re_str, '123efa123123-ef'))
re_str = 'a.+b'
re_str1 = 'a.+?b'
str1 = 'xxahdjbnnkhasssbkkkkk'
print(re.findall(re_str, str1))
print(re.findall(re_str1, str1))
re_str = 'a\\+\\(\\d{2}\\)'
print(re.fullmatch(re_str, 'a+(23)'))
re_str = '\\d{3}'
re_obj = re.compile(re_str)
print(re_obj.fullmatch('234'))
re_str = '\\d([A-Z]{2})'
result = re.fullmatch(re_str, '2HKdfsd')
print(result)
result = re.match(re_str, '8KLsifdfd==')
print(result)
print(result.start(), result.end())
print(result.group())
print(result.group(1))
print(result.string)
str1 = 'abc123hks362shjjk990kll'
result = re.search('\\d{3}[a-z]{2}', str1)
print(result)
str1 = 'ab+c7hdjd8jss-sk9s9kk*k'
result = re.split('\\d+|[+*-]+', str1)
print(result)
str = 'abcd1235asdf'
result = re.findall('a[a-zA-Z]+', str)
print(result)
<|reserved_special_token_1|>
import re
re_str = 'abc'
result = re.fullmatch(re_str, 'abc')
print(result)
re_str = 'a.c'
result = re.fullmatch(re_str, 'abc')
print(result)
re_str = '\\w\\w...'
result = re.fullmatch(re_str, '_a123')
print(result)
re_str = '\\w\\w\\s\\w'
result = re.fullmatch(re_str, 'hj\t8')
print(result)
re_str = '\\d\\d\\d..'
result = re.fullmatch(re_str, '082ww')
print(result)
re_str = 'hello\\bworld'
result = re.fullmatch(re_str, 'hello world')
print(result)
re_str = '\\bhello,\\bworld'
result = re.fullmatch(re_str, 'hello,world')
print(result)
re_str = '^The..'
result = re.fullmatch(re_str, 'The2;')
print(result)
re_str = 'The$'
result = re.fullmatch(re_str, 'The')
print(result)
re_str = '\\d\\D\\s\\s\\Ba'
print(re.fullmatch(re_str, '2a a'))
re_str = '\\d[bcd]'
result = re.fullmatch(re_str, '2d')
print(result)
re_str = '[1-7][abc-][a-z]'
result = re.fullmatch(re_str, '3-b')
print(result)
re_str = '[^a-z]'
result = re.fullmatch(re_str, '是')
print(result)
print(re.fullmatch('a*b', 'b'))
print(re.fullmatch('a+b', 'aaaab'))
print(re.fullmatch('[+-]?[1-9]\\d*', '+145345'))
re_str = '[a-zA-Z][a-zA-Z\\d]{5,11}'
str1 = 'ab123456'
result = re.fullmatch(re_str, str1)
if result:
print('密码正确')
else:
print('密码错误')
re_str = '[-+]?[1-9]\\d*[.]?\\d*|[-+]?0[.][0-9]*[1-9]|0'
result = re.fullmatch(re_str, '0.0000009')
print(result)
str1 = 'ahs123+34asdf24'
print(re.findall('\\d+', str1))
str2 = 'a153s123+34asfa24'
print(re.findall('a\\d+', str2))
print(re.findall('a(\\d+)', str2))
str3 = 'http://www.qq.com'
print(re.findall('^(http://)?www.(\\w+).com', str3))
re_str = '(\\d{3})([a-z]{2})a\\1{2}-\\2'
print(re.findall(re_str, '123efa123123-ef'))
re_str = 'a.+b'
re_str1 = 'a.+?b'
str1 = 'xxahdjbnnkhasssbkkkkk'
print(re.findall(re_str, str1))
print(re.findall(re_str1, str1))
re_str = 'a\\+\\(\\d{2}\\)'
print(re.fullmatch(re_str, 'a+(23)'))
re_str = '\\d{3}'
re_obj = re.compile(re_str)
print(re_obj.fullmatch('234'))
re_str = '\\d([A-Z]{2})'
result = re.fullmatch(re_str, '2HKdfsd')
print(result)
result = re.match(re_str, '8KLsifdfd==')
print(result)
print(result.start(), result.end())
print(result.group())
print(result.group(1))
print(result.string)
str1 = 'abc123hks362shjjk990kll'
result = re.search('\\d{3}[a-z]{2}', str1)
print(result)
str1 = 'ab+c7hdjd8jss-sk9s9kk*k'
result = re.split('\\d+|[+*-]+', str1)
print(result)
str = 'abcd1235asdf'
result = re.findall('a[a-zA-Z]+', str)
print(result)
<|reserved_special_token_1|>
# -*- coding:utf-8 -*-
import re
# 普通字符串 匹配本身
re_str = r'abc'
result = re.fullmatch(re_str, 'abc')
print(result)
# 匹配任意字符 一个.只能匹配一个字符
re_str = r'a.c'
result = re.fullmatch(re_str, 'abc')
print(result)
# \w匹配字母数字或下划线
# 匹配一个长度是5的字符串并且字符串的前两位是数字字母或者下划线后面是三个任意字符串 \w中文也能匹配
re_str = r'\w\w...'
result = re.fullmatch(re_str, '_a123')
print(result)
# \s匹配空白字符
# 空白字符串包括空格,制表符,换行符:\t,\r,\n
re_str = r'\w\w\s\w'
result = re.fullmatch(re_str, 'hj\t8')
print(result)
# \d匹配数字字符
re_str = r'\d\d\d..'
result = re.fullmatch(re_str, '082ww')
print(result)
# \b检测单词边界
re_str = r'hello\bworld'
result = re.fullmatch(re_str, 'hello world')
print(result)
re_str = r'\bhello,\bworld'
result = re.fullmatch(re_str, 'hello,world')
print(result)
# ^检测字符串开头
re_str = r'^The..'
result = re.fullmatch(re_str, 'The2;')
print(result)
# $检测字符串结尾
re_str = r'The$'
result = re.fullmatch(re_str, 'The')
print(result)
# \大写字母对应的功能是\小写字母功能取反
# \W 匹配非字母数字下划线
# \D 匹配非数字字符
# \S 匹配空白字符串
# \B 检测非单词边界
re_str = r'\d\D\s\s\Ba'
print(re.fullmatch(re_str, '2a a'))
# 字符集
# 匹配中括号出现的任意一个字符
re_str = r'\d[bcd]'
result = re.fullmatch(re_str, '2d')
print(result)
# [a-z] 表示匹配所有的小写字母
# [A_Z] 表示匹配所有的大写字母
# [a-zA-Z] 匹配所有的字母
# [1-7] 匹配数字字符1到7
# [\u4e00-\u9fa5] 匹配所有的中文
# [字符1字符2-] 这儿的-表示减号本身
re_str = r'[1-7][abc-][a-z]'
result = re.fullmatch(re_str, '3-b')
print(result)
# [^abc] 匹配不再abc以外的任意一个字符
# [^\d] 匹配除了数字字符以外的任意一个字符
# [^a-z] 匹配除了小写字母以外的其他任意一个字符
# [abc^] 匹配abc^中的任意一个字符
re_str = r'[^a-z]'
result = re.fullmatch(re_str, '是')
print(result)
# 正则控制匹配次数
# *(匹配0次或者多次) a* a出现0次或多次 \d* 任意数字出现0次或多次 [abc]* a,b,c出现0次或多次 [A-F] A到F中任意字符出现0次或多次
print(re.fullmatch(r'a*b', 'b'))
# +(匹配1次或者多次)
print(re.fullmatch(r'a+b', 'aaaab'))
# ?(匹配0次或1次)
print(re.fullmatch(r'[+-]?[1-9]\d*', '+145345'))
# {N} 匹配N次 a{3} 匹配三个a
# {M,N}} 匹配M到N次
# {,N} 最多匹配N次
# {M,} 至少匹配M次
re_str = r'[a-zA-Z][a-zA-Z\d]{5,11}'
# str1 = input('请输入密码:')
str1 = 'ab123456'
result = re.fullmatch(re_str, str1)
if result:
print('密码正确')
else:
print('密码错误')
# 分之、捕获、贪婪
# 分之 条件1|条件2 匹配条件1或条件2
# \d{2}|[a-z] 匹配两个数字字符或者一个小写字母
# 正则中的分之也会出现短路,当条件1可以匹配就不会在使用条件2匹配
re_str = r'[-+]?[1-9]\d*[.]?\d*|[-+]?0[.][0-9]*[1-9]|0'
result = re.fullmatch(re_str, '0.0000009')
print(result)
# 捕获 通过正则获取符合条件的字串的时候可以在正则表达式中加括号,匹配后之获取括号里面匹配到的内容
# re.findall(正则表达式,字符串) 在字符串中获取符合正则表达式条件的所有的字串返回一个列表
str1 = 'ahs123+34asdf24'
print(re.findall(r'\d+', str1))
str2 = 'a153s123+34asfa24'
print(re.findall(r'a\d+', str2))
print(re.findall(r'a(\d+)', str2))
str3 = 'http://www.qq.com'
print(re.findall(r'^(http://)?www.(\w+).com', str3))
# 重复匹配 带多个分组的正则表达式可以在分组的后面通过添加\数字来重复前面第几个分组中匹配到的内容
re_str = r'(\d{3})([a-z]{2})a\1{2}-\2'
print(re.findall(re_str, '123efa123123-ef'))
# 贪婪 匹配次数后加?就是贪婪匹配:*?,+?,??,{M,N}?,{M,}?表示尽可能少的重复
re_str = 'a.+b'
re_str1 = 'a.+?b'
str1 = 'xxahdjbnnkhasssbkkkkk'
print(re.findall(re_str, str1))
print(re.findall(re_str1, str1))
# 转义字符 \
re_str = r'a\+\(\d{2}\)'
print(re.fullmatch(re_str, 'a+(23)'))
# re模块
# complie
re_str = r'\d{3}'
re_obj = re.compile(re_str)
print(re_obj.fullmatch('234'))
# match 不完全匹配之匹配字符串开头 之匹配字符串开头 匹配成功返回匹配对象匹配失败返回None
# fullmatch 完全匹配从字符串开头匹配到字符串结束
re_str = r'\d([A-Z]{2})'
result = re.fullmatch(re_str, '2HKdfsd')
print(result)
result = re.match(re_str, '8KLsifdfd==')
print(result)
# 匹配对象
# start,end 获取匹配结果的开始下标和结束下标
# 匹配对象.start(n)/匹配对象.end(n) 获取正则表达式中第n个分组匹配到的开始下标/结束下标
print(result.start(), result.end())
# print(result.start(1), result.end(2))
# ggroup 获取匹配到的内容
# 匹配对象.group() 获取整个正则表达式匹配到的内容
# 匹配对象.group(n) 获取正则表达式第n个分组匹配到的内容
print(result.group())
print(result.group(1))
# string 获取匹配的原字符串
# 匹配对象.string
print(result.string)
# search
# search(正则表达式,字符串)匹配字符串中第一个满足正则表达式的字串,如果匹配成功返回匹配对象否则返回None
str1 = 'abc123hks362shjjk990kll'
result = re.search(r'\d{3}[a-z]{2}', str1)
print(result)
# split split(正则表达式,字符串) 在字符串中按照满足正则表达式条件的字串对字符串进行切割
str1 = 'ab+c7hdjd8jss-sk9s9kk*k'
result = re.split(r'\d+|[+*-]+', str1)
print(result)
# findall findall(正则表达式,字符串) 在字符串中获取满足正则表达式的所有的字符返回一个列表列表元素是字符串
str = 'abcd1235asdf'
result = re.findall(r'a[a-zA-Z]+', str)
print(result)
# finditer finditer(正则表达式,字符串) 获取字符串中满足正则表达式的内容返回的是一个迭代器
# def yt_finditer(pattern, string):
# re1 = re.search(pattern, string)
# while re1:
# yield re1
# string = string[re1.end():]
# re1 = re.search(pattern, string)
#
# str1='haja37jjkd89sdhs909nnna238==='
# result = yt_finditer(r'[a-zA-Z]{2,}(\d+)(a-z)+?', str1)
# print(next(result))
|
flexible
|
{
"blob_id": "e0e00688a75021c2f8b608d4c942f5e68f6a6a48",
"index": 6282,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(result)\n<mask token>\nprint(result)\n<mask token>\nprint(result)\n<mask token>\nprint(result)\n<mask token>\nprint(result)\n<mask token>\nprint(result)\n<mask token>\nprint(result)\n<mask token>\nprint(result)\n<mask token>\nprint(result)\n<mask token>\nprint(re.fullmatch(re_str, '2a a'))\n<mask token>\nprint(result)\n<mask token>\nprint(result)\n<mask token>\nprint(result)\nprint(re.fullmatch('a*b', 'b'))\nprint(re.fullmatch('a+b', 'aaaab'))\nprint(re.fullmatch('[+-]?[1-9]\\\\d*', '+145345'))\n<mask token>\nif result:\n print('密码正确')\nelse:\n print('密码错误')\n<mask token>\nprint(result)\n<mask token>\nprint(re.findall('\\\\d+', str1))\n<mask token>\nprint(re.findall('a\\\\d+', str2))\nprint(re.findall('a(\\\\d+)', str2))\n<mask token>\nprint(re.findall('^(http://)?www.(\\\\w+).com', str3))\n<mask token>\nprint(re.findall(re_str, '123efa123123-ef'))\n<mask token>\nprint(re.findall(re_str, str1))\nprint(re.findall(re_str1, str1))\n<mask token>\nprint(re.fullmatch(re_str, 'a+(23)'))\n<mask token>\nprint(re_obj.fullmatch('234'))\n<mask token>\nprint(result)\n<mask token>\nprint(result)\nprint(result.start(), result.end())\nprint(result.group())\nprint(result.group(1))\nprint(result.string)\n<mask token>\nprint(result)\n<mask token>\nprint(result)\n<mask token>\nprint(result)\n",
"step-3": "<mask token>\nre_str = 'abc'\nresult = re.fullmatch(re_str, 'abc')\nprint(result)\nre_str = 'a.c'\nresult = re.fullmatch(re_str, 'abc')\nprint(result)\nre_str = '\\\\w\\\\w...'\nresult = re.fullmatch(re_str, '_a123')\nprint(result)\nre_str = '\\\\w\\\\w\\\\s\\\\w'\nresult = re.fullmatch(re_str, 'hj\\t8')\nprint(result)\nre_str = '\\\\d\\\\d\\\\d..'\nresult = re.fullmatch(re_str, '082ww')\nprint(result)\nre_str = 'hello\\\\bworld'\nresult = re.fullmatch(re_str, 'hello world')\nprint(result)\nre_str = '\\\\bhello,\\\\bworld'\nresult = re.fullmatch(re_str, 'hello,world')\nprint(result)\nre_str = '^The..'\nresult = re.fullmatch(re_str, 'The2;')\nprint(result)\nre_str = 'The$'\nresult = re.fullmatch(re_str, 'The')\nprint(result)\nre_str = '\\\\d\\\\D\\\\s\\\\s\\\\Ba'\nprint(re.fullmatch(re_str, '2a a'))\nre_str = '\\\\d[bcd]'\nresult = re.fullmatch(re_str, '2d')\nprint(result)\nre_str = '[1-7][abc-][a-z]'\nresult = re.fullmatch(re_str, '3-b')\nprint(result)\nre_str = '[^a-z]'\nresult = re.fullmatch(re_str, '是')\nprint(result)\nprint(re.fullmatch('a*b', 'b'))\nprint(re.fullmatch('a+b', 'aaaab'))\nprint(re.fullmatch('[+-]?[1-9]\\\\d*', '+145345'))\nre_str = '[a-zA-Z][a-zA-Z\\\\d]{5,11}'\nstr1 = 'ab123456'\nresult = re.fullmatch(re_str, str1)\nif result:\n print('密码正确')\nelse:\n print('密码错误')\nre_str = '[-+]?[1-9]\\\\d*[.]?\\\\d*|[-+]?0[.][0-9]*[1-9]|0'\nresult = re.fullmatch(re_str, '0.0000009')\nprint(result)\nstr1 = 'ahs123+34asdf24'\nprint(re.findall('\\\\d+', str1))\nstr2 = 'a153s123+34asfa24'\nprint(re.findall('a\\\\d+', str2))\nprint(re.findall('a(\\\\d+)', str2))\nstr3 = 'http://www.qq.com'\nprint(re.findall('^(http://)?www.(\\\\w+).com', str3))\nre_str = '(\\\\d{3})([a-z]{2})a\\\\1{2}-\\\\2'\nprint(re.findall(re_str, '123efa123123-ef'))\nre_str = 'a.+b'\nre_str1 = 'a.+?b'\nstr1 = 'xxahdjbnnkhasssbkkkkk'\nprint(re.findall(re_str, str1))\nprint(re.findall(re_str1, str1))\nre_str = 'a\\\\+\\\\(\\\\d{2}\\\\)'\nprint(re.fullmatch(re_str, 'a+(23)'))\nre_str = '\\\\d{3}'\nre_obj = re.compile(re_str)\nprint(re_obj.fullmatch('234'))\nre_str = '\\\\d([A-Z]{2})'\nresult = re.fullmatch(re_str, '2HKdfsd')\nprint(result)\nresult = re.match(re_str, '8KLsifdfd==')\nprint(result)\nprint(result.start(), result.end())\nprint(result.group())\nprint(result.group(1))\nprint(result.string)\nstr1 = 'abc123hks362shjjk990kll'\nresult = re.search('\\\\d{3}[a-z]{2}', str1)\nprint(result)\nstr1 = 'ab+c7hdjd8jss-sk9s9kk*k'\nresult = re.split('\\\\d+|[+*-]+', str1)\nprint(result)\nstr = 'abcd1235asdf'\nresult = re.findall('a[a-zA-Z]+', str)\nprint(result)\n",
"step-4": "import re\nre_str = 'abc'\nresult = re.fullmatch(re_str, 'abc')\nprint(result)\nre_str = 'a.c'\nresult = re.fullmatch(re_str, 'abc')\nprint(result)\nre_str = '\\\\w\\\\w...'\nresult = re.fullmatch(re_str, '_a123')\nprint(result)\nre_str = '\\\\w\\\\w\\\\s\\\\w'\nresult = re.fullmatch(re_str, 'hj\\t8')\nprint(result)\nre_str = '\\\\d\\\\d\\\\d..'\nresult = re.fullmatch(re_str, '082ww')\nprint(result)\nre_str = 'hello\\\\bworld'\nresult = re.fullmatch(re_str, 'hello world')\nprint(result)\nre_str = '\\\\bhello,\\\\bworld'\nresult = re.fullmatch(re_str, 'hello,world')\nprint(result)\nre_str = '^The..'\nresult = re.fullmatch(re_str, 'The2;')\nprint(result)\nre_str = 'The$'\nresult = re.fullmatch(re_str, 'The')\nprint(result)\nre_str = '\\\\d\\\\D\\\\s\\\\s\\\\Ba'\nprint(re.fullmatch(re_str, '2a a'))\nre_str = '\\\\d[bcd]'\nresult = re.fullmatch(re_str, '2d')\nprint(result)\nre_str = '[1-7][abc-][a-z]'\nresult = re.fullmatch(re_str, '3-b')\nprint(result)\nre_str = '[^a-z]'\nresult = re.fullmatch(re_str, '是')\nprint(result)\nprint(re.fullmatch('a*b', 'b'))\nprint(re.fullmatch('a+b', 'aaaab'))\nprint(re.fullmatch('[+-]?[1-9]\\\\d*', '+145345'))\nre_str = '[a-zA-Z][a-zA-Z\\\\d]{5,11}'\nstr1 = 'ab123456'\nresult = re.fullmatch(re_str, str1)\nif result:\n print('密码正确')\nelse:\n print('密码错误')\nre_str = '[-+]?[1-9]\\\\d*[.]?\\\\d*|[-+]?0[.][0-9]*[1-9]|0'\nresult = re.fullmatch(re_str, '0.0000009')\nprint(result)\nstr1 = 'ahs123+34asdf24'\nprint(re.findall('\\\\d+', str1))\nstr2 = 'a153s123+34asfa24'\nprint(re.findall('a\\\\d+', str2))\nprint(re.findall('a(\\\\d+)', str2))\nstr3 = 'http://www.qq.com'\nprint(re.findall('^(http://)?www.(\\\\w+).com', str3))\nre_str = '(\\\\d{3})([a-z]{2})a\\\\1{2}-\\\\2'\nprint(re.findall(re_str, '123efa123123-ef'))\nre_str = 'a.+b'\nre_str1 = 'a.+?b'\nstr1 = 'xxahdjbnnkhasssbkkkkk'\nprint(re.findall(re_str, str1))\nprint(re.findall(re_str1, str1))\nre_str = 'a\\\\+\\\\(\\\\d{2}\\\\)'\nprint(re.fullmatch(re_str, 'a+(23)'))\nre_str = '\\\\d{3}'\nre_obj = re.compile(re_str)\nprint(re_obj.fullmatch('234'))\nre_str = '\\\\d([A-Z]{2})'\nresult = re.fullmatch(re_str, '2HKdfsd')\nprint(result)\nresult = re.match(re_str, '8KLsifdfd==')\nprint(result)\nprint(result.start(), result.end())\nprint(result.group())\nprint(result.group(1))\nprint(result.string)\nstr1 = 'abc123hks362shjjk990kll'\nresult = re.search('\\\\d{3}[a-z]{2}', str1)\nprint(result)\nstr1 = 'ab+c7hdjd8jss-sk9s9kk*k'\nresult = re.split('\\\\d+|[+*-]+', str1)\nprint(result)\nstr = 'abcd1235asdf'\nresult = re.findall('a[a-zA-Z]+', str)\nprint(result)\n",
"step-5": "# -*- coding:utf-8 -*-\nimport re\n\n# 普通字符串 匹配本身\nre_str = r'abc'\nresult = re.fullmatch(re_str, 'abc')\nprint(result)\n# 匹配任意字符 一个.只能匹配一个字符\nre_str = r'a.c'\nresult = re.fullmatch(re_str, 'abc')\nprint(result)\n# \\w匹配字母数字或下划线\n# 匹配一个长度是5的字符串并且字符串的前两位是数字字母或者下划线后面是三个任意字符串 \\w中文也能匹配\nre_str = r'\\w\\w...'\nresult = re.fullmatch(re_str, '_a123')\nprint(result)\n# \\s匹配空白字符\n# 空白字符串包括空格,制表符,换行符:\\t,\\r,\\n\nre_str = r'\\w\\w\\s\\w'\nresult = re.fullmatch(re_str, 'hj\\t8')\nprint(result)\n# \\d匹配数字字符\nre_str = r'\\d\\d\\d..'\nresult = re.fullmatch(re_str, '082ww')\nprint(result)\n# \\b检测单词边界\nre_str = r'hello\\bworld'\nresult = re.fullmatch(re_str, 'hello world')\nprint(result)\nre_str = r'\\bhello,\\bworld'\nresult = re.fullmatch(re_str, 'hello,world')\nprint(result)\n# ^检测字符串开头\nre_str = r'^The..'\nresult = re.fullmatch(re_str, 'The2;')\nprint(result)\n# $检测字符串结尾\nre_str = r'The$'\nresult = re.fullmatch(re_str, 'The')\nprint(result)\n# \\大写字母对应的功能是\\小写字母功能取反\n# \\W 匹配非字母数字下划线\n# \\D 匹配非数字字符\n# \\S 匹配空白字符串\n# \\B 检测非单词边界\nre_str = r'\\d\\D\\s\\s\\Ba'\nprint(re.fullmatch(re_str, '2a a'))\n# 字符集\n# 匹配中括号出现的任意一个字符\nre_str = r'\\d[bcd]'\nresult = re.fullmatch(re_str, '2d')\nprint(result)\n# [a-z] 表示匹配所有的小写字母\n# [A_Z] 表示匹配所有的大写字母\n# [a-zA-Z] 匹配所有的字母\n# [1-7] 匹配数字字符1到7\n# [\\u4e00-\\u9fa5] 匹配所有的中文\n# [字符1字符2-] 这儿的-表示减号本身\nre_str = r'[1-7][abc-][a-z]'\nresult = re.fullmatch(re_str, '3-b')\nprint(result)\n# [^abc] 匹配不再abc以外的任意一个字符\n# [^\\d] 匹配除了数字字符以外的任意一个字符\n# [^a-z] 匹配除了小写字母以外的其他任意一个字符\n# [abc^] 匹配abc^中的任意一个字符\nre_str = r'[^a-z]'\nresult = re.fullmatch(re_str, '是')\nprint(result)\n# 正则控制匹配次数\n# *(匹配0次或者多次) a* a出现0次或多次 \\d* 任意数字出现0次或多次 [abc]* a,b,c出现0次或多次 [A-F] A到F中任意字符出现0次或多次\nprint(re.fullmatch(r'a*b', 'b'))\n# +(匹配1次或者多次)\nprint(re.fullmatch(r'a+b', 'aaaab'))\n# ?(匹配0次或1次)\nprint(re.fullmatch(r'[+-]?[1-9]\\d*', '+145345'))\n# {N} 匹配N次 a{3} 匹配三个a\n# {M,N}} 匹配M到N次\n# {,N} 最多匹配N次\n# {M,} 至少匹配M次\nre_str = r'[a-zA-Z][a-zA-Z\\d]{5,11}'\n# str1 = input('请输入密码:')\nstr1 = 'ab123456'\nresult = re.fullmatch(re_str, str1)\nif result:\n print('密码正确')\nelse:\n print('密码错误')\n\n# 分之、捕获、贪婪\n# 分之 条件1|条件2 匹配条件1或条件2\n# \\d{2}|[a-z] 匹配两个数字字符或者一个小写字母\n# 正则中的分之也会出现短路,当条件1可以匹配就不会在使用条件2匹配\nre_str = r'[-+]?[1-9]\\d*[.]?\\d*|[-+]?0[.][0-9]*[1-9]|0'\nresult = re.fullmatch(re_str, '0.0000009')\nprint(result)\n# 捕获 通过正则获取符合条件的字串的时候可以在正则表达式中加括号,匹配后之获取括号里面匹配到的内容\n# re.findall(正则表达式,字符串) 在字符串中获取符合正则表达式条件的所有的字串返回一个列表\nstr1 = 'ahs123+34asdf24'\nprint(re.findall(r'\\d+', str1))\n\nstr2 = 'a153s123+34asfa24'\nprint(re.findall(r'a\\d+', str2))\nprint(re.findall(r'a(\\d+)', str2))\n\nstr3 = 'http://www.qq.com'\nprint(re.findall(r'^(http://)?www.(\\w+).com', str3))\n# 重复匹配 带多个分组的正则表达式可以在分组的后面通过添加\\数字来重复前面第几个分组中匹配到的内容\nre_str = r'(\\d{3})([a-z]{2})a\\1{2}-\\2'\nprint(re.findall(re_str, '123efa123123-ef'))\n# 贪婪 匹配次数后加?就是贪婪匹配:*?,+?,??,{M,N}?,{M,}?表示尽可能少的重复\nre_str = 'a.+b'\nre_str1 = 'a.+?b'\nstr1 = 'xxahdjbnnkhasssbkkkkk'\nprint(re.findall(re_str, str1))\nprint(re.findall(re_str1, str1))\n# 转义字符 \\\nre_str = r'a\\+\\(\\d{2}\\)'\nprint(re.fullmatch(re_str, 'a+(23)'))\n# re模块\n# complie\nre_str = r'\\d{3}'\nre_obj = re.compile(re_str)\nprint(re_obj.fullmatch('234'))\n# match 不完全匹配之匹配字符串开头 之匹配字符串开头 匹配成功返回匹配对象匹配失败返回None\n# fullmatch 完全匹配从字符串开头匹配到字符串结束\nre_str = r'\\d([A-Z]{2})'\nresult = re.fullmatch(re_str, '2HKdfsd')\nprint(result)\nresult = re.match(re_str, '8KLsifdfd==')\nprint(result)\n# 匹配对象\n# start,end 获取匹配结果的开始下标和结束下标\n# 匹配对象.start(n)/匹配对象.end(n) 获取正则表达式中第n个分组匹配到的开始下标/结束下标\nprint(result.start(), result.end())\n# print(result.start(1), result.end(2))\n# ggroup 获取匹配到的内容\n# 匹配对象.group() 获取整个正则表达式匹配到的内容\n# 匹配对象.group(n) 获取正则表达式第n个分组匹配到的内容\nprint(result.group())\nprint(result.group(1))\n# string 获取匹配的原字符串\n# 匹配对象.string\nprint(result.string)\n# search\n# search(正则表达式,字符串)匹配字符串中第一个满足正则表达式的字串,如果匹配成功返回匹配对象否则返回None\nstr1 = 'abc123hks362shjjk990kll'\nresult = re.search(r'\\d{3}[a-z]{2}', str1)\nprint(result)\n# split split(正则表达式,字符串) 在字符串中按照满足正则表达式条件的字串对字符串进行切割\nstr1 = 'ab+c7hdjd8jss-sk9s9kk*k'\nresult = re.split(r'\\d+|[+*-]+', str1)\nprint(result)\n# findall findall(正则表达式,字符串) 在字符串中获取满足正则表达式的所有的字符返回一个列表列表元素是字符串\nstr = 'abcd1235asdf'\nresult = re.findall(r'a[a-zA-Z]+', str)\nprint(result)\n\n\n# finditer finditer(正则表达式,字符串) 获取字符串中满足正则表达式的内容返回的是一个迭代器\n# def yt_finditer(pattern, string):\n# re1 = re.search(pattern, string)\n# while re1:\n# yield re1\n# string = string[re1.end():]\n# re1 = re.search(pattern, string)\n#\n# str1='haja37jjkd89sdhs909nnna238==='\n# result = yt_finditer(r'[a-zA-Z]{2,}(\\d+)(a-z)+?', str1)\n# print(next(result))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
"""
Created on Thu Jan 2 22:49:00 2020
@author: Drew
____________________________________________________________________
basic_github_auto_uploader.py - A Basic Automated GitHub Uploader
____________________________________________________________________
1. Requirements:
Version: Python 3.7
Built-in Libs: base64, os, shutil, time, datetime
Dependencies: pygithub, Git (maybe)
2. Description:
This file automatically uploads subdirectories as new repositories in
GitHub. You will need an internet connection to do this.
The first function [subdir_maker(directory)] will sort the subdirectories
in the folder.
The second function [daily_github_upload(subdirs)] will do the actual repo
creation and commit.
The second function can be fun on a schedule using a for loop and
time.sleep or a dedicated scheduling library. You need to restart the
script if you add new subdirectories that you want to upload.
3. Running Instructions:
Place this file in a root directory where you keep your project
subdirectories. Keep the file structure in the subdirectories flat (don't
make subdirectories in the subdirectory) as this is not handled in this
simplified script. Also, support for PDFs is a bit sketchy.
Be sure to replace the Github key in the second function with your own
generated key. You can configure the README.MD file as well to say a
custom message.
4. Performance:
Performance is poor for now. The script needs to run constantly and uses
quite a bit of memory. A more efficient future version will be made.
"""
# Import libraries that we need to use.
import os, shutil, base64, time, datetime
from github import Github, InputGitTreeElement
# Function 1: Given a directory/file path, return all the subdirectories in
# the given directory in a list of strings. Uses the os library.
# Individual files should not be left in the directory.
def subdir_maker(directory):
# Create an empty list to store the resultant subdirectories in.
subdirs = []
# Walk through the directory and add items to the empty list we made.
for i,j,y in os.walk(directory):
subdirs.append(i)
# os.walk's first element is the directory itself, so remove it.
subdirs.remove(subdirs[0])
# Return the list of subdirectories.
return subdirs
# Function 2: When invoked with a filepath, upload all the files.
# Does not support subdirectories within the subdirectory.
# Also, cannot be empty!
def daily_github_upload(sub_to_repo):
# Create a Github object that we can use to connect to Github and do work.
# It must be initialized with a 40-character secret key. You generate this
# on Github itself.
g = Github('****************************************')
# Copy the location to a local variable.
current_subdir = sub_to_repo
# Extract the subdirectory name - this will be the Repo name.
title = current_subdir[current_subdir.rindex("\\")+1:]
# Create Repo through Github object. We will not work on the repo object.
repo = g.get_user().create_repo(title)
# Initialize with a README.MD file. You can configure this as needed.
repo.create_file("README.MD","A readme file","This was an auto-upload on "
+ str(datetime.datetime.now()))
# The message we will add under the commit.
commit_message = "This was automatically committed."
# Create a list of file objects.
file_list = []
# Create a list of file names.
file_names = []
# Do a walk through the subdirectory.
for subdir, dirs, files in os.walk(current_subdir):
# For the files in the subdirectory, print them and then add them to
# list we created, along with the name to the other list.
for file in files:
print(os.path.join(subdir, file))
file_list.append(os.path.join(subdir, file))
file_names.append(file)
# Get the branch to add to.
master_ref = repo.get_git_ref('heads/master')
master_sha = master_ref.object.sha
base_tree = repo.get_git_tree(master_sha)
# Create an empty list to add files to.
element_list = list()
# For each file in list of file objects, read and adjust as needed.
for i, entry in enumerate(file_list):
# If normal file type.
with open(entry) as input_file:
data = input_file.read()
# If proprietary file type, encode it.
if entry.endswith('.png' or '.pdf' or '.xlsx'):
data = base64.b64encode(data)
# Put each file that was encoded from above into an appropriate format
# to add to a branch.
element = InputGitTreeElement(file_names[i], '100644', 'blob', data)
# Append the object created above to the list made before the loop.
element_list.append(element)
# Create a tree with the elements and specify settings to add the element
# list to the repo.
tree = repo.create_git_tree(element_list, base_tree)
parent = repo.get_git_commit(master_sha)
# Commit!
commit = repo.create_git_commit(commit_message, tree, [parent])
master_ref.edit(commit.sha)
# Remove the subdirectory from the folder so we don't repeat.
shutil.rmtree(current_subdir)
def main():
# Invoke the subdir_maker() function with the current directory at runtime.
subs = subdir_maker(os.path.dirname(os.path.realpath(__file__)))
# Use a loop to call the daily_github_upload() function for each subdir in
# the subs list. We keep the subs in case we want to see what was uploaded.
for i in range(len(subs)):
# Call the function for each elem of the list.
daily_github_upload(subs[i])
# Print what was done.
print("_"*40 + "\n\n" + "Uploaded {0} to Github. ".format(i) + "\n" + "_"*40)
# Sleep for 24 hours then do it again.
time.sleep(86400)
|
normal
|
{
"blob_id": "bcc3d4e9be0de575c97bb3bf11eeb379ab5be458",
"index": 5380,
"step-1": "<mask token>\n\n\ndef main():\n subs = subdir_maker(os.path.dirname(os.path.realpath(__file__)))\n for i in range(len(subs)):\n daily_github_upload(subs[i])\n print('_' * 40 + '\\n\\n' + 'Uploaded {0} to Github. '.format(i) +\n '\\n' + '_' * 40)\n time.sleep(86400)\n",
"step-2": "<mask token>\n\n\ndef daily_github_upload(sub_to_repo):\n g = Github('****************************************')\n current_subdir = sub_to_repo\n title = current_subdir[current_subdir.rindex('\\\\') + 1:]\n repo = g.get_user().create_repo(title)\n repo.create_file('README.MD', 'A readme file', \n 'This was an auto-upload on ' + str(datetime.datetime.now()))\n commit_message = 'This was automatically committed.'\n file_list = []\n file_names = []\n for subdir, dirs, files in os.walk(current_subdir):\n for file in files:\n print(os.path.join(subdir, file))\n file_list.append(os.path.join(subdir, file))\n file_names.append(file)\n master_ref = repo.get_git_ref('heads/master')\n master_sha = master_ref.object.sha\n base_tree = repo.get_git_tree(master_sha)\n element_list = list()\n for i, entry in enumerate(file_list):\n with open(entry) as input_file:\n data = input_file.read()\n if entry.endswith('.png' or '.pdf' or '.xlsx'):\n data = base64.b64encode(data)\n element = InputGitTreeElement(file_names[i], '100644', 'blob', data)\n element_list.append(element)\n tree = repo.create_git_tree(element_list, base_tree)\n parent = repo.get_git_commit(master_sha)\n commit = repo.create_git_commit(commit_message, tree, [parent])\n master_ref.edit(commit.sha)\n shutil.rmtree(current_subdir)\n\n\ndef main():\n subs = subdir_maker(os.path.dirname(os.path.realpath(__file__)))\n for i in range(len(subs)):\n daily_github_upload(subs[i])\n print('_' * 40 + '\\n\\n' + 'Uploaded {0} to Github. '.format(i) +\n '\\n' + '_' * 40)\n time.sleep(86400)\n",
"step-3": "<mask token>\n\n\ndef subdir_maker(directory):\n subdirs = []\n for i, j, y in os.walk(directory):\n subdirs.append(i)\n subdirs.remove(subdirs[0])\n return subdirs\n\n\ndef daily_github_upload(sub_to_repo):\n g = Github('****************************************')\n current_subdir = sub_to_repo\n title = current_subdir[current_subdir.rindex('\\\\') + 1:]\n repo = g.get_user().create_repo(title)\n repo.create_file('README.MD', 'A readme file', \n 'This was an auto-upload on ' + str(datetime.datetime.now()))\n commit_message = 'This was automatically committed.'\n file_list = []\n file_names = []\n for subdir, dirs, files in os.walk(current_subdir):\n for file in files:\n print(os.path.join(subdir, file))\n file_list.append(os.path.join(subdir, file))\n file_names.append(file)\n master_ref = repo.get_git_ref('heads/master')\n master_sha = master_ref.object.sha\n base_tree = repo.get_git_tree(master_sha)\n element_list = list()\n for i, entry in enumerate(file_list):\n with open(entry) as input_file:\n data = input_file.read()\n if entry.endswith('.png' or '.pdf' or '.xlsx'):\n data = base64.b64encode(data)\n element = InputGitTreeElement(file_names[i], '100644', 'blob', data)\n element_list.append(element)\n tree = repo.create_git_tree(element_list, base_tree)\n parent = repo.get_git_commit(master_sha)\n commit = repo.create_git_commit(commit_message, tree, [parent])\n master_ref.edit(commit.sha)\n shutil.rmtree(current_subdir)\n\n\ndef main():\n subs = subdir_maker(os.path.dirname(os.path.realpath(__file__)))\n for i in range(len(subs)):\n daily_github_upload(subs[i])\n print('_' * 40 + '\\n\\n' + 'Uploaded {0} to Github. '.format(i) +\n '\\n' + '_' * 40)\n time.sleep(86400)\n",
"step-4": "<mask token>\nimport os, shutil, base64, time, datetime\nfrom github import Github, InputGitTreeElement\n\n\ndef subdir_maker(directory):\n subdirs = []\n for i, j, y in os.walk(directory):\n subdirs.append(i)\n subdirs.remove(subdirs[0])\n return subdirs\n\n\ndef daily_github_upload(sub_to_repo):\n g = Github('****************************************')\n current_subdir = sub_to_repo\n title = current_subdir[current_subdir.rindex('\\\\') + 1:]\n repo = g.get_user().create_repo(title)\n repo.create_file('README.MD', 'A readme file', \n 'This was an auto-upload on ' + str(datetime.datetime.now()))\n commit_message = 'This was automatically committed.'\n file_list = []\n file_names = []\n for subdir, dirs, files in os.walk(current_subdir):\n for file in files:\n print(os.path.join(subdir, file))\n file_list.append(os.path.join(subdir, file))\n file_names.append(file)\n master_ref = repo.get_git_ref('heads/master')\n master_sha = master_ref.object.sha\n base_tree = repo.get_git_tree(master_sha)\n element_list = list()\n for i, entry in enumerate(file_list):\n with open(entry) as input_file:\n data = input_file.read()\n if entry.endswith('.png' or '.pdf' or '.xlsx'):\n data = base64.b64encode(data)\n element = InputGitTreeElement(file_names[i], '100644', 'blob', data)\n element_list.append(element)\n tree = repo.create_git_tree(element_list, base_tree)\n parent = repo.get_git_commit(master_sha)\n commit = repo.create_git_commit(commit_message, tree, [parent])\n master_ref.edit(commit.sha)\n shutil.rmtree(current_subdir)\n\n\ndef main():\n subs = subdir_maker(os.path.dirname(os.path.realpath(__file__)))\n for i in range(len(subs)):\n daily_github_upload(subs[i])\n print('_' * 40 + '\\n\\n' + 'Uploaded {0} to Github. '.format(i) +\n '\\n' + '_' * 40)\n time.sleep(86400)\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Jan 2 22:49:00 2020\n@author: Drew\n\n____________________________________________________________________\n\nbasic_github_auto_uploader.py - A Basic Automated GitHub Uploader\n____________________________________________________________________\n\n1. Requirements:\n Version: Python 3.7\n Built-in Libs: base64, os, shutil, time, datetime\n Dependencies: pygithub, Git (maybe)\n\n2. Description:\n This file automatically uploads subdirectories as new repositories in\n GitHub. You will need an internet connection to do this.\n The first function [subdir_maker(directory)] will sort the subdirectories\n in the folder. \n The second function [daily_github_upload(subdirs)] will do the actual repo \n creation and commit.\n The second function can be fun on a schedule using a for loop and \n time.sleep or a dedicated scheduling library. You need to restart the\n script if you add new subdirectories that you want to upload. \n\n3. Running Instructions:\n Place this file in a root directory where you keep your project\n subdirectories. Keep the file structure in the subdirectories flat (don't\n make subdirectories in the subdirectory) as this is not handled in this \n simplified script. Also, support for PDFs is a bit sketchy. \n Be sure to replace the Github key in the second function with your own \n generated key. You can configure the README.MD file as well to say a \n custom message. \n \n4. Performance:\n Performance is poor for now. The script needs to run constantly and uses\n quite a bit of memory. A more efficient future version will be made. \n\"\"\"\n\n# Import libraries that we need to use.\nimport os, shutil, base64, time, datetime\nfrom github import Github, InputGitTreeElement\n\n# Function 1: Given a directory/file path, return all the subdirectories in\n# the given directory in a list of strings. Uses the os library.\n# Individual files should not be left in the directory.\ndef subdir_maker(directory):\n # Create an empty list to store the resultant subdirectories in.\n subdirs = []\n # Walk through the directory and add items to the empty list we made.\n for i,j,y in os.walk(directory):\n subdirs.append(i)\n # os.walk's first element is the directory itself, so remove it. \n subdirs.remove(subdirs[0])\n # Return the list of subdirectories. \n return subdirs\n\n# Function 2: When invoked with a filepath, upload all the files. \n# Does not support subdirectories within the subdirectory. \n# Also, cannot be empty!\ndef daily_github_upload(sub_to_repo):\n # Create a Github object that we can use to connect to Github and do work.\n # It must be initialized with a 40-character secret key. You generate this\n # on Github itself. \n g = Github('****************************************')\n # Copy the location to a local variable. \n current_subdir = sub_to_repo\n # Extract the subdirectory name - this will be the Repo name. \n title = current_subdir[current_subdir.rindex(\"\\\\\")+1:]\n # Create Repo through Github object. We will not work on the repo object.\n repo = g.get_user().create_repo(title)\n # Initialize with a README.MD file. You can configure this as needed. \n repo.create_file(\"README.MD\",\"A readme file\",\"This was an auto-upload on \"\n + str(datetime.datetime.now()))\n # The message we will add under the commit. \n commit_message = \"This was automatically committed.\"\n # Create a list of file objects.\n file_list = []\n # Create a list of file names.\n file_names = []\n # Do a walk through the subdirectory. \n for subdir, dirs, files in os.walk(current_subdir):\n # For the files in the subdirectory, print them and then add them to\n # list we created, along with the name to the other list. \n for file in files:\n print(os.path.join(subdir, file))\n file_list.append(os.path.join(subdir, file))\n file_names.append(file)\n # Get the branch to add to. \n master_ref = repo.get_git_ref('heads/master')\n master_sha = master_ref.object.sha\n base_tree = repo.get_git_tree(master_sha)\n # Create an empty list to add files to. \n element_list = list()\n # For each file in list of file objects, read and adjust as needed.\n for i, entry in enumerate(file_list):\n # If normal file type.\n with open(entry) as input_file:\n data = input_file.read()\n # If proprietary file type, encode it. \n if entry.endswith('.png' or '.pdf' or '.xlsx'):\n data = base64.b64encode(data)\n # Put each file that was encoded from above into an appropriate format \n # to add to a branch.\n element = InputGitTreeElement(file_names[i], '100644', 'blob', data)\n # Append the object created above to the list made before the loop. \n element_list.append(element)\n # Create a tree with the elements and specify settings to add the element\n # list to the repo. \n tree = repo.create_git_tree(element_list, base_tree)\n parent = repo.get_git_commit(master_sha)\n # Commit!\n commit = repo.create_git_commit(commit_message, tree, [parent])\n master_ref.edit(commit.sha)\n # Remove the subdirectory from the folder so we don't repeat. \n shutil.rmtree(current_subdir)\n\ndef main():\n # Invoke the subdir_maker() function with the current directory at runtime. \n subs = subdir_maker(os.path.dirname(os.path.realpath(__file__)))\n # Use a loop to call the daily_github_upload() function for each subdir in\n # the subs list. We keep the subs in case we want to see what was uploaded. \n for i in range(len(subs)):\n # Call the function for each elem of the list. \n daily_github_upload(subs[i])\n # Print what was done. \n print(\"_\"*40 + \"\\n\\n\" + \"Uploaded {0} to Github. \".format(i) + \"\\n\" + \"_\"*40)\n # Sleep for 24 hours then do it again. \n time.sleep(86400)\n \n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def splitListToParts(self, root, k):
"""
:type root: ListNode
:type k: int
:rtype: List[ListNode]
"""
if not root:
return [None]*k
res,p,q,n = [None]*k,root,root,0
while p:
p,n = p.next,n+1
per_len,per = 1 if n/k==0 else n/k,1
extra_count,index = 0 if n<=k else n%k,0
#print "per_len-->"+str(per_len)+" extra_count-->"+str(extra_count)
per_link_start = q
while q:
if per==per_len:
tmp = q.next
if extra_count:
p,tmp.next = tmp.next,None
tmp,extra_count = p,extra_count-1
else:
q.next = None
res[index],q,index = per_link_start,tmp,index+1
per_link_start = tmp
per = 1
continue
q,per = q.next,per+1
#print "cur PER -->"+str(per)
return res
|
normal
|
{
"blob_id": "6a609c91122f8b66f57279cff221ee76e7fadb8c",
"index": 7059,
"step-1": "# Definition for singly-linked list.\n# class ListNode(object):\n# def __init__(self, x):\n# self.val = x\n# self.next = None\n\nclass Solution(object):\n\tdef splitListToParts(self, root, k):\n\t\t\"\"\"\n\t\t:type root: ListNode\n\t\t:type k: int\n\t\t:rtype: List[ListNode]\n\t\t\"\"\"\n\t\tif not root:\n\t\t\treturn [None]*k\n\t\tres,p,q,n = [None]*k,root,root,0\n\t\twhile p:\n\t\t\tp,n = p.next,n+1\n\t\tper_len,per = 1 if n/k==0 else n/k,1\n\t\textra_count,index = 0 if n<=k else n%k,0\t\n\t\t\n\t\t#print \"per_len-->\"+str(per_len)+\" extra_count-->\"+str(extra_count)\n\t\tper_link_start = q\n\t\twhile q:\n\t\t\tif per==per_len:\n\t\t\t\ttmp = q.next\n\t\t\t\tif extra_count:\n\t\t\t\t\tp,tmp.next = tmp.next,None\n\t\t\t\t\ttmp,extra_count = p,extra_count-1\n\t\t\t\telse:\n\t\t\t\t\tq.next = None\n\t\t\t\tres[index],q,index = per_link_start,tmp,index+1\n\t\t\t\tper_link_start = tmp\n\t\t\t\tper = 1\n\t\t\t\tcontinue\n\t\t\tq,per = q.next,per+1\n\t\t\t#print \"cur PER -->\"+str(per)\n\t\treturn res",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import numpy as np
import cv2
import serial
import serial.tools.list_ports
import time
import random
import math
#import mcpi.minecraft as minecraft
#import mcpi.block as block
#from house import House
#Arduino Serials
ports = list(serial.tools.list_ports.comports())
print (ports)
for p in ports:
print (p[1])
if "Arduino" in p[1]:
ser=serial.Serial(port=p[0])
else :
print ("No Arduino Device was found connected to the computer")
#time.sleep(2)
#face detection
cap =cv2.VideoCapture(1)
face_cascade = cv2.CascadeClassifier('./haarcascade_frontalface_default.xml')
eye_cascade = cv2.CascadeClassifier('./haarcascade_eye.xml')
lastpos=0
currentpos=0
lastdis=0
currentdis=0
lastx_d=0
currentx_d=0
shoot=0
#MC
#mc=minecraft.Minecraft.create()
#pos=mc.player.getTilePos()
#pos0=[]
#pos0.append(pos.x)
#pos0.append(pos.y)
#pos0.append(pos.z)
#des=House([pos.x+20,pos.y,pos.z],mc,block.GOLD_BLOCK.id,block.GLASS.id)
#des.buildall()
ct=0
while(True):
ct+=1
#到达目的地了吗
#if(des.isInsideHouse()):
#mc.postToChat("You win")
#break
#人脸识别,一方面投石机追踪,一方面控制MC里面人到Destinatioin
ret,img=cap.read()
center=[img.shape[0]/2,img.shape[1]/2]
faces = face_cascade.detectMultiScale(img, 1.3, 5)
tmp=0
for(x,y,w,h) in faces:
tmp+=1
if(tmp>1):
print("too many faces")
else:
for (x,y,w,h) in faces:
img = cv2.rectangle(img,(x,y),(x+w,y+h),(255,0,0),2)
roi_color = img[y:y+h, x:x+w]
x_d=x+w/2-325-73
dis=(-0.88*w+220)
angle=x_d#math.atan(x_d/dis)/3.1415926535897*180
currentpos=angle
currentdis=dis
currentx_d=x_d
if(ct==1):
lastpos=currentpos
lastdis=currentdis
lastx_d=currentx_d
#pos=mc.player.getTilePos()
#mc.player.setTilePos([pos.x+(currentx_d-lastx_d)/5,pos.y,pos.z+(currentdis-lastdis)/5])
#print(x_d)
#print(angle)
#ser.write
print(str(int(angle)).encode())
#ser.write
if(angle<0):
ser.write(str(int(angle)).encode())
else:
ser.write(("+"+str(int(angle))).encode())
time.sleep(1)
if((lastpos-currentpos)<10 and abs(angle)<15):
shoot+=1
if(shoot>1):
time.sleep(2)
#mc.player.setTilePos([0,-1000,0])
ser.write(str(10000).encode())
time.sleep(2)
shoot=0
lastpos=currentpos
lastdis=currentdis
lastx_d=currentx_d
cv2.imshow('img',img)
if cv2.waitKey(1)& 0xFF==ord('q'):
break
cap.release()
cv2.destroyAllWindows()
|
normal
|
{
"blob_id": "7c80c98e32f386362003ac3cd729fa9b279b8e8e",
"index": 7316,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(ports)\nfor p in ports:\n print(p[1])\n if 'Arduino' in p[1]:\n ser = serial.Serial(port=p[0])\n else:\n print('No Arduino Device was found connected to the computer')\n<mask token>\nwhile True:\n ct += 1\n ret, img = cap.read()\n center = [img.shape[0] / 2, img.shape[1] / 2]\n faces = face_cascade.detectMultiScale(img, 1.3, 5)\n tmp = 0\n for x, y, w, h in faces:\n tmp += 1\n if tmp > 1:\n print('too many faces')\n else:\n for x, y, w, h in faces:\n img = cv2.rectangle(img, (x, y), (x + w, y + h), (255, 0, 0), 2)\n roi_color = img[y:y + h, x:x + w]\n x_d = x + w / 2 - 325 - 73\n dis = -0.88 * w + 220\n angle = x_d\n currentpos = angle\n currentdis = dis\n currentx_d = x_d\n if ct == 1:\n lastpos = currentpos\n lastdis = currentdis\n lastx_d = currentx_d\n print(str(int(angle)).encode())\n if angle < 0:\n ser.write(str(int(angle)).encode())\n else:\n ser.write(('+' + str(int(angle))).encode())\n time.sleep(1)\n if lastpos - currentpos < 10 and abs(angle) < 15:\n shoot += 1\n if shoot > 1:\n time.sleep(2)\n ser.write(str(10000).encode())\n time.sleep(2)\n shoot = 0\n lastpos = currentpos\n lastdis = currentdis\n lastx_d = currentx_d\n cv2.imshow('img', img)\n if cv2.waitKey(1) & 255 == ord('q'):\n break\ncap.release()\ncv2.destroyAllWindows()\n",
"step-3": "<mask token>\nports = list(serial.tools.list_ports.comports())\nprint(ports)\nfor p in ports:\n print(p[1])\n if 'Arduino' in p[1]:\n ser = serial.Serial(port=p[0])\n else:\n print('No Arduino Device was found connected to the computer')\ncap = cv2.VideoCapture(1)\nface_cascade = cv2.CascadeClassifier('./haarcascade_frontalface_default.xml')\neye_cascade = cv2.CascadeClassifier('./haarcascade_eye.xml')\nlastpos = 0\ncurrentpos = 0\nlastdis = 0\ncurrentdis = 0\nlastx_d = 0\ncurrentx_d = 0\nshoot = 0\nct = 0\nwhile True:\n ct += 1\n ret, img = cap.read()\n center = [img.shape[0] / 2, img.shape[1] / 2]\n faces = face_cascade.detectMultiScale(img, 1.3, 5)\n tmp = 0\n for x, y, w, h in faces:\n tmp += 1\n if tmp > 1:\n print('too many faces')\n else:\n for x, y, w, h in faces:\n img = cv2.rectangle(img, (x, y), (x + w, y + h), (255, 0, 0), 2)\n roi_color = img[y:y + h, x:x + w]\n x_d = x + w / 2 - 325 - 73\n dis = -0.88 * w + 220\n angle = x_d\n currentpos = angle\n currentdis = dis\n currentx_d = x_d\n if ct == 1:\n lastpos = currentpos\n lastdis = currentdis\n lastx_d = currentx_d\n print(str(int(angle)).encode())\n if angle < 0:\n ser.write(str(int(angle)).encode())\n else:\n ser.write(('+' + str(int(angle))).encode())\n time.sleep(1)\n if lastpos - currentpos < 10 and abs(angle) < 15:\n shoot += 1\n if shoot > 1:\n time.sleep(2)\n ser.write(str(10000).encode())\n time.sleep(2)\n shoot = 0\n lastpos = currentpos\n lastdis = currentdis\n lastx_d = currentx_d\n cv2.imshow('img', img)\n if cv2.waitKey(1) & 255 == ord('q'):\n break\ncap.release()\ncv2.destroyAllWindows()\n",
"step-4": "import numpy as np\nimport cv2\nimport serial\nimport serial.tools.list_ports\nimport time\nimport random\nimport math\nports = list(serial.tools.list_ports.comports())\nprint(ports)\nfor p in ports:\n print(p[1])\n if 'Arduino' in p[1]:\n ser = serial.Serial(port=p[0])\n else:\n print('No Arduino Device was found connected to the computer')\ncap = cv2.VideoCapture(1)\nface_cascade = cv2.CascadeClassifier('./haarcascade_frontalface_default.xml')\neye_cascade = cv2.CascadeClassifier('./haarcascade_eye.xml')\nlastpos = 0\ncurrentpos = 0\nlastdis = 0\ncurrentdis = 0\nlastx_d = 0\ncurrentx_d = 0\nshoot = 0\nct = 0\nwhile True:\n ct += 1\n ret, img = cap.read()\n center = [img.shape[0] / 2, img.shape[1] / 2]\n faces = face_cascade.detectMultiScale(img, 1.3, 5)\n tmp = 0\n for x, y, w, h in faces:\n tmp += 1\n if tmp > 1:\n print('too many faces')\n else:\n for x, y, w, h in faces:\n img = cv2.rectangle(img, (x, y), (x + w, y + h), (255, 0, 0), 2)\n roi_color = img[y:y + h, x:x + w]\n x_d = x + w / 2 - 325 - 73\n dis = -0.88 * w + 220\n angle = x_d\n currentpos = angle\n currentdis = dis\n currentx_d = x_d\n if ct == 1:\n lastpos = currentpos\n lastdis = currentdis\n lastx_d = currentx_d\n print(str(int(angle)).encode())\n if angle < 0:\n ser.write(str(int(angle)).encode())\n else:\n ser.write(('+' + str(int(angle))).encode())\n time.sleep(1)\n if lastpos - currentpos < 10 and abs(angle) < 15:\n shoot += 1\n if shoot > 1:\n time.sleep(2)\n ser.write(str(10000).encode())\n time.sleep(2)\n shoot = 0\n lastpos = currentpos\n lastdis = currentdis\n lastx_d = currentx_d\n cv2.imshow('img', img)\n if cv2.waitKey(1) & 255 == ord('q'):\n break\ncap.release()\ncv2.destroyAllWindows()\n",
"step-5": "import numpy as np\nimport cv2\nimport serial\nimport serial.tools.list_ports\nimport time\nimport random\nimport math\n#import mcpi.minecraft as minecraft\n#import mcpi.block as block\n#from house import House\n\n\n\n#Arduino Serials\nports = list(serial.tools.list_ports.comports())\nprint (ports)\nfor p in ports:\n print (p[1])\n if \"Arduino\" in p[1]:\n\t ser=serial.Serial(port=p[0])\n else :\n\t print (\"No Arduino Device was found connected to the computer\")\n#time.sleep(2)\n#face detection\t \ncap =cv2.VideoCapture(1)\nface_cascade = cv2.CascadeClassifier('./haarcascade_frontalface_default.xml')\neye_cascade = cv2.CascadeClassifier('./haarcascade_eye.xml')\n\nlastpos=0\ncurrentpos=0\nlastdis=0\ncurrentdis=0\nlastx_d=0\ncurrentx_d=0\nshoot=0\n#MC\n#mc=minecraft.Minecraft.create()\n#pos=mc.player.getTilePos()\n#pos0=[]\n#pos0.append(pos.x)\n#pos0.append(pos.y)\n#pos0.append(pos.z)\n#des=House([pos.x+20,pos.y,pos.z],mc,block.GOLD_BLOCK.id,block.GLASS.id)\n#des.buildall()\n\nct=0\nwhile(True):\n ct+=1\n #到达目的地了吗\n #if(des.isInsideHouse()):\n #mc.postToChat(\"You win\")\n #break\n #人脸识别,一方面投石机追踪,一方面控制MC里面人到Destinatioin\n ret,img=cap.read()\n center=[img.shape[0]/2,img.shape[1]/2]\n faces = face_cascade.detectMultiScale(img, 1.3, 5)\n tmp=0\n for(x,y,w,h) in faces:\n tmp+=1\n if(tmp>1):\n print(\"too many faces\")\n else:\n for (x,y,w,h) in faces:\n img = cv2.rectangle(img,(x,y),(x+w,y+h),(255,0,0),2)\n roi_color = img[y:y+h, x:x+w]\n \n x_d=x+w/2-325-73\n dis=(-0.88*w+220)\n angle=x_d#math.atan(x_d/dis)/3.1415926535897*180\n currentpos=angle\n currentdis=dis\n currentx_d=x_d\n if(ct==1):\n lastpos=currentpos\n lastdis=currentdis\n lastx_d=currentx_d\n #pos=mc.player.getTilePos()\n #mc.player.setTilePos([pos.x+(currentx_d-lastx_d)/5,pos.y,pos.z+(currentdis-lastdis)/5])\n #print(x_d)\n #print(angle)\n #ser.write\n print(str(int(angle)).encode())\n #ser.write\n if(angle<0):\n ser.write(str(int(angle)).encode())\n else:\n ser.write((\"+\"+str(int(angle))).encode())\n time.sleep(1)\n if((lastpos-currentpos)<10 and abs(angle)<15):\n shoot+=1\n if(shoot>1):\n time.sleep(2)\n #mc.player.setTilePos([0,-1000,0])\n ser.write(str(10000).encode())\n time.sleep(2)\n shoot=0\n lastpos=currentpos\n lastdis=currentdis\n lastx_d=currentx_d\n cv2.imshow('img',img)\n if cv2.waitKey(1)& 0xFF==ord('q'):\n break\n \n\ncap.release()\ncv2.destroyAllWindows()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
from plone import api
from plone.dexterity.content import Container
from sc.microsite.interfaces import IMicrosite
from zope.interface import implementer
@implementer(IMicrosite)
class Microsite(Container):
"""A microsite."""
def getLocallyAllowedTypes(self):
"""
By now we allow all allowed types without constrain.
TODO: fully implement ISelectableConstrainTypes
"""
portal_types = api.portal.get_tool('portal_types')
my_type = portal_types.getTypeInfo(self)
result = portal_types.listTypeInfo()
return [t for t in result if my_type.allowType(t.getId()) and
t.isConstructionAllowed(self)]
def getImmediatelyAddableTypes(self, context=None):
"""
By now we allow all allowed types without constrain.
TODO: fully implement ISelectableConstrainTypes
"""
return self.getLocallyAllowedTypes()
|
normal
|
{
"blob_id": "3d5d88edca5d746b830363cc9451bda94c1d7aa4",
"index": 2905,
"step-1": "<mask token>\n\n\n@implementer(IMicrosite)\nclass Microsite(Container):\n <mask token>\n\n def getLocallyAllowedTypes(self):\n \"\"\"\n By now we allow all allowed types without constrain.\n TODO: fully implement ISelectableConstrainTypes\n \"\"\"\n portal_types = api.portal.get_tool('portal_types')\n my_type = portal_types.getTypeInfo(self)\n result = portal_types.listTypeInfo()\n return [t for t in result if my_type.allowType(t.getId()) and t.\n isConstructionAllowed(self)]\n <mask token>\n",
"step-2": "<mask token>\n\n\n@implementer(IMicrosite)\nclass Microsite(Container):\n <mask token>\n\n def getLocallyAllowedTypes(self):\n \"\"\"\n By now we allow all allowed types without constrain.\n TODO: fully implement ISelectableConstrainTypes\n \"\"\"\n portal_types = api.portal.get_tool('portal_types')\n my_type = portal_types.getTypeInfo(self)\n result = portal_types.listTypeInfo()\n return [t for t in result if my_type.allowType(t.getId()) and t.\n isConstructionAllowed(self)]\n\n def getImmediatelyAddableTypes(self, context=None):\n \"\"\"\n By now we allow all allowed types without constrain.\n TODO: fully implement ISelectableConstrainTypes\n \"\"\"\n return self.getLocallyAllowedTypes()\n",
"step-3": "<mask token>\n\n\n@implementer(IMicrosite)\nclass Microsite(Container):\n \"\"\"A microsite.\"\"\"\n\n def getLocallyAllowedTypes(self):\n \"\"\"\n By now we allow all allowed types without constrain.\n TODO: fully implement ISelectableConstrainTypes\n \"\"\"\n portal_types = api.portal.get_tool('portal_types')\n my_type = portal_types.getTypeInfo(self)\n result = portal_types.listTypeInfo()\n return [t for t in result if my_type.allowType(t.getId()) and t.\n isConstructionAllowed(self)]\n\n def getImmediatelyAddableTypes(self, context=None):\n \"\"\"\n By now we allow all allowed types without constrain.\n TODO: fully implement ISelectableConstrainTypes\n \"\"\"\n return self.getLocallyAllowedTypes()\n",
"step-4": "from plone import api\nfrom plone.dexterity.content import Container\nfrom sc.microsite.interfaces import IMicrosite\nfrom zope.interface import implementer\n\n\n@implementer(IMicrosite)\nclass Microsite(Container):\n \"\"\"A microsite.\"\"\"\n\n def getLocallyAllowedTypes(self):\n \"\"\"\n By now we allow all allowed types without constrain.\n TODO: fully implement ISelectableConstrainTypes\n \"\"\"\n portal_types = api.portal.get_tool('portal_types')\n my_type = portal_types.getTypeInfo(self)\n result = portal_types.listTypeInfo()\n return [t for t in result if my_type.allowType(t.getId()) and t.\n isConstructionAllowed(self)]\n\n def getImmediatelyAddableTypes(self, context=None):\n \"\"\"\n By now we allow all allowed types without constrain.\n TODO: fully implement ISelectableConstrainTypes\n \"\"\"\n return self.getLocallyAllowedTypes()\n",
"step-5": "# -*- coding: utf-8 -*-\nfrom plone import api\nfrom plone.dexterity.content import Container\nfrom sc.microsite.interfaces import IMicrosite\nfrom zope.interface import implementer\n\n\n@implementer(IMicrosite)\nclass Microsite(Container):\n \"\"\"A microsite.\"\"\"\n\n def getLocallyAllowedTypes(self):\n \"\"\"\n By now we allow all allowed types without constrain.\n TODO: fully implement ISelectableConstrainTypes\n \"\"\"\n portal_types = api.portal.get_tool('portal_types')\n my_type = portal_types.getTypeInfo(self)\n result = portal_types.listTypeInfo()\n return [t for t in result if my_type.allowType(t.getId()) and\n t.isConstructionAllowed(self)]\n\n def getImmediatelyAddableTypes(self, context=None):\n \"\"\"\n By now we allow all allowed types without constrain.\n TODO: fully implement ISelectableConstrainTypes\n \"\"\"\n return self.getLocallyAllowedTypes()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
#!/usr/bin/env python3
# Copyright (c) 2018 Nobody
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test perforance of descendant package (chained transactions)"""
import time
import copy
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.mininode import COIN
from test_framework.blocktools import *
"""Read optional arguments from command line"""
CHAINED_TX = 25
if len(sys.argv)>1:
CHAINED_TX = int(sys.argv[1])
TEST_ITERATIONS = 1
if len(sys.argv)>2:
TEST_ITERATIONS = int(sys.argv[2])
DEBUG_MODE = '-printtoconsole'
MAX_ANCESTORS = CHAINED_TX
MAX_DESCENDANTS = CHAINED_TX
MAGNETIC_ANOMALY_START_TIME = 2000000000
class ChainedTest(BitcoinTestFramework):
def set_test_params(self):
''' our test network requires a peer node so that getblocktemplate succeeds '''
self.num_nodes = 2
chained_args = ["-limitancestorcount=2000", "-limitdescendantcount=2000",
"-limitancestorsize=1000", "-limitdescendantsize=1000",
"-magneticanomalyactivationtime=%d" % MAGNETIC_ANOMALY_START_TIME
]
config_node2 = chained_args.copy()
if DEBUG_MODE:
chained_args.append(DEBUG_MODE)
self.extra_args = [chained_args, config_node2]
# Build a transaction that spends parent_txid:vout
# Return amount sent
def chain_transaction(self, node, parent_txid, vout, value, fee, num_outputs):
send_value = satoshi_round((value - fee) / num_outputs)
inputs = [{'txid': parent_txid, 'vout': vout}]
outputs = {}
for i in range(num_outputs):
outputs[node.getnewaddress()] = send_value
rawtx = node.createrawtransaction(inputs, outputs)
signedtx = node.signrawtransaction(rawtx)
#measure the performance of sending the raw transaction to the node
sendtx_start = time.perf_counter()
new_txid = node.sendrawtransaction(signedtx['hex'])
sendtx_stop = time.perf_counter()
fulltx = node.getrawtransaction(new_txid, 1)
#self.log.info('{0} => {1}'.format(parent_txid, fulltx['vout'][0]))
# make sure we didn't generate a change output
assert(len(fulltx['vout']) == num_outputs)
return (new_txid, send_value, sendtx_stop - sendtx_start, fulltx['size'])
def mine_blocks(self):
''' Mine some blocks and have them mature. '''
self.nodes[0].generate(101)
self.utxo = self.nodes[0].listunspent(10)
self.txid = self.utxo[0]['txid']
self.coinbasetx = self.txid
self.vout = self.utxo[0]['vout']
self.value = self.utxo[0]['amount']
self.fee = Decimal("0.0001")
self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
self.block_time = int(time.time()) + 1
def send_chain_to_node(self):
''' Generates tx chain and send it to node '''
for i in range(CHAINED_TX):
(sent_txid, sent_value, this_sendtx, tx_size) = self.chain_transaction(
self.nodes[0], self.txid, 0, self.value, self.fee, 1)
if not self.chain_top:
self.chain_top = sent_txid
self.txid = sent_txid
self.value = sent_value
self.chain.append(sent_txid)
self.mempool_send += this_sendtx
self.mempool_size += tx_size
def create_new_block(self):
''' Create a new block with an anyone-can-spend coinbase '''
block = create_block(
self.tip, create_coinbase(self.height), self.block_time)
self.block_time += 1
block.solve()
return block
def mempool_count(self):
''' get count of tx in mempool '''
mininginfo = self.nodes[0].getmininginfo()
return mininginfo['pooledtx']
def dumppool(self, mempool):
''' Show list of chained tx in mempool with parent(depends) '''
def sortdepends(e):
return e['descendantcount']
sortedlist = [[k,v] for k,v in mempool.items()]
sortedlist = sorted(sortedlist, key=lambda l: l[1]['descendantcount'], reverse=True)
for memkv in sortedlist:
memtx = memkv[1]
self.log.info('{} {} {}'.format(memkv[0], memtx['descendantcount'], memtx['depends']))
def run_test(self):
self.log.info('Starting Test with {0} Chained Transactions'.format(CHAINED_TX))
self.chain_top = None
self.mine_blocks()
self.mempool_send = 0
self.mempool_size = 0
self.chain = []
self.send_chain_to_node()
# mempool should have all our tx
assert(self.mempool_count() == CHAINED_TX)
mempool = self.nodes[0].getrawmempool(True)
self.log.info('tx at top has {} descendants'.format(mempool[self.chain_top]["descendantcount"]))
assert(mempool[self.chain_top]["descendantcount"] == CHAINED_TX)
#self.dumppool(mempool)
self.height = 1
# create new block and save coinbase
self.block1 = self.create_new_block()
self.tip = self.block1.sha256
self.height += 1
#mature the block so we can spend the coinbase
for i in range(100):
block = self.create_new_block()
self.tip = block.sha256
self.height += 1
#sync pool not needed as long as we are using node 0 which has all the tx we sent to it
#sync_mempools(self.nodes, wait=1, timeout=100)
self.runs=[]
for test_iteration in range(TEST_ITERATIONS):
# do not use perf_counter. use timer from -printtoconsole instead
gbt_start = time.perf_counter()
# assemble a block and validate all tx in it
templat = self.nodes[0].getblocktemplate()
gbt_stop = time.perf_counter()
# make sure all tx got mined
assert(len(templat['transactions']) == CHAINED_TX)
self.runs.append(gbt_stop - gbt_start)
#assert(self.mempool_count() == 0)
self.log.info('Mempool size {0}'.format(self.mempool_size))
self.log.info('Send Tx took {0:.5f}s'.format(self.mempool_send))
if len(self.runs) > 1:
self.log.info('run times {}'.format(self.runs))
self.log.info('GetBlkT took {0:.5f}s'.format(sum(self.runs)/len(self.runs)))
if __name__ == '__main__':
ChainedTest().main()
|
normal
|
{
"blob_id": "661eef8500309191514fd760b7518014dee2bb5f",
"index": 9225,
"step-1": "<mask token>\n\n\nclass ChainedTest(BitcoinTestFramework):\n\n def set_test_params(self):\n \"\"\" our test network requires a peer node so that getblocktemplate succeeds \"\"\"\n self.num_nodes = 2\n chained_args = ['-limitancestorcount=2000',\n '-limitdescendantcount=2000', '-limitancestorsize=1000',\n '-limitdescendantsize=1000', \n '-magneticanomalyactivationtime=%d' % MAGNETIC_ANOMALY_START_TIME]\n config_node2 = chained_args.copy()\n if DEBUG_MODE:\n chained_args.append(DEBUG_MODE)\n self.extra_args = [chained_args, config_node2]\n\n def chain_transaction(self, node, parent_txid, vout, value, fee,\n num_outputs):\n send_value = satoshi_round((value - fee) / num_outputs)\n inputs = [{'txid': parent_txid, 'vout': vout}]\n outputs = {}\n for i in range(num_outputs):\n outputs[node.getnewaddress()] = send_value\n rawtx = node.createrawtransaction(inputs, outputs)\n signedtx = node.signrawtransaction(rawtx)\n sendtx_start = time.perf_counter()\n new_txid = node.sendrawtransaction(signedtx['hex'])\n sendtx_stop = time.perf_counter()\n fulltx = node.getrawtransaction(new_txid, 1)\n assert len(fulltx['vout']) == num_outputs\n return new_txid, send_value, sendtx_stop - sendtx_start, fulltx['size']\n\n def mine_blocks(self):\n \"\"\" Mine some blocks and have them mature. \"\"\"\n self.nodes[0].generate(101)\n self.utxo = self.nodes[0].listunspent(10)\n self.txid = self.utxo[0]['txid']\n self.coinbasetx = self.txid\n self.vout = self.utxo[0]['vout']\n self.value = self.utxo[0]['amount']\n self.fee = Decimal('0.0001')\n self.tip = int('0x' + self.nodes[0].getbestblockhash(), 0)\n self.block_time = int(time.time()) + 1\n\n def send_chain_to_node(self):\n \"\"\" Generates tx chain and send it to node \"\"\"\n for i in range(CHAINED_TX):\n sent_txid, sent_value, this_sendtx, tx_size = (self.\n chain_transaction(self.nodes[0], self.txid, 0, self.value,\n self.fee, 1))\n if not self.chain_top:\n self.chain_top = sent_txid\n self.txid = sent_txid\n self.value = sent_value\n self.chain.append(sent_txid)\n self.mempool_send += this_sendtx\n self.mempool_size += tx_size\n\n def create_new_block(self):\n \"\"\" Create a new block with an anyone-can-spend coinbase \"\"\"\n block = create_block(self.tip, create_coinbase(self.height), self.\n block_time)\n self.block_time += 1\n block.solve()\n return block\n <mask token>\n <mask token>\n\n def run_test(self):\n self.log.info('Starting Test with {0} Chained Transactions'.format(\n CHAINED_TX))\n self.chain_top = None\n self.mine_blocks()\n self.mempool_send = 0\n self.mempool_size = 0\n self.chain = []\n self.send_chain_to_node()\n assert self.mempool_count() == CHAINED_TX\n mempool = self.nodes[0].getrawmempool(True)\n self.log.info('tx at top has {} descendants'.format(mempool[self.\n chain_top]['descendantcount']))\n assert mempool[self.chain_top]['descendantcount'] == CHAINED_TX\n self.height = 1\n self.block1 = self.create_new_block()\n self.tip = self.block1.sha256\n self.height += 1\n for i in range(100):\n block = self.create_new_block()\n self.tip = block.sha256\n self.height += 1\n self.runs = []\n for test_iteration in range(TEST_ITERATIONS):\n gbt_start = time.perf_counter()\n templat = self.nodes[0].getblocktemplate()\n gbt_stop = time.perf_counter()\n assert len(templat['transactions']) == CHAINED_TX\n self.runs.append(gbt_stop - gbt_start)\n self.log.info('Mempool size {0}'.format(self.mempool_size))\n self.log.info('Send Tx took {0:.5f}s'.format(self.mempool_send))\n if len(self.runs) > 1:\n self.log.info('run times {}'.format(self.runs))\n self.log.info('GetBlkT took {0:.5f}s'.format(sum(self.runs) / len(\n self.runs)))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ChainedTest(BitcoinTestFramework):\n\n def set_test_params(self):\n \"\"\" our test network requires a peer node so that getblocktemplate succeeds \"\"\"\n self.num_nodes = 2\n chained_args = ['-limitancestorcount=2000',\n '-limitdescendantcount=2000', '-limitancestorsize=1000',\n '-limitdescendantsize=1000', \n '-magneticanomalyactivationtime=%d' % MAGNETIC_ANOMALY_START_TIME]\n config_node2 = chained_args.copy()\n if DEBUG_MODE:\n chained_args.append(DEBUG_MODE)\n self.extra_args = [chained_args, config_node2]\n\n def chain_transaction(self, node, parent_txid, vout, value, fee,\n num_outputs):\n send_value = satoshi_round((value - fee) / num_outputs)\n inputs = [{'txid': parent_txid, 'vout': vout}]\n outputs = {}\n for i in range(num_outputs):\n outputs[node.getnewaddress()] = send_value\n rawtx = node.createrawtransaction(inputs, outputs)\n signedtx = node.signrawtransaction(rawtx)\n sendtx_start = time.perf_counter()\n new_txid = node.sendrawtransaction(signedtx['hex'])\n sendtx_stop = time.perf_counter()\n fulltx = node.getrawtransaction(new_txid, 1)\n assert len(fulltx['vout']) == num_outputs\n return new_txid, send_value, sendtx_stop - sendtx_start, fulltx['size']\n\n def mine_blocks(self):\n \"\"\" Mine some blocks and have them mature. \"\"\"\n self.nodes[0].generate(101)\n self.utxo = self.nodes[0].listunspent(10)\n self.txid = self.utxo[0]['txid']\n self.coinbasetx = self.txid\n self.vout = self.utxo[0]['vout']\n self.value = self.utxo[0]['amount']\n self.fee = Decimal('0.0001')\n self.tip = int('0x' + self.nodes[0].getbestblockhash(), 0)\n self.block_time = int(time.time()) + 1\n\n def send_chain_to_node(self):\n \"\"\" Generates tx chain and send it to node \"\"\"\n for i in range(CHAINED_TX):\n sent_txid, sent_value, this_sendtx, tx_size = (self.\n chain_transaction(self.nodes[0], self.txid, 0, self.value,\n self.fee, 1))\n if not self.chain_top:\n self.chain_top = sent_txid\n self.txid = sent_txid\n self.value = sent_value\n self.chain.append(sent_txid)\n self.mempool_send += this_sendtx\n self.mempool_size += tx_size\n\n def create_new_block(self):\n \"\"\" Create a new block with an anyone-can-spend coinbase \"\"\"\n block = create_block(self.tip, create_coinbase(self.height), self.\n block_time)\n self.block_time += 1\n block.solve()\n return block\n <mask token>\n\n def dumppool(self, mempool):\n \"\"\" Show list of chained tx in mempool with parent(depends) \"\"\"\n\n def sortdepends(e):\n return e['descendantcount']\n sortedlist = [[k, v] for k, v in mempool.items()]\n sortedlist = sorted(sortedlist, key=lambda l: l[1][\n 'descendantcount'], reverse=True)\n for memkv in sortedlist:\n memtx = memkv[1]\n self.log.info('{} {} {}'.format(memkv[0], memtx[\n 'descendantcount'], memtx['depends']))\n\n def run_test(self):\n self.log.info('Starting Test with {0} Chained Transactions'.format(\n CHAINED_TX))\n self.chain_top = None\n self.mine_blocks()\n self.mempool_send = 0\n self.mempool_size = 0\n self.chain = []\n self.send_chain_to_node()\n assert self.mempool_count() == CHAINED_TX\n mempool = self.nodes[0].getrawmempool(True)\n self.log.info('tx at top has {} descendants'.format(mempool[self.\n chain_top]['descendantcount']))\n assert mempool[self.chain_top]['descendantcount'] == CHAINED_TX\n self.height = 1\n self.block1 = self.create_new_block()\n self.tip = self.block1.sha256\n self.height += 1\n for i in range(100):\n block = self.create_new_block()\n self.tip = block.sha256\n self.height += 1\n self.runs = []\n for test_iteration in range(TEST_ITERATIONS):\n gbt_start = time.perf_counter()\n templat = self.nodes[0].getblocktemplate()\n gbt_stop = time.perf_counter()\n assert len(templat['transactions']) == CHAINED_TX\n self.runs.append(gbt_stop - gbt_start)\n self.log.info('Mempool size {0}'.format(self.mempool_size))\n self.log.info('Send Tx took {0:.5f}s'.format(self.mempool_send))\n if len(self.runs) > 1:\n self.log.info('run times {}'.format(self.runs))\n self.log.info('GetBlkT took {0:.5f}s'.format(sum(self.runs) / len(\n self.runs)))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass ChainedTest(BitcoinTestFramework):\n\n def set_test_params(self):\n \"\"\" our test network requires a peer node so that getblocktemplate succeeds \"\"\"\n self.num_nodes = 2\n chained_args = ['-limitancestorcount=2000',\n '-limitdescendantcount=2000', '-limitancestorsize=1000',\n '-limitdescendantsize=1000', \n '-magneticanomalyactivationtime=%d' % MAGNETIC_ANOMALY_START_TIME]\n config_node2 = chained_args.copy()\n if DEBUG_MODE:\n chained_args.append(DEBUG_MODE)\n self.extra_args = [chained_args, config_node2]\n\n def chain_transaction(self, node, parent_txid, vout, value, fee,\n num_outputs):\n send_value = satoshi_round((value - fee) / num_outputs)\n inputs = [{'txid': parent_txid, 'vout': vout}]\n outputs = {}\n for i in range(num_outputs):\n outputs[node.getnewaddress()] = send_value\n rawtx = node.createrawtransaction(inputs, outputs)\n signedtx = node.signrawtransaction(rawtx)\n sendtx_start = time.perf_counter()\n new_txid = node.sendrawtransaction(signedtx['hex'])\n sendtx_stop = time.perf_counter()\n fulltx = node.getrawtransaction(new_txid, 1)\n assert len(fulltx['vout']) == num_outputs\n return new_txid, send_value, sendtx_stop - sendtx_start, fulltx['size']\n\n def mine_blocks(self):\n \"\"\" Mine some blocks and have them mature. \"\"\"\n self.nodes[0].generate(101)\n self.utxo = self.nodes[0].listunspent(10)\n self.txid = self.utxo[0]['txid']\n self.coinbasetx = self.txid\n self.vout = self.utxo[0]['vout']\n self.value = self.utxo[0]['amount']\n self.fee = Decimal('0.0001')\n self.tip = int('0x' + self.nodes[0].getbestblockhash(), 0)\n self.block_time = int(time.time()) + 1\n\n def send_chain_to_node(self):\n \"\"\" Generates tx chain and send it to node \"\"\"\n for i in range(CHAINED_TX):\n sent_txid, sent_value, this_sendtx, tx_size = (self.\n chain_transaction(self.nodes[0], self.txid, 0, self.value,\n self.fee, 1))\n if not self.chain_top:\n self.chain_top = sent_txid\n self.txid = sent_txid\n self.value = sent_value\n self.chain.append(sent_txid)\n self.mempool_send += this_sendtx\n self.mempool_size += tx_size\n\n def create_new_block(self):\n \"\"\" Create a new block with an anyone-can-spend coinbase \"\"\"\n block = create_block(self.tip, create_coinbase(self.height), self.\n block_time)\n self.block_time += 1\n block.solve()\n return block\n\n def mempool_count(self):\n \"\"\" get count of tx in mempool \"\"\"\n mininginfo = self.nodes[0].getmininginfo()\n return mininginfo['pooledtx']\n\n def dumppool(self, mempool):\n \"\"\" Show list of chained tx in mempool with parent(depends) \"\"\"\n\n def sortdepends(e):\n return e['descendantcount']\n sortedlist = [[k, v] for k, v in mempool.items()]\n sortedlist = sorted(sortedlist, key=lambda l: l[1][\n 'descendantcount'], reverse=True)\n for memkv in sortedlist:\n memtx = memkv[1]\n self.log.info('{} {} {}'.format(memkv[0], memtx[\n 'descendantcount'], memtx['depends']))\n\n def run_test(self):\n self.log.info('Starting Test with {0} Chained Transactions'.format(\n CHAINED_TX))\n self.chain_top = None\n self.mine_blocks()\n self.mempool_send = 0\n self.mempool_size = 0\n self.chain = []\n self.send_chain_to_node()\n assert self.mempool_count() == CHAINED_TX\n mempool = self.nodes[0].getrawmempool(True)\n self.log.info('tx at top has {} descendants'.format(mempool[self.\n chain_top]['descendantcount']))\n assert mempool[self.chain_top]['descendantcount'] == CHAINED_TX\n self.height = 1\n self.block1 = self.create_new_block()\n self.tip = self.block1.sha256\n self.height += 1\n for i in range(100):\n block = self.create_new_block()\n self.tip = block.sha256\n self.height += 1\n self.runs = []\n for test_iteration in range(TEST_ITERATIONS):\n gbt_start = time.perf_counter()\n templat = self.nodes[0].getblocktemplate()\n gbt_stop = time.perf_counter()\n assert len(templat['transactions']) == CHAINED_TX\n self.runs.append(gbt_stop - gbt_start)\n self.log.info('Mempool size {0}'.format(self.mempool_size))\n self.log.info('Send Tx took {0:.5f}s'.format(self.mempool_send))\n if len(self.runs) > 1:\n self.log.info('run times {}'.format(self.runs))\n self.log.info('GetBlkT took {0:.5f}s'.format(sum(self.runs) / len(\n self.runs)))\n\n\n<mask token>\n",
"step-4": "<mask token>\nif len(sys.argv) > 1:\n CHAINED_TX = int(sys.argv[1])\n<mask token>\nif len(sys.argv) > 2:\n TEST_ITERATIONS = int(sys.argv[2])\n<mask token>\n\n\nclass ChainedTest(BitcoinTestFramework):\n\n def set_test_params(self):\n \"\"\" our test network requires a peer node so that getblocktemplate succeeds \"\"\"\n self.num_nodes = 2\n chained_args = ['-limitancestorcount=2000',\n '-limitdescendantcount=2000', '-limitancestorsize=1000',\n '-limitdescendantsize=1000', \n '-magneticanomalyactivationtime=%d' % MAGNETIC_ANOMALY_START_TIME]\n config_node2 = chained_args.copy()\n if DEBUG_MODE:\n chained_args.append(DEBUG_MODE)\n self.extra_args = [chained_args, config_node2]\n\n def chain_transaction(self, node, parent_txid, vout, value, fee,\n num_outputs):\n send_value = satoshi_round((value - fee) / num_outputs)\n inputs = [{'txid': parent_txid, 'vout': vout}]\n outputs = {}\n for i in range(num_outputs):\n outputs[node.getnewaddress()] = send_value\n rawtx = node.createrawtransaction(inputs, outputs)\n signedtx = node.signrawtransaction(rawtx)\n sendtx_start = time.perf_counter()\n new_txid = node.sendrawtransaction(signedtx['hex'])\n sendtx_stop = time.perf_counter()\n fulltx = node.getrawtransaction(new_txid, 1)\n assert len(fulltx['vout']) == num_outputs\n return new_txid, send_value, sendtx_stop - sendtx_start, fulltx['size']\n\n def mine_blocks(self):\n \"\"\" Mine some blocks and have them mature. \"\"\"\n self.nodes[0].generate(101)\n self.utxo = self.nodes[0].listunspent(10)\n self.txid = self.utxo[0]['txid']\n self.coinbasetx = self.txid\n self.vout = self.utxo[0]['vout']\n self.value = self.utxo[0]['amount']\n self.fee = Decimal('0.0001')\n self.tip = int('0x' + self.nodes[0].getbestblockhash(), 0)\n self.block_time = int(time.time()) + 1\n\n def send_chain_to_node(self):\n \"\"\" Generates tx chain and send it to node \"\"\"\n for i in range(CHAINED_TX):\n sent_txid, sent_value, this_sendtx, tx_size = (self.\n chain_transaction(self.nodes[0], self.txid, 0, self.value,\n self.fee, 1))\n if not self.chain_top:\n self.chain_top = sent_txid\n self.txid = sent_txid\n self.value = sent_value\n self.chain.append(sent_txid)\n self.mempool_send += this_sendtx\n self.mempool_size += tx_size\n\n def create_new_block(self):\n \"\"\" Create a new block with an anyone-can-spend coinbase \"\"\"\n block = create_block(self.tip, create_coinbase(self.height), self.\n block_time)\n self.block_time += 1\n block.solve()\n return block\n\n def mempool_count(self):\n \"\"\" get count of tx in mempool \"\"\"\n mininginfo = self.nodes[0].getmininginfo()\n return mininginfo['pooledtx']\n\n def dumppool(self, mempool):\n \"\"\" Show list of chained tx in mempool with parent(depends) \"\"\"\n\n def sortdepends(e):\n return e['descendantcount']\n sortedlist = [[k, v] for k, v in mempool.items()]\n sortedlist = sorted(sortedlist, key=lambda l: l[1][\n 'descendantcount'], reverse=True)\n for memkv in sortedlist:\n memtx = memkv[1]\n self.log.info('{} {} {}'.format(memkv[0], memtx[\n 'descendantcount'], memtx['depends']))\n\n def run_test(self):\n self.log.info('Starting Test with {0} Chained Transactions'.format(\n CHAINED_TX))\n self.chain_top = None\n self.mine_blocks()\n self.mempool_send = 0\n self.mempool_size = 0\n self.chain = []\n self.send_chain_to_node()\n assert self.mempool_count() == CHAINED_TX\n mempool = self.nodes[0].getrawmempool(True)\n self.log.info('tx at top has {} descendants'.format(mempool[self.\n chain_top]['descendantcount']))\n assert mempool[self.chain_top]['descendantcount'] == CHAINED_TX\n self.height = 1\n self.block1 = self.create_new_block()\n self.tip = self.block1.sha256\n self.height += 1\n for i in range(100):\n block = self.create_new_block()\n self.tip = block.sha256\n self.height += 1\n self.runs = []\n for test_iteration in range(TEST_ITERATIONS):\n gbt_start = time.perf_counter()\n templat = self.nodes[0].getblocktemplate()\n gbt_stop = time.perf_counter()\n assert len(templat['transactions']) == CHAINED_TX\n self.runs.append(gbt_stop - gbt_start)\n self.log.info('Mempool size {0}'.format(self.mempool_size))\n self.log.info('Send Tx took {0:.5f}s'.format(self.mempool_send))\n if len(self.runs) > 1:\n self.log.info('run times {}'.format(self.runs))\n self.log.info('GetBlkT took {0:.5f}s'.format(sum(self.runs) / len(\n self.runs)))\n\n\nif __name__ == '__main__':\n ChainedTest().main()\n",
"step-5": "#!/usr/bin/env python3\n# Copyright (c) 2018 Nobody\n# Distributed under the MIT software license, see the accompanying\n# file COPYING or http://www.opensource.org/licenses/mit-license.php.\n\"\"\"Test perforance of descendant package (chained transactions)\"\"\"\nimport time\nimport copy\nfrom test_framework.test_framework import BitcoinTestFramework\nfrom test_framework.util import *\nfrom test_framework.mininode import COIN\nfrom test_framework.blocktools import *\n\n\"\"\"Read optional arguments from command line\"\"\"\nCHAINED_TX = 25\nif len(sys.argv)>1:\n CHAINED_TX = int(sys.argv[1])\nTEST_ITERATIONS = 1\nif len(sys.argv)>2:\n TEST_ITERATIONS = int(sys.argv[2])\nDEBUG_MODE = '-printtoconsole'\n\nMAX_ANCESTORS = CHAINED_TX\nMAX_DESCENDANTS = CHAINED_TX\n\nMAGNETIC_ANOMALY_START_TIME = 2000000000\n\nclass ChainedTest(BitcoinTestFramework):\n\n def set_test_params(self):\n ''' our test network requires a peer node so that getblocktemplate succeeds '''\n self.num_nodes = 2\n chained_args = [\"-limitancestorcount=2000\", \"-limitdescendantcount=2000\",\n \"-limitancestorsize=1000\", \"-limitdescendantsize=1000\",\n \"-magneticanomalyactivationtime=%d\" % MAGNETIC_ANOMALY_START_TIME\n ]\n config_node2 = chained_args.copy()\n if DEBUG_MODE:\n chained_args.append(DEBUG_MODE)\n self.extra_args = [chained_args, config_node2]\n\n # Build a transaction that spends parent_txid:vout\n # Return amount sent\n def chain_transaction(self, node, parent_txid, vout, value, fee, num_outputs):\n send_value = satoshi_round((value - fee) / num_outputs)\n inputs = [{'txid': parent_txid, 'vout': vout}]\n outputs = {}\n for i in range(num_outputs):\n outputs[node.getnewaddress()] = send_value\n rawtx = node.createrawtransaction(inputs, outputs)\n signedtx = node.signrawtransaction(rawtx)\n\n #measure the performance of sending the raw transaction to the node\n sendtx_start = time.perf_counter()\n new_txid = node.sendrawtransaction(signedtx['hex'])\n sendtx_stop = time.perf_counter()\n fulltx = node.getrawtransaction(new_txid, 1)\n\n #self.log.info('{0} => {1}'.format(parent_txid, fulltx['vout'][0]))\n\n # make sure we didn't generate a change output\n assert(len(fulltx['vout']) == num_outputs)\n return (new_txid, send_value, sendtx_stop - sendtx_start, fulltx['size'])\n\n def mine_blocks(self):\n ''' Mine some blocks and have them mature. '''\n self.nodes[0].generate(101)\n self.utxo = self.nodes[0].listunspent(10)\n self.txid = self.utxo[0]['txid']\n self.coinbasetx = self.txid\n self.vout = self.utxo[0]['vout']\n self.value = self.utxo[0]['amount']\n self.fee = Decimal(\"0.0001\")\n self.tip = int(\"0x\" + self.nodes[0].getbestblockhash(), 0)\n self.block_time = int(time.time()) + 1\n\n def send_chain_to_node(self):\n ''' Generates tx chain and send it to node '''\n for i in range(CHAINED_TX):\n (sent_txid, sent_value, this_sendtx, tx_size) = self.chain_transaction(\n self.nodes[0], self.txid, 0, self.value, self.fee, 1)\n if not self.chain_top:\n self.chain_top = sent_txid\n self.txid = sent_txid\n self.value = sent_value\n self.chain.append(sent_txid)\n self.mempool_send += this_sendtx\n self.mempool_size += tx_size\n\n def create_new_block(self):\n ''' Create a new block with an anyone-can-spend coinbase '''\n block = create_block(\n self.tip, create_coinbase(self.height), self.block_time)\n self.block_time += 1\n block.solve()\n return block\n\n def mempool_count(self):\n ''' get count of tx in mempool '''\n mininginfo = self.nodes[0].getmininginfo()\n return mininginfo['pooledtx']\n\n def dumppool(self, mempool):\n ''' Show list of chained tx in mempool with parent(depends) '''\n def sortdepends(e):\n return e['descendantcount']\n sortedlist = [[k,v] for k,v in mempool.items()]\n sortedlist = sorted(sortedlist, key=lambda l: l[1]['descendantcount'], reverse=True)\n for memkv in sortedlist:\n memtx = memkv[1]\n self.log.info('{} {} {}'.format(memkv[0], memtx['descendantcount'], memtx['depends']))\n\n def run_test(self):\n self.log.info('Starting Test with {0} Chained Transactions'.format(CHAINED_TX))\n self.chain_top = None\n\n self.mine_blocks()\n\n self.mempool_send = 0\n self.mempool_size = 0\n self.chain = []\n\n self.send_chain_to_node()\n\n # mempool should have all our tx\n assert(self.mempool_count() == CHAINED_TX)\n mempool = self.nodes[0].getrawmempool(True)\n self.log.info('tx at top has {} descendants'.format(mempool[self.chain_top][\"descendantcount\"]))\n assert(mempool[self.chain_top][\"descendantcount\"] == CHAINED_TX)\n\n #self.dumppool(mempool)\n\n self.height = 1\n\n # create new block and save coinbase\n self.block1 = self.create_new_block()\n self.tip = self.block1.sha256\n self.height += 1\n\n #mature the block so we can spend the coinbase\n for i in range(100):\n block = self.create_new_block()\n self.tip = block.sha256\n self.height += 1\n\n #sync pool not needed as long as we are using node 0 which has all the tx we sent to it\n #sync_mempools(self.nodes, wait=1, timeout=100)\n\n self.runs=[]\n for test_iteration in range(TEST_ITERATIONS):\n # do not use perf_counter. use timer from -printtoconsole instead\n gbt_start = time.perf_counter()\n # assemble a block and validate all tx in it\n templat = self.nodes[0].getblocktemplate()\n gbt_stop = time.perf_counter()\n # make sure all tx got mined\n assert(len(templat['transactions']) == CHAINED_TX)\n self.runs.append(gbt_stop - gbt_start)\n\n #assert(self.mempool_count() == 0)\n\n self.log.info('Mempool size {0}'.format(self.mempool_size))\n self.log.info('Send Tx took {0:.5f}s'.format(self.mempool_send))\n if len(self.runs) > 1:\n self.log.info('run times {}'.format(self.runs))\n self.log.info('GetBlkT took {0:.5f}s'.format(sum(self.runs)/len(self.runs)))\n\nif __name__ == '__main__':\n ChainedTest().main()\n",
"step-ids": [
7,
8,
9,
10,
13
]
}
|
[
7,
8,
9,
10,
13
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.